├── .npmignore
├── assets
├── powers-dre.png
├── alkali-logo-old.png
└── alkali-logo.svg
├── src
├── util
│ ├── when.ts
│ ├── errors.ts
│ ├── bufferStream.ts
│ ├── WeakValueMap.ts
│ └── process.ts
├── http-server
│ ├── url-encoded.ts
│ ├── text.ts
│ ├── MessagePack.ts
│ ├── dpack.ts
│ ├── html.ts
│ ├── media.ts
│ └── JSONStream.ts
├── configure.ts
├── index.ts
├── storage
│ ├── Database.ts
│ ├── redis.ts
│ └── level.ts
├── RequestContext.ts
├── UpdateProgress.ts
├── Aggregator.ts
├── ExpirationStrategy.ts
├── Reduced.ts
├── KeyIndex.ts
└── Persisted.ts
├── rollup.config.js
├── tests
├── model
│ ├── TestProcess.js
│ ├── TestCached.js
│ ├── CountryCity.js
│ └── Test2.js
├── unit.js
├── Persisted.js
├── second-process.js
├── Relation.js
├── Process.js
├── Index.js
└── performance.js
├── tsconfig.json
├── client
├── resource-viewer.html
└── resource-viewer.js
├── LICENSE
├── .gitignore
├── package.json
├── yarn.lock
└── README.md
/.npmignore:
--------------------------------------------------------------------------------
1 | # Dependency directories
2 | node_modules/
3 | tests/db/
4 |
--------------------------------------------------------------------------------
/assets/powers-dre.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kriszyp/cobase/master/assets/powers-dre.png
--------------------------------------------------------------------------------
/assets/alkali-logo-old.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/kriszyp/cobase/master/assets/alkali-logo-old.png
--------------------------------------------------------------------------------
/src/util/when.ts:
--------------------------------------------------------------------------------
1 | export default function when(promise, callback, errback?) {
2 | if (promise && promise.then) {
3 | return errback ?
4 | promise.then(callback, errback) :
5 | promise.then(callback)
6 | }
7 | return callback(promise)
8 | }
9 |
--------------------------------------------------------------------------------
/rollup.config.js:
--------------------------------------------------------------------------------
1 | export default [
2 | {
3 | input: "dist/index.js",
4 | output: [
5 | {
6 | file: "dist/index.cjs",
7 | format: "cjs"
8 | }
9 | ]
10 | }
11 | ];
12 |
--------------------------------------------------------------------------------
/src/http-server/url-encoded.ts:
--------------------------------------------------------------------------------
1 | import { encode, decode } from 'querystring'
2 | export const urlEncodedMediaType = {
3 | q: 0.8,
4 | parse: (content) => content.length > 0 ? decode(content) : undefined, // tolerate empty requests
5 | serialize(data, connection) {
6 | encode(data)
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/src/http-server/text.ts:
--------------------------------------------------------------------------------
1 | import { Readable } from 'stream'
2 |
3 | export const textMediaType = {
4 | parse: (content) => content, // just the text
5 | q: 0.1,
6 | serialize: (content) => {
7 | if (content?.next)
8 | return Readable.from(content)
9 | return content
10 | }, // just the text
11 | }
12 |
--------------------------------------------------------------------------------
/tests/model/TestProcess.js:
--------------------------------------------------------------------------------
1 | console.log('TestProcess module')
2 | const { Persisted, runInProcess } = require('../..')
3 | Persisted.dbFolder = 'tests/db'
4 | class TestProcess extends Persisted {
5 |
6 | }
7 | TestProcess.start()
8 | exports.TestProcess = TestProcess
9 | exports.TestProcessWithExtra = TestProcess.cacheWith({ extra: 'test' })
10 | exports.TestProcessByName = exports.TestProcessWithExtra.index('name')
11 |
--------------------------------------------------------------------------------
/src/configure.ts:
--------------------------------------------------------------------------------
1 | import { configure as configurePersisted } from './Persisted.js'
2 | import { setRequestContextClass } from './RequestContext.js'
3 | export function configure(options) {
4 | if (options.dbFolder || options.cacheDbFolder || options.doesInitialization !== undefined) {
5 | configurePersisted(options)
6 | }
7 | if (options.RequestContext) {
8 | setRequestContextClass(options.RequestContext)
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/src/http-server/MessagePack.ts:
--------------------------------------------------------------------------------
1 | import { EncodeStream, decode } from 'msgpackr'
2 | import { Readable } from 'stream'
3 |
4 | export const msgpackMediaType = {
5 | q: 0.96,
6 | parse: (content) => content.length > 0 ? decode(content) : undefined, // tolerate empty requests
7 | serialize(data, connection) {
8 | connection.response.headers['Transfer-Encoding'] = 'chunked'
9 | var stream = new EncodeStream()
10 | stream.write(data)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/tests/unit.js:
--------------------------------------------------------------------------------
1 | if (typeof assert === 'undefined') { assert = require('chai').assert }
2 | const inspector = require('inspector')
3 | inspector.open(9329, null, true)
4 | const { configure } = require('..')
5 | const { removeSync } = require('fs-extra')
6 | removeSync('tests/db')
7 | configure({
8 | dbFolder: 'tests/db'
9 | })
10 |
11 | require('./Persisted')
12 | //require('./Process')
13 | //require('./Relation')
14 | require('./Index')
15 | //require('./performance')*/
16 |
--------------------------------------------------------------------------------
/tests/model/TestCached.js:
--------------------------------------------------------------------------------
1 | const { Persisted, Cached } = require('../..')
2 | Persisted.updatingProcessModule = 'tests/updating-process'
3 | Cached.updatingProcessModule = 'tests/updating-process'
4 | class Test extends Persisted {
5 | }
6 | exports.Test = Test
7 | Test.version = 1
8 | Test.ready
9 | class TestCached extends Cached.from(Test) {
10 | transform(test) {
11 | return {
12 | upperName: test.name.toUpperCase()
13 | }
14 | }
15 | }
16 | TestCached.version = 1
17 | exports.TestCached = TestCached
18 | TestCached.ready
19 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compileOnSave": true,
3 | "compilerOptions": {
4 | "target": "ES2017",
5 | "module": "esnext",
6 | "declaration": true,
7 | "moduleResolution": "node",
8 | "stripInternal": true,
9 | "lib": ["ES2018", "ScriptHost", "DOM"],
10 | "outDir": "dist",
11 | "sourceMap": true
12 | },
13 | "files": [
14 | "./node_modules/@types/node/index.d.ts"
15 | ],
16 | "include": [
17 | "src/**/*"
18 | ],
19 | "exclude": [
20 | "node_modules",
21 | "dist"
22 | ]
23 | }
24 |
--------------------------------------------------------------------------------
/src/util/errors.ts:
--------------------------------------------------------------------------------
1 | export class ExtendableError extends Error {
2 | constructor(message) {
3 | super(message)
4 | this.stack = (new Error()).stack
5 | this.name = this.constructor.name
6 | }
7 | }
8 | export class ShareChangeError extends Error {}
9 |
10 | export class AccessError extends ExtendableError {
11 | get status() {
12 | return 403
13 | }
14 | get isExternal() {
15 | return true
16 | }
17 | }
18 |
19 | export class UnauthenticatedError extends ExtendableError {
20 | get status() {
21 | return 401
22 | }
23 | get isExternal() {
24 | return true
25 | }
26 | }
27 | export class ConcurrentModificationError extends ExtendableError {
28 | }
29 |
--------------------------------------------------------------------------------
/src/util/bufferStream.ts:
--------------------------------------------------------------------------------
1 | export let maxRequestBody = 10000000 // default to 10MB
2 | interface ErrorWithStatus extends Error {
3 | status: number
4 | }
5 | export function bufferStream(stream) {
6 | return new Promise((resolve, reject) => {
7 | var chunks = []
8 | var length = 0
9 | stream.on('data', (data) => {
10 | chunks.push(data)
11 | length += data.length
12 | if (length > maxRequestBody) {
13 | stream.connection.destroy()
14 | const error = new Error('Request Entity Too Large') as ErrorWithStatus
15 | error.status = 413
16 | reject(error)
17 | }
18 | })
19 | stream.on('end', () => {
20 | resolve(Buffer.concat(chunks, length))
21 | })
22 | })
23 | }
24 |
--------------------------------------------------------------------------------
/src/http-server/dpack.ts:
--------------------------------------------------------------------------------
1 | import { createSerializeStream, parse } from 'dpack'
2 | import { Readable } from 'stream'
3 |
4 | export const dpackMediaType = {
5 | parse: (content, connection) => {
6 | try {
7 | return content.length > 0 ? parse(content) : undefined // tolerate empty requests
8 | } catch(error) {
9 | console.error('Parse error', error.toString(), 'content-length', connection.request.headers['content-length'], 'content.length', content.length, 'content', content)
10 | throw error
11 | }
12 | },
13 | serialize(data, connection, parameters) {
14 | connection.response.headers['Transfer-Encoding'] = 'chunked'
15 | var stream = createSerializeStream()
16 | stream.end(data)
17 | return stream
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/client/resource-viewer.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Resource
5 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/tests/model/CountryCity.js:
--------------------------------------------------------------------------------
1 | const { Persisted, Cached } = require('../..')
2 | class City extends Persisted {
3 |
4 | }
5 | class Country extends Persisted {
6 |
7 | }
8 | class CityWithCountry extends City.cacheWith({
9 | countryId: '',
10 | country: Country.relatedBy('countryId'), // Country.relatedByAll('countryIds') for multiple values
11 | }) {}
12 | CityWithCountry.version = 1
13 | const CountryWithCities = Country.cacheWith({
14 | cities: City.relatesBy('countryId')
15 | })
16 | CountryWithCities.version = 1
17 |
18 | City.start()
19 | exports.City = City
20 | Country.start()
21 | exports.Country = Country
22 | CityWithCountry.start()
23 | exports.CityWithCountry = CityWithCountry
24 | CountryWithCities.start()
25 | exports.CountryWithCities = CountryWithCities
26 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export { Index, KeyIndex } from './KeyIndex.js'
2 | export { Persisted, Cached, Persistable, secureAccess, writeCommonStructures, getCurrentStatus, allStores } from './Persisted.js'
3 | export { RequestContext } from './RequestContext.js'
4 | export { Reduced } from './Reduced.js'
5 | export { AccessError, UnauthenticatedError } from './util/errors.js'
6 | export { JSONStream } from './http-server/JSONStream.js'
7 | export { media, mediaTypes } from './http-server/media.js'
8 | export { default as ExpirationStrategy } from './ExpirationStrategy.js'
9 | export { Aggregator } from './Aggregator.js'
10 | export { configure } from './configure.js'
11 | export { bufferStream } from './util/bufferStream.js'
12 | export { WeakValueMap } from './util/WeakValueMap.js'
13 | export { default as when } from './util/when.js'
--------------------------------------------------------------------------------
/tests/Persisted.js:
--------------------------------------------------------------------------------
1 | const { Persisted, Cached } = require('..')
2 | const { Test, TestCached } = require('./model/TestCached')
3 | suite('Persisted', function() {
4 | this.timeout(1000000)
5 | suiteSetup(() => {
6 | })
7 |
8 | test('standalone table', () => {
9 | Test.for(10).put({ name: 'ten' })
10 | return Test.for(10).then(value => {
11 | assert.equal(value.name, 'ten')
12 | return Test.instanceIds.then(ids => {
13 | assert.deepEqual(ids, [10])
14 | })
15 | })
16 | })
17 | test('cached transform', () => {
18 | return TestCached.for(10).then(value => {
19 | assert.equal(value.upperName, 'TEN')
20 | })
21 | })
22 |
23 | /*
24 | suiteTeardown(() => {
25 | console.log('teardown persisted')
26 | return Promise.all([
27 | Test.db.close(),
28 | TestCached.db.close()
29 | ])
30 | })*/
31 | })
32 |
--------------------------------------------------------------------------------
/src/storage/Database.ts:
--------------------------------------------------------------------------------
1 | import ArrayLikeIterable from '../util/ArrayLikeIterable'
2 |
3 | export interface Database {
4 | getSync(id, asBuffer?): any
5 | get(id): Promise
6 | putSync(id, value): void
7 | put(id, value): void
8 | removeSync(id): void
9 | remove(id): void
10 | iterable(options: IterableOptions): ArrayLikeIterable
11 | batch(operations: OperationsArray): void
12 | close(): void
13 | clear(): void
14 | transaction(executor: () => void): void
15 | }
16 | export interface IterableOptions {
17 | start: Buffer,
18 | end?: Buffer,
19 | values?: boolean
20 | reverse?: boolean
21 | valueAsBuffer?: boolean
22 | waitForInitialization?: boolean
23 | limit?: number
24 | }
25 | export interface OperationsArray extends Array<{
26 | type: string
27 | key: any
28 | value?: any
29 | }> {
30 | byteCount?: number
31 | }
32 |
--------------------------------------------------------------------------------
/tests/second-process.js:
--------------------------------------------------------------------------------
1 | const inspector = require('inspector')
2 | inspector.open(9330, null, true)
3 | const { configure } = require('..')
4 | configure({
5 | dbFolder: 'tests/db'
6 | })
7 |
8 | const { TestProcess, TestProcessByName } = require('./model/TestProcess')
9 | function start() {
10 | console.log('started second process')
11 | }
12 | process.on('message', (data) => {
13 | console.log('child got message', data)
14 | if (data.action == 'put10') {
15 | TestProcess.for(10).put({ name: 'ten' })
16 | process.send({ completed: 'put10' })
17 | }
18 | if (data.action == 'delete10') {
19 | TestProcess.for(10).delete()
20 | process.send({ completed: 'delete10' })
21 | }
22 | if (data.action == 'change10') {
23 | TestProcess.for(10).put({ name: 'change b'})
24 | process.send({ completed: 'change10' })
25 | }
26 | })
27 |
28 | start()
29 |
--------------------------------------------------------------------------------
/src/storage/redis.ts:
--------------------------------------------------------------------------------
1 | import * as redis from 'redis'
2 | const client = redis.createClient({url: 'redis://pFw0Zm5Z+MsvcKfoqp2r3lOd:Wl3kimoV3nZ8L+qs3g=@uswe-redis.redis.cache.windows.net:6380?ssl=True'})
3 |
4 | export * from {
5 | get(db, id) {
6 | return new Promise((resolve, reject) => {
7 | client.get(db.id + '~' + id, (err, value) => {
8 | if (err) {
9 | console.error('error', err)
10 | reject(err)
11 | } else {
12 | resolve(value)
13 | }
14 | })
15 | })
16 | },
17 | put(db, id, value) {
18 | return new Promise((resolve, reject) => {
19 | client.set(db.id + '~' + id, value, (err, value) => {
20 | if (err) {
21 | console.error('error', err)
22 | reject(err)
23 | } else {
24 | resolve(value)
25 | }
26 | })
27 | })
28 | },
29 | open(name) {
30 | return {
31 | id: name
32 | }
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/RequestContext.ts:
--------------------------------------------------------------------------------
1 | import * as alkali from 'alkali'; const { Context } = alkali.Context ? alkali : alkali.default
2 | export class RequestContext extends Context {
3 | preferJSON: boolean // used by JSONStream to assist with direct JSON transfers
4 | request: any
5 | session: any
6 | constructor(request, session, user?) {
7 | super(user)
8 | this.request = request
9 | this.session = session
10 | }
11 | newContext() {
12 | return new this.constructor(this.request, this.session, this.subject)
13 | }
14 | get expectedVersions(): Map> {
15 | const session = this.session || (this.session = {})
16 | return session.expectedVersions || (session.expectedVersions = {})
17 | }
18 | }
19 | export let CurrentRequestContext = RequestContext
20 | export function setRequestContextClass(Context) {
21 | CurrentRequestContext = Context
22 | }
23 |
24 | export const DEFAULT_CONTEXT = {
25 | expectedVersions: {}
26 | }
27 |
--------------------------------------------------------------------------------
/tests/model/Test2.js:
--------------------------------------------------------------------------------
1 | const { Persisted, Persistable, Index, Reduced } = require('../..')
2 | Persisted.dbFolder = 'tests/db'
3 | Persistable.dbFolder = 'tests/db'
4 | class Test2 extends Persisted {
5 |
6 | }
7 | Test2.version = 1
8 | Test2.start()
9 | class TestByType extends Index.from(Test2) {
10 | static indexBy(test) {
11 | return test.isEven ? 'even' : 'odd'
12 | }
13 | }
14 | TestByType.version = 1
15 | TestByType.start()
16 | let reduceCalls = 0
17 | class SumOfNumbersByType extends Reduced.from(TestByType) {
18 | reduceBy(a, b) {
19 | reduceCalls++
20 | return {
21 | number: a.number + b.number
22 | }
23 | }
24 | transform(total) {
25 | return total.number
26 | }
27 | static get reduceCalls() {
28 | return reduceCalls
29 | }
30 | }
31 | SumOfNumbersByType.version = 1
32 | SumOfNumbersByType.start()
33 | exports.Test2 = Test2
34 | exports.TestByType = TestByType
35 | exports.SumOfNumbersByType = SumOfNumbersByType
36 | exports.getReduceCalls = function() {
37 | return reduceCalls
38 | }
39 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Kris Zyp
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/src/http-server/html.ts:
--------------------------------------------------------------------------------
1 | let resourceEditor
2 | import fs from 'fs'
3 | import { PackrStream } from 'msgpackr'
4 | export const htmlMediaType = {
5 | q: 0.2,
6 | serialize(data, connection, parameters) {
7 | // TODO: Cache this
8 | connection.response.headers['transfer-encoding'] = 'chunked'
9 | var stream = new PackrStream()
10 | stream.push(fs.readFileSync(require.resolve('../../client/resource-viewer.html')))
11 | const push = stream.push
12 | stream.push = function(chunk) {
13 | if (chunk) {
14 | push.call(stream, '') // .replace(/(["/\n\r\\])/g, '\\$1'
15 | } else {
16 | push.call(stream, '')
17 | push.call(stream, null)
18 | }
19 | }
20 | stream.end(data)
21 | return stream
22 | }
23 | }
24 | export function sendResourceEditor(connection) {
25 | connection.body = connection.response.content = fs.readFileSync(require.resolve('alkali/dist/index')) + '\n' +
26 | fs.readFileSync(require.resolve('dpack/dist/index')) + '\n' +
27 | fs.readFileSync(require.resolve('../../client/resource-viewer.js'))
28 | }
29 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | dist
8 |
9 | # Runtime data
10 | pids
11 | *.pid
12 | *.seed
13 | *.pid.lock
14 |
15 | # Directory for instrumented libs generated by jscoverage/JSCover
16 | lib-cov
17 |
18 | # Coverage directory used by tools like istanbul
19 | coverage
20 |
21 | # nyc test coverage
22 | .nyc_output
23 |
24 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
25 | .grunt
26 |
27 | # Bower dependency directory (https://bower.io/)
28 | bower_components
29 |
30 | # node-waf configuration
31 | .lock-wscript
32 |
33 | # Compiled binary addons (http://nodejs.org/api/addons.html)
34 | build/Release
35 |
36 | # Dependency directories
37 | node_modules/
38 | jspm_packages/
39 |
40 | package-lock.json
41 |
42 | # Typescript v1 declaration files
43 | typings/
44 |
45 | # Optional npm cache directory
46 | .npm
47 |
48 | # Optional eslint cache
49 | .eslintcache
50 |
51 | # Optional REPL history
52 | .node_repl_history
53 |
54 | # Output of 'npm pack'
55 | *.tgz
56 |
57 | # Yarn Integrity file
58 | .yarn-integrity
59 |
60 | # dotenv environment variables file
61 | .env
62 |
63 | tests/db/
64 |
65 | portal/types/dtos.ts
--------------------------------------------------------------------------------
/src/util/WeakValueMap.ts:
--------------------------------------------------------------------------------
1 | // this provides a weak-valued map to ensure we only have a single instance of an object per id, but can still be GC-ed
2 | import { WeakLRUCache } from 'weak-lru-cache'
3 | export let WeakValueMap
4 | try {
5 | let allInstances = []
6 | WeakValueMap = function() {
7 | let map = new WeakLRUCache({
8 | expirer: false,
9 | })
10 | allInstances.push(map)
11 | return map
12 | }
13 | WeakValueMap.getStatus = function() {
14 | let mapStats = []
15 | for (let map of allInstances) {
16 | let size = 0
17 | let count = 0
18 | for (let key of map.keys()) {
19 | let value = map.get(key)
20 | size += value && value.approximateSize || 100
21 | count++
22 | }
23 | if (count > 0) {
24 | mapStats.push({
25 | name: map.name,
26 | size,
27 | count
28 | })
29 | }
30 | }
31 | return mapStats
32 | }
33 | } catch (error) {
34 | console.warn('No weak value map available, this can be used for development, but weak value maps should be enabled for production use', error.toString())
35 | WeakValueMap = Map
36 | WeakValueMap.getStatus = function() {
37 | return 'WeakValueMap failed to load'
38 | }
39 | WeakValueMap.prototype._keysAsArray = function() {
40 | return Array.from(this.keys())
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/assets/alkali-logo.svg:
--------------------------------------------------------------------------------
1 |
12 |
--------------------------------------------------------------------------------
/tests/Relation.js:
--------------------------------------------------------------------------------
1 | const { Persisted, Persistable, Cached } = require('..')
2 | const { removeSync } = require('fs-extra')
3 | const { City, Country, CityWithCountry, CountryWithCities } = require('./model/CountryCity')
4 | suite('Persisted', function() {
5 | this.timeout(20000)
6 | Persisted.dbFolder = 'tests/db'
7 | Persistable.dbFolder = 'tests/db'
8 | Cached.dbFolder = 'tests/db'
9 | suiteSetup(() => {
10 | return Promise.all([
11 | CityWithCountry.ready,
12 | CountryWithCities.ready
13 | ])
14 | })
15 |
16 | test('many-to-one', async () => {
17 | Country.set('usa', {name: 'USA'})
18 | Country.set('france', {name: 'France'})
19 | City.set('la', {name: 'LA', countryId: 'usa'})
20 | City.set('slc', {name: 'SLC', countryId: 'usa'})
21 | City.set('paris', {name: 'Parise', countryId: 'france'})
22 | let la = await CityWithCountry.for('la')
23 | assert.equal(la.name, 'LA')
24 | assert.equal(la.country.name, 'USA')
25 | })
26 | test('one-to-many', async () => {
27 | let usa = await CountryWithCities.for('usa')
28 | assert.equal(usa.name, 'USA')
29 | assert.equal(usa.cities[0].name, 'LA')
30 | assert.equal(usa.cities[1].name, 'SLC')
31 | })
32 |
33 | /*
34 | suiteTeardown(() => {
35 | console.log('teardown persisted')
36 | return Promise.all([
37 | Test.db.close(),
38 | TestCached.db.close()
39 | ])
40 | })*/
41 | })
42 |
--------------------------------------------------------------------------------
/src/UpdateProgress.ts:
--------------------------------------------------------------------------------
1 | /* A HasUpdateProgress instance can be an entity, an event, a context/session, or a index class*/
2 | export interface HasUpdateProgress {
3 | updatesInProgress: Map>
4 | }
5 | export function whenClassIsReady(Source, context: HasUpdateProgress): Promise {
6 | let whenReady = context.updatesInProgress && context.updatesInProgress.get(Source)
7 | if (whenReady && !whenReady.resultRegistered) {
8 | context.updatesInProgress.set(Source, whenReady = whenReady.then(newProgress => {
9 | if (context.updatesInProgress.get(Source) == whenReady) {
10 | context.updatesInProgress.delete(Source)
11 | }
12 | if (newProgress) {
13 | mergeProgress(context, newProgress)
14 | }
15 | }))
16 | whenReady.resultRegistered = true
17 | }
18 | return whenReady
19 | }
20 | export function mergeProgress(target: HasUpdateProgress, source: HasUpdateProgress) {
21 | if (source.updatesInProgress) {
22 | for (const [Source, promise] of source.updatesInProgress) {
23 | registerProcessing(target, Source, promise)
24 | }
25 | }
26 | }
27 | export function registerProcessing(target: HasUpdateProgress, Source: Function, promise: Promise) {
28 | if (!target.updatesInProgress) {
29 | target.updatesInProgress = new Map()
30 | }
31 | target.updatesInProgress.set(Source, promise)
32 | }
33 | export const DEFAULT_CONTEXT = {
34 | updatesInProgress: new Map()
35 | }
36 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "cobase",
3 | "author": "Kris Zyp",
4 | "version": "0.10.22",
5 | "description": "Compositional data stores of reactive JavaScript transform, index, reduce, and join functions, built on NodeJS, Alkali, and LMDB",
6 | "license": "MIT",
7 | "repository": {
8 | "type": "git",
9 | "url": "http://github.com/kriszyp/cobase"
10 | },
11 | "scripts": {
12 | "test": "mocha tests/unit -u tdd",
13 | "build": "tsc || rollup -c"
14 | },
15 | "type": "module",
16 | "main": "./dist/index.js",
17 | "types": "./dist/index.d.ts",
18 | "exports": {
19 | ".": {
20 | "require": "./dist/index.cjs",
21 | "import": "./dist/index.js"
22 | },
23 | "./index.js": {
24 | "require": "./dist/index.cjs",
25 | "import": "./dist/index.js"
26 | },
27 | "./dist/util/WeakValueMap": {
28 | "require": "./dist/index.cjs",
29 | "import": "./dist/index.js"
30 | },
31 | "./dist/util/when": {
32 | "require": "./dist/index.cjs",
33 | "import": "./dist/index.js"
34 | }
35 | },
36 | "dependencies": {
37 | "msgpackr": "^1.5.4",
38 | "lmdb": "^2.3.2"
39 | },
40 | "peerDependencies": {
41 | "alkali": "^1.1.2"
42 | },
43 | "optionalDependencies": {
44 | "weak-lru-cache": "^1.2.0"
45 | },
46 | "devDependencies": {
47 | "@types/node": "latest",
48 | "chai": "^4",
49 | "fs-extra": "^9.0.0",
50 | "mocha": "^5",
51 | "typescript": "^4.4.3",
52 | "rollup": "^1.20.3"
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/tests/Process.js:
--------------------------------------------------------------------------------
1 | const { Persisted, createProcess, registerProcesses } = require('..')
2 | const { TestProcess, TestProcessByName } = require('./model/TestProcess')
3 | const { removeSync } = require('fs-extra')
4 | const { fork } = require('child_process')
5 | let childProcess
6 | suite('Process', function() {
7 | this.timeout(2000000)
8 | Persisted.dbFolder = 'tests/db'
9 | // Persistable.dbFolder = 'tests/db'
10 | suiteSetup(() => {
11 | childProcess = fork('tests/second-process', [], {
12 | env: process.env,
13 | execArgv:['--stack-trace-limit=100'],
14 | stdio: [0, 1, 2, 'ipc'],
15 | })
16 |
17 | process.on('exit', () => childProcess.kill())
18 | console.log('created child test process')
19 | })
20 |
21 | test('run-in-process', () => {
22 | return sendMessage('put10').then(() => {
23 | return TestProcess.for(10).then(value => {
24 | console.log('got response')
25 | assert.equal(value.name, 'ten')
26 | return TestProcess.instanceIds.then(ids => {
27 | assert.deepEqual(ids, [10])
28 | return sendMessage('delete10').then(() => delay(10)).then(() => {
29 | return TestProcess.for(10).then(value => {
30 | assert.isUndefined(value)
31 | })
32 | })
33 | })
34 | })
35 | })
36 | })
37 | test('run-in-process with index', () => {
38 | return sendMessage('put10').then(() => {
39 | console.log('got response for index of ten')
40 | TestProcess.for(10).put({ name: 'change a' })
41 | return sendMessage('change10').then(() => delay(10)).then(() =>
42 | Promise.all([
43 | TestProcessByName.for('change a').then(value => {
44 | assert.equal(value.length, 0)
45 | }),
46 | TestProcessByName.for('change b').then(value => {
47 | assert.equal(value.length, 1)
48 | })
49 | ])
50 | )
51 | })
52 | })
53 |
54 | suiteTeardown(() => {
55 | childProcess.kill()
56 | })
57 | })
58 |
59 | function sendMessage(action) {
60 | childProcess.send({ action })
61 | return new Promise(resolve => childProcess.on('message', (data) => {
62 | if (data.completed == action) {
63 | resolve()
64 | }
65 | }))
66 | }
67 | function delay(ms) {
68 | return new Promise(resolve => setTimeout(resolve, ms))
69 | }
70 |
--------------------------------------------------------------------------------
/src/Aggregator.ts:
--------------------------------------------------------------------------------
1 | import { PersistedBase, Persisted, Cached } from './Persisted.js'
2 | import when from './util/when.js'
3 | const INITIALIZING_LAST_KEY = Buffer.from([1, 7])
4 |
5 | export class Aggregator extends PersistedBase {
6 | static updateAggregate(previousEntry, entry) {
7 | }
8 | static forValue(id, entry) {
9 | return this.tryForQueueEntry(id, () => {
10 | this.updateAggregate(entry.previousValue, entry.value)
11 | })
12 | }
13 | static forQueueEntry(id) {
14 | return this.tryForQueueEntry(id, () => {
15 | return when(this.sources[0].get(id), value => this.updateAggregate(null, value))
16 | // if (complete) {
17 | // complete.commit()
18 | // }
19 | })
20 | }
21 | static get(id, mode?) {
22 | let entry = this.db.getEntry(id, mode ? 2: 0)
23 | // don't use versions
24 | return entry && entry.value
25 | }
26 |
27 | static fetchAllIds() {
28 | return []
29 | }
30 | static from(...sources) {
31 | return Cached.from.apply(this, sources)
32 | }
33 | static derivedFrom(...sources: Array) {
34 | for (let source of sources) {
35 | if (source.notifies) {
36 | if (!this.sources)
37 | this.sources = []
38 | this.sources.push(source)
39 | } else if (typeof source === 'function') {
40 | this.updateAggregate = source
41 | } else {
42 | Object.assign(this, source)
43 | }
44 | }
45 | this.start()
46 | }
47 | static openDatabase() {
48 | this.sources[0].openChildDB(this, { cache: true })
49 | return false // is not root
50 | }
51 | static getIdsFromKey(key) {
52 | return this.sources[0].getIdsFromKey(key)
53 | }
54 | static updateDBVersion() {
55 | if (!this.sources[0].wasReset) // only reindex if the source didn't do it for us
56 | this.db.putSync(INITIALIZING_LAST_KEY, this.resumeFromKey = true)
57 | super.updateDBVersion()
58 | }
59 |
60 | static clearEntries() {
61 | // don't really have any way of doing this right now
62 | }
63 | static resumeQueue() {
64 | this.state = 'waiting for upstream source to build'
65 | // explicitly wait for source to finish resuming before our own resuming
66 | return when(this.sources[0].resumePromise, () =>
67 | super.resumeQueue())
68 | }
69 |
70 | static updated(event, by) {
71 | // don't do anything, we don't want these events to propagate through here, and we do indexing based on upstream queue
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/tests/Index.js:
--------------------------------------------------------------------------------
1 | const { Persisted, Persistable, Index, Reduced } = require('..')
2 | const { removeSync } = require('fs-extra')
3 | const { Test, TestByType, SumOfNumbersByType, getReduceCalls } = require('./model/Test2')
4 | suite('Index', function() {
5 | this.timeout(2000000)
6 | suiteSetup(() => {
7 | // removeSync('tests/db')
8 | return Promise.all([
9 | Test2.ready,
10 | TestByType.ready,
11 | SumOfNumbersByType.ready,
12 | ]).then(() => {
13 | console.log('stores ready')
14 | var promises = []
15 | for (let i = 1; i < 11; i++)
16 | promises.push(Test2.for(i).put({
17 | isEven: i % 2 == 0,
18 | number: i,
19 | }))
20 | // return TestIndex.whenReadableAfter(Test2)
21 | return Promise.all(promises)
22 | })
23 | })
24 |
25 | test('index', () => {
26 | return TestByType.for('even').then(value => {
27 | assert.isTrue(value[0].isEven)
28 | assert.equal(value.length, 5)
29 | Test2.for(12).put({isEven: true, number: 12})
30 | return TestByType.for('even').then(value => {
31 | assert.isTrue(value[5].isEven)
32 | assert.equal(value.length, 6)
33 | Test2.remove(12)
34 | return TestByType.for('even').then(value => {
35 | assert.equal(value.length, 5)
36 | })
37 | })
38 | })
39 | })
40 | test('index-reduce', async () => {
41 | let value = await SumOfNumbersByType.for('even')
42 | assert.equal(value, 30)
43 | assert.isTrue(getReduceCalls() < 10)
44 |
45 | Test2.remove(4)
46 | value = await SumOfNumbersByType.for('even')
47 | assert.equal(value, 26)
48 | assert.isTrue(getReduceCalls() < 10)
49 |
50 | Test2.for(8).put({ description: 'changing 8 to 10', isEven: true, number: 10})
51 | value = await SumOfNumbersByType.for('even')
52 | assert.equal(value, 28)
53 | assert.isTrue(getReduceCalls() < 10)
54 |
55 | Test2.for(12).put({ name: 'twelve', isEven: true, number: 12})
56 | value = await SumOfNumbersByType.for('even')
57 | assert.equal(value, 40)
58 | assert.isTrue(getReduceCalls() < 13)
59 |
60 | Test2.remove(2)
61 | value = await SumOfNumbersByType.for('even')
62 | assert.equal(value, 38)
63 | assert.isTrue(getReduceCalls() < 16)
64 |
65 | Test2.remove(6)
66 | Test2.remove(8)
67 | Test2.remove(10)
68 | Test2.remove(12)
69 | value = await SumOfNumbersByType.for('even')
70 | assert.equal(value, undefined)
71 | assert.isTrue(getReduceCalls() < 16)
72 |
73 | Test2.for(4).put({ name: 'four', isEven: true, number: 4})
74 | value = await SumOfNumbersByType.for('even')
75 | assert.equal(value, 4)
76 | assert.isTrue(getReduceCalls() < 18)
77 |
78 | })
79 |
80 | /*
81 | suiteTeardown(() => {
82 | return Promise.all([
83 | Test2.db.close(),
84 | TestIndex.db.close()
85 | ])
86 | })*/
87 | })
88 |
--------------------------------------------------------------------------------
/tests/performance.js:
--------------------------------------------------------------------------------
1 | const { removeSync } = require('fs-extra')
2 | //const { open: openLevel } = require('../dist/storage/level')
3 | const { open: openLmdb } = require('../dist/storage/lmdb')
4 | //const { deflateSync, inflateSync } = require('zlib')
5 | const { deflateSync, inflateSync } = require('snappy')
6 | var fs = require('fs')
7 | var sampleData = JSON.parse(fs.readFileSync(__dirname + '/../../dpack/tests/samples/study.json'))
8 |
9 | var sampleBuffer = require('dpack').serialize(sampleData)
10 | var getRandomBuffer = () => {
11 | var size = Math.random() * 30000 >> 0
12 | var randomBuffer = Buffer.allocUnsafe(size)
13 | for (var i = 0; i< size; i++) {
14 | randomBuffer[i] = Math.random() * 256 >> 0
15 | }
16 | return randomBuffer
17 | }
18 | suite('performance', function() {
19 | removeSync('tests/db')
20 |
21 | // const level = openLevel('tests/db/test-level')
22 | const lmdb = openLmdb('tests/db/test-lmdb')
23 | suiteSetup(() => {
24 | lmdb.clear()
25 | // return level.clear()
26 | })
27 |
28 | let testString = 'test'
29 | for (let i = 0; i < 500; i++) {
30 | testString += 'some string data' + i
31 | }
32 |
33 | /* test('level-write', () => {
34 | for (let i = 0; i < 10000; i++) {
35 | level.putSync(Buffer.from((i % 1000).toString()), Buffer.from(testString + i))
36 | }
37 | })
38 | test('level-read', () => {
39 | for (let i = 0; i < 10000; i++) {
40 | level.getSync(Buffer.from((i % 1000).toString()))
41 | }
42 | })
43 | test('level-read-write', () => {
44 | for (let i = 0; i < 10000; i++) {
45 | level.putSync(Buffer.from((i % 1000).toString()), Buffer.from(testString + i))
46 | level.getSync(Buffer.from((i % 1000).toString()))
47 | }
48 | })
49 | test('level-batch', () => {
50 | let operations = []
51 | for (let i = 0; i < 10000; i++) {
52 | operations.push({
53 | type: 'put',
54 | key: Buffer.from((i % 1000).toString()),
55 | value: Buffer.from(testString + i)
56 | })
57 | }
58 | return level.batch(operations)
59 | })*/
60 | test('lmdb-write', () => {
61 | let last
62 | for (let i = 0; i < 10000; i++) {
63 | last= lmdb.put(Buffer.from((i % 1000).toString()), sampleBuffer)
64 | }
65 | return last
66 | })
67 | test('lmdb-read', () => {
68 | for (let i = 0; i < 10000; i++) {
69 | global.test = lmdb.get(Buffer.from((i % 1000).toString()), {
70 | onShareInvalidate(arg) {
71 | console.log('onShareInvalidate', arg)
72 | }
73 | })
74 | }
75 | })
76 | test('lmdb-read-write', () => {
77 | console.log('lmdb.get', lmdb.get.length)
78 | for (let i = 0; i < 10000; i++) {
79 | lmdb.put(Buffer.from((i % 1000).toString()), sampleBuffer)
80 | lmdb.get(Buffer.from((i % 1000).toString()))
81 | }
82 | })
83 | test('lmdb-batch', function() {
84 | let start = Date.now()
85 | this.timeout(10000)
86 | let operations = []
87 | for (let i = 0; i < 10000; i++) {
88 | operations.push({
89 | type: 'put',
90 | key: Buffer.from((i % 1000).toString()),
91 | value: sampleBuffer
92 | })
93 | }
94 | console.log('set up operations', Date.now() -start)
95 | let promise = lmdb.batch(operations)
96 | console.log('after batch', Date.now() -start)
97 | return promise.then(() => console.log('after commit', Date.now() -start))
98 | })
99 | })
100 |
--------------------------------------------------------------------------------
/src/http-server/media.ts:
--------------------------------------------------------------------------------
1 | import when from '../util/when.js'
2 | import { bufferStream } from '../util/bufferStream.js'
3 | import { jsonMediaType } from './JSONStream.js'
4 | import { textMediaType } from './text.js'
5 | import { htmlMediaType, sendResourceEditor } from './html.js'
6 | import { urlEncodedMediaType } from './url-encoded.js'
7 |
8 | export const mediaTypes = new Map()
9 | export function media(connection, next) {
10 | let request = connection.request
11 | if (connection.path.indexOf('cobase-resource-viewer') > -1) {
12 | return sendResourceEditor(connection)
13 | }
14 | let headers = request.headers
15 | const options = {
16 | charset: 'utf8'
17 | }
18 | const contentType = headers['content-type']
19 | if (contentType) {
20 | let [mimeType, optionsString] = contentType.split(/\s*;\s*/)
21 | if (optionsString) {
22 | optionsString.replace(/([^=]+)=([^;]+)/g, (t, name, value) =>
23 | options[name] = value)
24 | }
25 | let parser = mediaTypes.get(mimeType)
26 | if (!parser || (!parser.parse && !parser.handleRequest)) {
27 | if (headers['content-length'] == '0') {
28 | parser = EMPTY_MEDIA_PARSER
29 | } else {
30 | connection.status = 415
31 | connection.response.content = 'Unsupported media type ' + mimeType
32 | return
33 | }
34 | }
35 | if (parser.handleRequest) {
36 | return when(parser.handleRequest(connection), () =>
37 | when(next(), (returnValue) => serializer(returnValue, connection)))
38 | }
39 | return bufferStream(connection.req).then(data => {
40 | connection.request.data = parser.parse(data.toString(options.charset), connection)
41 | return when(next(), (returnValue) => serializer(returnValue, connection))
42 | })
43 | }
44 | return when(next(), (returnValue) => serializer(returnValue, connection))
45 | }
46 | function serializer(returnValue, connection) {
47 | returnValue = connection.data !== undefined ? connection.data :
48 | connection.response.data !== undefined ? connection.response.data : returnValue
49 | if (returnValue === undefined)
50 | return // nothing to serialize
51 | let requestHeaders = connection.request.headers
52 | let acceptHeader = connection.query?.accept || requestHeaders.accept || '*/*'
53 | let responseHeaders = connection.response.headers
54 | responseHeaders.vary = (responseHeaders.vary ? responseHeaders.vary + ',' : '') + 'Accept'
55 | let bestSerializer = jsonMediaType // default for now, TODO: return a 415
56 | let bestQuality = 0
57 | let bestType = 'application/json' // default
58 | let bestParameters
59 | const acceptTypes = acceptHeader.split(/\s*,\s*/);
60 | for (const acceptType of acceptTypes) {
61 | const [type, ...parameterParts] = acceptType.split(/\s*;\s*/)
62 | let clientQuality = 1
63 | const parameters = { q: 1 }
64 | for(const part of parameterParts) {
65 | const equalIndex = part.indexOf('=')
66 | parameters[part.substring(0, equalIndex)] = part.substring(equalIndex + 1)
67 | }
68 | clientQuality = +parameters.q
69 | const serializer = mediaTypes.get(type)
70 | if (serializer) {
71 | const quality = (serializer.q || 1) * clientQuality
72 | if (quality > bestQuality) {
73 | bestSerializer = serializer
74 | bestType = type
75 | bestQuality = quality
76 | bestParameters = parameters
77 | }
78 | }
79 | }
80 | if (connection.response.set) {
81 | connection.response.set('content-type', bestType.startsWith('text') ? bestType + '; charset=utf8' : bestType)
82 | connection.response.set('vary', 'Accept')
83 | } else {
84 | responseHeaders['content-type'] = bestType
85 | }
86 | try {
87 | connection.response.body = connection.response.content = bestSerializer.serialize(returnValue, connection, bestParameters)
88 | } catch (error) {
89 | console.error(error)
90 | connection.response.body = connection.response.content = 'Error serializing: ' + error.toString()
91 | }
92 | }
93 |
94 | mediaTypes.set('application/json', jsonMediaType)
95 | mediaTypes.set('text/plain', textMediaType)
96 | mediaTypes.set('text/html', htmlMediaType)
97 | mediaTypes.set('application/x-www-form-urlencoded', urlEncodedMediaType)
98 | const EMPTY_MEDIA_PARSER = {
99 | parse() {
100 | }
101 | }
102 |
103 |
--------------------------------------------------------------------------------
/client/resource-viewer.js:
--------------------------------------------------------------------------------
1 | /*dpack.fetch(location.href).then(function(response) {
2 | return response.text()
3 | }).then(function(text) {
4 | window.text = text
5 | //console.log(window.data = data)
6 | for (var i = 0; i < text.length; i++) {
7 | if (text.charCodeAt(i) !== data.charCodeAt(i)) {
8 | console.log('Differ at',i, text.charCodeAt(i), data.charCodeAt(i))
9 | break
10 | }
11 | }
12 | document.getElementById('details').append(renderExpandedObject(dpack.parse(text)))
13 | })*/
14 | var source = ''
15 | window.nextData = function(nextChunk) {
16 | source += nextChunk
17 | }
18 | window.finished = function() {
19 | document.getElementById('details').append(renderExpandedObject(window.data = dpack.parse(source)))
20 | }
21 | HTMLElement.prototype.append = alkali.append
22 |
23 | var Table = alkali.Table
24 | var TR = alkali.TR
25 | var TD = alkali.TD
26 | var THead = alkali.THead
27 | var TH = alkali.TH
28 | var Span = alkali.Span
29 | var Div = alkali.Div
30 | function renderExpandedObject(object) {
31 | var rows = []
32 | for (var key in object) {
33 | let value = object[key]
34 | rows.push(TR([
35 | TD('.expando-cell', [
36 | (value && typeof value === 'object') ? Div('.expando') : null
37 | ]),
38 | TD('.property', [
39 | Span([key, ': ']),
40 | Span('.value-summary', [renderValue(value)])
41 | ])
42 | ], {
43 | onclick: function(event) {
44 | event.stopPropagation()
45 | if (!this.hasExpanded) {
46 | this.hasExpanded = true
47 | this.append(TD('.property-expanded', {
48 | }, [ value.constructor === Array ?
49 | renderExpandedArray(value) : renderExpandedObject(value) ]))
50 | }
51 | if (this.expanded) {
52 | this.className = this.className.replace(/expanded/g, '') + ' collapsed'
53 | this.expanded = false
54 | } else {
55 | this.className = this.className.replace(/collapsed/g, '') + ' expanded'
56 | this.expanded = true
57 | }
58 | }
59 | }))
60 | }
61 | return Table(rows)
62 | }
63 | function renderExpandedArray(array) {
64 | var rows = []
65 | var columns = []
66 | var first = array[0]
67 | if (!first || typeof first != 'object') {
68 | return renderExpandedObject(array)
69 | }
70 | for (var key in first) {
71 | columns.push(key)
72 | }
73 | rows.push(THead([
74 | TR([TH()].concat(columns.map(column => TH([column]))))
75 | ]))
76 |
77 | for (var index in array) {
78 | let item = array[index]
79 | var cells = [
80 | TD('.expando-cell', [
81 | (item && typeof item === 'object') ? Div('.expando') : null
82 | ])]
83 | for (var key in item) {
84 | cells.push(TD('.property', [
85 | Span('.value-summary', [renderValue(item[key])])
86 | ]))
87 | }
88 | rows.push(TR(cells, {
89 | onclick: function(event) {
90 | event.stopPropagation()
91 | if (!this.hasExpanded) {
92 | this.hasExpanded = true
93 | this.append(TD('.property-expanded', {
94 | colSpan: columns.length
95 | }, [ renderExpandedObject(item) ]))
96 | }
97 | if (this.expanded) {
98 | this.className = this.className.replace(/expanded/g, '') + ' collapsed'
99 | this.expanded = false
100 | } else {
101 | this.className = this.className.replace(/collapsed/g, '') + ' expanded'
102 | this.expanded = true
103 | }
104 | }
105 | }))
106 | }
107 | return Table(rows)
108 | }
109 |
110 | function renderValue(value) {
111 | if (value && typeof value === 'object') {
112 | var description
113 | if (value.constructor == Array) {
114 | description = 'Array (' + value.length + ')'
115 | if (value.length === 1) {
116 | return description + ': ' + renderValue(value[0])
117 | } else if (value.length > 1) {
118 | var first = value[0]
119 | if (first && typeof first === 'object') {
120 | return description + ' of {' + Object.keys(first).join(', ') + '}'
121 | } else {
122 | return description + ': ' + value.join(', ').slice(0, 100)
123 | }
124 | }
125 | } else {
126 | description = '{'
127 | for (var key in value) {
128 | if (description) {
129 | description += ', '
130 | }
131 | description += key + ': ' + renderValue(value[key])
132 | if (description.length > 100) {
133 | description = description.slice(0, 100)
134 | break
135 | }
136 | }
137 | description += '}'
138 | return description
139 | }
140 | } else {
141 | return JSON.stringify(value)
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/src/ExpirationStrategy.ts:
--------------------------------------------------------------------------------
1 | // a prioritized least recently used cache replacement/expiration strategy
2 | // this is implemented as a probablistic multi-step descent towards expiration, with prioritized
3 | // entry positions into the queue (allows repeatedly accessed, smaller objects
4 | // to expire slower)
5 |
6 | // number of entries in the cache. reduce this to reduce cache size (less memory)
7 | let CACHE_ENTRIES = 30000
8 | // this is the mid-point size in the cache (roughly half of cache entries will
9 | // be smaller, half larger). Reduce this to force large entries out faster (less memory)
10 | const NOMINAL_SIZE = 100
11 | // this is the speed of decay towards expiration. Reducing this will result in
12 | // more "accurate" expiration timing, but will also increase the overhead of the
13 | // algorithm
14 | const DECAY_RATE = 2.7
15 |
16 | const DECAY_INTERVAL = 10000
17 | const DECAY_REMOVAL = 10
18 | const PRIORITY = Symbol('priority')
19 | const EMPTY_SLOT = {
20 | set priority(priority) {
21 | },
22 | get priority() {
23 | return -1
24 | },
25 | }
26 |
27 | let offset = 0 // starting offset, shouldn't need to adjust
28 | class ExpirationStrategy {
29 | cache = []
30 |
31 | constructor() {
32 | // periodically clean out entries so they decay over time as well.
33 | setInterval(() => {
34 | for (let i = 0; i < DECAY_REMOVAL; i++) {
35 | this.useEntry(EMPTY_SLOT, Math.random())
36 | }
37 | }, DECAY_INTERVAL).unref()
38 | }
39 |
40 | useEntry(entity, size) {
41 | if (!isFinite(size)) {
42 | size = 100
43 | }
44 | let lastPriority = entity.priority
45 | if (lastPriority > -1) {
46 | // remove from old slot if it is currently in cache
47 | this.cache[entity.priority] = null
48 | }
49 | // define new priority
50 | // to prevent duplicate sizes from colliding, we add a revolving offset
51 | // this is an 8-bit revolving offset. We could add more bits if we wanted
52 | // to further reduce collisions, but minimizing offset bits actually helps leave "room"
53 | // for multi-occurence entries to stick around longer, and more offset bits could cause overlaps
54 | // in sizes which slightly reduces accuracies
55 | // offset = (offset + 157) & 255
56 | // calculate new priority/slot, placing large entries closer to expiration, smaller objects further from expiration
57 | let adjustedSize = size / NOMINAL_SIZE
58 | let priority = entity.priority = Math.floor(CACHE_ENTRIES / (1 +
59 | (lastPriority > -1 ?
60 | (adjustedSize + CACHE_ENTRIES / lastPriority) / 3 :
61 | adjustedSize)))
62 | while(entity) {
63 | // iteratively place entries in cache, pushing old entries closer to the end of the queue
64 | priority = entity.priority = Math.floor(priority / DECAY_RATE)
65 | if (priority == -1) {
66 | return
67 | }
68 | let entityToMove = this.cache[priority]
69 | this.cache[priority] = entity
70 | entity = entityToMove
71 | if (priority == 0 && entity) {
72 | // moved out of the queue, clear it from the cache
73 | entity.priority = -1 // keep priority a number type for more efficient class structure
74 | return
75 | }
76 | }
77 | }
78 | deleteEntry(entity) {
79 | if (entity.priority > -1) {
80 | // remove from old slot if it is currently in cache
81 | this.cache[entity.priority] = null
82 | }
83 | }
84 | isCached(entity) {
85 | return entity.priority > -1
86 | }
87 | getSnapshot() {
88 | let totalSize = 0
89 | let size
90 | return {
91 | entries: this.cache.map(entity => entity && ({
92 | id: entity.id,
93 | type: entity.constructor.name,
94 | size: (size = entity.approximateSize, (totalSize += (size || 1000)), size)
95 | })),
96 | totalSize
97 | }
98 | }
99 | set cachedEntrySize(size) {
100 | CACHE_ENTRIES = size
101 | }
102 | get cachedEntrySize() {
103 | return CACHE_ENTRIES
104 | }
105 | clearEntireCache() {
106 | // clear the entire cache. This is useful for finding memory leaks
107 | for (let i in this.cache) {
108 | let entry = this.cache[i]
109 | if (entry) {
110 | this.cache[i] = null
111 | }
112 | }
113 | }
114 | static defaultInstance = new ExpirationStrategy()
115 | }
116 | export default ExpirationStrategy
117 |
118 | /*
119 | This is the test I used to determine the optimal multiplier for spatial diversity:
120 | for (var multiplier = 0; multiplier < 255; multiplier++) {
121 | entries = []
122 | sum = 0
123 | for (var i = 0; i < 256; i++) {
124 | let n = ((i * multiplier) & 255)
125 | for (var j = 0; j < 128; j++) {
126 | if (entries[(n - j + 256) & 255] || entries[(n + j + 256) & 255]) {
127 | sum += j
128 | //console.log(j)
129 | break
130 | }
131 | }
132 | entries[n] = true
133 | }
134 | if (sum > 800) {
135 | console.log(multiplier, sum)
136 | }
137 | }
138 | */
--------------------------------------------------------------------------------
/src/http-server/JSONStream.ts:
--------------------------------------------------------------------------------
1 | import { Readable } from 'stream'
2 | import alkali from 'alkali'; const { Context } = alkali
3 | import when from '../util/when.js'
4 | import { RequestContext } from '../RequestContext.js'
5 | const BUFFER_SIZE = 10000
6 | const COMMA = Buffer.from(',')
7 | const OPEN_CURLY = Buffer.from('{')
8 | const CLOSE_CURLY = Buffer.from('}')
9 | const OPEN_BRACKET = Buffer.from('[')
10 | const CLOSE_BRACKET = Buffer.from(']')
11 |
12 | // a readable stream for serializing a set of variables to a JSON stream
13 | export class JSONStream extends Readable {
14 | context: RequestContext
15 | buffer: {}[]
16 | bufferSize: number
17 | iterator: any
18 | id: number
19 | private _amReading: boolean
20 |
21 | constructor(options) {
22 | // Calls the stream.Readable(options) constructor
23 | super(options)
24 | this.context = new RequestContext(null, options.session)
25 | this.context.preferJSON = true
26 | this.buffer = []
27 | this.bufferSize = 0
28 | this.iterator = this.serialize(options.value, true)
29 | this.id = Math.random()
30 | }
31 |
32 | *serialize(object, containsVariables?: boolean) {
33 | // using a generator to serialize JSON for convenience of recursive pause and resume functionality
34 | // serialize a value to an iterator that can be consumed by streaming API
35 | if (object && typeof object === 'object') {
36 | if (object.asJSON) {
37 | yield object.asJSON
38 | return
39 | }
40 | if (object[Symbol.iterator] && !object.then) {
41 | yield '['
42 | let first = true
43 | if (object[Symbol.asyncIterator] && !(object instanceof Array)) {
44 | let iterator = object[Symbol.asyncIterator]()
45 | let iteratorResult
46 | do {
47 | iteratorResult = iterator.next()
48 | if (iteratorResult.then) {
49 | yield iteratorResult.then(result => {
50 | iteratorResult = result
51 | return ''
52 | })
53 | }
54 | if (iteratorResult.done) {
55 | yield ']'
56 | return
57 | } else {
58 | if (first) {
59 | first = false
60 | } else {
61 | yield ','
62 | }
63 | yield* this.serialize(iteratorResult.value)
64 | }
65 | } while(true)
66 | }
67 | for (let element of object) {
68 | if (first) {
69 | first = false
70 | } else {
71 | yield ','
72 | }
73 | yield* this.serialize(element)
74 | }
75 | yield ']'
76 | return
77 | }
78 | containsVariables = containsVariables || object.containsVariables
79 | object = object.valueOf()
80 | if (!object)
81 | yield JSON.stringify(object)
82 | else if (object.then) {
83 | try {
84 | yield this.context.executeWithin(() =>
85 | object.then(object => this.serialize(object, containsVariables), handleError))
86 | } catch (error) {
87 | yield handleError(error)
88 | }
89 | } else if (object.asJSON) {
90 | yield object.asJSON
91 | } else if (containsVariables) {
92 | yield '{'
93 | let first = true
94 | for (let key in object) {
95 | if (first) {
96 | first = false
97 | } else {
98 | yield ','
99 | }
100 | yield JSON.stringify(key) + ':'
101 | yield* this.serialize(object[key])
102 | }
103 | yield '}'
104 | } else {
105 | yield JSON.stringify(object)
106 | }
107 | } else {
108 | yield JSON.stringify(object)
109 | }
110 | }
111 |
112 | _read() {
113 | if (this._amReading) {
114 | // I don't know why _read is called from within a push call, but if we are already reading, ignore the call
115 | return
116 | }
117 | this._amReading = true
118 | if (this.done) {
119 | return this.push(null)
120 | }
121 | when(this.readIterator(this.iterator), done => {
122 | if (done) {
123 | this.done = true
124 | this.push(null)
125 | } else {
126 | this._amReading = false
127 | }
128 | }, error => {
129 | console.error(error)
130 | this.done = true
131 | this.push(error.toString())
132 | this.push(null)
133 | })
134 | }
135 |
136 | push(content) {
137 | if (content === null || content instanceof Buffer) {
138 | if (this.bufferSize > 0)
139 | this.flush()
140 | return super.push(content)
141 | }
142 | this.bufferSize += content.length || content.toString().length
143 | this.buffer.push(content)
144 | if (this.bufferSize > BUFFER_SIZE) {
145 | return this.flush()
146 | }
147 | return true
148 | }
149 |
150 | flush() {
151 | let pushResult = super.push(this.buffer.join(''))
152 | this.buffer = []
153 | this.bufferSize = 0
154 | return pushResult
155 | }
156 |
157 | readIterator(iterator) {
158 | try { // eventually we should be able to just put this around iterator.next()
159 | let nextString
160 | if (iterator.childIterator) {
161 | // resuming in a child iterator
162 | return when(this.readIterator(iterator.childIterator), done => {
163 | if (done) {
164 | iterator.childIterator = null
165 | // continue on with the current iterator
166 | return this.readIterator(iterator)
167 | }
168 | })
169 | }
170 | do {
171 | let stepReturn = iterator.next()
172 | if (stepReturn.done) {
173 | return true
174 | }
175 | nextString = stepReturn.value
176 | if (nextString == null) {
177 | nextString = 'null'
178 | } else {
179 | if (nextString.then) {
180 | this.flush()
181 | return Promise.resolve(nextString).then((resolved) => {
182 | if (resolved && typeof resolved.return === 'function') {
183 | iterator.childIterator = resolved
184 | return this.readIterator(iterator)
185 | } else if (this.push(resolved + '')) {
186 | return this.readIterator(iterator)
187 | } // else return false
188 | })
189 | }
190 | if (typeof nextString.return === 'function') {
191 | iterator.childIterator = nextString
192 | return this.readIterator(iterator)
193 | }
194 | }
195 | } while (this.push(nextString))
196 | } catch (error) {
197 | console.error(error)
198 | this.push(error.toString())
199 | this.push(null)
200 | return true
201 | }
202 | }
203 | }
204 |
205 | function handleError(error) {
206 | console.error(error)
207 | return JSON.stringify(error.toString())
208 | }
209 |
210 | export const jsonMediaType = {
211 | q: 0.9, // prefer dpack
212 | parse: (content) => content.length > 0 ? JSON.parse(content) : undefined, // tolerate empty requests
213 | serialize(data, connection, parameters) {
214 | connection.response.headers['Transfer-Encoding'] = 'chunked'
215 | return new JSONStream({
216 | value: data,
217 | session: connection.session
218 | })
219 | }
220 | }
221 |
--------------------------------------------------------------------------------
/src/storage/level.ts:
--------------------------------------------------------------------------------
1 | import * as fs from 'fs-extra'
2 | import * as leveldown from 'leveldown'
3 | import ArrayLikeIterable from '../util/ArrayLikeIterable'
4 | import { Database } from './Database'
5 |
6 | const STARTING_ARRAY = [null]
7 | const AS_STRING = {
8 | asBuffer: false
9 | }
10 | export const allDbs = new Map()
11 | function genericErrorHandler(err) {
12 | if (err) {
13 | console.error(err)
14 | }
15 | }
16 |
17 | export function open(name): Database {
18 | let location = './' + name
19 | try {
20 | fs.statSync(location)
21 | } catch (error) {
22 | fs.mkdirsSync(location)
23 | }
24 | try {
25 | fs.removeSync(location + '/LOCK') // clean up any old locks
26 | } catch(e) {}
27 | let db = leveldown(location)
28 | db.openSync()
29 | const cobaseDb = {
30 | db,
31 | bytesRead: 0,
32 | bytesWritten: 0,
33 | reads: 0,
34 | writes: 0,
35 | getSync(id, asBuffer) {
36 | try {
37 | let result = db.getSync(id, asBuffer ? undefined : AS_STRING)
38 | this.bytesRead += result && result.length || 1
39 | this.reads++
40 | return (asBuffer && result) ? Buffer.from(result) : result
41 | } catch (error) {
42 | if (error.message.startsWith('NotFound')) {
43 | return
44 | } else {
45 | throw error
46 | }
47 | }
48 | },
49 | get(id) {
50 | if (this.reads % Math.round(Math.min(this.reads * 100 / (this.bytesRead + 1) + 1, 5)) !== 0)
51 | return this.getSync(id, true) // make some of the calls sync (they are faster for smaller values) if they have historically been very small values
52 | return new Promise((resolve, reject) => {
53 | this.reads++
54 | let callback = (err, value) => {
55 | if (err) {
56 | if (err.message.startsWith('NotFound')) {
57 | resolve(null)
58 | } else {
59 | if (err.message.indexOf('Corruption') == 0) {
60 | alterDatabase('repair')
61 | }
62 | console.error('error', err, db.location)
63 | if (err.message.indexOf('not open') > 0) {
64 | // process.exit()
65 | }
66 | reject(err)
67 | }
68 | } else {
69 | this.bytesRead += value && value.length || 1
70 | resolve(value)
71 | }
72 | }
73 | db.get(id, callback)
74 | })
75 | },
76 | putSync(id, value) {
77 | this.bytesWritten += value && value.length || 0
78 | this.writes++
79 | db.putSync(id, value)
80 | },
81 | put(id, value) {
82 | this.bytesWritten += value && value.length || 0
83 | this.writes++
84 | return new Promise((resolve, reject) => {
85 | let callbacks = []
86 | db.put(id, value, (err, value) => {
87 | if (err) {
88 | if (err.message.indexOf('Corruption') == 0) {
89 | alterDatabase('repair')
90 | }
91 | reject(err)
92 | } else {
93 | resolve(value)
94 | }
95 | })
96 | })
97 | },
98 | remove(id) {
99 | this.writes++
100 | return new Promise((resolve, reject) => {
101 | db.del(id, (err, value) => {
102 | if (err) {
103 | if (err.notFound) {
104 | resolve(null)
105 | } else {
106 | if (err.message.indexOf('Corruption') == 0) {
107 | alterDatabase('repair')
108 | }
109 | reject(err)
110 | }
111 | } else {
112 | resolve(value)
113 | }
114 | })
115 | })
116 | },
117 | removeSync(id) {
118 | this.writes++
119 | return db.delSync(id)
120 | },
121 | iterator(options) {
122 | return db.iterator(options)
123 | },
124 | iterable(options) {
125 | let iterable = new ArrayLikeIterable()
126 | iterable[Symbol.iterator] = (async) => {
127 | options.valueAsBuffer = false
128 | let iterator = db.iterator(options)
129 | let array = STARTING_ARRAY
130 | let i = 1
131 | let finished
132 | return {
133 | next() {
134 | let length = array.length
135 | if (i === length) {
136 | if (finished || i === 0) {
137 | if (!this.ended) {
138 | this.ended = true
139 | iterator.binding.end(genericErrorHandler)
140 | }
141 | return { done: true }
142 | } else {
143 | if (async) {
144 | return new Promise((resolve, reject) =>
145 | iterator.binding.next((err, nextArray, nextFinished) => {
146 | cobaseDb.reads++
147 | if (err) {
148 | reject(err)
149 | } else {
150 | array = nextArray
151 | finished = nextFinished
152 | i = 0
153 | resolve(this.next())
154 | }
155 | }))
156 | } else {
157 | console.log('calling nextSync')
158 | array = iterator.binding.nextSync()
159 | console.log('finished nextSync', array.length)
160 | i = 0
161 | finished = array.finished // defined as a property on the sync api
162 | return this.next()
163 | }
164 | }
165 | }
166 | let key = array[length - ++i]
167 | let value = array[length - ++i]
168 | cobaseDb.bytesRead += value && value.length || 0
169 | return {
170 | value: {
171 | key, value
172 | }
173 | }
174 | },
175 | return() {
176 | console.log('return called on iterator', this.ended)
177 | if (!this.ended) {
178 | this.ended = true
179 | iterator.binding.end(genericErrorHandler)
180 | }
181 | return { done: true }
182 | },
183 | throw() {
184 | console.log('throw called on iterator', this.ended)
185 | if (!this.ended) {
186 | this.ended = true
187 | iterator.binding.end(genericErrorHandler)
188 | }
189 | return { done: true }
190 | }
191 | }
192 | }
193 | return iterable
194 | },
195 | iterateSync(options, callback) {
196 | // This currently causes Node to crash
197 | if (!leveldown.fixed)
198 | throw new Error('Unstable function')
199 | options.keyAsBuffer = false
200 | options.valueAsBuffer = false
201 | let iterator = db.iterator(options)
202 | let nextResult
203 |
204 | while ((nextResult = iterator.nextSync()).length > 0) {
205 | if (options.gt == '0')
206 | console.log('next returned',nextResult)
207 | for (let i = 0, l = nextResult.length; i < l;) {
208 | let value = nextResult[i++]
209 | let key = nextResult[i++]
210 | callback(key, value)
211 | }
212 | }
213 | if (options.gt == '0')
214 | console.log('end')
215 | // clean up iterator
216 | iterator.endSync()
217 | },
218 | batchSync(operations) {
219 | return db.batchSync(operations)
220 | },
221 | batch(operations) {
222 | this.writes += operations.length
223 | this.bytesWritten += operations.reduce((a, b) => a + (b.value && b.value.length || 0), 0)
224 | return new Promise((resolve, reject) => {
225 | db.batch(operations, (err, value) => {
226 | if (err) {
227 | if (err.message.indexOf('Corruption') == 0) {
228 | alterDatabase('repair')
229 | }
230 | reject(err)
231 | } else {
232 | resolve(value)
233 | }
234 | })
235 | })
236 | },
237 | close() {
238 | return new Promise((resolve, reject) =>
239 | db.close((err, value) => {
240 | if (err)
241 | reject(err)
242 | else
243 | resolve()
244 | }))
245 | },
246 | clear() {
247 | console.log('clearing db', db.location)
248 | return this.iterable({
249 | values: false,
250 | }).map(({ key }) => ({
251 | key,
252 | type: 'del'
253 | })).asArray.then(operations => {
254 | console.log('deleting', operations.length, 'entries')
255 | return this.batch(operations)
256 | })
257 | }
258 | }
259 | allDbs.set(name, cobaseDb)
260 | return cobaseDb
261 |
262 | function alterDatabase(action) {
263 | if (db.repairing) {
264 | return db.repairing
265 | }
266 | let location = db.location
267 | console.info(action + 'ing database at ' + location)
268 | return db.repairing = new Promise((resolve, reject) => {
269 | // suspend all activity on the db
270 | let queued = []
271 | let originalGet = db.get
272 | db.get = function(...args) {
273 | console.log('queueing get', location)
274 | queued.push(() => {
275 | console.log('finishing get')
276 | db.get(...args)
277 | })
278 | }
279 | let originalPut = db.put
280 | db.put = function(...args) {
281 | console.log('queueing put', location)
282 | queued.push(() => {
283 | console.log('finishing put')
284 | db.put(...args)
285 | })
286 | }
287 | let originalDel = db.del
288 | db.del = function(...args) {
289 | console.log('queueing del', location)
290 | queued.push(() => {
291 | console.log('finishing del')
292 | db.del(...args)
293 | })
294 | }
295 | let originalBatch = db.batch
296 | db.batch = function(...args) {
297 | console.log('queueing batch', location)
298 | queued.push(() => {
299 | console.log('finishing batch')
300 | db.batch(...args)
301 | })
302 | }
303 | // close it down
304 | db.close((error) => {
305 | if (error) {
306 | console.error('Error closing db', error)
307 | }
308 | // do the repair
309 | leveldown[action](location, (err) => {
310 | if (err) {
311 | console.error('Failed to ' + action + ' database at ' + location, err)
312 | } else {
313 | console.info('Finished ' + action + 'ing database at ' + location)
314 | }
315 | db.open((error) => {
316 | if (error) {
317 | console.error('Error opening db', error)
318 | reject(error)
319 | }
320 | // resume
321 | db.repairing = false
322 | console.info('Resuming database operations at ' + location)
323 | db.get = originalGet
324 | db.put = originalPut
325 | db.batch = originalBatch
326 | db.del = originalDel
327 | for (let action of queued) {
328 | action()
329 | }
330 | resolve()
331 | })
332 | })
333 | })
334 | })
335 | }
336 | }
337 |
--------------------------------------------------------------------------------
/src/util/process.ts:
--------------------------------------------------------------------------------
1 | import { fork } from 'child_process'
2 | import when from './when.js'
3 | import * as net from 'net'
4 | import * as path from 'path'
5 | import { PackrStream, UnpackrStream } from 'msgpackr'
6 | import * as alkali from 'alkali'; const { spawn, UpdateEvent, currentContext } = alkali.Variable ? alkali : alkali.default
7 |
8 | let pipeServerStarted
9 | const classMap = new Map()
10 | const streamByPidClass = new Map()
11 | const streamsByClass = new Map()
12 | const waitingRequests = new Map()
13 | const whenProcessConnected = new Map>()
14 |
15 | const getPipePath = (processId) => path.join(path.sep == '/' ? '/tmp' : '\\\\?\\pipe', 'cobase-' + processId)
16 | let nextRequestId = 1
17 |
18 | function startPipeClient(processId, Class) {
19 | let whenConnected
20 | if (whenProcessConnected.has(processId)) {
21 | whenConnected = whenProcessConnected.get(processId)
22 | } else {
23 | whenConnected = new Promise((resolve, reject) => {
24 | const tryToConnect = (retries) => {
25 | const socket = net.createConnection(getPipePath(processId))
26 | let parsedStream = socket.pipe(new UnpackrStream()).on('error', (error) => {
27 | console.error('Error in pipe client socket', error)
28 | })
29 | let serializingStream = new PackrStream()
30 | serializingStream.pipe(socket)
31 | serializingStream.pid = processId
32 | let connected
33 | socket.on('error', (error) => {
34 | if (connected)
35 | console.error(error) // shouldn't happen after a connection
36 | else {
37 | if (retries > 2)
38 | reject(error)
39 | else
40 | setTimeout(() => {
41 | tryToConnect(retries + 1)
42 | }, 1500)
43 | }
44 | }).on('connect', () => {
45 | connected = true
46 | console.debug('Connected to process', processId)
47 | resolve(serializingStream)
48 | })
49 | socket.on('close', (event) => {
50 | serializingStream.emit('close', event)
51 | })
52 | socket.unref()
53 | parsedStream.on('data', (message) => {
54 | onMessage(message, serializingStream)
55 | })
56 | }
57 | tryToConnect(0)
58 | })
59 | whenProcessConnected.set(processId, whenConnected)
60 | }
61 | return whenConnected.then(stream => {
62 | attachClass(stream, Class, processId)
63 | // declare this class listens on this stream, by sending out a process identification
64 | stream.write({
65 | className: Class.name,
66 | pid: process.pid
67 | })
68 | })
69 | }
70 |
71 |
72 | function startPipeServer() {
73 | if (pipeServerStarted)
74 | return
75 | pipeServerStarted = true
76 | net.createServer((socket) => {
77 | socket.pipe(new UnpackrStream()).on('data', (message) => {
78 | onMessage(message, serializingStream)
79 | })
80 | let serializingStream = new PackrStream()
81 | socket.on('close', (event) => {
82 | serializingStream.emit('close', event)
83 | })
84 | serializingStream.pipe(socket)
85 | serializingStream.isIncoming = true
86 | }).on('error', (err) => {
87 | // handle errors here
88 | throw err;
89 | }).listen(getPipePath(process.pid))
90 | }
91 |
92 | function attachClasses(stream) {
93 | for (const [className, Class] of classMap) {
94 | attachClass(stream, Class, className)
95 | }
96 | }
97 | function attachClass(stream, Class, processId) {
98 | stream.pid = processId
99 | const className = Class.name
100 | let streams = streamsByClass.get(className)
101 | if (!streams) {
102 | streamsByClass.set(className, streams = [])
103 | }
104 | streams.push(stream)
105 | streamByPidClass.set(processId + '-' + className, stream)
106 | const otherProcesses = Class.otherProcesses || (Class.otherProcesses = [])
107 | if (!otherProcesses.includes(processId) && processId !== process.pid) {
108 | otherProcesses.push(processId)
109 | }
110 | let queue = []
111 | let timeoutToWrite
112 | const updater = {
113 | updated(event, by) {
114 | // TODO: debounce
115 | //console.log('sending update event', className, process.pid)
116 | let id = by && by.id
117 | if (id && event.source && by.constructor === event.source.constructor && !event.sourceProcess) {
118 | try {
119 | const eventToSerialize = Object.assign({}, event, {
120 | instanceId: id,
121 | method: 'updated',
122 | className,
123 | type: event.type,
124 | })
125 | delete eventToSerialize.visited
126 | delete eventToSerialize.source
127 | if (eventToSerialize.sources) {
128 | eventToSerialize.sources = Array.from(eventToSerialize.sources).map(source => ({
129 | id: source.id,
130 | typeName: source.constructor.name,
131 | }))
132 | }
133 | delete eventToSerialize.previousValues
134 | delete eventToSerialize.target
135 | delete eventToSerialize.oldValue
136 | delete eventToSerialize.whenWritten
137 | when(event.whenWritten, () => {
138 | queue.push(eventToSerialize)
139 | if (!timeoutToWrite)
140 | timeoutToWrite = setTimeout(() => {
141 | timeoutToWrite = null
142 | stream.write(queue)
143 | queue = []
144 | }, 10)
145 | })
146 | } catch(error) {
147 | // TODO: Not sure how we get in this state
148 | console.warn(error)
149 | Class.stopNotifies(updater)
150 | }
151 | }
152 | },
153 | stream,
154 | Class
155 | }
156 | Class.notifies(updater)
157 | Class.sendBroadcast = notification => {
158 | for (const stream of streams) {
159 | notification.className = className
160 | stream.write(notification)
161 | }
162 | }
163 | Class.sendRequestToProcess = (pid, message) => {
164 | const requestId = message.requestId = nextRequestId++
165 | message.className = Class.name
166 | const stream = streamByPidClass.get(pid + '-' + className)
167 | if (!stream) {
168 | return // TODO: If it is undefined wait for a connection
169 | }
170 | stream.write(message)
171 | return new Promise((resolve, reject) => waitingRequests.set(requestId, { resolve, reject }))
172 | }
173 | Class.sendRequestToAllProcesses = (message) => {
174 | message.className = Class.name
175 | return Promise.all(streams.map(stream => {
176 | const requestId = message.requestId = nextRequestId++
177 | stream.write(message)
178 | return new Promise((resolve, reject) => waitingRequests.set(requestId, { resolve, reject }))
179 | }))
180 | }
181 | stream.setMaxListeners(1000) // we are going to be adding a lot here
182 | stream.on('close', () => {
183 | Class.stopNotifies(updater)
184 | streams.splice(streams.indexOf(stream), 1)
185 | let otherProcessIndex = otherProcesses.indexOf(processId)
186 | if (otherProcessIndex > -1)
187 | otherProcesses.splice(otherProcessIndex, 1)
188 | streamByPidClass.delete(processId + '-' + className)
189 | })
190 | }
191 |
192 | function onMessage(message, stream) {
193 | try {
194 | if (message instanceof Array) {
195 | for (let part of message)
196 | onMessage(part, stream)
197 | return
198 | }
199 | const { requestId, responseId, className, instanceId } = message
200 |
201 | if (responseId) {
202 | const resolver = waitingRequests.get(responseId)
203 | waitingRequests.delete(responseId)
204 | return resolver.resolve(message)
205 | }
206 | let target = classMap.get(className)
207 | if (target) {
208 | if (requestId) {
209 | try {
210 | when(target.receiveRequest(message), (result) => {
211 | result = result || {}
212 | result.responseId = requestId
213 | stream.write(result)
214 | }, (error) => {
215 | stream.write({
216 | error,
217 | responseId: requestId
218 | })
219 | })
220 | } catch (error) {
221 | stream.write({
222 | error,
223 | responseId: requestId
224 | })
225 | }
226 | } else {
227 | if (message.type) {
228 | const event = new UpdateEvent()
229 | event.sourceProcess = stream.pid
230 | event.source = target
231 | Object.assign(event, message)
232 | if (message.sources) {
233 | event.sources = message.sources.map(source => ({
234 | id: source.id,
235 | constructor: classMap.get(source.typeName)
236 | }))
237 | }
238 | target.updated(event, instanceId && { id: instanceId })
239 | } else if (message.pid) {
240 | attachClass(stream, target, message.pid)
241 | } else {
242 | target.update(message)
243 | }
244 | }
245 | } else {
246 | console.warn('Unknown message received', message)
247 | }
248 | } catch(error) {
249 | console.error('Handling message error', error)
250 | }
251 | }
252 |
253 | export function registerClass(Class) {
254 | startPipeServer() // Maybe start it in the next event turn so you can turn it off in single process environment?
255 | classMap.set(Class.name, Class)
256 | }
257 |
258 | export function addProcess(pid, Class) {
259 | return startPipeClient(pid, Class)
260 | }
261 |
262 | /*function onCloseSocket(stream, processId) {
263 | const pid = stream.pid
264 | let index = streams.indexOf(stream)
265 | if (index > -1)
266 | streams.splice(index, 1)
267 | let removed = 0
268 | for (let updater of updaters) {
269 | if (updater.stream === stream) {
270 | //console.log('stop notifications for', process.pid, 'from', pid)
271 | removed++
272 | updater.Class.stopNotifies(updater)
273 | index = updaters.indexOf(updater)
274 | if (index > -1)
275 | updaters.splice(index, 1)
276 | }
277 | }
278 | console.log('socket close from', process.pid, 'to', processId, pid, 'removed updaters', removed)
279 | streamByPid.set(pid, null) // set it to null, so we now it once existed and is dead
280 | }
281 |
282 | /*
283 | // every child process should be ready to join the network
284 | process.on('message', (data) => {
285 | if (data.enterNetwork) {
286 | console.log('Received request to start pipe server')
287 | // create pipe server
288 | startPipeServer()
289 | // need to send confirmation that it is set up.
290 | process.send({
291 | enteredNetwork: true
292 | })
293 | } else if (data.connectToProcess) {
294 | startPipeClient(data.connectToProcess)
295 | }
296 | })
297 | */
298 |
--------------------------------------------------------------------------------
/src/Reduced.ts:
--------------------------------------------------------------------------------
1 | import { Cached, Persisted } from './Persisted.js'
2 | import when from './util/when.js'
3 | const INVALIDATED_VALUE = Buffer.from([])
4 | const SEPARATOR_BYTE = Buffer.from([30]) // record separator control character
5 | const REDUCED_INDEX_PREFIX_BYTE = Buffer.from([3])
6 | const CHILDREN = 2
7 |
8 | export class Reduced extends Cached {
9 | static startingValue = undefined
10 | /**
11 | * This defines the reduce function that accumulates index entry values into a single value
12 | */
13 | reduceBy(a, b) {
14 | return null
15 | }
16 |
17 | static derivedFrom(...sources: Array) {
18 | for (let source of sources) {
19 | if (source.notifies) {
20 | if (!this.sources)
21 | this.sources = []
22 | this.sources.push(source)
23 | } else if (typeof source === 'function') {
24 | this.reduceBy = source
25 | } else {
26 | Object.assign(this, source)
27 | }
28 | }
29 | this.start()
30 | }
31 | // rewrite the source to be the computed reduced value
32 | // this allows transform to still execute using the result
33 | get source() {
34 | return this.reducingSource || (this.reducingSource = {
35 | valueOf: this.getReducedEntry.bind(this)
36 | })
37 | }
38 |
39 | set source(source) {
40 | this.indexSource = source
41 | }
42 |
43 | getReducedEntry() {
44 | return this.transaction(async (db, put) => {
45 | if (this.rootLevel > -1) {
46 | const indexKey = toBufferKey(this.id)
47 | const { split, noChildren, accumulator } = await this.reduceRange(this.rootLevel, Buffer.from([1]), Buffer.from([255]), put)
48 | if (split) // splitting the root node, just bump up the level number
49 | this.rootLevel++
50 | else if (noChildren) {
51 | // if all children go away, return to a root level of 1
52 | // we don't ever incrementally reduce depth, and if we are decreasing children,
53 | // we can represent a single childen with an arbitrarily deep single-child-at-every-level
54 | // tree
55 | this.rootLevel = 1
56 | }
57 | // now it should be written to the node
58 | // this should be done by Cached: Class.dbPut(this.id, version + ',' + this.rootLevel + ',' + serialize(accumulator))
59 | return accumulator
60 | }
61 | })
62 | }
63 |
64 | async reduceRange(level, rangeStartKey: Buffer, rangeEndKey: Buffer, put) {
65 | let iterator
66 | const Class = this.constructor
67 | const db = Class.db
68 | const indexBufferKey = toBufferKey(this.id)
69 | if (level === 1) {
70 | // leaf-node, go to source index
71 | iterator = this.indexSource._getIndexedValues({
72 | start: Buffer.concat([indexBufferKey, SEPARATOR_BYTE, rangeStartKey]),
73 | end: Buffer.concat([indexBufferKey, SEPARATOR_BYTE, rangeEndKey]),
74 | }, true)[Symbol.iterator]()
75 | } else {
76 | // mid-node, use our own nodes/ranges here
77 | iterator = db.getRange({
78 | start: Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level - 1]), indexBufferKey, SEPARATOR_BYTE, rangeStartKey]),
79 | end: Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level - 1]), indexBufferKey, SEPARATOR_BYTE, rangeEndKey]),
80 | reverse: false,
81 | }).map(({ key, value }) => {
82 | let [, startKey, endKey] = fromBufferKey(key.slice(2), true)
83 | return {
84 | level: key[1],
85 | key: startKey,
86 | endKey,
87 | value: value.length > 0 ? parse(value) : INVALIDATED_VALUE,
88 | }
89 | })[Symbol.iterator]()
90 | }
91 | let next
92 | let version = Date.now()
93 | let firstOfSection = true
94 | let split = false
95 | let lastDividingKey = rangeStartKey
96 | let accumulator
97 | let totalAccumulator
98 | let childrenProcessed = 0 // asynchronously iterate
99 | while(!(next = iterator.next()).done) {
100 | let { key, endKey, value } = next.value
101 | if (value && value.then) // if the index has references to variables, need to resolve them
102 | value = await value
103 |
104 | childrenProcessed++
105 | if (childrenProcessed > CHILDREN) {
106 | childrenProcessed = 0
107 | let nextDividingKey = endKey || key
108 | put(Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level]), indexBufferKey, SEPARATOR_BYTE, lastDividingKey, SEPARATOR_BYTE, lastDividingKey = toBufferKey(nextDividingKey)]),
109 | serialize(accumulator))
110 | if (!split)
111 | totalAccumulator = accumulator // start with existing accumulation
112 | else
113 | totalAccumulator = this.reduceBy(totalAccumulator, accumulator)
114 | split = true
115 | firstOfSection = true
116 | }
117 |
118 | if (value == INVALIDATED_VALUE) {
119 | const result = await this.reduceRange(level - 1, toBufferKey(key), toBufferKey(endKey), put)
120 | value = result.accumulator
121 | put(Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level - 1]), indexBufferKey, SEPARATOR_BYTE, toBufferKey(key), SEPARATOR_BYTE, toBufferKey(endKey)]),
122 | result.split || result.noChildren ?
123 | undefined :// if it is a split, we have to remove the existing node
124 | serialize(value)) // otherwise write our value
125 | if (result.noChildren) {
126 | continue
127 | }
128 | }
129 | if (firstOfSection) {
130 | accumulator = value
131 | } else {
132 | accumulator = await this.reduceBy(accumulator, value)
133 | }
134 | firstOfSection = false
135 | }
136 | // store the last accumulated value if we are splitting
137 | if (split) {
138 | put(Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level]), indexBufferKey, SEPARATOR_BYTE, lastDividingKey, SEPARATOR_BYTE, rangeEndKey]),
139 | serialize(accumulator))
140 | // do one final merge of the sectional accumulator into the total to determine what to return
141 | accumulator = await this.reduceBy(totalAccumulator, accumulator)
142 | }
143 | return { split, accumulator, version, noChildren: !split && firstOfSection }
144 | }
145 |
146 | updated(event) {
147 | if (event && event.sources) {
148 | for (let source of event.sources) {
149 | this.invalidateEntry(source.id, event.version)
150 | }
151 | } else {
152 | // delete the entire tree
153 | let level = 1
154 | let hasEntries
155 | do {
156 | const indexBufferKey = toBufferKey(this.id)
157 | const Class = this.constructor
158 | const db = Class.db
159 | const iterator = db.getRange({
160 | start: Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level]), indexBufferKey, SEPARATOR_BYTE, Buffer.from([1])]),
161 | end: Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([level]), indexBufferKey, SEPARATOR_BYTE, Buffer.from([255])]),
162 | reverse: false,
163 | })[Symbol.iterator]()
164 | let next
165 | hasEntries = false
166 | while(!(next = iterator.next()).done) {
167 | db.remove(next.value.key)
168 | hasEntries = true
169 | }
170 | level++
171 | }
172 | while (hasEntries)
173 |
174 | }
175 | if (!this.rootLevel)
176 | this.rootLevel = 1
177 | super.updated(event)
178 | }
179 |
180 | invalidateEntry(sourceKey, version) {
181 | return this.transaction(async (db, put) => {
182 | // get the computed entry so we know how many levels we have
183 | let level = this.rootLevel
184 | if (!level) {
185 | let data = this.constructor.db.get(toBufferKey(this.id))
186 | if (data) {
187 | const parser = createParser()
188 | parser.setSource(data.slice(0, 28).toString(), 0) // the lazy version only reads the first fews bits to get the version
189 | const version = parser.read()
190 | level = parser.hasMoreData() && parser.read()
191 | } else {
192 | return // no entry, no levels
193 | }
194 | }
195 | for (let i = 1; i < level; i++) {
196 | let sourceKeyBuffer = toBufferKey(sourceKey)
197 | let [ nodeToInvalidate ] = await db.getRange({
198 | start: Buffer.concat([REDUCED_INDEX_PREFIX_BYTE, Buffer.from([i]), toBufferKey(this.id), SEPARATOR_BYTE, sourceKeyBuffer, Buffer.from([255])]),
199 | values: false,
200 | reverse: true,
201 | limit: 1,
202 | }).asArray
203 | put(nodeToInvalidate.key, INVALIDATED_VALUE)
204 | }
205 | // this should happen in the super.updated call
206 | //put(this.id, version + ',' + level)
207 | })
208 | // rebalancing nodes will take place when we when we do the actual reduce operation
209 | }
210 | parseEntryValue(buffer) {
211 | if (buffer) {
212 | const parser = createParser()
213 | parser.setSource(buffer.slice(0, 28).toString(), 0) // the lazy version only reads the first fews bits to get the version
214 | const version = parser.read()
215 | this.rootLevel = parser.hasMoreData() && parser.read()
216 | if (parser.hasMoreData()) {
217 | return {
218 | version,
219 | data: parseLazy(buffer.slice(parser.getOffset()), parser),
220 | buffer,
221 | }
222 | } else {
223 | // stored as an invalidated version
224 | return {
225 | version,
226 | buffer,
227 | }
228 | }
229 | } else {
230 | return {}
231 | }
232 | }
233 | serializeEntryValue(version, object) {
234 | const serializer = createSerializer()
235 | serializer.serialize(version)
236 | serializer.serialize(this.rootLevel || 1)
237 | if (object)
238 | serializer.serialize(object)
239 | return serializer.getSerialized()
240 | }
241 |
242 | transaction(action) {
243 | const Class = this.constructor
244 | const db = Class.db
245 | return this.currentTransaction = when(this.currentTransaction, () => {
246 | let operations = []
247 | const put = (key, value) => {
248 | operations.push({
249 | type: value === undefined ? 'del' : 'put',
250 | key,
251 | value
252 | })
253 | }
254 | let result = action(db, put)
255 | return result.then((result) => {
256 | return when(db.batch(operations),
257 | () => {//this.currentTransaction = null
258 | return result
259 | },
260 | (error) => {
261 | console.error(error)
262 | //this.currentTransaction = null
263 | return result
264 | })
265 | })
266 | //return result
267 | })
268 | }
269 | }
270 |
271 | Cached.reduce = Persisted.reduce = function(name: string, reduceFunction: (accumulator, nextValue) => any) {
272 | let reduced = this['reduced-' + name]
273 | if (reduced) {
274 | return reduced
275 | }
276 | reduced = this['reduced-' + name] = class extends Reduced.from(this) {
277 | static reduceBy(a, b) {
278 | return reduceFunction.call(this, a, b)
279 | }
280 | }
281 | Object.defineProperty(reduced, 'name', { value: this.name + '-reduced-' + name })
282 | return reduced
283 | }
284 |
--------------------------------------------------------------------------------
/yarn.lock:
--------------------------------------------------------------------------------
1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
2 | # yarn lockfile v1
3 |
4 |
5 | "@types/estree@*":
6 | version "0.0.50"
7 | resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.50.tgz#1e0caa9364d3fccd2931c3ed96fdbeaa5d4cca83"
8 | integrity sha512-C6N5s2ZFtuZRj54k2/zyRhNDjJwwcViAM3Nbm8zjBpbqAdZ00mr0CFxvSKeO8Y/e03WVFLpQMdHYVfUd6SB+Hw==
9 |
10 | "@types/node@*", "@types/node@latest":
11 | version "16.11.6"
12 | resolved "https://registry.yarnpkg.com/@types/node/-/node-16.11.6.tgz#6bef7a2a0ad684cf6e90fcfe31cecabd9ce0a3ae"
13 | integrity sha512-ua7PgUoeQFjmWPcoo9khiPum3Pd60k4/2ZGXt18sm2Slk0W0xZTqt5Y0Ny1NyBiN1EVQ/+FaF9NcY4Qe6rwk5w==
14 |
15 | acorn@^7.1.0:
16 | version "7.4.1"
17 | resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
18 | integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==
19 |
20 | assertion-error@^1.1.0:
21 | version "1.1.0"
22 | resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b"
23 | integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==
24 |
25 | at-least-node@^1.0.0:
26 | version "1.0.0"
27 | resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
28 | integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
29 |
30 | balanced-match@^1.0.0:
31 | version "1.0.2"
32 | resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
33 | integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
34 |
35 | brace-expansion@^1.1.7:
36 | version "1.1.11"
37 | resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
38 | integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
39 | dependencies:
40 | balanced-match "^1.0.0"
41 | concat-map "0.0.1"
42 |
43 | browser-stdout@1.3.1:
44 | version "1.3.1"
45 | resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.1.tgz#baa559ee14ced73452229bad7326467c61fabd60"
46 | integrity sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==
47 |
48 | chai@^4:
49 | version "4.3.4"
50 | resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.4.tgz#b55e655b31e1eac7099be4c08c21964fce2e6c49"
51 | integrity sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==
52 | dependencies:
53 | assertion-error "^1.1.0"
54 | check-error "^1.0.2"
55 | deep-eql "^3.0.1"
56 | get-func-name "^2.0.0"
57 | pathval "^1.1.1"
58 | type-detect "^4.0.5"
59 |
60 | check-error@^1.0.2:
61 | version "1.0.2"
62 | resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82"
63 | integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=
64 |
65 | commander@2.15.1:
66 | version "2.15.1"
67 | resolved "https://registry.yarnpkg.com/commander/-/commander-2.15.1.tgz#df46e867d0fc2aec66a34662b406a9ccafff5b0f"
68 | integrity sha512-VlfT9F3V0v+jr4yxPc5gg9s62/fIVWsd2Bk2iD435um1NlGMYdVCq+MjcXnhYq2icNOizHr1kK+5TI6H0Hy0ag==
69 |
70 | concat-map@0.0.1:
71 | version "0.0.1"
72 | resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
73 | integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
74 |
75 | debug@3.1.0:
76 | version "3.1.0"
77 | resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
78 | integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==
79 | dependencies:
80 | ms "2.0.0"
81 |
82 | deep-eql@^3.0.1:
83 | version "3.0.1"
84 | resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df"
85 | integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==
86 | dependencies:
87 | type-detect "^4.0.0"
88 |
89 | diff@3.5.0:
90 | version "3.5.0"
91 | resolved "https://registry.yarnpkg.com/diff/-/diff-3.5.0.tgz#800c0dd1e0a8bfbc95835c202ad220fe317e5a12"
92 | integrity sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==
93 |
94 | escape-string-regexp@1.0.5:
95 | version "1.0.5"
96 | resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
97 | integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=
98 |
99 | fs-extra@^9.0.0:
100 | version "9.1.0"
101 | resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d"
102 | integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==
103 | dependencies:
104 | at-least-node "^1.0.0"
105 | graceful-fs "^4.2.0"
106 | jsonfile "^6.0.1"
107 | universalify "^2.0.0"
108 |
109 | fs.realpath@^1.0.0:
110 | version "1.0.0"
111 | resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
112 | integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8=
113 |
114 | get-func-name@^2.0.0:
115 | version "2.0.0"
116 | resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41"
117 | integrity sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=
118 |
119 | glob@7.1.2:
120 | version "7.1.2"
121 | resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.2.tgz#c19c9df9a028702d678612384a6552404c636d15"
122 | integrity sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==
123 | dependencies:
124 | fs.realpath "^1.0.0"
125 | inflight "^1.0.4"
126 | inherits "2"
127 | minimatch "^3.0.4"
128 | once "^1.3.0"
129 | path-is-absolute "^1.0.0"
130 |
131 | graceful-fs@^4.1.6, graceful-fs@^4.2.0:
132 | version "4.2.8"
133 | resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.8.tgz#e412b8d33f5e006593cbd3cee6df9f2cebbe802a"
134 | integrity sha512-qkIilPUYcNhJpd33n0GBXTB1MMPp14TxEsEs0pTrsSVucApsYzW5V+Q8Qxhik6KU3evy+qkAAowTByymK0avdg==
135 |
136 | growl@1.10.5:
137 | version "1.10.5"
138 | resolved "https://registry.yarnpkg.com/growl/-/growl-1.10.5.tgz#f2735dc2283674fa67478b10181059355c369e5e"
139 | integrity sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==
140 |
141 | has-flag@^3.0.0:
142 | version "3.0.0"
143 | resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
144 | integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0=
145 |
146 | he@1.1.1:
147 | version "1.1.1"
148 | resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
149 | integrity sha1-k0EP0hsAlzUVH4howvJx80J+I/0=
150 |
151 | inflight@^1.0.4:
152 | version "1.0.6"
153 | resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
154 | integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=
155 | dependencies:
156 | once "^1.3.0"
157 | wrappy "1"
158 |
159 | inherits@2:
160 | version "2.0.4"
161 | resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
162 | integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
163 |
164 | jsonfile@^6.0.1:
165 | version "6.1.0"
166 | resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae"
167 | integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==
168 | dependencies:
169 | universalify "^2.0.0"
170 | optionalDependencies:
171 | graceful-fs "^4.1.6"
172 |
173 | lmdb@^2.0.0-beta2:
174 | version "2.0.0-beta2-win-ia32"
175 | resolved "https://registry.yarnpkg.com/lmdb/-/lmdb-2.0.0-beta2-win-ia32.tgz#d5efe2fbe0127ffb68f34cb79304633ea3bceb6b"
176 | integrity sha512-dNeMQkcJ1xh5xGgidbfy+Z0e00+Aiu2JbLoV9qxpIPeMWYRsizlRFbCQpO/WJBnKKYcFv6SVppWRJwWgpOc5qQ==
177 | dependencies:
178 | nan "^2.14.2"
179 | node-gyp-build "^4.2.3"
180 | ordered-binary "^1.1.0"
181 | weak-lru-cache "^1.1.0"
182 | optionalDependencies:
183 | msgpackr "^1.4.7"
184 |
185 | minimatch@3.0.4, minimatch@^3.0.4:
186 | version "3.0.4"
187 | resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083"
188 | integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==
189 | dependencies:
190 | brace-expansion "^1.1.7"
191 |
192 | minimist@0.0.8:
193 | version "0.0.8"
194 | resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
195 | integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=
196 |
197 | mkdirp@0.5.1:
198 | version "0.5.1"
199 | resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
200 | integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
201 | dependencies:
202 | minimist "0.0.8"
203 |
204 | mocha@^5:
205 | version "5.2.0"
206 | resolved "https://registry.yarnpkg.com/mocha/-/mocha-5.2.0.tgz#6d8ae508f59167f940f2b5b3c4a612ae50c90ae6"
207 | integrity sha512-2IUgKDhc3J7Uug+FxMXuqIyYzH7gJjXECKe/w43IGgQHTSj3InJi+yAA7T24L9bQMRKiUEHxEX37G5JpVUGLcQ==
208 | dependencies:
209 | browser-stdout "1.3.1"
210 | commander "2.15.1"
211 | debug "3.1.0"
212 | diff "3.5.0"
213 | escape-string-regexp "1.0.5"
214 | glob "7.1.2"
215 | growl "1.10.5"
216 | he "1.1.1"
217 | minimatch "3.0.4"
218 | mkdirp "0.5.1"
219 | supports-color "5.4.0"
220 |
221 | ms@2.0.0:
222 | version "2.0.0"
223 | resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
224 | integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
225 |
226 | msgpackr-extract@^1.0.14:
227 | version "1.0.15"
228 | resolved "https://registry.yarnpkg.com/msgpackr-extract/-/msgpackr-extract-1.0.15.tgz#3010a3ff0b033782d525116071b6c32864a79db2"
229 | integrity sha512-vgJgzFva0/4/mt84wXf3CRCDPHKqiqk5t7/kVSjk/V2IvwSjoStHhxyq/b2+VrWcch3sxiNQOJEWXgI86Fm7AQ==
230 | dependencies:
231 | nan "^2.14.2"
232 | node-gyp-build "^4.2.3"
233 |
234 | msgpackr@^1.4.4, msgpackr@^1.4.7:
235 | version "1.4.7"
236 | resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.4.7.tgz#d802ade841e7d2e873000b491cdda6574a3d5748"
237 | integrity sha512-bhC8Ed1au3L3oHaR/fe4lk4w7PLGFcWQ5XY/Tk9N6tzDRz8YndjCG68TD8zcvYZoxNtw767eF/7VpaTpU9kf9w==
238 | optionalDependencies:
239 | msgpackr-extract "^1.0.14"
240 |
241 | nan@^2.14.2:
242 | version "2.15.0"
243 | resolved "https://registry.yarnpkg.com/nan/-/nan-2.15.0.tgz#3f34a473ff18e15c1b5626b62903b5ad6e665fee"
244 | integrity sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==
245 |
246 | node-gyp-build@^4.2.3:
247 | version "4.3.0"
248 | resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.3.0.tgz#9f256b03e5826150be39c764bf51e993946d71a3"
249 | integrity sha512-iWjXZvmboq0ja1pUGULQBexmxq8CV4xBhX7VDOTbL7ZR4FOowwY/VOtRxBN/yKxmdGoIp4j5ysNT4u3S2pDQ3Q==
250 |
251 | once@^1.3.0:
252 | version "1.4.0"
253 | resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
254 | integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
255 | dependencies:
256 | wrappy "1"
257 |
258 | ordered-binary@^1.1.0:
259 | version "1.1.3"
260 | resolved "https://registry.yarnpkg.com/ordered-binary/-/ordered-binary-1.1.3.tgz#11dbc0a4cb7f8248183b9845e031b443be82571e"
261 | integrity sha512-tDTls+KllrZKJrqRXUYJtIcWIyoQycP7cVN7kzNNnhHKF2bMKHflcAQK+pF2Eb1iVaQodHxqZQr0yv4HWLGBhQ==
262 |
263 | path-is-absolute@^1.0.0:
264 | version "1.0.1"
265 | resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
266 | integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18=
267 |
268 | pathval@^1.1.1:
269 | version "1.1.1"
270 | resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d"
271 | integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==
272 |
273 | rollup@^1.20.3:
274 | version "1.32.1"
275 | resolved "https://registry.yarnpkg.com/rollup/-/rollup-1.32.1.tgz#4480e52d9d9e2ae4b46ba0d9ddeaf3163940f9c4"
276 | integrity sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==
277 | dependencies:
278 | "@types/estree" "*"
279 | "@types/node" "*"
280 | acorn "^7.1.0"
281 |
282 | supports-color@5.4.0:
283 | version "5.4.0"
284 | resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54"
285 | integrity sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==
286 | dependencies:
287 | has-flag "^3.0.0"
288 |
289 | type-detect@^4.0.0, type-detect@^4.0.5:
290 | version "4.0.8"
291 | resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
292 | integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==
293 |
294 | typescript@^4.4.3:
295 | version "4.4.4"
296 | resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.4.4.tgz#2cd01a1a1f160704d3101fd5a58ff0f9fcb8030c"
297 | integrity sha512-DqGhF5IKoBl8WNf8C1gu8q0xZSInh9j1kJJMqT3a94w1JzVaBU4EXOSMrz9yDqMT0xt3selp83fuFMQ0uzv6qA==
298 |
299 | universalify@^2.0.0:
300 | version "2.0.0"
301 | resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717"
302 | integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ==
303 |
304 | weak-lru-cache@^1.1.0, weak-lru-cache@^1.1.3:
305 | version "1.1.3"
306 | resolved "https://registry.yarnpkg.com/weak-lru-cache/-/weak-lru-cache-1.1.3.tgz#8a691884501b611d2b5aeac1ee5a011b2a97d9a8"
307 | integrity sha512-5LDIv+sr6uzT94Hhcq7Qv7gt3jxol4iMWUqOgJSLYbB5oO7bTSMqIBtKsytm8N2BufYOdJw86/qu+SDfbo/wKQ==
308 |
309 | wrappy@1:
310 | version "1.0.2"
311 | resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
312 | integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
313 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | Cobase is a composable layered system of JavaScript-defined, cached, persisted data transform functions and aggregations for building fast, efficient JavaScript-mapped relational data storage. Cobase is a reactive cache using Alkali's reactive API for entities. Cobase uses four basic data functional/processing constructs to build scalable relational data caches that can be used to query/access data efficiently to scale:
3 |
4 | * Join
5 | * Map
6 | * Index
7 | * Reduce
8 |
9 |
10 | From these constructs we can build data structures that can aggregate and index data from multiples tables and be queried in fast, scalable O(log n) time/space.
11 |
12 | There are several key goals and philosophies that have shaped cobase:
13 | * REST-oriented programming architecture that composes data stores by caching layering stores with a uniform interface, ideal for using as a caching/transformation middle tier to put in front of a simple backend storage.
14 | * NoSQL/Document-based DB-functionality with relational capability.
15 | * Scalable, performant data querying is acheived by ensuring data has been properly indexed to quickly satisfy queries, and data indexing and transformation, with full JS functionality, as the central focus of the API design.
16 | * Defaulting to in-process data store access (via LevelDB) for extremely efficient/fast access to indexed/transformed data.
17 |
18 | ## Getting Started
19 | First, install:
20 |
21 | ```sh
22 | npm install cobase
23 | ```
24 |
25 | And then we can begin creating basic persisted data structures. A basic table or data store can be constructed by simply creating a class that extends `Persisted`:
26 |
27 | ```js
28 | import { Persisted } from 'cobase'
29 | class Project extends Persisted {
30 |
31 | }
32 | ```
33 |
34 | And then we can begin adding data to it.
35 |
36 | ```js
37 | let newProject = Project.add({ name: 'Learn cobase', description: '...' })
38 | ```
39 |
40 | The most common API for interacting with your persisted class instances, is to to get an instance for a given id/key, and then we can change it, or retrieve its value:
41 |
42 | ```js
43 | Project.for(1).valueOf() -> return project with id of 1
44 | // or shorthand:
45 | Project.get(1)
46 | ```
47 |
48 | Generally stores will return asynchronously, returning a promise to the data requested.
49 |
50 | We can then build various layers of transform that use this as our data source. We could also define our base data source from an external data source (SQL database, S3 storage, etc.), and much of Cobase is optimized around this type of architecture, but here we are using our own internal storage for the sake of examples.
51 |
52 | ## Composing Layers
53 | ### Map Transform
54 | The first two compositional functions are available by defining a transforming and caching store, by extending `Cached`. This directly maps and transforms a source data store to the transformed and cached data. For example, if we wanted to select some properties from data source and cache them, we could do:
55 |
56 | ```js
57 | import { Cached } from 'cobase'
58 | class ProjectSummary extends Cached.from(Project) {
59 | transform(project) {
60 | // transforms projects from the source data
61 | return { // just keep the name
62 | name: project.name
63 | }
64 | }
65 | static transformVersion = 1 // we can assign a version so that we can increment it to indicate changes in the transform
66 | }
67 | ```
68 |
69 | The resulting data store will lazily compute and cache these transformed summary objects, providing fast access on repeated accesses.
70 |
71 | When defining a transform/cached entry, you should specify a version number in the `register` method that should be incremented whenever the transform is changed so that the table can be recomputed when it changes.
72 |
73 | ### Join
74 | The compositional aggregation functionality is a join, which is acheived by simply providing multiple data sources to the `Cached` base class. We can provide multiple data sources, which are then combined by id, and passed into the `transform` function.
75 |
76 | ```js
77 | import { Cached } from 'cobase'
78 | class ProjectSummary extends Cached.from(Project, ExtraProjectInfo) {
79 | transform(project, extraProjectInfo) {
80 | // transforms projects from the source data
81 | return { // just keep the name
82 | name: project.name,
83 | extraInfo: extraProjectInfo
84 | }
85 | }
86 | }
87 | ```
88 |
89 | This allows you to compose a new table of data that is joined from two other tables. This can be used for a variety of situations, although generally, a join is most useful when it is combined with an index/map function that can index by a foreign key to relate two different data sources.
90 |
91 | ### Index
92 | The third function is an indexing function, that allows one key-valued data source to be mapped to a generated different set of key-values, by different keys. And index is created by extending `Indexed` class, and defining a `static` `indexBy` method (make sure you define it as a `static`!) Imagine we have another store that held a table of tasks, that each had a `projectId` that referenced a project that it belonged to. We can index the tasks store by project id:
93 |
94 | ```js
95 | import { Indexed } from 'cobase'
96 | class TasksByProject extends Indexed({ Source: Task }) {
97 | static indexBy(task) { // make sure you define this with static
98 | return task.projectId // this will index tasks by project ids
99 | }
100 | }
101 | ```
102 |
103 | We now have created an index of tasks by the project id. We can join this to the project store to create relationally connected transformed store of cached data:
104 |
105 | ```js
106 | class ProjectWithTasks extends Cached.from(Project, TasksByProject) {
107 | transform(project, tasks) {
108 | return {
109 | name: project.name,
110 | tasks
111 | }
112 | }
113 | }
114 | ```
115 |
116 | This is also fully reactive index; any changes to a project, or tasks will automatically update through the layers of the index and caches such `ProjectWithTasks` will be up-to-date.
117 |
118 | When `indexBy` simple returns a key, the index will default to generate a store where values comes from the source values. That means that in this case, the indexed key will be the project id, and the value will be the array of tasks with that project id. However, `indexBy` supports a number of different ways to specify keys *and* values. First, `indexBy` can return multiple keys (rather than just a single key in the example above). This can be done by simply returning an array of keys.
119 |
120 | We can also specify the values in the indexed table as well. Again, if no value is specified, it will default to the input data source (and will be stored as a reference for efficiency). However, we can specify both the key and value by simply returning an object with `key` and `value` properties. And furthermore, if we want multiple keys and values generated, we can return an array of objects with `key`/`value` properties.
121 |
122 | For example:
123 |
124 | ```js
125 | static indexBy(task) {
126 | return task.projectIds.map(projectId => ({ // if this was a many to many relationship, with multiple project ids
127 | key: projectId, // index using the project id as the key
128 | value: { // if we wanted our index to just store the name and ids of the tasks
129 | id: task.id,
130 | name: task.name
131 | }
132 | }))
133 | }
134 | static transformVersion = 1 // transform version should be used with an Index as well, to indicate changes to the transform
135 | ```
136 |
137 | And if we accessed a `ProjectWithTasks` by a project id, this would return a promise to an array of of the task id and name of tasks referencing this project:
138 |
139 | ```js
140 | ProjectWithTasks.get(projectId)
141 | ```
142 |
143 | ### Reduce
144 | This function provides efficient aggregation of indices, merging multiple values per index key with a reduction operator. Without a reduce function, an index just returns an array of the entries for a given index key, but a reduce function provides a custom function to aggregate values under an index key. The `Reduced` class uses an `Index` class as a source, and aggregates the values of an index entry in `O(log n)` time using a tree/graph reduction algorithm. The `Reduced` class should define a source index, and a `reduceBy(a, b)` method that takes two input values and reduces them to one that it returns. The `Reduced` class extends the `Cached` class and can optionally include a `transform` method to transform the total reduced value.
145 |
146 | For example, if we wanted to compute the total estimated time of the tasks in project, this could become very expensive to recompute if there are large number of tasks in a project (`O(n)` after any update). However, a `Reduced` class can maintain this sum with incremental updates in `O(log n)` time.
147 |
148 | ```js
149 | import { Reduced } from 'cobase'
150 |
151 | class ProjectTotalHours extends Reduced.from(TasksByProject) {
152 | reduceBy(taskA, taskB) {
153 | return { // the returned value can be passed into subsequent reduceBy calls, so we make it the same type as the inputs
154 | hours: taskA.hours + taskB.hours
155 | }
156 | }
157 | transform(total) {
158 | // now finally get just the hours
159 | return total.hours
160 | }
161 | }
162 | ProjectTotalHours.for(projectId).valueOf() -> get the total hours of the tasks for a project
163 | ```
164 |
165 | Note that the `reduceBy` function is slightly different than a JavaScript `reduce` function in that both the inputs may be the output from previous `reduceBy` calls. A `reduceBy` operation must be side-effect free, commutative (order of execution can be rearranged), associative (grouping of execution can rearranged), and referentially transparent (same inputs should always produce same output).
166 |
167 | A reduce operation can also be written shorthand from a source entity:
168 |
169 | ```js
170 | const ProjectTotalHours = TasksByProject.reduce((a, b) => ({ number: a.number + b.number }))
171 | ```
172 |
173 | ## Relational Properties
174 | The relational property definitions provide a convenient mechanism for defining related entities, that is built on cobase's index and join functionality. A derived entity, that is transformed/cached from a source entity can be created with the `cacheWith` method, with property definitions that reference other entities. These referencing property relations are defined by the `relatedBy` and `relatesBy` methods. The appropriate index will be created based on the provided foreign key, to join the indices and produce a cached entity based on the joined data. For example, if we had a `Task` and `Project` entity classes, where the `Task` class had a foreign key referencing projects in the `projectId` property, we could create a `ProjectWithTasks` class that included a project and all its associated tasks:
175 |
176 | ```js
177 | class ProjectWithTasks extends Project.cacheWith({
178 | tasks: Task.relatesBy('projectId')
179 | }) {
180 |
181 | }
182 | ```
183 |
184 | Likewise we could define a `TaskWithProject` that defined an entity with the task and the project data it is associated with:
185 |
186 | ```js
187 | export class TaskWithProject extends Task.cacheWith({
188 | project: Project.relatedBy('projectId')
189 | })
190 | ```
191 |
192 | Note the distinct use of the relation definitions:
193 | `TargetEntity.relatesBy(foreignKey)` - This defines a relationship where the foreign key is defined on the `TargetEntity` that will be included as a property. This will add a property with an array of the target entities that reference the parent entity.
194 | `TargetEntity.relatedBy(foreignKey)` - This defines a relationship where the foreign key is defined on source class with a property that will be referencing the `TargetEntity`. This will add a property with a single target entity if there is a single foreign key, or an array of the target entities if the foreign key is an array of ids.
195 |
196 | In both cases, the foreign key can be either a single (string or number) value, or an array of values if there is a many-to-many relationship.
197 |
198 | ## Cobase API
199 |
200 | Cobase entities extend the Alkali Variable API, and consequently inherit its full API, as [described here](https://github.com/kriszyp/alkali#variable-api). For example, the following are methods available on cobase entity instances:
201 | `valueOf()` - This retrieves the current value of this entity. If this is available synchronously, it will return the value directly. Otherwise, if this requires asynchronous resolution (if the store or transform is async, and it is not cached in memory), it will return a promise to the resolved value.
202 | `then(onFulfilled, onRejected)` - This also retrieves the current of the entity, using the standard promise API/callback. This method also means that all entities can be treated as promises/thenables, and used in places that accept promises, including the `await` operator.
203 | `updated(event?)` - This is can be called to indicate that the entity has been updated, and it's transform needs to be re-executed to retrieve its value.
204 | `subscribe((event) => void)` - Subscribe to a entity and listen for changes.
205 |
206 | In addition, the following methods are available as *`static`* methods on entity classes:
207 | `for(id)` - Return the entity for the given id.
208 | `get(id)` - Shorthand for `Entity.for(id).valueof()`.
209 | `set(id, value)` - Shorthand for `Entity.for(id).put(value)`.
210 | `instanceIds` - This property returns a variable array (VArray) with ids of all the available instances of the entity.
211 | `subscribe((event) => void)` - Subscribe to any changes to any instances of this class.
212 | `index(propertyName)` - This returns an `Index` class defined such it indexes this class using the provided property name, or `indexBy` function (that can be referenced by the provided name).
213 |
214 | ## Connecting to different databases
215 | Cobase, using LevelDB, provides a capable data storage system, and makes it easy to build a compositional data system. However, for many applications, it may be desirable to cobase's compositional transforms on top of an existing database system with transactional capabilities, integrate backup, and/or access to existing/legacy data. In fact, this type of cross-server, compositional data layering where cobase acts a transforming caching middle tier in front of a database, is what cobase is optimized for.
216 |
217 | To connect cobase to existing database, we can create a cached class that retrieves data from database as a transform, notifies of changes to data, and delivers any requests for data modifications. Here is an outline of what a connector class looks like that implements a connection to another database:
218 |
219 | ```js
220 | // we aren't using any other "source" entities, since our transform method will handle retrieving data
221 | class Task extends Cached {
222 | transform() {
223 | // this is the main method that is called when an object is first accessed or changed, and can retrieve data from the db
224 | // this.id has the id of this object
225 | let id = this.id
226 | // do a database query to get our data, with something like this (note that we can return a promise, async transforms are fine):
227 | return sqlDatabase.query('SELECT * FROM TASK WHERE id = $1', [id]).then(rows => {
228 | // return the first row
229 | return rows[0]
230 | })
231 | }
232 |
233 | static transformVersion = 1 // increment this if we ever change the transform
234 |
235 | static initialize() {
236 | // we can hook into initialize to do any setup
237 | // In particular it is important to make sure we notify this class of any data changes.
238 | // This can be implemented by setting up database trigger, or some other mechanism to
239 | // notify of data updates.
240 | // Another simple approach (although not the most efficient/optimal) could be to simply
241 | // poll for updates:
242 | let lastUpdate = Date.now()
243 | sqlDatabase.query('SELECT * FROM TASK WHERE UPDATED > $1', [lastUpdate]).then(rows => {
244 | lastUpdate = Date.now()
245 | // for each row that was updated, call updated(),
246 | // which will mark the entity in cobase as updated
247 | // and will be re-retrieved on next access
248 | for (let updatedRow of rows) {
249 | this.for(updatedRow.id).updated()
250 | }
251 | })
252 | return super.initialize()
253 | }
254 |
255 | static fetchAllIds() {
256 | // If you need to be able to be able to get a list of all the object (ids), this can be implemented to fetch them
257 | return sqlDatabase.query('SELECT id FROM TASK', [lastUpdate]).then(rows.map(row => row.id))
258 | }
259 | // alternately you can implement a static resetAll that retrieves all the objects and assigns each with is()
260 |
261 | put() {
262 | // we could implement methods for updates, if they will go through cobase (updates may go directly to the server)
263 | return sqlDatabase.execute('INSERT INTO TASK...')
264 | }
265 | }
266 | ```
267 |
268 | ## Context
269 | ### Sessions (Operation Sequencing)
270 | In the cobase composition architecture, when an entity is updated, added, or removed, there may be some delay before any indices or reduce-computations are finished, which typically takes place asynchronously. When you access one of these downstream data sources, normally they will wait for these operations to finish, so they can return data that is consistent with any preceding operations. However, it can be preferable to create separate "sessions" so that when you retrieve data, you don't need to wait for operations to finish that are outside the scope of the current session. This facilitates a type of "eventual consistency" where everything inside the session maintains true consistency, but performance of accessing data is prioritized over waiting for other sessions' operations to be finish. This provides better performance. A session can be used to create a context, which can then be executed around value retrievals or updates:
271 |
272 | ```js
273 | import { Context } from 'alkali'
274 | let mySession = {}
275 | new Context(mySession).executeWithin(() => {
276 | someTask.updated()
277 | TasksByProject.valueOf() // will wait for any changes from the sameTask update to be indexed, but not any operations outside this session
278 | })
279 | ```
280 |
281 | Without specifying a session, all operations will be performed (and sequenced) within a single default session.
282 |
283 | ## Memory Model
284 | Cobase ensures a direct one-to-one mapping between a entity id/key and entity object instance. This effectively means multiple calls to get an instance by id will never return a different object or copy of the data:
285 |
286 | ```js
287 | Task.for(3) === Task.for(3) // always true
288 | ```
289 |
290 | This protects against data inconsistency between two different instances of the same data, and greatly simplifies the logic of dealing with mapped objects.
291 |
292 | Cobase actually integrates with garbage collection (using weak value maps), to allow unused/unreferenced to objects to be collected (so cobase doesn't leak memory after data is accessed), which means object instances can be collected, but still guarantees that at most only one instance of object per id/key is ever in memory at a time.
293 |
294 | Cobase uses a size-prioritized, multi-step exponential decaying least frequently/recently used policy to keep recently and frequented used data in memory, to effectively provide a GC-coordinated cache of data in memory, using the object instances. The amount of memory used for keeping recently used object instances in memory can be adjusted by setting the `ExpirationStrategy.cachedEntrySize`. The default value of 20000 keeps up to 20000 entries in memory:
295 |
296 | ```js
297 | ExpirationStrategy.cachedEntrySize = 40000 // use twice the default amount of caching
298 | ```
299 |
300 | The weak value map mechanism and LRU caching strategy are described in [more detail here](https://dev.doctorevidence.com/an-introduction-to-weak-value-maps-40e108b44e1c).
301 |
302 | ## Integration with an HTTP/Web Server
303 | Cobase provides utilities for efficient delivery of data in a web server. This mainly includes a middleware component (built on Koa) that can perform content negotiation and efficiently stream JSON with support for advanced optimizations including direct binary transfer from the DB to streams, and backpressure. The can be used by including the cobase's `media` export as middleware, and then downstream apps/middleware can access `connection.request.data` for the parsed request data, and the response data can be set on `connection.response.data`, and the middleware will serialize to the appropriate content type as specified by the client (defaulting to JSON).
304 |
305 | Additional content/media handlers can be defined by using the exported `mediaTypes` `Map`, and setting a media handler with the content type as the key, and an object with `serialize` and/or `parse` methods as the value:
306 |
307 | ```js
308 | import { mediaTypes, media } from 'cobase'
309 | mediaTypes.set('text/html', {
310 | serialize(data, connection) {
311 | return // some HTML we generate from data
312 | },
313 | parse(html) {
314 | // probably wouldn't try to parse HTML from a request, but if we wanted to
315 | }
316 | })
317 |
318 | koaApp.use(media)
319 | ```
320 |
--------------------------------------------------------------------------------
/src/KeyIndex.ts:
--------------------------------------------------------------------------------
1 | import * as alkali from 'alkali'; const { VArray, ReplacedEvent, UpdateEvent, getNextVersion } = alkali.VArray ? alkali : alkali.default
2 | import { Persistable, INVALIDATED_ENTRY, VERSION, Invalidated } from './Persisted.js'
3 | import when from './util/when.js'
4 | import ExpirationStrategy from './ExpirationStrategy.js'
5 | import { OperationsArray, IterableOptions, Database } from './storage/Database.js'
6 | import { asBinary } from 'lmdb'
7 | //import { mergeProgress, registerProcessing, whenClassIsReady, DEFAULT_CONTEXT } from './UpdateProgress'
8 |
9 | const expirationStrategy = ExpirationStrategy.defaultInstance
10 | const DEFAULT_INDEXING_CONCURRENCY = 40
11 | const INITIALIZING_LAST_KEY = Buffer.from([1, 7])
12 | const INDEXING_MODE = { indexing: true }
13 | const DEFAULT_INDEXING_DELAY = 60
14 | const INITIALIZATION_SOURCE = 'is-initializing'
15 | const INITIALIZATION_SOURCE_SET = new Set([INITIALIZATION_SOURCE])
16 | const COMPRESSED_STATUS_24 = 254
17 | const QUEUED_UPDATES = Symbol.for('qu')
18 | export interface IndexRequest {
19 | previousEntry?: any
20 | pendingProcesses?: number[]
21 | deleted?: boolean
22 | sources?: Set
23 | version: number
24 | triggers?: Set
25 | previousValues?: Map
26 | value: {}
27 | by?: any
28 | resolveOnCompletion?: Function[]
29 | }
30 | interface IndexEntryUpdate {
31 | sources: Set
32 | triggers?: Set
33 | }
34 |
35 | class InitializingIndexRequest implements IndexRequest {
36 | version: number
37 | constructor(version) {
38 | this.version = version
39 | }
40 | get triggers() {
41 | return INITIALIZATION_SOURCE_SET
42 | }
43 | get previousVersion() {
44 | return -1
45 | }
46 | }
47 |
48 | const versionToDate = (version) =>
49 | new Date(version / 256 + 1500000000000).toLocaleString()
50 |
51 | export const Index = ({ Source }) => {
52 | const Index = class extends KeyIndex {}
53 | Source.updateWithPrevious = true
54 | Index.sources = [ Index.source = Source ]
55 | Index.eventLog = []
56 | Index.queuedUpdateArrays = []
57 | Index.allQueuedUpdates = new Set()
58 | return Index
59 | }
60 |
61 | export class KeyIndex extends Persistable.as(VArray) {
62 | version: number
63 | averageConcurrencyLevel: number
64 | static whenProcessingComplete: Promise // promise for the completion of processing in current indexing task for this index
65 | static whenCommitted: Promise // promise for when an update received by this index has been fully committed (to disk)
66 | static indexingProcess: Promise
67 | static eventLog = []
68 | static queuedUpdateArrays = []
69 | static derivedFrom(...sources: Array) {
70 | for (let source of sources) {
71 | if (source.notifies) {
72 | if (!this.sources)
73 | this.sources = []
74 | this.sources.push(this.source = source)
75 | source.updateWithPrevious = true
76 | } else if (typeof source === 'function') {
77 | this.indexBy = source
78 | } else {
79 | Object.assign(this, source)
80 | }
81 | }
82 | /* static whenProcessingComplete: Promise // promise for the completion of processing in current indexing task for this index,
83 | whenCommitted: Promise // promise for when an update received by this index has been fully committed (to disk),
84 | indexingProcess: Promise*/
85 | this.eventLog = []
86 | this.queuedUpdateArrays = []
87 | this.allQueuedUpdates = new Set()
88 | this.start()
89 | }
90 |
91 | static forValue(id, transition) {
92 | return this.tryForQueueEntry(id, () => this.indexEntry(id, transition))
93 | }
94 | static forQueueEntry(id) {
95 | return this.tryForQueueEntry(id, () =>
96 | this.indexEntry(id).then(complete => {
97 | if (complete) {
98 | complete.commit()
99 | }
100 | })
101 | )
102 | }
103 | static async indexEntry(id, transition) {
104 | let { previousValue: previousData, deleted, sources, triggers, fromVersion: previousVersion } = transition || {}
105 | let operations: OperationsArray = []
106 | let eventUpdateSources = []
107 |
108 | let toRemove = new Map()
109 | // TODO: handle delta, for optimized index updaes
110 | // this is for recording changed entities and removing the values that previously had been indexed
111 | let previousEntries
112 | try {
113 | if (previousData != null) { // if no data, then presumably no references to clear
114 | // use the same mapping function to determine values to remove
115 | previousEntries = previousData === undefined ? previousData : this.indexBy(previousData, id)
116 | if (previousEntries && previousEntries.then)
117 | previousEntries = await previousEntries
118 | if (typeof previousEntries == 'object' && previousEntries) {
119 | previousEntries = this.normalizeEntries(previousEntries)
120 | for (let entry of previousEntries) {
121 | let previousValue = entry.value
122 | previousValue = this.db.encoder.encode(previousValue)
123 | toRemove.set(typeof entry === 'object' ? entry.key : entry, previousValue)
124 | }
125 | } else if (previousEntries != null) {
126 | toRemove.set(previousEntries, this.db.encoder.encode(previousEntries))
127 | }
128 | }
129 | } catch(error) {
130 | if (error.isTemporary)
131 | throw error
132 | //if (transition && transition.version !== version) return // don't log errors from invalidated states
133 | this.warn('Error indexing previous value', this.source.name, 'for', this.name, id, error)
134 | }
135 | //if (indexRequest && indexRequest.version !== version) return // if at any point it is invalidated, break out
136 | let entries
137 | if (!deleted) {
138 | let attempts = 0
139 | let data
140 | try {
141 | data = transition ? transition.value : this.source.get(id, INDEXING_MODE)
142 | if (data && data.then)
143 | data = await data
144 | } catch(error) {
145 | if (error.isTemporary)
146 | throw error
147 | try {
148 | // try again
149 | data = transition ? transition.value : await this.source.get(id, INDEXING_MODE)
150 | } catch(error) {
151 | //if (indexRequest && indexRequest.version !== version) return // if at any point it is invalidated, break out
152 | this.warn('Error retrieving value needing to be indexed', error, 'for', this.name, id)
153 | data = undefined
154 | }
155 | }
156 | //if (indexRequest && indexRequest.version !== version) return // if at any point it is invalidated, break out
157 | // let the indexBy define how we get the set of values to index
158 | try {
159 | entries = data === undefined ? data : this.indexBy(data, id)
160 | if (entries && entries.then)
161 | entries = await entries
162 | } catch(error) {
163 | if (error.isTemporary)
164 | throw error
165 | //if (indexRequest && indexRequest.version !== version) return // if at any point it is invalidated, break out
166 | this.warn('Error indexing value', error, 'for', this.name, id)
167 | entries = undefined
168 | }
169 | entries = this.normalizeEntries(entries)
170 | let first = true
171 | for (let entry of entries) {
172 | // we use the composite key, so we can quickly traverse all the entries under a certain key
173 | let key = typeof entry === 'object' ? entry.key : entry // TODO: Maybe at some point we support dates as keys
174 | // TODO: If toRemove has the key, that means the key exists, and we don't need to do anything, as long as the value matches (if there is no value might be a reasonable check)
175 | let removedValue = toRemove.get(key)
176 | // a value of '' is treated as a reference to the source object, so should always be treated as a change
177 | let value = this.db.encoder.encode(entry.value)
178 | first = false
179 | if (removedValue != null)
180 | toRemove.delete(key)
181 | let isChanged = removedValue == null || !value.equals(removedValue)
182 | if (isChanged || entry.value == null || this.alwaysUpdate) {
183 | if (isChanged) {
184 | let fullKey = [ key, id ]
185 | operations.push({
186 | key: fullKey,
187 | value: value
188 | })
189 | operations.byteCount = (operations.byteCount || 0) + value.length + fullKey.length
190 | }
191 | if (!this.resumePromise && this.listeners && this.listeners.length > 0)
192 | eventUpdateSources.push({ key, sources, triggers })
193 | }
194 | }
195 | }
196 | for (let [key] of toRemove) {
197 | /*if (fullKey.length > 510) {
198 | console.error('Too large of key indexing', this.name, key)
199 | continue
200 | }*/
201 | operations.push({
202 | key: [ key, id ]
203 | })
204 | if (!this.resumePromise && this.listeners && this.listeners.length > 0)
205 | eventUpdateSources.push({ key, sources, triggers })
206 | }
207 | if (Index.onIndexEntry) {
208 | Index.onIndexEntry(this.name, id, version, previousEntries, entries)
209 | }
210 | return {
211 | commit: () => {
212 | let batchFinished
213 | for (let operation of operations) {
214 | if (operation.value)
215 | batchFinished = this.db.put(operation.key, asBinary(operation.value))
216 | else
217 | batchFinished = this.db.remove(operation.key)
218 | }
219 | if (eventUpdateSources.length > 0) {
220 | return (batchFinished || Promise.resolve()).then(() =>
221 | this.queueUpdates(eventUpdateSources))
222 | }
223 | }
224 | }
225 | }
226 | static pendingEvents = new Map()
227 |
228 | static normalizeEntries(entries) {
229 | if (typeof entries != 'object') {
230 | // allow single primitive key
231 | return entries == null ? [] : [ entries ]
232 | } else if (entries instanceof Map) {
233 | return Array.from(entries).map(([ key, value ]) => ({ key, value }))
234 | } else if (!(entries instanceof Array)) {
235 | // single object
236 | if (entries === null)
237 | return []
238 | return [entries]
239 | }
240 | return entries
241 | }
242 |
243 | static log(...args) {
244 | this.eventLog.push(args.join(' ') + ' ' + new Date().toLocaleString())
245 | console.log(...args)
246 | }
247 | static warn(...args) {
248 | this.eventLog.push(args.join(' ') + ' ' + new Date().toLocaleString())
249 | console.warn(...args)
250 | }
251 |
252 | static updated(event, by) {
253 | // don't do anything, we don't want these events to propagate through here, and we do indexing based on upstream queue
254 | }
255 |
256 | static queueUpdates(eventSources) {
257 | if (!this.unsavedUpdates) {
258 | this.unsavedUpdates = []
259 | this.unsavedUpdates.id = this.unsavedQueuedId = (this.unsavedQueuedId || 0) + 1
260 | if (this.queuedUpdateArrays.length == 0)
261 | setTimeout(() =>
262 | this.processQueuedUpdates())
263 | this.queuedUpdateArrays.push(this.unsavedUpdates)
264 | }
265 | for ( const { key, triggers, sources } of eventSources) {
266 | if (!this.allQueuedUpdates.has(key)) {
267 | this.allQueuedUpdates.add(key)
268 | this.unsavedUpdates.push(key)
269 | }
270 | }
271 | }
272 | static async processQueuedUpdates() {
273 | let inProgress = []
274 | let delayMs = 0
275 | while (this.queuedUpdateArrays.length) {
276 | let updateArray = this.queuedUpdateArrays[0]
277 | let l = updateArray.length
278 | for (let i = updateArray.sent || 0; i < l; i++) {
279 | let key = updateArray[i]
280 | try {
281 | let event = new ReplacedEvent()
282 | this.allQueuedUpdates.delete(key)
283 | let start = process.hrtime.bigint()
284 | super.updated(event, { // send downstream
285 | id: key,
286 | constructor: this
287 | })
288 | let whenWritten = event.whenWritten
289 | if (whenWritten) {
290 | inProgress.push(event.whenWritten)
291 | if (inProgress.length > 20) {
292 | await Promise.all(inProgress)
293 | inProgress = []
294 | }
295 | }
296 | await delay(Number(process.hrtime.bigint() - start) / 1000000) // delay about the same amount of time the update took
297 | } catch (error) {
298 | this.warn('Error sending index updates', error)
299 | }
300 | }
301 | if (updateArray.written) {
302 | Promise.all(inProgress).then(() =>
303 | this.db.remove([QUEUED_UPDATES, updateArray.id]))
304 | this.queuedUpdateArrays.shift()
305 | }
306 | else {
307 | if (l == updateArray.length) {
308 | await Promise.all(inProgress)
309 | inProgress = []
310 | }
311 | if (l == updateArray.length && this.unsavedUpdates == updateArray) {
312 | this.unsavedUpdates = null // we sent all the entries before it was written, restart a new array
313 | this.queuedUpdateArrays.shift()
314 | }
315 | else
316 | updateArray.sent = l
317 | }
318 | }
319 | }
320 | static sendUpdates(eventSources) {
321 | let updatedIndexEntries = new Map()
322 | // aggregate them by key so as to minimize the number of events we send
323 | nextEvent: for ( const { key, triggers, sources } of eventSources) {
324 | let entry = updatedIndexEntries.get(key)
325 | if (!entry) {
326 | updatedIndexEntries.set(key, entry = {
327 | sources: new Set(),
328 | triggers: new Set(),
329 | })
330 | }
331 | if (triggers) {
332 | for (let trigger of triggers) {
333 | entry.triggers.add(trigger)
334 | if (trigger === INITIALIZATION_SOURCE) {
335 | continue nextEvent // don't record sources for initialization
336 | }
337 | }
338 | }
339 | if (sources)
340 | for (let source of sources)
341 | entry.sources.add(source)
342 | }
343 |
344 | let updatedIndexEntriesArray = Array.from(updatedIndexEntries).reverse()
345 | updatedIndexEntries = new Map()
346 | let indexedEntry
347 | while ((indexedEntry = updatedIndexEntriesArray.pop())) {
348 | try {
349 | let event = new ReplacedEvent()
350 | let indexEntryUpdate: IndexEntryUpdate = indexedEntry[1]
351 | event.sources = indexEntryUpdate.sources
352 | event.triggers = Array.from(indexEntryUpdate.triggers)
353 | super.updated(event, { // send downstream
354 | id: indexedEntry[0],
355 | constructor: this
356 | })
357 | } catch (error) {
358 | this.warn('Error sending index updates', error)
359 | }
360 | }
361 | this.instanceIds.updated()
362 | }
363 |
364 | static get(id) {
365 | // First: ensure that all the source instances are up-to-date
366 | return when(this.source.whenUpdatedInContext(true), () => {
367 | let iterable = this._getIndexedValues({
368 | start: id, // the range of everything starting with id-
369 | end: [id, '\u{ffff}'],
370 | })
371 | return this.returnsIterables ? iterable : iterable.asArray
372 | })
373 | }
374 |
375 | static getIndexedKeys(id) {
376 | return this._getIndexedValues({
377 | start: id, // the range of everything starting with id-
378 | end: [id, '\u{ffff}'],
379 | values: false,
380 | }, true)
381 | }
382 |
383 | static getIndexedValues(range: IterableOptions) {
384 | range = range || {}
385 | if (!this.initialized && range.waitForInitialization) {
386 | return this.start().then(() => this.getIndexedValues(range))
387 | }
388 | if (range.start === undefined)
389 | range.start = true
390 | return when(!range.noWait && this.whenUpdatedInContext(), () =>
391 | this._getIndexedValues(range, !range.onlyValues, range.useFullKey))
392 | }
393 |
394 | // Get a range of indexed entries for this id (used by Reduced)
395 | static _getIndexedValues(range: IterableOptions, returnKeys?: boolean, useFullKey?: boolean) {
396 | const db: Database = this.db
397 | let approximateSize = 0
398 | let promises = []
399 | return db.getRange(range).map(({ key, value }) => {
400 | let [, sourceId] = key
401 | /*if (range.recordApproximateSize) {
402 | let approximateSize = approximateSize += key.length + (value && value.length || 10)
403 | }*/
404 | let parsedValue = value == null ? this.source.get(sourceId) : value
405 | if (parsedValue && parsedValue.then) {
406 | return parsedValue.then(parsedValue => returnKeys ? {
407 | key: useFullKey ? key : sourceId,
408 | value: parsedValue,
409 | } : parsedValue)
410 | }
411 | return returnKeys ? {
412 | key: useFullKey ? key : sourceId,
413 | value: parsedValue,
414 | } : parsedValue
415 | })
416 | }
417 | /**
418 | * Indexing function, that defines the keys and values used in the indexed table.
419 | * This should be implemented by Index subclasses, and should be safe/functional
420 | * method with referential integrity (always returns the same results with same inputs),
421 | * as it is used to determine key/values on both addition and removal of entities.
422 | * @param data The object to be indexed
423 | * @return The return value can be an array of objects, where each object has a `key` and a `value`. It can only be an array of simple strings or numbers, if it is merely keys that need to be indexed, or even be a just a string (or number), if only a single key should be indexed
424 | **/
425 | static indexBy(data: {}, sourceKey: string | number | boolean): Array<{ key: string | number, value: any} | string | number> | IterableIterator | string | number {
426 | return null
427 | }
428 |
429 | static whenUpdatedInContext(context?) {
430 | return this.source.whenUpdatedInContext(true)
431 | }
432 |
433 | // static returnsIterables = true // maybe at some point default this to on
434 |
435 | static getInstanceIdsAndVersionsSince(version) {
436 | // There is no version tracking with indices.
437 | // however, indices always do send updates, and as long as we wait until we are ready and finished with initial indexing
438 | // downstream tables should have received all the updates they need to proceed
439 | //console.log('getInstanceIdsAndVersionsSince from KeyIndex', this.name, version)
440 | return this.ready.then(() => {
441 | //this.log('getInstanceIdsAndVersionsSince ready from KeyIndex', this.name, version)
442 | if (version == 0) { // if we are starting from scratch, we can return everything
443 | return when(this.getInstanceIds(), idsAndVersions => {
444 | idsAndVersions = idsAndVersions.map(id => ({
445 | id,
446 | version: getNextVersion(),
447 | }))
448 | idsAndVersions.isFullReset = true
449 | return idsAndVersions
450 | })
451 | }
452 | return []
453 | })
454 | }
455 |
456 | clearCache() {
457 | this.cachedValue = undefined
458 | this.cachedVersion = -1
459 | }
460 |
461 | valueOf() {
462 | return when(super.valueOf(true), (value) => {
463 | expirationStrategy.useEntry(this, (this.approximateSize || 100) * 10) // multiply by 10 because generally we want to expire index values pretty quickly
464 | return value
465 | })
466 | }
467 |
468 | static openDatabase() {
469 | let db = this.source.openChildDB(this)
470 | db.on('beforecommit', () => {
471 | if (this.unsavedUpdates && this.unsavedUpdates.length > 0) {
472 | this.unsavedUpdates.written = true
473 | db.put([QUEUED_UPDATES, this.unsavedQueuedId], this.unsavedUpdates)
474 | this.unsavedUpdates = null
475 | }
476 | })
477 | return false // is not root
478 | }
479 | static async initializeData() {
480 | await super.initializeData()
481 | let hadQueuedUpdates = this.queuedUpdateArrays.length > 0
482 | for (let { key, value } of this.db.getRange({
483 | start: [QUEUED_UPDATES, 0],
484 | end: [QUEUED_UPDATES, 1e30],
485 |
486 | })) {
487 | value.id = key[1]
488 | value.written = true
489 | this.unsavedQueuedId = Math.max(this.unsavedQueuedId || 0, value.id)
490 | this.queuedUpdateArrays.push(value)
491 | for (let updateId of value) {
492 | this.allQueuedUpdates.add(updateId)
493 | }
494 | }
495 | if (this.queuedUpdateArrays.length > 0 && !hadQueuedUpdates)
496 | this.processQueuedUpdates()
497 | }
498 | static getIdsFromKey(key) {
499 | return this.source.getIdsFromKey(key)
500 | }
501 | static updateDBVersion() {
502 | if (!this.source.wasReset) {// only reindex if the source didn't do it for use
503 | this.resumeFromKey = true
504 | this.db.putSync(INITIALIZING_LAST_KEY, true)
505 | }
506 | super.updateDBVersion()
507 | }
508 |
509 | static resumeQueue() {
510 | this.state = 'waiting for upstream source to build'
511 | // explicitly wait for source to finish resuming before our own resuming
512 | return when(this.source.resumePromise, () =>
513 | super.resumeQueue())
514 | }
515 |
516 | static async clearEntries(set) {
517 | let result
518 | let db = this.db
519 | let i = 1
520 | try {
521 | for (let key of db.getRange({
522 | start: true,
523 | values: false,
524 | })) {
525 | let [, sourceId] = key
526 | if (set.has(sourceId)) {
527 | result = db.removeSync(key)
528 | }
529 | if (i++ % 100000 == 0)
530 | await delay()
531 | }
532 | } catch(error) {
533 | console.error(error)
534 | }
535 | return result // just need to wait for last one to finish (guarantees all others are finished)
536 | }
537 |
538 | static myEarliestPendingVersion = 0 // have we registered our process, and at what version
539 | static whenAllConcurrentlyIndexing?: Promise // promise if we are waiting for the initial indexing process to join the concurrent indexing mode
540 | static loadVersions() {
541 | // don't load versions
542 | }
543 | resetCache() {
544 | // don't reset any in the db, we are incrementally updating
545 | this.cachedValue = undefined
546 | this.updateVersion()
547 | }
548 |
549 | static get instances() {
550 | // don't load from disk
551 | return this._instances || (this._instances = [])
552 | }
553 |
554 | static getInstanceIds(range?: IterableOptions) {
555 | let db = this.db
556 | range = range || {}
557 | if (range.start === undefined)
558 | range.start = true
559 | let whenReady
560 | if (range.waitForAllIds) {
561 | whenReady = when(this.ready, () => when(this.resumePromise, () => this.whenProcessingComplete))
562 | }
563 | let lastKey
564 | range.values = false
565 | return when(whenReady, () =>
566 | db.getRange(range).map(( key ) => key[0]).filter(key => {
567 | if (key !== lastKey) { // skip multiple entries under one key
568 | lastKey = key
569 | return true
570 | }
571 | }))
572 | }
573 | }
574 | Index.from = (Source) => Index({ Source })
575 | export default Index
576 |
577 | const withTimeout = (promise, ms) => Promise.race([promise, new Promise((resolve, reject) =>
578 | setTimeout(() => reject(new Error('Timeout waiting for indexing synchronization')), ms))])
579 |
580 | let currentlyProcessing = new Set()
581 |
582 | class IndexingCompletionEvent extends UpdateEvent {
583 | type = 'indexing-completion'
584 | }
585 | // write a 64-bit uint (could be optimized/improved)
586 | function writeUInt(buffer, number, offset?) {
587 | buffer.writeUIntBE(number, (offset || 0) + 2, 6)
588 | }
589 | // read a 64-bit uint (could be optimized/improved)
590 | function readUInt(buffer, offset?) {
591 | return buffer.readUIntBE((offset || 0) + 2, 6)
592 | }
593 |
594 | class IteratorThenMap implements Map {
595 | didIterator: boolean
596 | iterator: any
597 | deferredMap: Map
598 | iterable: Iterable
599 | deletedCount: number
600 | constructor(iterable, length, deferredMap) {
601 | this.iterable = iterable
602 | this.deferredMap = deferredMap || new Map()
603 | this.deferredMap.isReplaced = true
604 | this.deletedCount = 0
605 | this.length = length
606 | }
607 | [Symbol.iterator]() {
608 | if (this.didIterator) {
609 | return this.deferredMap[Symbol.iterator]()
610 | } else {
611 | this.didIterator = true
612 | return this.iterator = this.iterable.map(([id, value]) => {
613 | this.deferredMap.set(id, value)
614 | return [id, value]
615 | })[Symbol.iterator]()
616 | }
617 | }
618 | get size() {
619 | if (this.iterator && this.iterator.done)
620 | return this.deferredMap.size
621 | return this.length - this.deletedCount + this.deferredMap.size
622 | }
623 | set(id: K, value: V) {
624 | return this.deferredMap.set(id, value)
625 | }
626 | get(id) {
627 | return this.deferredMap.get(id)
628 | }
629 | delete(id) {
630 | this.deletedCount++
631 | return this.deferredMap.delete(id)
632 | }
633 | }
634 | const delay = (ms?) => new Promise(resolve => ms >= 1 ? setTimeout(resolve, ms) : setImmediate(resolve))
635 |
--------------------------------------------------------------------------------
/src/Persisted.ts:
--------------------------------------------------------------------------------
1 | import * as alkali from 'alkali';
2 | const { Transform, VPromise, VArray, Variable, spawn, currentContext, NOT_MODIFIED, getNextVersion, ReplacedEvent, DeletedEvent, AddedEvent, UpdateEvent, Context } = alkali.Variable ? alkali : alkali.default
3 | import { open, compareKeys } from 'lmdb'
4 | import when from './util/when.js'
5 | import { WeakValueMap } from './util/WeakValueMap.js'
6 | import ExpirationStrategy from './ExpirationStrategy.js'
7 | import * as fs from 'fs'
8 | import * as crypto from 'crypto'
9 | import * as CBOR from 'cbor-x'
10 | import { AccessError, ConcurrentModificationError, ShareChangeError } from './util/errors.js'
11 | import { Database, IterableOptions, OperationsArray } from './storage/Database.js'
12 | //import { mergeProgress } from './UpdateProgress'
13 | import { registerClass, addProcess } from './util/process.js'
14 | import { DEFAULT_CONTEXT, RequestContext } from './RequestContext.js'
15 | import { platform } from 'os'
16 |
17 | let getCurrentContext = () => currentContext
18 |
19 | const DEFAULT_INDEXING_DELAY = 20
20 | const DEFAULT_INDEXING_CONCURRENCY = 20
21 | const expirationStrategy = ExpirationStrategy.defaultInstance
22 | const instanceIdsMap = new WeakValueMap()
23 | const DB_VERSION_KEY = Buffer.from([1, 1]) // table metadata
24 | const INITIALIZING_PROCESS_KEY = Buffer.from([1, 4])
25 | // everything after 9 is cleared when a db is cleared
26 | const SHARED_STRUCTURE_KEY = Buffer.from([1, 10])
27 | const INITIALIZING_LAST_KEY = Buffer.from([1, 7])
28 | const INITIALIZATION_SOURCE = 'is-initializing'
29 | const DISCOVERED_SOURCE = 'is-discovered'
30 | const SHARED_MEMORY_THRESHOLD = 1024
31 | export const INVALIDATED_ENTRY = { state: 'invalidated'}
32 | const INDEXING_MODE = {}
33 | const INVALIDATED_STATE = 1
34 | const ONLY_COMMITTED = 1
35 | const NO_CACHE = 2
36 | const AS_SOURCE = {}
37 | const EXTENSION = '.mdb'
38 | const DB_FORMAT_VERSION = 0
39 | export const allStores = new Map()
40 |
41 | export const ENTRY = Symbol('entry')
42 |
43 | let globalDoesInitialization
44 |
45 | global.cache = expirationStrategy // help with debugging
46 |
47 | export interface IndexRequest {
48 | previousEntry?: any
49 | pendingProcesses?: number[]
50 | deleted?: boolean
51 | sources?: Set
52 | version: number
53 | triggers?: Set
54 | fromValues?: Map
55 | by?: any
56 | resolveOnCompletion?: Function[]
57 | }
58 | interface IndexEntryUpdate {
59 | sources: Set
60 | triggers?: Set
61 | }
62 |
63 | class InitializingIndexRequest implements IndexRequest {
64 | version: number
65 | constructor(version) {
66 | this.version = version
67 | }
68 | get triggers() {
69 | return INITIALIZATION_SOURCE_SET
70 | }
71 | get previousVersion() {
72 | return -1
73 | }
74 | }
75 |
76 | class InstanceIds extends Transform.as(VArray) {
77 | Class: any
78 | cachedValue: any
79 | cachedVersion: any
80 | transform() {
81 | return when(when(this.Class.resetProcess, () => this.Class.whenWritten), () => this.Class.getInstanceIds().asArray)
82 | }
83 | getValue() {
84 | return when(super.getValue(true), ids => {
85 | expirationStrategy.useEntry(this, ids.length)
86 | return ids
87 | })
88 | }
89 | valueOf() {
90 | return super.valueOf(true) // always allow promises to be returned
91 | }
92 | clearCache() {
93 | this.cachedValue = undefined
94 | this.cachedVersion = -1
95 | }
96 | }
97 |
98 | const MakePersisted = (Base) => secureAccess(class extends Base {
99 | static DB: any
100 | static syncVersion: number
101 | static Cached: any
102 | _cachedValue: any
103 | _cachedVersion: number
104 | _versions: any
105 | version: number
106 | static useWeakMap = true
107 | static whenWritten: Promise
108 | static dbFolder = 'cachedb'
109 | static db: Database
110 | db: Database
111 | repetitiveGets: boolean
112 |
113 | static updatingProcessConnection: {
114 | sendMessage(data: any): Promise
115 | }
116 |
117 | constructor(id) {
118 | super()
119 | if (id == null) {
120 | throw new TypeError('No id provided')
121 | }
122 | if (this.constructor === Persisted) {
123 | throw new TypeError('Can not directly instantiate abstract Persisted class')
124 | }
125 | if (this.checkSourceVersions)
126 | this.readyState = 'invalidated' // start in this state for items that might not be updated so freshly loaded entities don't bypass version checks
127 | this.id = id
128 | let sources = this.constructor.sources
129 | if (sources) {
130 | for (let i = 0; i < sources.length; i++) {
131 | this['source' + (i ? i : '')] = sources[i].for(id)
132 | }
133 | }
134 | }
135 |
136 | get staysUpdated() {
137 | return true
138 | }
139 |
140 | static get defaultInstance() {
141 | return this._defaultInstance || (this._defaultInstance = new Variable())
142 | }
143 |
144 | static for(id) {
145 | if (id > 0 && typeof id === 'string' || id == null) {
146 | throw new Error('Id should be a number or non-numeric string: ' + id + 'for ' + this.name)
147 | }
148 | let instancesById = this.instancesById
149 | if (!instancesById) {
150 | this.ready
151 | instancesById = this.instancesById
152 | }
153 | let instance = instancesById.getValue(id)
154 | if (!instance) {
155 | instance = new this(id)
156 | instancesById.setValue(id, instance)
157 | }
158 | return instance
159 | }
160 |
161 | static getByIds(ids) {
162 | // for optimized access to a set of ids
163 | if (!(ids instanceof Array))
164 | ids = Array.from(ids)
165 | let i = 0, l = ids.length
166 | let values = []
167 | let promised = []
168 | const getNext = () => {
169 | while (i < l) {
170 | let value = this.for(ids[i])
171 | if (value && value.then) {
172 | // a promise, put in the list of parallel promises
173 | let promisedI = i++
174 | promised.push(value.then(value => {
175 | values[promisedI] = value
176 | }))
177 | if (promised.length > (this.MAX_CONCURRENCY || 100)) {
178 | let promisedToFinish = promised
179 | promised = []
180 | return Promise.all(promisedToFinish).then(getNext)
181 | }
182 | } else {
183 | values[i++] = value
184 | }
185 | }
186 | if (promised.length > 0) {
187 | return Promise.all(promised)
188 | }
189 | }
190 | return when(getNext(), () => values)
191 | }
192 |
193 | static index(propertyName: string, indexBy?: (value, sourceKey) => any) {
194 | let index = this['index-' + propertyName]
195 | if (index) {
196 | return index
197 | }
198 | index = this['index-' + propertyName] = class extends Index({ Source : this }) {
199 | static indexBy(entity, sourceKey) {
200 | return indexBy ? indexBy(entity, sourceKey) : entity[propertyName]
201 | }
202 | }
203 | Object.defineProperty(index, 'name', { value: this.name + '-index-' + propertyName })
204 | index.start()
205 | return index
206 | }
207 |
208 | /* static with(properties) {
209 | let DerivedClass = super.with(properties)
210 | DerivedClass.sources = [this]
211 | let hasRelatedProperties
212 | for (let key of properties) {
213 | let property = properties[key]
214 | if (property.initialized) {
215 | property.initialized(this)
216 | hasRelatedProperties = true
217 | }
218 |
219 | }
220 | if (hasRelatedProperties) {
221 | DerivedClass.prototype.transform = function(data, ...propertySources) {
222 | for (let propertySource of propertySources) {
223 | data[DerivedClass.sources[i].key] = propertySource
224 | }
225 | return data
226 | }
227 | }
228 | return DerivedClass
229 | }*/
230 |
231 | static relatesBy(foreignKey: string) {
232 | let TargetClass = this
233 | function relatesBy() {}
234 | relatesBy.defineAs = function(propertyName, Parent) {
235 | let RelatedIndex = TargetClass.index(foreignKey)
236 | let sourceIndex = Parent.sources.push(RelatedIndex) - 1
237 | let existingTransform = Parent.prototype.transform
238 | Parent.prototype.transform = function(primaryData) {
239 | if (existingTransform) {
240 | primaryData = existingTransform.apply(this, arguments)
241 | }
242 | let source = arguments[sourceIndex]
243 | return Object.assign({ [propertyName]: source }, primaryData)
244 | }
245 | Parent.assign({
246 | [propertyName]: VArray.of(TargetClass)
247 | })
248 | }
249 | return relatesBy
250 | }
251 |
252 | static relatedBy(foreignKey: string) {
253 | let TargetClass = this
254 | function relatedBy() {}
255 | relatedBy.defineAs = function(propertyName, Parent) {
256 | let ParentSource = Parent.sources[0]
257 | let RelatedIndex = ParentSource.index(foreignKey)
258 | let existingTransform = Parent.prototype.transform
259 | Parent.prototype.transform = function(primaryData) {
260 | if (existingTransform) {
261 | primaryData = existingTransform.apply(this, arguments)
262 | }
263 | return when(primaryData, primaryData => {
264 | let reference = foreignKey.call ? foreignKey(primaryData) : primaryData[foreignKey]
265 | return (reference instanceof Array ?
266 | Promise.all(reference.map(ref => TargetClass.for(ref))) :
267 | TargetClass.for(reference)).then(relatedValue =>
268 | Object.assign({ [propertyName]: relatedValue }, primaryData))
269 | })
270 | }
271 | TargetClass.notifies({
272 | updated(event, by) {
273 | RelatedIndex.for(by.id).getIndexedKeys().map(fromId => {
274 | Parent.for(fromId).updated(event)
275 | }).resolveData()
276 | }
277 | })
278 | Parent.assign({
279 | [propertyName]: TargetClass
280 | })
281 | }
282 | return relatedBy
283 | }
284 |
285 | static cacheWith(properties) {
286 | const CachedWith = Cached.from(this).assign(properties)
287 | Object.defineProperty(CachedWith, 'name', {
288 | value: this.name + '-with-' + Object.keys(properties).filter(key => properties[key] && properties[key].defineAs).join('-')
289 | })
290 | CachedWith.start()
291 | return CachedWith
292 | }
293 |
294 | transform(source) {
295 | return
296 | }
297 |
298 | static updatesRecorded(event) {
299 | return (event && event.updatesInProgress) ? Promise.all(event.updatesInProgress) : Promise.resolve()
300 | }
301 |
302 | delete() {
303 | return this.constructor.remove(this.id)
304 | }
305 |
306 | reset(action) {
307 | this.updated()
308 | }
309 |
310 | static get ready() {
311 | return this.start()
312 | }
313 | static start() {
314 | if (this.name == '_a')
315 | return
316 | if (!this.hasOwnProperty('_ready')) {
317 | let resolver
318 | this._ready = Promise.resolve(this.initialize())
319 | this._ready.then(() => {
320 | //console.log(this.name, 'is ready and initialized')
321 | this.initialized = true
322 | }, (error) => {
323 | console.error('Error initializing', this.name, error)
324 | })
325 | }
326 | return this._ready
327 | }
328 |
329 | static clearAllData() {
330 | let db = this.db
331 | let count = 0
332 | return db.clearAsync().then(() =>
333 | console.debug('Cleared the database', this.name, ', rebuilding'))
334 | }
335 |
336 | static register(sourceCode?: { id?: string, version?: number }) {
337 | // check the transform hash
338 | if (sourceCode) {
339 | let moduleFilename = sourceCode.id || sourceCode
340 | if (sourceCode.version) {
341 | // manually provide hash
342 | this.version = sourceCode.version
343 | } else if (typeof moduleFilename == 'string') {
344 | // create a hash from the module source
345 | this.version = fs.statSync(moduleFilename).mtime.getTime()
346 | let hmac = crypto.createHmac('sha256', 'cobase')
347 | hmac.update(fs.readFileSync(moduleFilename, { encoding: 'utf8' }))
348 | this.transformHash = hmac.digest('hex')
349 | }
350 | }
351 | return this.ready
352 | }
353 |
354 | static get doesInitialization() {
355 | return this._doesInitialization === undefined ? globalDoesInitialization : this._doesInitialization
356 | }
357 | static set doesInitialization(flag) {
358 | this._doesInitialization = flag
359 | }
360 | static initializeRootDB() {
361 | const db = this.rootDB
362 | this.rootStore = this
363 | // TODO: Might be better use Buffer.allocUnsafeSlow(6)
364 | const processKey = this.processKey = Buffer.from([1, 3, (process.pid >> 24) & 0xff, (process.pid >> 16) & 0xff, (process.pid >> 8) & 0xff, process.pid & 0xff])
365 | let initializingProcess
366 | initializingProcess = db.get(INITIALIZING_PROCESS_KEY)
367 | initializingProcess = initializingProcess && +initializingProcess.toString()
368 | this.otherProcesses = Array.from(db.getRange({
369 | start: Buffer.from([1, 3]),
370 | end: INITIALIZING_PROCESS_KEY,
371 | }).map(({key, value}) => (key[2] << 24) + (key[3] << 16) + (key[4] << 8) + key[5])).filter(pid => !isNaN(pid))
372 | if (!db.readOnly) {
373 | db.putSync(processKey, '1') // register process, in ready state
374 | if ((!initializingProcess || !this.otherProcesses.includes(initializingProcess)) && this.doesInitialization !== false) {
375 | initializingProcess = null
376 | db.putSync(INITIALIZING_PROCESS_KEY, process.pid.toString())
377 | }
378 | }
379 | if (this.otherProcesses.includes(process.pid)) {
380 | //console.warn('otherProcesses includes self')
381 | this.otherProcesses.splice(this.otherProcesses.indexOf(process.pid))
382 | }
383 | this.initializingProcess = initializingProcess
384 | this.whenUpgraded = Promise.resolve()
385 | const waitForUpgraded = () => {
386 | let whenUpgraded = this.whenUpgraded
387 | whenUpgraded.then(() => setTimeout(() => {
388 | if (whenUpgraded == this.whenUpgraded && !db.readOnly)
389 | try {
390 | this.db.removeSync(INITIALIZING_PROCESS_KEY)
391 | } catch (error) {
392 | console.warn(error.toString())
393 | }
394 | else
395 | return waitForUpgraded()
396 | }), 10)
397 | }
398 | waitForUpgraded()
399 | }
400 |
401 | static getStructureVersion() {
402 | // default version handling is just to get the static version and hash with source versions, but this can be overriden with something
403 | // that gets this asynchronously or uses other logic
404 | let aggregateVersion = 0
405 | for (let Source of this.sources || []) {
406 | let version = Source.getStructureVersion && Source.getStructureVersion() || 0
407 | aggregateVersion = (aggregateVersion ^ version) * 1049011 + (aggregateVersion / 5555555 >>> 0)
408 | }
409 | return aggregateVersion ^ (this.version || 0)
410 | }
411 | static openDatabase() {
412 | const options = {
413 | compression: true,
414 | useFloat32: 3, // DECIMAL_ROUND
415 | sharedStructuresKey: SHARED_STRUCTURE_KEY,
416 | cache: { clearKeptInterval: 20 },
417 | noMemInit: true,
418 | //encoder: CBOR,
419 | //overlappingSync: platform() != 'win32',
420 | useWritemap: false,
421 | //readOnly: true,
422 | }
423 | if (this.maxSharedStructures)
424 | options.maxSharedStructures = this.maxSharedStructures
425 | if (this.shouldShareStructure)
426 | options.shouldShareStructure = this.shouldShareStructure
427 | if (this.maxDbs)
428 | options.maxDbs = this.maxDbs
429 | if (this.useWritemap)
430 | options.useWritemap = this.useWritemap
431 | if (this.useFloat32)
432 | options.useFloat32 = this.useFloat32
433 | if (clearOnStart) {
434 | console.info('Completely clearing', this.name)
435 | options.clearOnStart = true
436 | }
437 | this.rootDB = open(this.dbFolder + '/' + this.name + EXTENSION, options)
438 |
439 | Object.assign(this, this.rootDB.get(DB_VERSION_KEY))
440 | this.prototype.db = this.db = this.openDB(this, { useVersions: true, cache: { clearKeptInterval: 20 } })
441 | return true
442 | }
443 |
444 | static initialize() {
445 | this.instancesById = new (this.useWeakMap ? WeakValueMap : Map)()
446 |
447 | clearTimeout(this._registerTimeout)
448 | if (allStores.get(this.name)) {
449 | throw new Error(this.name + ' already registered')
450 | }
451 | if (!storesObject[this.name])
452 | storesObject[this.name] = this
453 | allStores.set(this.name, this)
454 | for (let Source of this.sources || []) {
455 | if (Source.start)
456 | Source.start()
457 | Source.notifies(this)
458 | }
459 | let isRoot = this.openDatabase()
460 | this.instancesById.name = this.name
461 | return when(this.getStructureVersion(), structureVersion => {
462 | this.expectedDBVersion = (structureVersion || 0) ^ (DB_FORMAT_VERSION << 12)
463 | this.rootDB.transactionSync(() => {
464 | console.debug('start txn',this.name, process.pid)
465 | return new Promise(resolve => this.releaseStartTxn = () => {
466 | console.debug('done txn',this.name, process.pid)
467 | resolve(null)
468 | })
469 |
470 | })
471 | if (isRoot)
472 | this.initializeRootDB()
473 | let initializingProcess = this.rootStore.initializingProcess
474 | const db = this.db
475 | registerClass(this)
476 |
477 | let whenEachProcess = []
478 | for (const pid of this.otherProcesses) {
479 | whenEachProcess.push(addProcess(pid, this).catch(() =>
480 | this.cleanupDeadProcessReference(pid, initializingProcess)))
481 | }
482 | if (this.doesInitialization === false)
483 | this.releaseStartTxn()
484 | // make sure these are inherited
485 | if (initializingProcess || this.doesInitialization === false) {
486 | // there is another process handling initialization
487 | return when(whenEachProcess.length > 0 && Promise.all(whenEachProcess), (results) => {
488 | this.releaseStartTxn()
489 | console.debug('Connected to each process complete and finished reset initialization')
490 | })
491 | }
492 | return this.doDataInitialization().then(() => {
493 | this.releaseStartTxn()
494 | })
495 | }, (error) => {
496 | console.error('Error getting database version', error)
497 | })
498 | }
499 |
500 | static doDataInitialization() {
501 | let whenThisUpgraded
502 | try {
503 |
504 | whenThisUpgraded = when(this.initializeData(), () => {
505 | }, (error) => {
506 | console.error('Error initializing database for', this.name, error)
507 | })
508 | } catch (error) {
509 | console.error(error)
510 | whenThisUpgraded = Promise.resolve()
511 | }
512 | this.rootStore.whenUpgraded = this.rootStore.whenUpgraded.then(() => whenThisUpgraded)
513 | return whenThisUpgraded
514 | }
515 | static cleanupDeadProcessReference(pid, initializingProcess) {
516 | // error connecting to another process, which means it is dead/old and we need to clean up
517 | // and possibly take over initialization
518 | let index = this.otherProcesses.indexOf(pid)
519 | const db = this.rootDB
520 | if (index > -1) {
521 | this.otherProcesses.splice(index, 1)
522 | let deadProcessKey = Buffer.from([1, 3, (pid >> 24) & 0xff, (pid >> 16) & 0xff, (pid >> 8) & 0xff, pid & 0xff])
523 | let invalidationState = db.get(deadProcessKey)
524 | if (this.doesInitialization !== false && !db.readOnly) {
525 | db.removeSync(deadProcessKey)
526 | }
527 | }
528 | if (initializingProcess == pid && this.doesInitialization !== false) {
529 | let doInit
530 | //db.transactionSync(() => {
531 | // make sure it is still the initializing process
532 | initializingProcess = db.get(Buffer.from([1, 4]))
533 | initializingProcess = initializingProcess && +initializingProcess.toString()
534 | if (initializingProcess == pid && !db.readOnly) {
535 | // take over the initialization process
536 | // console.log('Taking over initialization of', this.name, 'from process', initializingProcess)
537 | initializingProcess = process.pid
538 | db.putSync(INITIALIZING_PROCESS_KEY, initializingProcess.toString())
539 | doInit = true
540 |
541 | }
542 | //})
543 | if (initializingProcess == process.pid) {
544 | return this.doDataInitialization()
545 | }
546 | }
547 |
548 | }
549 |
550 | static async reset() {
551 | if (this.sources && this.sources[0])
552 | this.sources[0].wasReset = false
553 | await this.clearAllData()
554 | await this.resetAll()
555 | this.updateDBVersion()
556 | this.resumeQueue()
557 | }
558 | static async initializeData() {
559 | const db = this.db
560 | this.state = 'initializing data'
561 | //console.log('comparing db versions', this.name, this.dbVersion, this.expectedDBVersion)
562 | if (this.dbVersion == this.expectedDBVersion) {
563 | // up to date, all done
564 | this.releaseStartTxn()
565 | } else {
566 | console.log('transform/database version mismatch, reseting db table', this.name, this.expectedDBVersion, this.dbVersion, this.version)
567 | this.wasReset = true
568 | this.resumeFromKey = true
569 | this.startVersion = getNextVersion()
570 | const clearDb = !!this.dbVersion // if there was previous state, clear out all entries
571 | await this.clearAllData()
572 | this.releaseStartTxn()
573 | await this.resetAll()
574 | this.updateDBVersion()
575 | }
576 | let readyPromises = []
577 | for (let Source of this.sources || []) {
578 | readyPromises.push(Source.ready)
579 | }
580 | await Promise.all(readyPromises)
581 | this.resumePromise = this.resumeQueue() // don't wait for this, it has its own separate promise system
582 | }
583 |
584 |
585 | valueOf(mode) {
586 | return super.valueOf(mode || true)
587 | }
588 |
589 | getValue(mode) {
590 | return this.constructor.get(this.id, mode)
591 | }
592 |
593 | gotValue(value) {
594 | // bypass any variable checks, since the data is coming from a DB
595 | return value
596 | }
597 | updated(event = new ReplacedEvent(), by?) {
598 | if (!event.visited) {
599 | event.visited = new Set() // TODO: Would like to remove this at some point
600 | }
601 | if (!event.source) {
602 | event.source = this
603 | }
604 | let context = getCurrentContext()
605 | if (context && !event.triggers && context.connectionId) {
606 | event.triggers = [ context.connectionId ]
607 | }
608 |
609 | let Class = this.constructor as PersistedType
610 | super.updated(event, by)
611 | Class.updated(event, this) // main handling occurs here
612 | // notify class listeners too
613 | return event
614 | }
615 |
616 | static instanceSetUpdated(event) {
617 | let instanceIds = instanceIdsMap.getValue(this.name)
618 | if (instanceIds) {
619 | instanceIds.updated(event)
620 | }
621 | }
622 |
623 | static clearEntryCache(id) {
624 | let entryCache = this._entryCache
625 | if (entryCache) {
626 | let entry = entryCache.get(id)
627 | if (entry !== undefined) {
628 | expirationStrategy.deleteEntry(entry)
629 | entryCache.delete(id)
630 | }
631 | }
632 | }
633 |
634 | static invalidateEntry(id, event) {
635 | this.clearEntryCache(id)
636 | }
637 |
638 | static update(id, event) {
639 | // this an easier way to manually call the updated process
640 | return this.updated(new ReplacedEvent(), { id })
641 | }
642 |
643 | static updated(event = new ReplacedEvent(), by?) {
644 | if (!event.visited) {
645 | event.visited = new Set() // TODO: Would like to remove this at some point
646 | }
647 | if (event.visited.has(this)) {
648 | return event
649 | }
650 | event.visited.add(this)
651 | let context = getCurrentContext()
652 | if (context && !event.triggers && context.connectionId) {
653 | event.triggers = [ context.connectionId ]
654 | }
655 |
656 | if (event && !event.version) {
657 | event.version = getNextVersion()
658 | }
659 | let id = by && by.id
660 | let nextBy = {
661 | id,
662 | constructor: this
663 | }
664 | if (!event.source) {
665 | event.source = nextBy
666 | }
667 | if (event.type === 'discovered' || event.type === 'added' || event.type === 'deleted') {
668 | this.instanceSetUpdated(event)
669 | }
670 | if (event.type === 'reload-entry') {
671 | if (event.doUpdate && by.constructor == this)
672 | this.invalidateEntry(id, event, by)
673 | } else if (event.type === 'discovered') {
674 | // if we are being notified of ourself being created or directly set, ignore it
675 | // do nothing
676 | } else if (id) {
677 | this.invalidateEntry(id, event, by)
678 | }
679 | if (id) {
680 | let instance
681 | instance = this.instancesById.getValue(id)
682 | if (instance)
683 | instance.updated(event, nextBy)
684 | }
685 | for (let listener of this.listeners || []) {
686 | listener.updated(event, nextBy)
687 | }
688 |
689 | if (!context || !context.expectedVersions) {
690 | context = DEFAULT_CONTEXT
691 | }
692 | context.expectedVersions[this.name] = event.version
693 | const whenUpdateProcessed = event.whenUpdateProcessed
694 | if (whenUpdateProcessed) {
695 | this.whenUpdateProcessed = whenUpdateProcessed
696 | }
697 |
698 | return event
699 | }
700 |
701 | static saveDBVersions() {
702 | this.rootDB.putSync(DB_VERSION_KEY, {
703 | dbVersion: this.dbVersion,
704 | childStores: this.childStores && this.childStores.map && this.childStores.map(childStore => ({
705 | name: childStore.name,
706 | dbVersion: childStore.dbVersion,
707 | }))
708 | })
709 | }
710 |
711 | static updateDBVersion() {
712 | let version = this.startVersion
713 | this.dbVersion = this.expectedDBVersion
714 | console.debug('saving db version', this.name, this.dbVersion)
715 | this.rootStore.saveDBVersions()
716 | return version
717 | }
718 |
719 | notifies(target) {
720 | let context = getCurrentContext()
721 | if (context) {
722 | (this.listenersWithContext || (this.listenersWithContext = new Map())).set(target, context)
723 | }
724 | return super.notifies(target)
725 | }
726 | stopNotifies(target) {
727 | // standard variable handling
728 | if (this.listenersWithContext) {
729 | this.listenersWithContext.delete(target)
730 | }
731 | return super.stopNotifies(target)
732 | }
733 | static subscribedInstances: Map
734 | init() {
735 | if (!this.subscribedInstances) {
736 | this.subscribedInstances = new Map()
737 | }
738 | this.subscribedInstances.set(this.id, this)
739 | return super.init()
740 | }
741 | cleanup() {
742 | this.subscribedInstances.delete(this.id)
743 | return super.cleanup()
744 | }
745 |
746 | static notifies(target) {
747 | let context = getCurrentContext()
748 | if (context) {
749 | (this.listenersWithContext || (this.listenersWithContext = new Map())).set(target, context)
750 | }
751 | // standard variable handling (don't use alkali's contextual notifies)
752 | return Variable.prototype.notifies.call(this, target)
753 | }
754 | static stopNotifies(target) {
755 | // standard variable handling
756 | if (this.listenersWithContext) {
757 | this.listenersWithContext.delete(target)
758 | }
759 | return Variable.prototype.stopNotifies.call(this, target)
760 | }
761 | static whenUpdatedInContext(waitForIndexing) {
762 | // transitively wait on all sources that need to update to this version
763 | let promises = []
764 | for (let Source of this.sources || []) {
765 | let whenUpdated = Source.whenUpdatedInContext && Source.whenUpdatedInContext()
766 | if (whenUpdated && whenUpdated.then) {
767 | promises.push(whenUpdated)
768 | }
769 | }
770 | let whenReady
771 | if (promises.length > 1) {
772 | whenReady = Promise.all(promises)
773 | } else if (promises.length == 1) {
774 | whenReady = promises[0]
775 | }
776 | if (waitForIndexing) {
777 | let currentContext = getCurrentContext()
778 | let updateContext = (currentContext && currentContext.expectedVersions) ? currentContext : DEFAULT_CONTEXT
779 | return when(whenReady, () => this.whenIndexedAndCommitted)/*{
780 | if (updateContext.expectedVersions && updateContext.expectedVersions[this.name] > this.lastIndexedVersion && this.queue && this.queue.size > 0) {
781 | // if the expected version is behind, wait for processing to finish
782 | return this.requestProcessing(1) // up the priority
783 | }
784 | })*/
785 | }
786 | return whenReady
787 | }
788 | static get instanceIds() {
789 | let instanceIds = instanceIdsMap.getValue(this.name)
790 | if (!instanceIds) {
791 | instanceIdsMap.setValue(this.name, instanceIds = new InstanceIds())
792 | instanceIds.Class = this
793 | }
794 | return instanceIds
795 | }
796 | exclusiveLock(executeWithLock: () => any) {
797 | let promisedResult
798 | if (this.currentLock) {
799 | let context = getCurrentContext()
800 | const executeInContext = () => context.executeWithin(executeWithLock)
801 | promisedResult = this.currentLock.then(executeInContext, executeInContext)
802 | } else {
803 | let result = executeWithLock()
804 | if (result && result.then)
805 | promisedResult = result
806 | else
807 | return result
808 | }
809 | let thisLock, sync
810 | const afterExecution = () => {
811 | if (thisLock === this.currentLock) {
812 | this.currentLock = null
813 | }
814 | sync = true
815 | }
816 | thisLock = this.currentLock = promisedResult.then(afterExecution, (error) => {
817 | // Probably need to review if uncaught promise rejections are properly handled
818 | console.error(error)
819 | afterExecution()
820 | })
821 | if (sync) {
822 | this.currentLock = null
823 | }
824 | return promisedResult
825 | }
826 |
827 | static tryForQueueEntry(id, action) {
828 | this.lastIndexingId = id
829 | const onQueueError = async (error) => {
830 | let indexRequest = this.db.cache.get(id) || {}
831 | let version = indexRequest.version
832 | if (error.isTemporary) {
833 | let retries = indexRequest.retries = (indexRequest.retries || 0) + 1
834 | this.state = 'retrying index in ' + retries * 1000 + 'ms'
835 | if (retries < (this.maxRetries || 1000)) {
836 | this.isRetrying = true
837 | await delay(retries * 1000)
838 | this.isRetrying = false
839 | //console.info('Retrying index entry', this.name, id, error)
840 | return this.tryForQueueEntry(id, action)
841 | } else {
842 | console.info('Too many retries', this.name, id, retries)
843 | }
844 | }
845 | if (indexRequest && indexRequest.version !== version) return // if at any point it is invalidated, break out, don't log errors from invalidated states
846 | console.warn('Error indexing', this.name, id, error)
847 | if (this.queue && this.queue.delete)
848 | this.queue.delete(id) // give up and delete it
849 | }
850 | try {
851 | let result = action(id)
852 | if (result && result.catch)
853 | return result.catch(error => onQueueError(error))
854 | } catch(error) {
855 | return onQueueError(error)
856 | }
857 | }
858 |
859 | static queue: Map
860 | static async processQueue(queue) {
861 | this.state = 'waiting to process queue'
862 | await this.ready
863 | this.state = 'processing'
864 | if (this.onStateChange) {
865 | this.onStateChange({ processing: true, started: true })
866 | }
867 | let cpuUsage = process.cpuUsage()
868 | let cpuTotalUsage = cpuUsage.user + cpuUsage.system
869 | let lastTime = Date.now()
870 | let concurrencyAdjustment = 1
871 | let niceAdjustment = 2
872 | try {
873 | queue = queue || this.queue
874 | let initialQueueSize = queue.size
875 | //currentlyProcessing.add(this)
876 | if (initialQueueSize > 100 || initialQueueSize == undefined) {
877 | console.log('Indexing', initialQueueSize || '', this.name, 'for', this.name)
878 | }
879 | let actionsInProgress = new Set()
880 | this.actionsInProgress = actionsInProgress
881 | let sinceLastStateUpdate = 0
882 | let lastTime = Date.now()
883 | let delayMs = 10
884 | let indexed = 0
885 | do {
886 | if (this.nice > 0)
887 | await delay(this.nice) // short delay for other processing to occur
888 | for (let [ id ] of queue) {
889 | if (queue.isReplaced)
890 | return
891 | sinceLastStateUpdate++
892 | this.state = 'indexing entry ' + id
893 | let now = Date.now()
894 | indexed++
895 | let desiredConcurrentRatio = actionsInProgress.size / Math.min(indexed, this.MAX_CONCURRENCY || DEFAULT_INDEXING_CONCURRENCY)
896 | delayMs = Math.min(Math.max(delayMs, 1) * (desiredConcurrentRatio + Math.sqrt(indexed)) / (Math.sqrt(indexed) + 1), (actionsInProgress.size + 4) * 100)
897 | while (this.isRetrying) {
898 | await delay(1000 + delayMs)
899 | delayMs = (delayMs + 10) * 2
900 | }
901 | this.delayMs = delayMs
902 | lastTime = now + delayMs
903 | let completion = this.forQueueEntry(id)
904 | if (completion && completion.then) {
905 | completion.id = id
906 | actionsInProgress.add(completion)
907 | completion.then(() => actionsInProgress.delete(completion))
908 | }
909 |
910 | if (sinceLastStateUpdate > (this.MAX_CONCURRENCY || DEFAULT_INDEXING_CONCURRENCY)) {
911 | // we have process enough, commit our changes so far
912 | this.averageConcurrencyLevel = ((this.averageConcurrencyLevel || 0) + actionsInProgress.size) / 2
913 | if (this.resumeFromKey) {// only update if we are actually resuming
914 | for (let last in actionsInProgress) {
915 | this.lastIndexingId = last.id
916 | break
917 | }
918 | this.resumeFromKey = this.lastIndexingId
919 | }
920 | }
921 | await delay(delayMs * desiredConcurrentRatio)
922 | }
923 | this.state = 'awaiting final indexing of ' + actionsInProgress.size
924 | await Promise.all(actionsInProgress) // then wait for all indexing to finish everything
925 | } while (queue.size > 0)
926 | await this.whenValueCommitted
927 | this.lastIndexedVersion = this.highestVersionToIndex
928 | if (initialQueueSize > 100 || initialQueueSize == undefined) {
929 | console.log('Finished indexing', initialQueueSize || '', 'for', this.name)
930 | }
931 | } catch (error) {
932 | console.warn('Error occurred in processing index queue for', this.name, error)
933 | }
934 | this.state = 'processed'
935 | if (this.onStateChange) {
936 | this.onStateChange({ processing: true, started: false })
937 | }
938 | }
939 |
940 | static forQueueEntry(id) {
941 | return this.tryForQueueEntry(id, () =>
942 | when(this.get(id, INDEXING_MODE), () => {
943 | let entry = this.db.cache.get(id)
944 | return when(entry && entry.whenIndexed, () => {
945 | if (this.queue)
946 | this.queue.delete(id)
947 | })
948 | }))
949 | }
950 |
951 | static requestProcessing(nice, queue) {
952 | // Indexing is performed one index at a time, until the indexing on that index is completed.
953 | // This is to prevent too much processing being consumed by the index processing,
954 | // and to allow dependent childStores to fully complete before downstream childStores start to
955 | // avoid thrashing from repeated changes in values
956 | if (this.whenProcessingThisComplete && !queue) {
957 | // TODO: priority increases need to be transitively applied
958 | this.nice = Math.min(this.nice, nice) // once started, niceness can only go down (and priority up)
959 | } else {
960 | this.nice = nice
961 | let whenUpdatesReadable
962 | this.state = 'pending'
963 | this.whenProcessingThisComplete = Promise.all((this.sources || []).map(Source =>
964 | Source.whenProcessingComplete)).then(() =>
965 | this.processQueue(queue)).then(() => {
966 | this.state = 'ready'
967 | this.whenProcessingThisComplete = null
968 | //for (const sourceName in processingSourceVersions) {
969 | // sourceVersions[sourceName] = processingSourceVersions[sourceName]
970 | //}
971 | /*const event = new IndexingCompletionEvent()
972 | event.sourceVersions = sourceVersions
973 | event.sourceVersions[this.name] = lastIndexedVersion
974 | super.updated(event, this)*/
975 | })
976 | this.whenProcessingThisComplete.queue = queue
977 | //this.whenProcessingThisComplete.version = lastIndexedVersion
978 | }
979 | return this.whenProcessingThisComplete
980 | }
981 |
982 | static openDB(store, options?) {
983 | let db = store.db = this.rootDB.openDB(store.dbName || store.name, Object.assign({
984 | compression: true,
985 | useFloat32: 3, // DECIMAL_ROUND
986 | //encoder: CBOR,
987 | sharedStructuresKey: SHARED_STRUCTURE_KEY,
988 | }, options))
989 | store.rootDB = this.rootDB
990 | return db
991 | }
992 | static openChildDB(store, options?) {
993 | let db = this.openDB(store, options)
994 | let rootStore = store.rootStore = this.rootStore || this
995 | store.otherProcesses = rootStore.otherProcesses
996 | let index
997 | if (!rootStore.childStores) {
998 | rootStore.childStores = []
999 | }
1000 | if (!this.indices) {
1001 | this.indices = []
1002 | }
1003 | this.indices.push(store)
1004 | rootStore.childStores.find((entry, i) => {
1005 | if (entry.name == store.name) {
1006 | index = i
1007 | store.dbVersion = entry.dbVersion
1008 | return true
1009 | }
1010 | })
1011 | if (index > -1) {
1012 | store.dbVersion = rootStore.childStores[index].dbVersion
1013 | rootStore.childStores[index] = store
1014 | } else {
1015 | rootStore.childStores.push(store)
1016 | }
1017 | return store.db
1018 | }
1019 |
1020 | static removeUnusedDBs() {
1021 | let unusedDBs = new Set()
1022 | for (let key of this.rootDB.getKeys({
1023 | start: Buffer.from([2])
1024 | })) {
1025 | unusedDBs.add(key.toString())
1026 | }
1027 | for (let store of [this, ...(this.childStores || [])]) {
1028 | unusedDBs.delete(store.dbName || store.name)
1029 | }
1030 | console.log('Removing unused dbs', Array.from(unusedDBs))
1031 | }
1032 |
1033 | static async resumeQueue() {
1034 | this.resumeFromKey = this.db.get(INITIALIZING_LAST_KEY)
1035 | if (!this.resumeFromKey) {
1036 | this.state = 'ready'
1037 | this.resumePromise = undefined
1038 | return
1039 | }
1040 | console.debug(this.name + ' Resuming from key ' + this.resumeFromKey)
1041 | let idsToInitiallyIndex = this.getIdsFromKey(this.resumeFromKey)
1042 | let db = this.db
1043 |
1044 | const beforeCommit = () => {
1045 | if (this.resumeFromKey)
1046 | db.put(INITIALIZING_LAST_KEY, this.resumeFromKey, 1)
1047 | }
1048 | db.on('beforecommit', beforeCommit)
1049 | this.state = 'building'
1050 | console.debug('Created queue for initial index build', this.name)
1051 | this.initialIndexCount = 0
1052 | await this.requestProcessing(30, idsToInitiallyIndex.map(id => {
1053 | this.initialIndexCount++
1054 | return [ id ]
1055 | }))
1056 | console.debug('Finished initial index build of', this.name)
1057 | db.off('beforecommit', beforeCommit)
1058 | this.resumeFromKey = null
1059 | await db.remove(INITIALIZING_LAST_KEY)
1060 | this.state = 'ready'
1061 | this.resumePromise = undefined
1062 | }
1063 |
1064 | static resetAll(): any {
1065 | }
1066 | })
1067 |
1068 | const KeyValued = (Base, { versionProperty, valueProperty }) => class extends Base {
1069 |
1070 | static childStores: {
1071 | forValue: Function
1072 | prepareCommit: Function
1073 | lastUpdate: number
1074 | }[]
1075 |
1076 | static get(id, mode?) {
1077 | let context = getCurrentContext()
1078 | let entry = this.db.getEntry(id, mode ? NO_CACHE : 0)
1079 | if (entry) {
1080 | if (context) {
1081 | context.setVersion(entry.version)
1082 | if (context.ifModifiedSince >= entry.version) {
1083 | return NOT_MODIFIED
1084 | }
1085 | }
1086 | } else {
1087 | if (context) {
1088 | let version = getNextVersion()
1089 | context.setVersion(version)
1090 | }
1091 | }
1092 |
1093 | return entry && entry.value
1094 | }
1095 |
1096 | static is(id, value, event) {
1097 | let entry = this.db.getEntry(id, NO_CACHE)
1098 | if (!event) {
1099 | event = entry ? new ReplacedEvent() : new DiscoveredEvent()
1100 | }
1101 | event.triggers = [ DISCOVERED_SOURCE ]
1102 | event.source = { constructor: this, id }
1103 | event.version = getNextVersion()
1104 | if (this.initialized)
1105 | this.updated(event, { id, invalidate: false })
1106 | if (entry) {
1107 | entry.value = value
1108 | } else {
1109 | entry = {
1110 | value,
1111 | }
1112 | }
1113 | entry.version = event.version
1114 | return this.saveValue(id, entry, event.version)
1115 | }
1116 |
1117 | static saveValue(id, entry, version?) {
1118 | this.highestVersionToIndex = Math.max(this.highestVersionToIndex || 0, version)
1119 | let forValueResults = (this.indices && !entry.noIndex) ? this.indices.map(store => store.forValue(id, entry)) : []
1120 | let promises = forValueResults.filter(promise => promise && promise.then)
1121 |
1122 | const readyToCommit = (forValueResults) => {
1123 | if (entry.version !== version)
1124 | return
1125 | entry.abortables = null // indicate that this is no longer in process
1126 | let conditionalVersion = entry.fromVersion
1127 | let committed = this.whenValueCommitted = this.db.ifVersion(id, conditionalVersion, () => {
1128 | // the whole set of writes for this entry and downstream indices are committed one transaction, conditional on the previous version
1129 | for (let result of forValueResults) {
1130 | if (result)
1131 | result.commit()
1132 | }
1133 | let committed
1134 | let value = entry.value
1135 | //console.log('conditional header for writing transform ' + (value ? 'write' : 'delete'), id, this.name, conditionalVersion)
1136 | if (value === undefined) {
1137 | if (conditionalVersion === null) {
1138 | // already an undefined entry, nothing to do (but clear out the transition)
1139 | if (this.db.cache.getEntry(id) == entry && entry.version === version) {
1140 | this.db.cache.delete(id)
1141 | return
1142 | }
1143 | } else {
1144 | this.db.remove(id)
1145 | }
1146 | } else {
1147 | this.db.put(id, value, version)
1148 | }
1149 | })
1150 | this.whenWritten = committed.flushed || committed
1151 |
1152 | return committed.then((successfulWrite) => {
1153 | //if (this.transitions.get(id) == transition && !transition.invalidating)
1154 | // this.transitions.delete(id)
1155 | if (!successfulWrite) {
1156 | this.db.cache.delete(id)
1157 | let entry = this.db.getEntry(id)
1158 | console.debug('unsuccessful write of transform, data changed, updating', id, this.name, version, conditionalVersion, entry && entry.version)
1159 | let event = new ReloadEntryEvent()
1160 | if (entry && entry.version >= version) {
1161 | event.version = entry.version
1162 | } else {
1163 | event.doUpdate = true
1164 | event.version = version
1165 | }
1166 | this.updated(event, { id, constructor: this })
1167 | }
1168 | })
1169 | }
1170 | let whenIndexedAndCommitted
1171 | if (promises.length == 0)
1172 | whenIndexedAndCommitted = readyToCommit(forValueResults)
1173 | else
1174 | whenIndexedAndCommitted = (entry.whenIndexed = Promise.all(forValueResults)).then(readyToCommit)
1175 | entry.committed = whenIndexedAndCommitted
1176 | return whenIndexedAndCommitted
1177 | }
1178 |
1179 | static reads = 0
1180 | static cachedReads = 0
1181 |
1182 | static getInstanceIds(range: IterableOptions) {
1183 | let db = this.db
1184 | range = range || {}
1185 | range.start = range.start || true
1186 | range.values = false
1187 | if (range.waitForAllIds && this.ready) {
1188 | delete range.waitForAllIds
1189 | return when(this.ready, () => this.getInstanceIds(range))
1190 | }
1191 | let iterable = db.getRange(range)
1192 | return iterable
1193 | }
1194 |
1195 | static entries(range) {
1196 | let db = this.db
1197 | return when(this.ready, () => {
1198 | let results = db.getRange(Object.assign({
1199 | start: true,
1200 | versions: true,
1201 | }, range))
1202 | return range && range.asIterable ? results : results.asArray
1203 | })
1204 | }
1205 |
1206 | /**
1207 | * Iterate through all instances to find instances since the given version
1208 | **/
1209 | static getIdsFromKey(startKey): number[] {
1210 | //console.log('getInstanceIdsAndVersionsSince', this.name, sinceVersion)
1211 | return this.db.getRange({
1212 | start: startKey,
1213 | snapshot: false,
1214 | values: false,
1215 | })
1216 | }
1217 |
1218 | static dataVersionBuffer: Buffer
1219 | static processKey: Buffer
1220 | static lastIndexedVersion: int
1221 |
1222 | static remove(id, event?) {
1223 | if (id > 0 && typeof id === 'string' || !id) {
1224 | throw new Error('Id should be a number or non-numeric string: ' + id)
1225 | }
1226 |
1227 | return this.updated(event || (event = new DeletedEvent()), { id }).whenWritten
1228 | }
1229 |
1230 | setValue(value) {
1231 | this.constructor.is(this.id, value)
1232 | }
1233 |
1234 | }
1235 |
1236 | export const PersistedBase = KeyValued(MakePersisted(Variable), {
1237 | valueProperty: 'value',
1238 | versionProperty: 'version'
1239 | })
1240 |
1241 | export class Persisted extends PersistedBase {
1242 | db: any
1243 | static dbFolder = 'db'
1244 |
1245 | static clearAllData() {
1246 | }
1247 |
1248 | static set(id, value, event) {
1249 | return this.is(id, value, event)
1250 | }
1251 |
1252 | patch(properties) {
1253 | return this.then((value) => {
1254 | return when(this.put(value = Object.assign({}, value, properties)), () => value)
1255 | })
1256 |
1257 | }
1258 | put(value, event) {
1259 | return this.constructor.is(this.id, value, event)
1260 | }
1261 | static syncVersion = 10
1262 | }
1263 |
1264 | export default Persisted
1265 | export const Persistable = MakePersisted(Transform)
1266 | interface PersistedType extends Function {
1267 | otherProcesses: any[]
1268 | instanceSetUpdated(event): any
1269 | updated(event, by): any
1270 | db: any
1271 | indices: []
1272 | listeners: Function[]
1273 | }
1274 |
1275 | export class Cached extends KeyValued(MakePersisted(Transform), {
1276 | valueProperty: 'cachedValue',
1277 | versionProperty: 'cachedVersion'
1278 | }) {
1279 | allowDirectJSON: boolean
1280 | static sources: any[]
1281 | static fetchAllIds: () => {}[]
1282 |
1283 | static get(id, mode?) {
1284 | let context = getCurrentContext()
1285 | return when(this.whenUpdatedInContext(), () => {
1286 | let entry = this.db.getEntry(id, mode ? NO_CACHE : 0)
1287 | if (entry) {
1288 | if (!entry.value) { // or only undefined?
1289 | let abortables = entry.abortables
1290 | //console.log('Running transform on invalidated', id, this.name, this.createHeader(entry[VERSION]), oldTransition)
1291 | this.runTransform(id, entry, mode)
1292 | /*if (abortables) {
1293 | // if it is still in progress, we can abort it and replace the result
1294 | //oldTransition.replaceWith = transition.value
1295 | for (let abortable of abortables) {
1296 | abortable()
1297 | }
1298 | }*/
1299 | return entry.value
1300 | }
1301 | if (context) {
1302 | context.setVersion(entry.version)
1303 | if (context.ifModifiedSince >= entry.version) {
1304 | return NOT_MODIFIED
1305 | }
1306 | }
1307 | when(entry.value, (result) => deepFreeze)
1308 | return entry.value
1309 | }
1310 | let version = getNextVersion()
1311 | if (context)
1312 | context.setVersion(version)
1313 | entry = { version }
1314 | entry = this.runTransform(id, entry, mode)
1315 | when(entry.value, (result) => {
1316 | if (result !== undefined && !entry.invalidating) {
1317 | deepFreeze(result, 0)
1318 | let event = new DiscoveredEvent()
1319 | event.triggers = [ DISCOVERED_SOURCE ]
1320 | event.source = { constructor: this, id }
1321 | event.version = version
1322 | this.instanceSetUpdated(event)
1323 | this.updated(event, {
1324 | id,
1325 | constructor: this
1326 | })
1327 | }
1328 | })
1329 | return entry.value
1330 | })
1331 | }
1332 | static whenValueCommitted: Promise
1333 | static runTransform(id, entry, mode) {
1334 | let version = entry.version
1335 | if (!entry.abortables)
1336 | entry.abortables = []
1337 | /*entry = {
1338 | version,
1339 | previousValue: entry.previousValue,
1340 | abortables: []
1341 | }*/
1342 | let cache = this.db.cache
1343 | cache.set(id, entry, -1) // enter in cache without LRFU tracking, keeping it in memory, this should be entered into LRFU once it is committed by the lmdb-store caching store logic
1344 | const removeTransition = () => {
1345 | if (cache.get(id) === entry && !entry.invalidating)
1346 | cache.delete(id)
1347 | }
1348 | let hasPromises
1349 | let inputData = this.sources ? this.sources.map(source => {
1350 | let data = source.get(id, AS_SOURCE)
1351 | if (data && data.then) {
1352 | hasPromises = true
1353 | }
1354 | return data
1355 | }) : []
1356 | try {
1357 | entry.value = when(when(hasPromises ? Promise.all(inputData) : inputData, inputData => {
1358 | if (inputData.length > 0 && inputData[0] === undefined && !this.sourceOptional) // first input is undefined, we pass through
1359 | return
1360 | let context = getCurrentContext()
1361 | let transformContext = context ? context.newContext() : new RequestContext(null, null)
1362 | transformContext.abortables = entry.abortables
1363 | inputData.push(id)
1364 | return this.prototype.transform ? transformContext.executeWithin(() => this.prototype.transform.apply({ id }, inputData)) : inputData
1365 | }), result => {
1366 | if (entry.version != version) {
1367 | if (entry.replaceWith) {
1368 | return entry.replaceWith
1369 | }
1370 | return result
1371 | } // else normal transform path
1372 | entry.value = result
1373 | this.saveValue(id, entry, version)
1374 | return result
1375 | }, (error) => {
1376 | removeTransition()
1377 | if (error.__CANCEL__) {
1378 | return entry.replaceWith
1379 | }
1380 | throw error
1381 | })
1382 | } catch (error) {
1383 | removeTransition()
1384 | throw error
1385 | }
1386 | return entry
1387 | }
1388 |
1389 | getValue() {
1390 | return this.constructor.get(this.id)
1391 | }
1392 | is(value, event) {
1393 | // we skip getEntryData and pretend it wasn't in the cache... not clear if
1394 | // that is how we want is() to behave or not
1395 | this.constructor.is(this.id, value, event)
1396 | return this
1397 | }
1398 |
1399 | static openChildDB(store, options) {
1400 | if (!this.queue) {
1401 | this.queue = new Map()
1402 | }
1403 | return super.openChildDB(store, options)
1404 | }
1405 |
1406 |
1407 |
1408 | static updated(event, by?) {
1409 | let id = by && (typeof by === 'object' ? by.id : by) // if we are getting an update from a source instance
1410 | event = super.updated(event, by)
1411 | if (this.queue) {
1412 | if (by && by.constructor === this || // our own instances can notify us of events, ignore them
1413 | this.otherProcesses && event.sourceProcess &&
1414 | !(id && this.queue.has(id)) && // if it is in our queue, we need to update the version number in our queue
1415 | (this.otherProcesses.includes(event.sourceProcess) || // another process should be able to handle this
1416 | this.otherProcesses.some(otherProcessId => otherProcessId < process.pid))) { // otherwise, defer to the lowest number process to handle it
1417 | // we can skip these (unless they are in are queue, then we need to update)
1418 | return event
1419 | }
1420 | if (id && (event.type === 'discovered')) {
1421 | this.enqueue(id, event)
1422 | }
1423 | }
1424 | return event
1425 | }
1426 |
1427 | static enqueue(id, event, previousEntry?) {
1428 | if (this.resumeFromKey && compareKeys(this.resumeFromKey, id) < 0) // during initialization, we ignore updates because we are going rebuild
1429 | return
1430 | const version = event.version
1431 | // queue up processing the event
1432 | let indexRequest = this.queue.get(id)
1433 |
1434 | if (indexRequest) {
1435 | // put it at that end so version numbers are in order, but don't alter the previous state or version, that is still what we will be diffing from
1436 | this.queue.delete(id)
1437 | this.queue.set(id, indexRequest)
1438 | indexRequest.version = version
1439 | if (event.triggers)
1440 | for (let trigger of event.triggers)
1441 | indexRequest.triggers.add(trigger)
1442 | } else {
1443 | this.queue.set(id, indexRequest = {
1444 | version: version,
1445 | previousEntry,
1446 | now: Date.now(),
1447 | triggers: event.triggers instanceof Set ? event.triggers : new Set(event.triggers),
1448 | })
1449 | /*if (indexRequest.previousState == INVALIDATED_ENTRY) {
1450 | indexRequest.fromValues = event.fromValues // need to have a shared map to update
1451 | indexRequest.by = by
1452 | }*/
1453 | this.requestProcessing(DEFAULT_INDEXING_DELAY)
1454 | }
1455 | if (!version) {
1456 | throw new Error('missing version')
1457 | }
1458 | indexRequest.deleted = event.type == 'deleted'
1459 | }
1460 |
1461 |
1462 | static async resetAll() {
1463 | console.debug('reseting', this.name)
1464 | let version = this.startVersion = 1
1465 | let allIds = this.fetchAllIds ? await this.fetchAllIds() :
1466 | (this.sources && this.sources[0] && this.sources[0].getInstanceIds) ?
1467 | await this.sources[0].getInstanceIds({
1468 | waitForAllIds: true,
1469 | }) : []
1470 | let committed
1471 | let queued = 0
1472 | console.log('loading ids for', this.name, 'with', allIds.length, 'ids')
1473 | let idCount = 0
1474 | for (let id of allIds) {
1475 | idCount++
1476 | if (this.instancesById.getValue(id)) {
1477 | // instance already in memory
1478 | this.for(id).updated()
1479 | continue
1480 | }
1481 | this.lastVersion = version++ // we give each entry its own version so that downstream childStores have unique versions to go off of
1482 | this.whenWritten = committed = this.db.put(id, 0, -version)
1483 | if (queued++ > 2000) {
1484 | //console.log('writing block of ids')
1485 | await this.whenWritten
1486 | //console.log('wrote block of ids')
1487 | queued = 0
1488 | }
1489 | }
1490 | console.log('loaded ids for', this.name, 'with', idCount, 'ids')
1491 | return committed
1492 | }
1493 |
1494 | static invalidateEntry(id, event, by) {
1495 | let db = this.db
1496 | let written
1497 | if (event && event.sourceProcess) {
1498 | // if it came from another process we can count on it to have written the update
1499 | db.cache.delete(id) // clear from cache
1500 | return
1501 | }
1502 | let version = event.version
1503 | if (this.indices) {
1504 | let entry
1505 | try {
1506 | entry = db.getEntry(id)
1507 | } catch (error) {
1508 | console.error(error)
1509 | }
1510 | if (entry) {
1511 | db.cache.expirer.used(entry, -1) // key it pinned in memory
1512 | if (!entry.abortables) { // if this entry is in a transform and not committed, don't update fromVersion
1513 | entry.previousValue = entry.value
1514 | entry.fromVersion = entry.version
1515 | entry.abortables = []
1516 | }
1517 | entry.value = null // set as invalidated
1518 | entry.version = version // new version
1519 | // for deleted events, let the transform do the removal
1520 | } else {
1521 | entry = {
1522 | version,
1523 | abortables: [],
1524 | }
1525 | db.cache.set(id, entry, -1) // enter in cache without LRFU tracking, keeping it in memory
1526 | }
1527 | if (event.noIndex)
1528 | entry.noIndex = true
1529 | if (!by || by.invalidate !== false) {
1530 | written = this.forQueueEntry(id).then(() => entry.committed)
1531 | let lastCompletion = this.whenIndexedAndCommitted = (this.whenIndexedAndCommitted ?
1532 | Promise.all([this.whenIndexedAndCommitted, written]) : written).then(() => {
1533 | if (this.whenIndexedAndCommitted == lastCompletion)
1534 | this.whenIndexedAndCommitted = null
1535 | })
1536 | }
1537 | } else {
1538 | if (event && event.type === 'deleted') {
1539 | // completely empty entry for deleted items
1540 | written = db.remove(id)
1541 | } else if (!by || by.invalidate !== false) {
1542 | written = db.put(id, null, version)
1543 | }
1544 | }
1545 | this.whenWritten = written
1546 | if (!event.whenWritten)
1547 | event.whenWritten = written
1548 | }
1549 |
1550 | static async receiveRequest({ id, waitFor }) {
1551 | if (waitFor == 'get') {
1552 | console.log('waiting for entity to commit', this.name, id)
1553 | this.clearEntryCache(id)
1554 | await this.get(id)
1555 | let entry = this.db.getEntry(id)
1556 | if (entry) {
1557 | // wait for get to truly finish and be committed
1558 | await entry.whenIndexed
1559 | await entry.committed
1560 | }
1561 | // return nothing, so we don't create any overhead between processes
1562 | }
1563 | }
1564 |
1565 | static _version: number
1566 | static get version() {
1567 | if (this.sources) {
1568 | return this.sources.reduce((sum, Source) => sum += (Source.version || 0), this._version)
1569 | } else {
1570 | return this._version || 1
1571 | }
1572 | }
1573 | static set version(version) {
1574 | this._version = version
1575 | }
1576 | static returnsAsyncIterables: boolean
1577 |
1578 | static from(...sources: Array any, for: (id: any) => any, returnsAsyncIterables: boolean}>) {
1579 | if (!sources[0]) {
1580 | throw new Error('No source provided')
1581 | }
1582 | let Cached = class extends this {
1583 | get checkSourceVersions() {
1584 | return false
1585 | }
1586 | }
1587 | for (let Source of sources) {
1588 | if (Source.returnsAsyncIterables) {
1589 | this.returnsAsyncIterables
1590 | }
1591 | }
1592 | Cached.sources = sources
1593 | return Cached
1594 | }
1595 | static derivedFrom(...sources: Array) {
1596 | for (let source of sources) {
1597 | if (source.notifies) {
1598 | if (!this.sources)
1599 | this.sources = []
1600 | this.sources.push(source)
1601 | } else if (typeof source === 'function') {
1602 | this.prototype.transform = source
1603 | } else {
1604 | Object.assign(this, source)
1605 | }
1606 | }
1607 | this.start()
1608 | }
1609 |
1610 | static getInstanceIds(range) {
1611 | if (!this.fetchAllIds && this.sources && this.sources[0] && this.sources[0].getInstanceIds) {
1612 | // if we don't know if we have all our ids, our source is a more reliable source of instance ids
1613 | return this.sources[0].getInstanceIds(range)
1614 | }
1615 | return super.getInstanceIds(range)
1616 | }
1617 |
1618 | static updateDBVersion() {
1619 | if (this.indices) {
1620 | console.debug('Setting up indexing for', this.name)
1621 | this.resumeFromKey = true
1622 | this.db.putSync(INITIALIZING_LAST_KEY, true)
1623 | }
1624 | super.updateDBVersion()
1625 | }
1626 |
1627 | static get whenProcessingComplete() {
1628 | return this.sources && Promise.all(this.sources.map(Source => Source.whenProcessingComplete))
1629 | }
1630 | }
1631 | type PermissionCheck = (source: any, session: any, action: string, args: Array) => boolean | string
1632 |
1633 | type Secured = {
1634 | allow(...permissions: Array): any
1635 | }
1636 |
1637 | export function secureAccess(Class: T): T & Secured {
1638 | Class.allow = function(...permissions: Array) {
1639 | let Class = this
1640 | let methodOverrides = {
1641 | for(id) {
1642 | let target = Class.for(id)
1643 | return new Proxy(target, handler)
1644 | },
1645 | stopNotifies(target) {
1646 | // skip permissions on this
1647 | return this.stopNotifies(target)
1648 | },
1649 | isChecked() {
1650 | return true
1651 | }
1652 | }
1653 | let handler = {
1654 | get(target, name) {
1655 | let value = target[name]
1656 | if (methodOverrides[name]) {
1657 | return methodOverrides[name].bind(target)
1658 | }
1659 | if (typeof value === 'function') {
1660 | return function() {
1661 | let context = getCurrentContext()
1662 | // create a new derivative context that includes the session, but won't
1663 | // update the version/timestamp
1664 | return context.newContext().executeWithin(() => {
1665 | let awaitingListener, variable, isAsync = false
1666 | const permitted = when(secureAccess.checkPermissions(permissions, target, name, Array.from(arguments)), (permitted) => {
1667 | if (permitted !== true) {
1668 | throw new AccessError('User does not have required permissions: ' + permitted + ' for ' + Class.name)
1669 | }
1670 | })
1671 | const whenPermitted = () =>
1672 | context.executeWithin(() => value.apply(target, arguments))
1673 | if (permitted.then) {
1674 | let result
1675 | let whenFinished = permitted.then(() => {
1676 | result = whenPermitted()
1677 | })
1678 | return {
1679 | then: (onFulfilled, onRejected) =>
1680 | whenFinished.then(() => {
1681 | return onFulfilled(result)
1682 | }, onRejected)
1683 | }
1684 | }
1685 | return whenPermitted()
1686 | })
1687 | }
1688 | } else {
1689 | return value
1690 | }
1691 | }
1692 | }
1693 | return new Proxy(this, handler)
1694 | }
1695 | return Class
1696 | }
1697 |
1698 | class DiscoveredEvent extends AddedEvent {
1699 | type
1700 | }
1701 | DiscoveredEvent.prototype.type = 'discovered'
1702 |
1703 | class ReloadEntryEvent extends ReplacedEvent {
1704 | type
1705 | }
1706 | ReloadEntryEvent.prototype.type = 'reload-entry'
1707 |
1708 | export function getCurrentStatus() {
1709 | function estimateSize(size, previousState) {
1710 | return (previousState ? JSON.stringify(previousState).length : 1) + size
1711 | }
1712 | return Array.from(allStores.values()).map(store => ({
1713 | name: store.name,
1714 | indexed: store.initialIndexCount,
1715 | queueSize: store.queue && store.queue.size,
1716 | size: global.skipDBStats ? 0 : store.db.getStats().entryCount,
1717 | state: store.state,
1718 | concurrencyLevel: store.actionsInProgress ? store.actionsInProgress.size : 0,
1719 | //pendingRequests: Array.from(Index.pendingRequests),
1720 | }))
1721 | }
1722 |
1723 | secureAccess.checkPermissions = () => true
1724 |
1725 | let clearOnStart
1726 | let sharedStructureDirectory
1727 | let sharedInstrumenting
1728 | let verboseLogging
1729 | let storesObject = global
1730 | export function configure(options) {
1731 | Persisted.dbFolder = options.dbFolder
1732 | Cached.dbFolder = options.cacheDbFolder || options.dbFolder
1733 | Persistable.dbFolder = options.cacheDbFolder || options.dbFolder
1734 | globalDoesInitialization = options.doesInitialization
1735 | verboseLogging = options.verboseLogging
1736 | clearOnStart = options.clearOnStart
1737 | if (options.storesObject) {
1738 | storesObject = options.storesObject
1739 | }
1740 | if (options.getCurrentContext)
1741 | getCurrentContext = options.getCurrentContext
1742 | if (options.sharedStructureDirectory)
1743 | sharedStructureDirectory = options.sharedStructureDirectory
1744 | if (options.sharedInstrumenting) {
1745 | sharedInstrumenting = true
1746 | console.warn('sharedInstrumenting is turned on!!!!!!!')
1747 | }
1748 | }
1749 | export function writeCommonStructures() {
1750 | let wrote = []
1751 | for (let [name, store] of allStores) {
1752 | if (store.writeCommonStructure())
1753 | wrote.push(name)
1754 | }
1755 | return wrote
1756 | }
1757 |
1758 | export class Invalidated {
1759 | constructor(version, processId?) {
1760 | this.version = version
1761 | this.value = processId
1762 | }
1763 | version: number
1764 | value: number
1765 | }
1766 | const delay = ms => new Promise(resolve => ms >= 1 ? setTimeout(resolve, ms) : setImmediate(resolve))
1767 | const primes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, 179, 181, 191, 193, 197, 199]
1768 | let deepFreeze = process.env.NODE_ENV == 'development' ? (object, depth) => {
1769 | if (depth > 100)
1770 | throw new Error('Max object depth exceeded or circular reference in data')
1771 | if (object && typeof object == 'object') {
1772 | if (object.constructor == Object) {
1773 | for (let key in object) {
1774 | let value = object[key]
1775 | if (typeof value == 'object')
1776 | deepFreeze(value, (depth || 0) + 1)
1777 | }
1778 | } else if (object.constructor == Array) {
1779 | for (let i = 0, l = object.length; i < l; i++) {
1780 | let value = object[i]
1781 | if (typeof value == 'object')
1782 | deepFreeze(value, (depth || 0) + 1)
1783 | }
1784 | }
1785 | }
1786 | return object
1787 | } : (object) => object
1788 | import Index from './KeyIndex.js'
1789 |
--------------------------------------------------------------------------------