├── .gitignore ├── License ├── README.md ├── package.json ├── prettier.config.js ├── ts ├── client-sync-log │ ├── index.test.ts │ ├── index.ts │ └── types.ts ├── custom-auto-pk.test.ts ├── custom-auto-pk.ts ├── fast-sync │ ├── channels.ts │ ├── chunking.test.ts │ ├── chunking.ts │ ├── index.test.ts │ ├── index.ts │ ├── interruptable.test.ts │ ├── interruptable.ts │ ├── types.ts │ ├── utils.test.ts │ └── utils.ts ├── index.test.data.ts ├── index.test.ts ├── index.tests.ts ├── index.ts ├── integration │ ├── continuous-sync.ts │ ├── index.test.ts │ ├── index.tests.ts │ ├── initial-sync.ts │ └── settings.ts ├── logging-middleware │ ├── change-processing.ts │ ├── index.test.ts │ ├── index.ts │ └── operation-processing.ts ├── reconciliation │ ├── default.test.ts │ ├── default.ts │ ├── index.ts │ └── types.ts ├── shared-sync-log │ ├── index.tests.ts │ ├── index.ts │ ├── storex.test.ts │ ├── storex.ts │ └── types.ts ├── types.test.ts ├── types.ts ├── utils.test.ts ├── utils.ts └── utils │ ├── recurring-task.test.ts │ └── recurring-task.ts └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | coverage 2 | .nyc_output 3 | node_modules/ 4 | lib/ 5 | test.dot 6 | yarn-error.log 7 | yarn.lock 8 | *.bak 9 | *-debug.log 10 | -------------------------------------------------------------------------------- /License: -------------------------------------------------------------------------------- 1 | Copyright 2019 World Brain (@ worldbrain.io) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | This package provides Sync functionality between multiple instances of any applications built on [Storex](https://github.com/WorldBrain/storex). This includes: 2 | 3 | - Multiple devices that run an application storing all of it data data in IndexedDB 4 | - Multiple SQL databases asynchronously sync'ed, like product catelogues in different physical shops 5 | - In the future, offline-first single-user applications storing their data both on devices and in the cloud 6 | 7 | By itself, right now this package cannot provide offline-first for multi-user application due to the need for access right management. That being said, its code is modular enough to be able to evolve to support such a scenario, opening up the possibility for permission decentralized applications. 8 | 9 | # How it works 10 | 11 | 1. When you set up Storex as the storage layer for your application (with IndexedDB as the backend for example) you set it up with the Custom PK and Sync Log [middleware](https://github.com/WorldBrain/storex/blob/master/docs/middleware.md). 12 | - The Custom PK middleware generates a random ID for each new object instead of an auto-incremented ID to prevent ID conflicts between devices 13 | - The Sync Log middleware intercepts all modifications to the database and also writes them to the Client Sync Log 14 | 2. Once in a while you sync the Client Log with the Shared Log, sending and receiving changes 15 | 3. When new changes are received, the Reconciliation Algorithm is ran to determine which changes have to be made to the client database, and execute them 16 | 17 | # Usage 18 | 19 | ``` 20 | import uuid from 'uuid/v1' 21 | import StorageManager, { StorageBackend, StorageRegistry } from "@worldbrain/storex" 22 | import { registerModuleMapCollections, StorageModule } from "@worldbrain/storex-pattern-modules" 23 | 24 | import { CustomAutoPkMiddleware } from '@worldbrain/storex-sync/lib/custom-auto-pk' 25 | import { SyncLoggingMiddleware } from '@worldbrain/storex-sync/lib/logging-middleware' 26 | import { ClientSyncLogStorage } from '@worldbrain/storex-sync/lib/client-sync-log' 27 | import { SharedSyncLog } from '@worldbrain/storex-sync/lib/shared-sync-log' 28 | import { SharedSyncLogStorage } from '@worldbrain/storex-sync/lib/shared-sync-log/storex' 29 | import { reconcileSyncLog } from '@worldbrain/storex-sync/lib/reconciliation' 30 | import { doSync } from '@worldbrain/storex-sync' 31 | 32 | export async function setupClientStorage() { 33 | const storageManager = ... // Set up your storage backend, manager, modules and collections here 34 | const clientSyncLog = new ClientSyncLogStorage({storageManager}) 35 | registerModuleMapCollections({ clientSyncLog }) 36 | await storageManager.finishInitialization() 37 | 38 | // Prevent auto-incremented ID clashes by generating UUIDs instead 39 | const pkMiddleware = new CustomAutoPkMiddleware({ pkGenerator: () => uuid() }) 40 | pkMiddleware.setup({ storageRegistry: storageManager.registry, collections: includeCollections }) 41 | 42 | const syncLoggingMiddleware = new SyncLoggingMiddleware({ storageManager, clientSyncLog: modules.clientSyncLog, includeCollections }) 43 | syncLoggingMiddleware._getNow = options.getNow 44 | 45 | storageManager.setMiddleware([ 46 | pkMiddleware, 47 | syncLoggingMiddleware 48 | ]) 49 | 50 | // From now on, all write operations will be logged to the Sync log 51 | await storageManager.collection('user').createObject({ displayName: 'Joe' }) 52 | } 53 | 54 | export async function sync(options : { storageManager : StorageManager, clientSyncLog : ClientSyncLog, sharedSyncLog : SharedSyncLog }) { 55 | await doSync({ 56 | storageManager, clientSyncLog, sharedSyncLog, 57 | 58 | // The default reconciliation algorithm, swappable 59 | reconciler: reconcileSyncLog, 60 | 61 | // For unit test, it may be useful to specify a custom timestamp here 62 | now: '$now', 63 | 64 | // This depends on the user management of your application 65 | userId, 66 | 67 | // This can be created with `sharedSyncLog.createDeviceId({ ... })` 68 | deviceId 69 | }) 70 | } 71 | ``` 72 | 73 | # The shared sync log 74 | 75 | As mentioned above, Sync works by sending and receiving changes from a shared log. Currently, we have working PoCs of doing this through GraphQL to a custom back-end, through Firestore, the local Filesystem and entirely within the same browser for testing purposes. However, all that's neded for a different kind of shared log is implementing the [SharedSyncLog interface](./ts/shared-sync-log/types.ts) and passing that into the `doSync()` function as shown above. Example implementations can be found [here](./ts/shared-sync-log/storex.ts) and [here](./ts/shared-sync-log/fs.ts). 76 | 77 | # Deeper understanding 78 | 79 | Since this a complex piece of software that with the risk that it brings with it, it's highly recommended to dive into the code and get a thorough understand of it before implementing any of this in your own application. The best point to start would be the [integration tests](./ts/index.test.ts) and drilling down from there. 80 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@worldbrain/storex-sync", 3 | "version": "0.1.1", 4 | "description": "Offline-first syncing between multiple databases using Storex", 5 | "main": "lib/index.js", 6 | "typings": "lib/index", 7 | "scripts": { 8 | "prepare": "tsc", 9 | "prepare:watch": "npm run prepare -- -w", 10 | "test": "mocha --require ts-node/register \"ts/**/*.test.ts\"", 11 | "test:watch": "mocha -r source-map-support/register -r ts-node/register \"ts/**/*.test.ts\" --watch --watch-extensions ts", 12 | "test:coverage": "rm -rf lib ; yarn prepare && nyc --reporter=html --reporter=text mocha 'lib/**/*.test.js'", 13 | "format": "prettier --config prettier.config.js --write '**/*.{ts,js,tsx,jsx,css,md}'" 14 | }, 15 | "husky": { 16 | "hooks": { 17 | "pre-commit": "pretty-quick --staged" 18 | } 19 | }, 20 | "keywords": [ 21 | "storage", 22 | "graph", 23 | "database", 24 | "typescript", 25 | "sync", 26 | "offline-first", 27 | "conflict-resolution" 28 | ], 29 | "author": "Vincent den Boer", 30 | "dependencies": { 31 | "eventemitter2": "^5.0.1", 32 | "immutability-helper": "^3.0.0", 33 | "json-date-parser": "^1.0.1", 34 | "lodash": "^4.17.10", 35 | "source-map-support": "0.5.16" 36 | }, 37 | "devDependencies": { 38 | "@types/chai": "^4.0.6", 39 | "@types/events": "^1.2.0", 40 | "@types/graphql": "^14.0.0", 41 | "@types/lodash": "^4.14.123", 42 | "@types/mocha": "^2.2.44", 43 | "@types/node": "^10.12.11", 44 | "@types/simple-peer": "^6.1.6", 45 | "@worldbrain/storex": "^0.4.1", 46 | "@worldbrain/storex-middleware-change-watcher": "^0.1.0", 47 | "@worldbrain/storex-backend-dexie": "^0.3.0", 48 | "@worldbrain/storex-backend-firestore": "^0.1.1", 49 | "@worldbrain/storex-backend-typeorm": "^0.2.0", 50 | "@worldbrain/storex-graphql-schema": "^0.1.0", 51 | "@worldbrain/storex-graphql-client": "^0.1.0", 52 | "@worldbrain/storex-pattern-modules": "^0.3.0", 53 | "apollo-server-express": "^2.4.8", 54 | "chai": "^4.1.2", 55 | "expect": "^24.9.0", 56 | "express": "^4.16.4", 57 | "fake-fs": "^0.5.0", 58 | "fake-indexeddb": "^2.0.4", 59 | "firebase": "^7.15.5", 60 | "firebase-tools": "^7.3.2", 61 | "graphql": "^14.0.0", 62 | "husky": "^3.0.5", 63 | "memory-fs": "^0.4.1", 64 | "mocha": "^4.0.1", 65 | "nyc": "^13.3.0", 66 | "prettier": "^1.19.1", 67 | "pretty-quick": "^2.0.1", 68 | "simple-peer": "^9.4.0", 69 | "simple-signalling": "^0.3.0", 70 | "sinon": "^4.1.2", 71 | "supertest": "^4.0.2", 72 | "tmp": "^0.1.0", 73 | "ts-node": "^7.0.1", 74 | "typed-emitter": "^0.1.0", 75 | "typedoc": "^0.15.0", 76 | "typescript": "^3.7.3", 77 | "wrtc": "^0.4.1" 78 | }, 79 | "peerDependencies": { 80 | "@worldbrain/storex": "^0.4.1", 81 | "@worldbrain/storex-backend-typeorm": "^0.2.0", 82 | "@worldbrain/storex-middleware-change-watcher": "^0.1.0", 83 | "@worldbrain/storex-graphql-schema": "^0.1.0", 84 | "@worldbrain/storex-pattern-modules": "^0.3.0", 85 | "simple-peer": "^9.4.0", 86 | "simple-signalling": "^0.3.0" 87 | }, 88 | "resolutions": { 89 | "**/graphql": "^14.0.0" 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | semi: false, 3 | singleQuote: true, 4 | trailingComma: 'all', 5 | tabWidth: 4, 6 | } 7 | -------------------------------------------------------------------------------- /ts/client-sync-log/index.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import StorageManager, { StorageBackend } from '@worldbrain/storex' 3 | import { DexieStorageBackend } from '@worldbrain/storex-backend-dexie' 4 | import inMemory from '@worldbrain/storex-backend-dexie/lib/in-memory' 5 | import { registerModuleCollections } from '@worldbrain/storex-pattern-modules' 6 | import { ClientSyncLogStorage } from './' 7 | import { ClientSyncLogEntry } from './types' 8 | import { TypeORMStorageBackend } from '@worldbrain/storex-backend-typeorm' 9 | 10 | const TEST_LOG_ENTRIES: (ClientSyncLogEntry & { field: string | null })[] = [ 11 | { 12 | deviceId: 'device-one', 13 | createdOn: 2, 14 | sharedOn: 0, 15 | needsIntegration: 0, 16 | collection: 'user', 17 | pk: '1:1', 18 | operation: 'create', 19 | field: null, 20 | value: { displayName: 'Joe' }, 21 | }, 22 | { 23 | deviceId: 'device-one', 24 | createdOn: 3, 25 | sharedOn: 0, 26 | needsIntegration: 0, 27 | collection: 'user', 28 | pk: '2:1', 29 | operation: 'create', 30 | field: null, 31 | value: { displayName: 'Joe' }, 32 | }, 33 | { 34 | deviceId: 'device-one', 35 | createdOn: 4, 36 | sharedOn: 0, 37 | needsIntegration: 0, 38 | collection: 'user', 39 | pk: '1:2', 40 | operation: 'create', 41 | field: null, 42 | value: { displayName: 'Joe' }, 43 | }, 44 | ] 45 | 46 | interface TestDependencies { 47 | createBackend(): StorageBackend 48 | destroyBackend?(backend: StorageBackend): Promise 49 | } 50 | interface TestSetup { 51 | syncLogStorage: ClientSyncLogStorage 52 | } 53 | 54 | async function setupTest(backend: StorageBackend): Promise { 55 | const storageManager = new StorageManager({ backend: backend as any }) 56 | const syncLogStorage = new ClientSyncLogStorage({ storageManager }) 57 | registerModuleCollections(storageManager.registry, syncLogStorage) 58 | await storageManager.finishInitialization() 59 | await backend.migrate() 60 | return { syncLogStorage } 61 | } 62 | 63 | function makeTestFactory(dependencies: TestDependencies) { 64 | return (description: string, test: (setup: TestSetup) => Promise) => { 65 | it(description, async () => { 66 | const backend = dependencies.createBackend() 67 | try { 68 | const setup = await setupTest(backend) 69 | await test(setup) 70 | } finally { 71 | if (dependencies.destroyBackend) { 72 | await dependencies.destroyBackend(backend) 73 | } 74 | } 75 | }) 76 | } 77 | } 78 | 79 | function clientSyncLogTests(dependencies: TestDependencies) { 80 | const it = makeTestFactory(dependencies) 81 | 82 | function normalizeEntries( 83 | entries?: ClientSyncLogEntry[] | null, 84 | ): ClientSyncLogEntry[] | undefined | null { 85 | if (!entries) { 86 | return entries 87 | } 88 | 89 | return entries.map(entry => ({ 90 | ...entry, 91 | field: 'field' in entry && entry.field ? entry.field : null, 92 | needsIntegration: entry.needsIntegration ? 1 : 0, 93 | })) 94 | } 95 | 96 | it('should store and retrieve entries correctly', async ({ 97 | syncLogStorage, 98 | }) => { 99 | await syncLogStorage.insertEntries([ 100 | TEST_LOG_ENTRIES[0], 101 | TEST_LOG_ENTRIES[2], 102 | ]) 103 | 104 | expect( 105 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(2)), 106 | ).toEqual([{ ...TEST_LOG_ENTRIES[0] }, { ...TEST_LOG_ENTRIES[2] }]) 107 | expect( 108 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(3)), 109 | ).toEqual([{ ...TEST_LOG_ENTRIES[2] }]) 110 | }) 111 | 112 | it('should store and retrieve entries received out-of-order correctly', async ({ 113 | syncLogStorage, 114 | }) => { 115 | await syncLogStorage.insertEntries([ 116 | TEST_LOG_ENTRIES[0], 117 | TEST_LOG_ENTRIES[2], 118 | ]) 119 | await syncLogStorage.insertEntries([TEST_LOG_ENTRIES[1]]) 120 | 121 | expect( 122 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(2)), 123 | ).toEqual([ 124 | { ...TEST_LOG_ENTRIES[0] }, 125 | { ...TEST_LOG_ENTRIES[1] }, 126 | { ...TEST_LOG_ENTRIES[2] }, 127 | ]) 128 | }) 129 | 130 | it('should mark entries as synced', async ({ syncLogStorage }) => { 131 | await syncLogStorage.insertEntries([ 132 | TEST_LOG_ENTRIES[0], 133 | TEST_LOG_ENTRIES[1], 134 | TEST_LOG_ENTRIES[2], 135 | ]) 136 | await syncLogStorage.updateSharedUntil({ until: 3, sharedOn: 6 }) 137 | 138 | expect( 139 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(2)), 140 | ).toEqual([ 141 | { ...TEST_LOG_ENTRIES[0], sharedOn: 6 }, 142 | { ...TEST_LOG_ENTRIES[1], sharedOn: 6 }, 143 | { ...TEST_LOG_ENTRIES[2] }, 144 | ]) 145 | }) 146 | 147 | it('should retrieve unshared entries in order of createdOn', async ({ 148 | syncLogStorage, 149 | }) => { 150 | await syncLogStorage.insertEntries([ 151 | TEST_LOG_ENTRIES[0], 152 | TEST_LOG_ENTRIES[2], 153 | ]) 154 | await syncLogStorage.insertEntries([TEST_LOG_ENTRIES[1]]) 155 | await syncLogStorage.updateSharedUntil({ until: 2, sharedOn: 6 }) 156 | 157 | expect( 158 | normalizeEntries(await syncLogStorage.getUnsharedEntries()), 159 | ).toEqual([{ ...TEST_LOG_ENTRIES[1] }, { ...TEST_LOG_ENTRIES[2] }]) 160 | }) 161 | 162 | it('should be able to insert entries received from shared log', async ({ 163 | syncLogStorage, 164 | }) => { 165 | const now = 56 166 | await syncLogStorage.insertReceivedEntries( 167 | TEST_LOG_ENTRIES.slice(0, 1).map(entry => ({ 168 | userId: 'test-user-1', 169 | deviceId: 'u1d1', 170 | createdOn: entry.createdOn, 171 | sharedOn: now - 10, 172 | data: { 173 | operation: entry.operation, 174 | collection: entry.collection, 175 | pk: entry.pk, 176 | field: null, 177 | value: entry['value'], 178 | }, 179 | })), 180 | { now }, 181 | ) 182 | expect( 183 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(1)), 184 | ).toEqual([ 185 | { 186 | createdOn: 2, 187 | deviceId: 'u1d1', 188 | sharedOn: now, 189 | needsIntegration: 1, 190 | collection: 'user', 191 | pk: '1:1', 192 | field: null, 193 | operation: 'create', 194 | value: { displayName: 'Joe' }, 195 | }, 196 | ]) 197 | }) 198 | 199 | it('should be able to mark entries as integrated', async ({ 200 | syncLogStorage, 201 | }) => { 202 | const entries: (ClientSyncLogEntry & { field: null })[] = [ 203 | { 204 | deviceId: 'device-one', 205 | createdOn: 2, 206 | sharedOn: 10, 207 | needsIntegration: 1, 208 | operation: 'create', 209 | collection: 'user', 210 | pk: '1:1', 211 | field: null, 212 | value: { displayName: 'Joe' }, 213 | }, 214 | { 215 | deviceId: 'device-one', 216 | createdOn: 3, 217 | sharedOn: 10, 218 | needsIntegration: 1, 219 | operation: 'create', 220 | collection: 'user', 221 | pk: '1:2', 222 | field: null, 223 | value: { displayName: 'Joe' }, 224 | }, 225 | ] 226 | 227 | await syncLogStorage.insertEntries(entries) 228 | await syncLogStorage.markAsIntegrated( 229 | await syncLogStorage.getEntriesCreatedAfter(1), 230 | ) 231 | expect( 232 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(1)), 233 | ).toEqual([ 234 | { ...entries[0], needsIntegration: 0 }, 235 | { ...entries[1], needsIntegration: 0 }, 236 | ]) 237 | }) 238 | 239 | it('should be able to delete obsolete sync entries', async ({ 240 | syncLogStorage, 241 | }) => { 242 | const entries: typeof TEST_LOG_ENTRIES = [ 243 | { ...TEST_LOG_ENTRIES[0], sharedOn: 1, needsIntegration: 1 }, 244 | { ...TEST_LOG_ENTRIES[1], sharedOn: 1, needsIntegration: 0 }, 245 | { ...TEST_LOG_ENTRIES[2], sharedOn: 0, needsIntegration: 0 }, 246 | ] 247 | 248 | await syncLogStorage.insertEntries(entries) 249 | await syncLogStorage.deleteObsoleteEntries() 250 | expect( 251 | normalizeEntries(await syncLogStorage.getEntriesCreatedAfter(1)), 252 | ).toEqual([{ ...entries[0] }, { ...entries[2] }]) 253 | }) 254 | 255 | describe('getNextEntriesToIntgrate()', () => { 256 | it('should be able to get all relevant operations that happened to a single object', async ({ 257 | syncLogStorage, 258 | }) => { 259 | const entries: (ClientSyncLogEntry & { 260 | field: string | null 261 | value: any 262 | })[] = [ 263 | { 264 | deviceId: 'device-one', 265 | createdOn: 2, 266 | sharedOn: 10, 267 | needsIntegration: 1, 268 | operation: 'create', 269 | collection: 'user', 270 | pk: '1:1', 271 | field: null, 272 | value: { displayName: 'Joe' }, 273 | }, 274 | { 275 | deviceId: 'device-one', 276 | createdOn: 3, 277 | sharedOn: 10, 278 | needsIntegration: 1, 279 | operation: 'create', 280 | collection: 'user', 281 | pk: '1:2', 282 | field: null, 283 | value: { displayName: 'Joe' }, 284 | }, 285 | { 286 | deviceId: 'device-one', 287 | createdOn: 4, 288 | sharedOn: 10, 289 | needsIntegration: 1, 290 | operation: 'modify', 291 | collection: 'user', 292 | pk: '1:1', 293 | field: 'displayName', 294 | value: 'Jack', 295 | }, 296 | { 297 | deviceId: 'device-one', 298 | createdOn: 5, 299 | sharedOn: 10, 300 | needsIntegration: 1, 301 | operation: 'delete', 302 | field: null, 303 | collection: 'user', 304 | pk: '1:1', 305 | value: null, 306 | }, 307 | ] 308 | 309 | await syncLogStorage.insertEntries(entries) 310 | const firstEntries = normalizeEntries( 311 | (await syncLogStorage.getNextEntriesToIntgrate()) as ClientSyncLogEntry[], 312 | ) 313 | expect(firstEntries).toEqual([ 314 | { ...entries[0] }, 315 | { ...entries[2] }, 316 | { ...entries[3] }, 317 | ]) 318 | 319 | await syncLogStorage.markAsIntegrated(firstEntries!) 320 | const secondEntries = normalizeEntries( 321 | (await syncLogStorage.getNextEntriesToIntgrate()) as ClientSyncLogEntry[], 322 | ) 323 | expect(secondEntries).toEqual([{ ...entries[1] }]) 324 | 325 | await syncLogStorage.markAsIntegrated(secondEntries!) 326 | const thirdEntries = normalizeEntries( 327 | (await syncLogStorage.getNextEntriesToIntgrate()) as ClientSyncLogEntry[], 328 | ) 329 | expect(thirdEntries).toEqual(null) 330 | }) 331 | }) 332 | } 333 | 334 | describe('Client sync log with in-memory Dexie IndexedDB backend', () => { 335 | clientSyncLogTests({ 336 | createBackend: () => { 337 | return (new DexieStorageBackend({ 338 | idbImplementation: inMemory(), 339 | dbName: 'unittest', 340 | legacyMemexCompatibility: true, 341 | }) as any) as StorageBackend 342 | }, 343 | }) 344 | }) 345 | 346 | describe('Client sync log with in-memory TypeORM SQLite backend', () => { 347 | clientSyncLogTests({ 348 | createBackend: () => { 349 | return (new TypeORMStorageBackend({ 350 | connectionOptions: { type: 'sqlite', database: ':memory:' }, 351 | }) as any) as StorageBackend 352 | }, 353 | destroyBackend: async (backend: StorageBackend) => { 354 | const connection = ((backend as any) as TypeORMStorageBackend) 355 | .connection! 356 | if (connection) { 357 | await connection.close() 358 | } 359 | }, 360 | }) 361 | }) 362 | -------------------------------------------------------------------------------- /ts/client-sync-log/index.ts: -------------------------------------------------------------------------------- 1 | const sortBy = require('lodash/sortBy') 2 | import { 3 | StorageModule, 4 | StorageModuleConfig, 5 | } from '@worldbrain/storex-pattern-modules' 6 | import { ClientSyncLogEntry } from './types' 7 | import { SharedSyncLogEntry } from '../shared-sync-log/types' 8 | import { UpdateObjectsBatchOperation } from '@worldbrain/storex' 9 | 10 | export class ClientSyncLogStorage extends StorageModule { 11 | getConfig(): StorageModuleConfig { 12 | return { 13 | collections: { 14 | clientSyncLogEntry: { 15 | version: new Date('2020-08-21'), 16 | fields: { 17 | createdOn: { type: 'timestamp' }, 18 | sharedOn: { type: 'timestamp' }, // when was this sent or received? 19 | deviceId: { type: 'json' }, // what device did this operation happen on? 20 | needsIntegration: { type: 'int' }, 21 | collection: { type: 'string' }, 22 | pk: { type: 'json' }, 23 | field: { type: 'string', optional: true }, 24 | operation: { type: 'string' }, 25 | value: { type: 'json', optional: true }, 26 | }, 27 | indices: [ 28 | { field: ['deviceId', 'createdOn'], pk: true }, 29 | { field: 'createdOn' }, 30 | { field: ['collection', 'pk'] }, 31 | { field: 'sharedOn' }, 32 | { field: 'needsIntegration' }, 33 | { field: ['createdOn', 'sharedOn'] }, 34 | ], 35 | history: [ 36 | { 37 | version: new Date('2019-02-05'), 38 | fields: { 39 | createdOn: { type: 'timestamp' }, 40 | sharedOn: { type: 'timestamp', optional: true }, // when was this sent or received? 41 | deviceId: { type: 'json' }, // what device did this operation happen on? 42 | needsIntegration: { 43 | type: 'boolean', 44 | optional: true, 45 | }, 46 | collection: { type: 'string' }, 47 | pk: { type: 'json' }, 48 | field: { type: 'string', optional: true }, 49 | operation: { type: 'string' }, 50 | value: { type: 'json', optional: true }, 51 | }, 52 | indices: [ 53 | { field: ['deviceId', 'createdOn'], pk: true }, 54 | { field: 'createdOn' }, 55 | { field: ['collection', 'pk'] }, 56 | ], 57 | }, 58 | { 59 | version: new Date('2020-07-15'), 60 | fields: { 61 | createdOn: { type: 'timestamp' }, 62 | sharedOn: { type: 'timestamp' }, // when was this sent or received? 63 | deviceId: { type: 'json' }, // what device did this operation happen on? 64 | needsIntegration: { type: 'int' }, 65 | collection: { type: 'string' }, 66 | pk: { type: 'json' }, 67 | field: { type: 'string', optional: true }, 68 | operation: { type: 'string' }, 69 | value: { type: 'json', optional: true }, 70 | }, 71 | indices: [ 72 | { field: ['deviceId', 'createdOn'], pk: true }, 73 | { field: 'createdOn' }, 74 | { field: ['collection', 'pk'] }, 75 | { field: 'sharedOn' }, 76 | { field: 'needsIntegration' }, 77 | ], 78 | }, 79 | ], 80 | }, 81 | // clientSyncLogInfo: { 82 | // version: new Date('2019-02-05'), 83 | // fields: { 84 | // receivalStartedWhen: { type: 'datetime' }, 85 | // } 86 | // }, 87 | }, 88 | operations: { 89 | createEntry: { 90 | operation: 'createObject', 91 | collection: 'clientSyncLogEntry', 92 | }, 93 | findEntriesCreatedAfter: { 94 | operation: 'findObjects', 95 | collection: 'clientSyncLogEntry', 96 | args: [{ createdOn: { $gte: '$timestamp:timestamp' } }], 97 | }, 98 | updateSharedUntil: { 99 | operation: 'updateObjects', 100 | collection: 'clientSyncLogEntry', 101 | args: [ 102 | { 103 | createdOn: { $lte: '$until:timestamp' }, 104 | sharedOn: 0, 105 | }, 106 | { sharedOn: '$sharedOn:timestamp' }, 107 | ], 108 | }, 109 | findUnsharedEntries: { 110 | operation: 'findObjects', 111 | collection: 'clientSyncLogEntry', 112 | args: { 113 | sharedOn: { $eq: 0 }, 114 | }, 115 | }, 116 | findUnsharedEntryBatch: { 117 | operation: 'findObjects', 118 | collection: 'clientSyncLogEntry', 119 | args: [ 120 | { sharedOn: { $eq: 0 } }, 121 | { limit: '$limit:number' }, 122 | ], 123 | }, 124 | markAsIntegrated: { 125 | operation: 'executeBatch', 126 | args: ['$batch'], 127 | }, 128 | findFirstUnintegratedEntry: { 129 | operation: 'findObjects', 130 | collection: 'clientSyncLogEntry', 131 | args: [ 132 | { needsIntegration: 1 }, 133 | { order: [['createdOn', 'asc']], limit: 1 }, 134 | ], 135 | }, 136 | findEntriesByObjectPk: { 137 | operation: 'findObjects', 138 | collection: 'clientSyncLogEntry', 139 | args: [ 140 | { collection: '$collection:string', pk: '$pk' }, 141 | { order: [['createdOn', 'asc']] }, 142 | ], 143 | }, 144 | deleteObsoleteEntries: { 145 | operation: 'deleteObjects', 146 | collection: 'clientSyncLogEntry', 147 | args: { 148 | needsIntegration: 0, 149 | sharedOn: { $gt: 0 }, 150 | }, 151 | }, 152 | }, 153 | } 154 | } 155 | 156 | async insertEntries(entries: ClientSyncLogEntry[]) { 157 | for (const entry of entries) { 158 | await this.operation('createEntry', entry) 159 | } 160 | } 161 | 162 | async insertReceivedEntries( 163 | sharedEntries: Array>, 164 | options: { now: number | '$now' }, 165 | ) { 166 | await this.insertEntries( 167 | sharedEntries.map( 168 | (sharedEntry): ClientSyncLogEntry => { 169 | const data = sharedEntry.data 170 | const common = { 171 | createdOn: sharedEntry.createdOn, 172 | sharedOn: 173 | typeof options.now === 'string' 174 | ? Date.now() 175 | : options.now, 176 | deviceId: sharedEntry.deviceId, 177 | needsIntegration: 1 as 1, 178 | collection: data.collection, 179 | pk: data.pk, 180 | } 181 | if (data.operation === 'create') { 182 | return { 183 | ...common, 184 | operation: 'create', 185 | value: data.value, 186 | } 187 | } else if (data.operation === 'modify') { 188 | return { 189 | ...common, 190 | operation: 'modify', 191 | field: data.field!, 192 | value: data.value, 193 | } 194 | } else if (data.operation === 'delete') { 195 | return { 196 | ...common, 197 | operation: 'delete', 198 | } 199 | } else { 200 | throw new Error( 201 | `Unknown operation received: ${data.operation}`, 202 | ) 203 | } 204 | }, 205 | ), 206 | ) 207 | } 208 | 209 | async getEntriesCreatedAfter( 210 | timestamp: number, 211 | ): Promise { 212 | return sortBy( 213 | await this.operation('findEntriesCreatedAfter', { timestamp }), 214 | 'createdOn', 215 | ) 216 | } 217 | 218 | async updateSharedUntil({ 219 | until, 220 | sharedOn, 221 | }: { 222 | until: number | '$now' 223 | sharedOn: number | '$now' 224 | }) { 225 | await this.operation('updateSharedUntil', { until, sharedOn }) 226 | } 227 | 228 | async getUnsharedEntries(options?: { 229 | batchSize?: number 230 | }): Promise { 231 | let entries: ClientSyncLogEntry[] 232 | if (options && options.batchSize) { 233 | entries = await this.operation('findUnsharedEntryBatch', { 234 | limit: options.batchSize, 235 | }) 236 | } else { 237 | entries = await this.operation('findUnsharedEntries', {}) 238 | } 239 | return sortBy(entries, 'createdOn') 240 | } 241 | 242 | async markAsIntegrated(entries: ClientSyncLogEntry[]) { 243 | await this.operation('markAsIntegrated', { 244 | batch: this.getMarkAsIntegratedBatchSteps(entries), 245 | }) 246 | } 247 | 248 | getMarkAsIntegratedBatchSteps(entries: ClientSyncLogEntry[]) { 249 | return entries.map( 250 | (entry): UpdateObjectsBatchOperation => ({ 251 | operation: 'updateObjects', 252 | collection: 'clientSyncLogEntry', 253 | where: { 254 | deviceId: entry.deviceId, 255 | createdOn: entry.createdOn, 256 | }, 257 | updates: { 258 | needsIntegration: 0, 259 | }, 260 | }), 261 | ) 262 | } 263 | 264 | async getNextEntriesToIntgrate(): Promise { 265 | const firstEntryList = await this.operation( 266 | 'findFirstUnintegratedEntry', 267 | {}, 268 | ) 269 | if (!firstEntryList.length) { 270 | return null 271 | } 272 | 273 | const firstEntry = firstEntryList[0] 274 | const entries = await this.operation('findEntriesByObjectPk', { 275 | collection: firstEntry.collection, 276 | pk: firstEntry.pk, 277 | }) 278 | return entries 279 | } 280 | 281 | async deleteObsoleteEntries() { 282 | await this.operation('deleteObsoleteEntries', {}) 283 | } 284 | } 285 | -------------------------------------------------------------------------------- /ts/client-sync-log/types.ts: -------------------------------------------------------------------------------- 1 | export interface ClientSyncLogEntryMetadata { 2 | createdOn: number | '$now' 3 | sharedOn: number 4 | deviceId: number | string 5 | needsIntegration: 0 | 1 6 | } 7 | 8 | export interface ClientSyncLogEntryBase extends ClientSyncLogEntryMetadata { 9 | id?: any 10 | collection: string 11 | pk: any 12 | } 13 | 14 | export interface ClientSyncLogCreationEntry extends ClientSyncLogEntryBase { 15 | operation: 'create' 16 | pk: any 17 | value: any 18 | } 19 | 20 | export type ClientSyncLogModificationEntry = ClientSyncLogEntryBase & { 21 | operation: 'modify' 22 | pk: string | number 23 | } & ({ field: string; value: any } | { value: { [key: string]: any } }) 24 | 25 | export interface ClientSyncLogDeletionEntry extends ClientSyncLogEntryBase { 26 | operation: 'delete' 27 | } 28 | 29 | export type ClientSyncLogEntry = 30 | | ClientSyncLogCreationEntry 31 | | ClientSyncLogModificationEntry 32 | | ClientSyncLogDeletionEntry 33 | -------------------------------------------------------------------------------- /ts/custom-auto-pk.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import StorageManager from '@worldbrain/storex' 3 | import { DexieStorageBackend } from '@worldbrain/storex-backend-dexie' 4 | import inMemory from '@worldbrain/storex-backend-dexie/lib/in-memory' 5 | import { CustomAutoPkMiddleware } from './custom-auto-pk' 6 | 7 | describe('CustomAutoPkMiddleware', () => { 8 | async function setupTest(options: { pkGenerator: () => string }) { 9 | const backend = new DexieStorageBackend({ 10 | idbImplementation: inMemory(), 11 | dbName: 'unittest', 12 | }) 13 | const storageManager = new StorageManager({ backend: backend as any }) 14 | storageManager.registry.registerCollections({ 15 | user: { 16 | version: new Date('2019-01-01'), 17 | fields: { 18 | displayName: { type: 'string' }, 19 | }, 20 | }, 21 | email: { 22 | version: new Date('2019-01-01'), 23 | fields: { 24 | address: { type: 'string' }, 25 | }, 26 | relationships: [{ childOf: 'user' }], 27 | }, 28 | }) 29 | const customPkMiddleware = new CustomAutoPkMiddleware(options) 30 | customPkMiddleware.setup({ 31 | storageRegistry: storageManager.registry, 32 | collections: ['user', 'email'], 33 | }) 34 | storageManager.setMiddleware([customPkMiddleware]) 35 | await storageManager.finishInitialization() 36 | return { storageManager } 37 | } 38 | 39 | it('should be able to set custom auto PKs on simple createObject operations', async () => { 40 | const { storageManager } = await setupTest({ 41 | pkGenerator: () => 'some-pk', 42 | }) 43 | const { object } = await storageManager 44 | .collection('user') 45 | .createObject({ displayName: 'Joe' }) 46 | expect(object.id).toEqual('some-pk') 47 | expect( 48 | await storageManager 49 | .collection('user') 50 | .findOneObject({ id: object.id }), 51 | ).toEqual(object) 52 | }) 53 | 54 | it('should be able to set custom auto PKs on complex createObject operations', async () => { 55 | let counter = 0 56 | const { storageManager } = await setupTest({ 57 | pkGenerator: () => `some-pk-${++counter}`, 58 | }) 59 | const { object: user } = await storageManager 60 | .collection('user') 61 | .createObject({ 62 | displayName: 'Joe', 63 | emails: [{ address: 'foo@bla.com' }], 64 | }) 65 | expect(user).toEqual({ 66 | id: 'some-pk-1', 67 | displayName: 'Joe', 68 | emails: [(expect as any).objectContaining({ id: 'some-pk-2' })], 69 | }) 70 | const email = user.emails[0] 71 | expect( 72 | await storageManager 73 | .collection('user') 74 | .findOneObject({ id: user.id }), 75 | ).toEqual({ id: user.id, displayName: 'Joe' }) 76 | expect( 77 | await storageManager 78 | .collection('email') 79 | .findOneObject({ id: email.id }), 80 | ).toEqual({ id: email.id, user: user.id, address: 'foo@bla.com' }) 81 | }) 82 | 83 | it('should not override manually provided PKs', async () => { 84 | let counter = 0 85 | const { storageManager } = await setupTest({ 86 | pkGenerator: () => `some-pk-${++counter}`, 87 | }) 88 | const { object: user } = await storageManager 89 | .collection('user') 90 | .createObject({ 91 | id: 'very manually set', 92 | displayName: 'Joe', 93 | emails: [{ address: 'foo@bla.com' }], 94 | }) 95 | expect(user).toEqual({ 96 | id: 'very manually set', 97 | displayName: 'Joe', 98 | emails: [(expect as any).objectContaining({ id: 'some-pk-1' })], 99 | }) 100 | expect( 101 | await storageManager 102 | .collection('user') 103 | .findOneObject({ id: user.id }), 104 | ).toEqual({ id: user.id, displayName: 'Joe' }) 105 | }) 106 | 107 | it('should be able to set custom auto PKs on batches') 108 | 109 | it('should be able to migrate from normal to custom auto PKs') 110 | }) 111 | -------------------------------------------------------------------------------- /ts/custom-auto-pk.ts: -------------------------------------------------------------------------------- 1 | import update from 'immutability-helper' 2 | import { StorageMiddleware } from '@worldbrain/storex/lib/types/middleware' 3 | import { StorageRegistry } from '@worldbrain/storex' 4 | import { 5 | dissectCreateObjectOperation, 6 | convertCreateObjectDissectionToBatch, 7 | reconstructCreatedObjectFromBatchResult, 8 | } from '@worldbrain/storex/lib/utils' 9 | 10 | export type CustomPkGenerator = () => string 11 | 12 | export class CustomAutoPkMiddleware implements StorageMiddleware { 13 | private _collections?: { [name: string]: { pkIndex: string } } 14 | private _pkGenerator: CustomPkGenerator 15 | private _storageRegistry?: StorageRegistry 16 | 17 | constructor({ pkGenerator }: { pkGenerator: CustomPkGenerator }) { 18 | this._pkGenerator = pkGenerator 19 | } 20 | 21 | setup({ 22 | storageRegistry, 23 | collections, 24 | }: { 25 | storageRegistry: StorageRegistry 26 | collections: string[] 27 | }) { 28 | this._storageRegistry = storageRegistry 29 | this._collections = {} 30 | for (const collection of collections) { 31 | const collectionDefinition = storageRegistry.collections[collection] 32 | if (!collectionDefinition) { 33 | throw new Error( 34 | `Tried to set up custom auto pk for non-existing collection '${collection}'`, 35 | ) 36 | } 37 | 38 | const pkIndex = collectionDefinition.pkIndex as string 39 | collectionDefinition.fields = update(collectionDefinition.fields, { 40 | [pkIndex]: { type: { $set: 'string' } }, 41 | }) 42 | this._collections[collection] = { pkIndex } 43 | } 44 | } 45 | 46 | async process({ 47 | next, 48 | operation, 49 | }: { 50 | next: { process: ({ operation }: { operation: any }) => any } 51 | operation: any[] 52 | }) { 53 | if (!this._collections || !this._storageRegistry) { 54 | throw new Error( 55 | `You tried to do a storage operation without calling CustomAutoPkMiddleware.setup() first`, 56 | ) 57 | } 58 | 59 | const mainCollection = operation[1] 60 | if ( 61 | operation[0] !== 'createObject' || 62 | !this._collections[mainCollection] 63 | ) { 64 | return next.process({ operation }) 65 | } 66 | 67 | const object = operation[2] 68 | const operationDissection = dissectCreateObjectOperation( 69 | { 70 | collection: mainCollection, 71 | args: object, 72 | }, 73 | this._storageRegistry, 74 | ) 75 | const batch = convertCreateObjectDissectionToBatch(operationDissection) 76 | for (const batchElement of batch) { 77 | const collectionInfo = this._collections[batchElement.collection] 78 | if (!collectionInfo) { 79 | continue 80 | } 81 | 82 | if (!batchElement.args[collectionInfo.pkIndex]) { 83 | batchElement.args[collectionInfo.pkIndex] = this._pkGenerator() 84 | } 85 | } 86 | 87 | const batchResult = await next.process({ 88 | operation: ['executeBatch', batch], 89 | }) 90 | reconstructCreatedObjectFromBatchResult({ 91 | object, 92 | collection: mainCollection, 93 | storageRegistry: this._storageRegistry, 94 | operationDissection, 95 | batchResultInfo: batchResult.info, 96 | }) 97 | return { object } 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /ts/fast-sync/channels.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from 'events' 2 | import TypedEmitter from 'typed-emitter' 3 | import { jsonDateParser } from 'json-date-parser' 4 | import * as SimplePeer from 'simple-peer' 5 | import { 6 | FastSyncBatch, 7 | FastSyncInfo, 8 | FastSyncPackage, 9 | FastSyncChannelEvents, 10 | FastSyncChannel, 11 | } from './types' 12 | import { ResolvablePromise, resolvablePromise, splitWithTail } from './utils' 13 | import { 14 | calculateStringChunkCount, 15 | getStringChunk, 16 | receiveInChucks, 17 | } from './chunking' 18 | 19 | export class ChannelDestroyedError extends Error { 20 | name = 'ChannelDestroyedError' 21 | } 22 | 23 | abstract class FastSyncChannelBase implements FastSyncChannel { 24 | events = new EventEmitter() as TypedEmitter 25 | 26 | packageTimeoutInMilliseconds = 10 * 1000 27 | channelTimeoutInMilliseconds = 180 * 1000 28 | preSend?: (syncPackage: FastSyncPackage) => Promise 29 | postReceive?: (syncPackage: FastSyncPackage) => Promise 30 | channelTimeout?: NodeJS.Timer 31 | 32 | abstract _sendPackage(syncPackage: FastSyncPackage): Promise 33 | abstract _receivePackage(): Promise 34 | 35 | abstract destroy(): Promise 36 | 37 | async sendUserPackage(jsonSerializable: any): Promise { 38 | await this._sendPackageSafely({ 39 | type: 'user-package', 40 | package: jsonSerializable, 41 | }) 42 | } 43 | 44 | async receiveUserPackage(options?: { 45 | expectedType?: keyof UserPackageType 46 | }): Promise { 47 | const userPackage = await this._receivePackageSafely() 48 | if (userPackage.type === 'user-package') { 49 | const innerPackage = userPackage.package 50 | if ( 51 | options && 52 | options.expectedType && 53 | innerPackage.type !== options.expectedType 54 | ) { 55 | throw new Error( 56 | `Expected user package with type ${String( 57 | options.expectedType, 58 | )} ` + 59 | `in fast sync WebRTC channel, but got ` + 60 | `user package with type ${innerPackage.type}`, 61 | ) 62 | } 63 | return innerPackage 64 | } else { 65 | throw new Error( 66 | `Expected user package in fast sync WebRTC channel, but got package type ${userPackage.type}`, 67 | ) 68 | } 69 | } 70 | 71 | async *streamObjectBatches(): AsyncIterableIterator<{ 72 | collection: string 73 | objects: any[] 74 | }> { 75 | while (true) { 76 | const syncPackage: FastSyncPackage = await this._receivePackageSafely() 77 | if (syncPackage.type === 'finish') { 78 | break 79 | } 80 | if (syncPackage.type === 'state-change') { 81 | this.events.emit( 82 | syncPackage.state === 'running' ? 'resumed' : 'paused', 83 | ) 84 | continue 85 | } 86 | 87 | if (syncPackage.type === 'batch') { 88 | yield syncPackage.batch 89 | } else { 90 | throw new Error( 91 | `Expected batch package in fast sync WebRTC channel, but got package type ${syncPackage.type}`, 92 | ) 93 | } 94 | } 95 | } 96 | 97 | async receiveSyncInfo() { 98 | const syncPackage: FastSyncPackage = await this._receivePackageSafely() 99 | if (syncPackage.type !== 'sync-info') { 100 | throw new Error( 101 | `Received package with unexpected type while waiting for initial Sync info: ${syncPackage.type}`, 102 | ) 103 | } 104 | return syncPackage.info 105 | } 106 | 107 | async sendSyncInfo(info: FastSyncInfo) { 108 | await this._sendPackageSafely({ type: 'sync-info', info }) 109 | } 110 | 111 | async sendObjectBatch(batch: FastSyncBatch) { 112 | await this._sendPackageSafely({ type: 'batch', batch }) 113 | } 114 | 115 | async sendStateChange(state: 'paused' | 'running'): Promise { 116 | await this._sendPackageSafely({ type: 'state-change', state }) 117 | } 118 | 119 | async finish() { 120 | this._clearChannelTimeout() 121 | await this._sendPackageSafely({ type: 'finish' }) 122 | } 123 | 124 | async _receivePackageSafely() { 125 | const syncPackage = await this._withPackageStallingDetection(() => 126 | this._receivePackage(), 127 | ) 128 | this._refreshChannelTimeout() 129 | 130 | if (this.postReceive) { 131 | await this.postReceive(syncPackage) 132 | } 133 | 134 | return syncPackage 135 | } 136 | 137 | async _sendPackageSafely(syncPackage: FastSyncPackage) { 138 | if (this.preSend) { 139 | await this.preSend(syncPackage) 140 | } 141 | await this._withPackageStallingDetection(() => 142 | this._sendPackage(syncPackage), 143 | ) 144 | this._refreshChannelTimeout() 145 | } 146 | 147 | async _withPackageStallingDetection(f: () => Promise) { 148 | const stalledTimeout = setTimeout(() => { 149 | this.events.emit('packageStalled') 150 | }, this.packageTimeoutInMilliseconds) 151 | const toReturn = await f() 152 | clearTimeout(stalledTimeout) 153 | return toReturn 154 | } 155 | 156 | _refreshChannelTimeout() { 157 | this._clearChannelTimeout() 158 | this.channelTimeout = setTimeout(() => { 159 | this.events.emit('channelTimeout') 160 | }, this.channelTimeoutInMilliseconds) as any 161 | } 162 | 163 | _clearChannelTimeout() { 164 | if (this.channelTimeout) { 165 | clearTimeout(this.channelTimeout) 166 | } 167 | } 168 | } 169 | 170 | export class WebRTCFastSyncChannel extends FastSyncChannelBase< 171 | UserPackageType 172 | > { 173 | dataReceived = resolvablePromise() 174 | dataHandler: (data: any) => void 175 | 176 | private destroyed = false 177 | 178 | constructor(private options: { peer: SimplePeer.Instance }) { 179 | super() 180 | 181 | this.dataHandler = (data: any) => { 182 | // This promise gets replaced after each received package 183 | // NOTE: This assumes package are sent and confirmed one by one 184 | this.dataReceived.resolve(data.toString()) 185 | } 186 | this.options.peer.on('data', this.dataHandler) 187 | } 188 | 189 | async destroy() { 190 | if (this.destroyed) { 191 | return 192 | } 193 | 194 | this.options.peer.removeListener('data', this.dataHandler) 195 | await this.options.peer.destroy() 196 | this.destroyed = true 197 | } 198 | 199 | async _sendPackage( 200 | syncPackage: FastSyncPackage, 201 | options?: { noChunking?: boolean }, 202 | ) { 203 | if (this.destroyed) { 204 | throw new ChannelDestroyedError( 205 | 'Cannot send package through destroyed channel', 206 | ) 207 | } 208 | 209 | const sendAndConfirm = async (data: string) => { 210 | this.options.peer.send(data) 211 | 212 | const response = await this._receivePackage({ 213 | noChunking: true, 214 | noConfirm: true, 215 | }) 216 | 217 | if (response.type !== 'confirm') { 218 | console.error(`Invalid confirmation received:`, response) 219 | throw new Error(`Invalid confirmation received`) 220 | } 221 | } 222 | 223 | const serialized = JSON.stringify(syncPackage) 224 | if (options?.noChunking) { 225 | return sendAndConfirm(serialized) 226 | } 227 | 228 | const chunkSize = 10000 229 | const chunkCount = calculateStringChunkCount(serialized, { chunkSize }) 230 | for (let chunkIndex = 0; chunkIndex < chunkCount; ++chunkIndex) { 231 | const chunkContent = getStringChunk(serialized, chunkIndex, { 232 | chunkSize, 233 | }) 234 | await sendAndConfirm( 235 | `chunk:${chunkIndex}:${chunkCount}:${chunkContent}`, 236 | ) 237 | } 238 | } 239 | 240 | async _receivePackage(options?: { 241 | noChunking?: boolean 242 | noConfirm?: boolean 243 | }): Promise { 244 | if (this.destroyed) { 245 | throw new ChannelDestroyedError( 246 | 'Cannot receive package from destroyed channel', 247 | ) 248 | } 249 | 250 | const receive = async () => { 251 | const data = await this.dataReceived.promise 252 | this.dataReceived = resolvablePromise() 253 | return data 254 | } 255 | const maybeConfirm = async () => { 256 | if (!options?.noConfirm) { 257 | const confirmationPackage: FastSyncPackage = { 258 | type: 'confirm', 259 | } 260 | this.options.peer.send(JSON.stringify(confirmationPackage)) 261 | } 262 | } 263 | const receiveAndMaybeConfirm = async () => { 264 | const data = await receive() 265 | await maybeConfirm() 266 | return data 267 | } 268 | 269 | const serialized = options?.noChunking 270 | ? await receiveAndMaybeConfirm() 271 | : await receiveInChucks(receiveAndMaybeConfirm) 272 | 273 | const syncPackage: FastSyncPackage = JSON.parse( 274 | serialized, 275 | jsonDateParser, 276 | ) 277 | return syncPackage 278 | } 279 | } 280 | 281 | interface MemoryFastSyncChannelPeer { 282 | sendPackage(syncPackage: FastSyncPackage): Promise 283 | receivePackage(): Promise 284 | } 285 | interface MemoryFastSyncChannelDependencies { 286 | sender: MemoryFastSyncChannelPeer 287 | receiver: MemoryFastSyncChannelPeer 288 | } 289 | export class MemoryFastSyncChannel< 290 | UserPackageType = any 291 | > extends FastSyncChannelBase { 292 | constructor(private dependencies: MemoryFastSyncChannelDependencies) { 293 | super() 294 | } 295 | 296 | async destroy() {} 297 | 298 | async _sendPackage(syncPackage: FastSyncPackage) { 299 | return this.dependencies.receiver.sendPackage(syncPackage) 300 | } 301 | 302 | async _receivePackage() { 303 | return this.dependencies.sender.receivePackage() 304 | } 305 | } 306 | 307 | function _createMemoryChannelPeer() { 308 | let sendPackagePromise = resolvablePromise() 309 | let receivePackagePromise = resolvablePromise() 310 | 311 | return { 312 | async sendPackage(syncPackage: FastSyncPackage) { 313 | // console.log('sendPackage', syncPackage) 314 | sendPackagePromise.resolve(syncPackage) 315 | await receivePackagePromise.promise 316 | }, 317 | async receivePackage(): Promise { 318 | const syncPackage = await sendPackagePromise.promise 319 | sendPackagePromise = resolvablePromise() 320 | receivePackagePromise.resolve(null) 321 | receivePackagePromise = resolvablePromise() 322 | return syncPackage 323 | }, 324 | } 325 | } 326 | 327 | export function createMemoryChannel() { 328 | const peers: MemoryFastSyncChannelDependencies = { 329 | sender: _createMemoryChannelPeer(), 330 | receiver: _createMemoryChannelPeer(), 331 | } 332 | const senderChannel = new MemoryFastSyncChannel(peers) 333 | const receiverChannel = new MemoryFastSyncChannel({ 334 | sender: peers.receiver, 335 | receiver: peers.sender, 336 | }) 337 | 338 | return { 339 | senderChannel, 340 | receiverChannel, 341 | } 342 | } 343 | -------------------------------------------------------------------------------- /ts/fast-sync/chunking.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import { 3 | calculateStringChunkCount, 4 | getStringChunk, 5 | receiveInChucks, 6 | } from './chunking' 7 | 8 | describe('Fast sync channel package chunking', () => { 9 | it('should calculate the right chunk count for strings exactly fitting the chunk size', () => { 10 | expect(calculateStringChunkCount('abcdefgh', { chunkSize: 4 })).toEqual( 11 | 2, 12 | ) 13 | }) 14 | 15 | it('should calculate the right chunk count for strings not exactly fitting the chunk size', () => { 16 | expect(calculateStringChunkCount('abcdefgh', { chunkSize: 4 })).toEqual( 17 | 2, 18 | ) 19 | }) 20 | 21 | it('should correctly get chunks for strings exactly fitting the chunk size', () => { 22 | expect(getStringChunk('abcdefgh', 0, { chunkSize: 4 })).toEqual('abcd') 23 | expect(getStringChunk('abcdefgh', 1, { chunkSize: 4 })).toEqual('efgh') 24 | }) 25 | 26 | it('should correctly get chunks for strings not exactly fitting the chunk size', () => { 27 | expect(getStringChunk('abcdef', 0, { chunkSize: 4 })).toEqual('abcd') 28 | expect(getStringChunk('abcdef', 1, { chunkSize: 4 })).toEqual('ef') 29 | }) 30 | 31 | it('should correctly receive data in chunks', async () => { 32 | const chunks = [`chunk:0:3:ab`, `chunk:1:3:cde`, `chunk:2:3:fghij`] 33 | 34 | expect(await receiveInChucks(async () => chunks.shift()!)).toEqual( 35 | 'abcdefghij', 36 | ) 37 | }) 38 | }) 39 | -------------------------------------------------------------------------------- /ts/fast-sync/chunking.ts: -------------------------------------------------------------------------------- 1 | import { splitWithTail } from './utils' 2 | 3 | export function calculateStringChunkCount( 4 | s: string, 5 | options: { chunkSize: number }, 6 | ): number { 7 | return Math.ceil(s.length / options.chunkSize) 8 | } 9 | 10 | export function getStringChunk( 11 | s: string, 12 | chunkIndex: number, 13 | options: { chunkSize: number }, 14 | ): string { 15 | return s.substr(chunkIndex * options.chunkSize, options.chunkSize) 16 | } 17 | 18 | export async function receiveInChucks( 19 | receiveChunk: () => Promise, 20 | ): Promise { 21 | let data: string[] = [] 22 | let expectedChunkCount: null | number = null 23 | 24 | while (true) { 25 | const chunk = await receiveChunk() 26 | 27 | const [ 28 | chunkConfirmation, 29 | chunkIndexString, 30 | chunkCountString, 31 | chunkContent, 32 | ] = splitWithTail(chunk, ':', 4) 33 | if (chunkConfirmation !== 'chunk') { 34 | throw new Error(`Invalid WebRTC package received: ${chunk}`) 35 | } 36 | 37 | const chunkIndex = parseInt(chunkIndexString) 38 | if (chunkIndex === NaN) { 39 | throw new Error( 40 | `Received WebRTC package with invalid chunk index: ${chunkIndexString}`, 41 | ) 42 | } 43 | 44 | if (chunkIndex !== data.length) { 45 | throw new Error( 46 | `Received WebRTC package chunk index ${chunkIndexString}, ` + 47 | `but was expecting chunk index ${data.length}`, 48 | ) 49 | } 50 | 51 | const chunkCount = parseInt(chunkCountString) 52 | if (chunkCount === NaN) { 53 | throw new Error( 54 | `Received WebRTC package with invalid chunk size: ${chunkIndexString}`, 55 | ) 56 | } 57 | 58 | if (expectedChunkCount) { 59 | if (chunkCount !== expectedChunkCount) { 60 | throw new Error( 61 | `Received WebRTC packge with chunk count ${chunkCount}, ` + 62 | `but we received a previous package with chunk count ${expectedChunkCount}`, 63 | ) 64 | } 65 | } else { 66 | expectedChunkCount = chunkCount 67 | } 68 | 69 | data.push(chunkContent) 70 | if (data.length === expectedChunkCount) { 71 | break 72 | } 73 | } 74 | 75 | return data.join('') 76 | } 77 | -------------------------------------------------------------------------------- /ts/fast-sync/index.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from 'events' 2 | import TypedEmitter from 'typed-emitter' 3 | import StorageManager from '@worldbrain/storex' 4 | import { 5 | FastSyncInfo, 6 | FastSyncProgress, 7 | FastSyncChannel, 8 | FastSyncRole, 9 | FastSyncOrder, 10 | flippedRole, 11 | } from './types' 12 | import Interruptable from './interruptable' 13 | import { getFastSyncInfo } from './utils' 14 | import { ExecuteReconciliationOperation } from '..' 15 | 16 | export interface FastSyncOptions { 17 | storageManager: StorageManager 18 | channel: FastSyncChannel 19 | collections: string[] 20 | batchSize?: number 21 | preSendProcessor?: FastSyncPreSendProcessor 22 | postReceiveProcessor?: FastSyncPreSendProcessor 23 | executeReconciliationOperation: ExecuteReconciliationOperation 24 | } 25 | 26 | export type FastSyncPreSendProcessor = ( 27 | params: FastSyncPreSendProcessorParams, 28 | ) => Promise<{ object: any | null }> 29 | export interface FastSyncPreSendProcessorParams { 30 | collection: string 31 | object: any 32 | } 33 | 34 | export interface FastSyncEvents { 35 | prepared: (event: { syncInfo: FastSyncInfo; role: FastSyncRole }) => void 36 | progress: (event: { 37 | progress: FastSyncProgress 38 | role: FastSyncRole 39 | }) => void 40 | paused: () => void 41 | resumed: () => void 42 | channelTimeout: () => void 43 | packageStalled: () => void 44 | roleSwitch: (event: { before: FastSyncRole; after: FastSyncRole }) => void 45 | error: (event: { error: string }) => void 46 | } 47 | 48 | export class FastSync { 49 | public events: TypedEmitter< 50 | FastSyncEvents 51 | > = new EventEmitter() as TypedEmitter 52 | processNonFatalError?: (event: { source: 'create-object', error: Error }) => ({ fatal: boolean }) 53 | 54 | private totalObjectsProcessed: number 55 | private interruptable: Interruptable | null = null 56 | private _state: 57 | | 'pristine' 58 | | 'running' 59 | | 'done' 60 | | 'paused' 61 | | 'cancelled' 62 | | 'error' = 'pristine' 63 | 64 | constructor(private options: FastSyncOptions) { 65 | this.totalObjectsProcessed = 0 66 | } 67 | 68 | get state() { 69 | return this._state 70 | } 71 | 72 | async execute(options: { 73 | role: FastSyncRole 74 | bothWays?: FastSyncOrder 75 | fastSyncInfo?: FastSyncInfo 76 | }) { 77 | const initialRole: FastSyncRole = options.bothWays 78 | ? options.bothWays === 'receive-first' 79 | ? 'receiver' 80 | : 'sender' 81 | : options.role 82 | const subsequentRole: FastSyncRole | null = options.bothWays 83 | ? flippedRole(initialRole) 84 | : null 85 | 86 | const execute = async ( 87 | role: FastSyncRole, 88 | fastSyncInfo?: FastSyncInfo, 89 | ) => { 90 | this.totalObjectsProcessed = 0 91 | this.setupTimeoutListeners(this.options.channel) 92 | if (role === 'sender') { 93 | await this.send({ role, fastSyncInfo }) 94 | } else { 95 | await this.receive({ role }) 96 | } 97 | } 98 | 99 | await execute(initialRole, options.fastSyncInfo) 100 | if (subsequentRole) { 101 | this.events.emit('roleSwitch', { 102 | before: initialRole, 103 | after: subsequentRole, 104 | }) 105 | await execute(subsequentRole) 106 | } 107 | } 108 | 109 | async send(options: { role: FastSyncRole; fastSyncInfo?: FastSyncInfo }) { 110 | const { channel } = this.options 111 | 112 | const interruptable = (this.interruptable = new Interruptable()) 113 | this._state = 'running' 114 | try { 115 | const syncInfo = 116 | options.fastSyncInfo || 117 | (await getFastSyncInfo(this.options.storageManager, { 118 | collections: this.options.collections, 119 | })) 120 | this.events.emit('prepared', { syncInfo, role: options.role }) 121 | await channel.sendSyncInfo(syncInfo) 122 | 123 | try { 124 | await interruptable.execute(async () => { 125 | this.events.emit('progress', { 126 | role: options.role, 127 | progress: { 128 | ...syncInfo, 129 | totalObjectsProcessed: this.totalObjectsProcessed, 130 | }, 131 | }) 132 | }) 133 | 134 | await interruptable.forOfLoop( 135 | this.options.collections, 136 | async collection => { 137 | await this.sendObjecsInCollection(collection, { 138 | channel, 139 | role: options.role, 140 | syncInfo, 141 | }) 142 | }, 143 | ) 144 | this._state = 'done' 145 | } finally { 146 | await channel.finish() 147 | } 148 | } catch (e) { 149 | this._state = 'error' 150 | this.events.emit('error', { error: `${e}` }) 151 | throw e 152 | } finally { 153 | this.interruptable = null 154 | } 155 | } 156 | 157 | private setupTimeoutListeners(channel: FastSyncChannel) { 158 | channel.events.on('channelTimeout', () => this.events.emit('channelTimeout')) 159 | channel.events.on('packageStalled', () => this.events.emit('packageStalled')) 160 | } 161 | 162 | private async sendObjecsInCollection( 163 | collection: string, 164 | options: { 165 | channel: FastSyncChannel 166 | role: FastSyncRole 167 | syncInfo: FastSyncInfo 168 | }, 169 | ) { 170 | const batchStream = streamObjectBatches( 171 | this.options.storageManager, 172 | collection, 173 | { batchSize: this.options.batchSize || 100 }, 174 | ) 175 | await this.interruptable!.forOfLoop(batchStream, async objects => { 176 | const processedObjects = await this._preproccesObjects({ 177 | collection, 178 | objects, 179 | }) 180 | if (processedObjects.length) { 181 | await options.channel.sendObjectBatch({ 182 | collection, 183 | objects: processedObjects, 184 | }) 185 | } 186 | this.totalObjectsProcessed += objects.length 187 | this.events.emit('progress', { 188 | role: options.role, 189 | progress: { 190 | ...options.syncInfo, 191 | totalObjectsProcessed: this.totalObjectsProcessed, 192 | }, 193 | }) 194 | }) 195 | } 196 | 197 | async _preproccesObjects(params: { collection: string; objects: any[] }) { 198 | const preSendProcessor = this.options.preSendProcessor 199 | if (!preSendProcessor) { 200 | return params.objects 201 | } 202 | 203 | const processedObjects = ( 204 | await Promise.all( 205 | params.objects.map( 206 | async object => 207 | ( 208 | await preSendProcessor({ 209 | collection: params.collection, 210 | object, 211 | }) 212 | ).object, 213 | ), 214 | ) 215 | ).filter(object => !!object) 216 | return processedObjects 217 | } 218 | 219 | async receive(options: { role: FastSyncRole }) { 220 | this._state = 'running' 221 | const stateChangeHandler = (state: 'paused' | 'resumed') => () => { 222 | this._state = state === 'paused' ? 'paused' : 'running' 223 | this.events.emit(state) 224 | } 225 | 226 | this.options.channel.events.on('paused', stateChangeHandler('paused')) 227 | this.options.channel.events.on('resumed', stateChangeHandler('resumed')) 228 | try { 229 | const syncInfo = await this.options.channel.receiveSyncInfo() 230 | this.events.emit('prepared', { syncInfo, role: options.role }) 231 | 232 | // console.log('recv: entering loop') 233 | this.events.emit('progress', { 234 | role: options.role, 235 | progress: { 236 | ...syncInfo, 237 | totalObjectsProcessed: this.totalObjectsProcessed, 238 | }, 239 | }) 240 | for await (const objectBatch of this.options.channel.streamObjectBatches()) { 241 | // console.log('recv: start iter') 242 | for (const object of objectBatch.objects) { 243 | try { 244 | await this.options.executeReconciliationOperation( 245 | 'createObject', 246 | objectBatch.collection, 247 | object, 248 | ) 249 | } catch (e) { 250 | const { fatal } = this.processNonFatalError?.({ source: 'create-object', error: e }) ?? { fatal: true } 251 | if (fatal) { 252 | throw e 253 | } 254 | } 255 | } 256 | this.totalObjectsProcessed += objectBatch.objects.length 257 | this.events.emit('progress', { 258 | role: options.role, 259 | progress: { 260 | ...syncInfo, 261 | totalObjectsProcessed: this.totalObjectsProcessed, 262 | }, 263 | }) 264 | // console.log('recv: end iter') 265 | } 266 | this._state = 'done' 267 | } catch (e) { 268 | this._state = 'error' 269 | throw e 270 | } 271 | } 272 | 273 | async pause() { 274 | if (this.interruptable) { 275 | this._state = 'paused' 276 | this.events.emit('paused') 277 | await this.interruptable.pause() 278 | await this.options.channel.sendStateChange('paused') 279 | } 280 | } 281 | 282 | async resume() { 283 | if (this.interruptable) { 284 | this._state = 'running' 285 | this.events.emit('resumed') 286 | await this.options.channel.sendStateChange('running') 287 | await this.interruptable.resume() 288 | } 289 | } 290 | 291 | async cancel() { 292 | if (this.interruptable) { 293 | this._state = 'cancelled' 294 | await this.interruptable.cancel() 295 | } 296 | } 297 | 298 | async abort() { 299 | await this.cancel() 300 | } 301 | } 302 | 303 | async function* streamObjectBatches( 304 | storageManager: StorageManager, 305 | collection: string, 306 | options: { batchSize: number }, 307 | ): AsyncIterableIterator { 308 | // const pkIndex = storageManager.registry.collections[collection] 309 | // if (typeof pkIndex !== 'string') { 310 | // throw new Error(`Only simple PK indices are supported for now (colllection: ${collection})`) 311 | // } 312 | 313 | if (storageManager.backend.supports('streamObjects')) { 314 | let objects: any[] = [] 315 | 316 | const objectStream = await storageManager.operation( 317 | 'streamObjects', 318 | collection, 319 | ) 320 | for await (const object of objectStream) { 321 | objects.push(object) 322 | if (objects.length >= options.batchSize) { 323 | yield objects 324 | objects = [] 325 | } 326 | } 327 | if (objects.length > 0) { 328 | yield objects 329 | } 330 | } else { 331 | for (const object of await storageManager 332 | .collection(collection) 333 | .findObjects({})) { 334 | yield [object] 335 | } 336 | } 337 | } 338 | -------------------------------------------------------------------------------- /ts/fast-sync/interruptable.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import Interruptable from './interruptable' 3 | 4 | describe('Interruptable', () => { 5 | it('should not execute steps if canceled', async () => { 6 | const calls: number[] = [] 7 | const step = async () => { 8 | calls.push(calls.length) 9 | } 10 | 11 | const interruptable = new Interruptable() 12 | await interruptable.execute(step) 13 | expect(calls).toEqual([0]) 14 | await interruptable.cancel() 15 | await interruptable.execute(step) 16 | expect(calls).toEqual([0]) 17 | }) 18 | 19 | it('should allow for pausable execute steps', async () => { 20 | const calls: number[] = [] 21 | const step = async () => { 22 | calls.push(calls.length) 23 | } 24 | 25 | const interruptable = new Interruptable() 26 | await interruptable.execute(step) 27 | expect(calls).toEqual([0]) 28 | 29 | await interruptable.pause() 30 | const promise = interruptable.execute(step) 31 | expect(calls).toEqual([0]) 32 | 33 | await interruptable.resume() 34 | await promise 35 | expect(calls).toEqual([0, 1]) 36 | await interruptable.execute(step) 37 | expect(calls).toEqual([0, 1, 2]) 38 | }) 39 | 40 | it('should allow for canceling while loops', async () => { 41 | const interruptable = new Interruptable() 42 | await interruptable.whileLoop( 43 | async () => true, 44 | async () => { 45 | interruptable.cancel() 46 | }, 47 | ) 48 | }) 49 | 50 | it('should allow for pausing while loops', async () => { 51 | const loops: number[] = [] 52 | 53 | const interruptable = new Interruptable() 54 | const promise = interruptable.whileLoop( 55 | async () => loops.length < 2, 56 | async () => { 57 | loops.push(loops.length) 58 | await interruptable.pause() 59 | }, 60 | ) 61 | 62 | await new Promise(resolve => setTimeout(resolve, 200)) 63 | expect(loops).toEqual([0]) 64 | await interruptable.resume() 65 | await promise 66 | expect(loops).toEqual([0, 1]) 67 | }) 68 | 69 | it('should allow for canceling for ... of loops', async () => { 70 | const loops: number[] = [] 71 | 72 | const interruptable = new Interruptable() 73 | const promise = interruptable.forOfLoop([1, 2], async item => { 74 | loops.push(item) 75 | interruptable.cancel() 76 | }) 77 | 78 | await promise 79 | expect(loops).toEqual([1]) 80 | }) 81 | 82 | it('should allow for pausing for ... of loops', async () => { 83 | const loops: number[] = [] 84 | 85 | const interruptable = new Interruptable() 86 | await interruptable.pause() 87 | const promise = interruptable.forOfLoop([1, 2], async item => { 88 | loops.push(item) 89 | }) 90 | 91 | expect(loops).toEqual([]) 92 | await interruptable.resume() 93 | await promise 94 | expect(loops).toEqual([1, 2]) 95 | }) 96 | }) 97 | -------------------------------------------------------------------------------- /ts/fast-sync/interruptable.ts: -------------------------------------------------------------------------------- 1 | import { ResolvablePromise, resolvablePromise } from './utils' 2 | 3 | export default class Interruptable { 4 | cancelled: boolean = false 5 | private pausePromise: ResolvablePromise | null = null // only set if paused, resolved when pause ends 6 | 7 | get paused(): boolean { 8 | return !!this.pausePromise 9 | } 10 | 11 | async cancel() { 12 | this.cancelled = true 13 | } 14 | 15 | async pause() { 16 | if (this.paused || this.cancelled) { 17 | return 18 | } 19 | 20 | this.pausePromise = resolvablePromise() 21 | } 22 | 23 | async resume() { 24 | if (this.pausePromise) { 25 | this.pausePromise.resolve() 26 | this.pausePromise = null 27 | } 28 | } 29 | 30 | async whileLoop( 31 | condition: () => Promise, 32 | body: () => Promise, 33 | ) { 34 | if (!this.cancelled) { 35 | while (await condition()) { 36 | if (await this._shouldCancelAfterWaitingForPause()) { 37 | break 38 | } 39 | 40 | await body() 41 | } 42 | } 43 | } 44 | 45 | async forOfLoop( 46 | iterable: Iterable | AsyncIterable, 47 | body: (item: T) => Promise, 48 | ) { 49 | if (this.cancelled) { 50 | return 51 | } 52 | 53 | if (iterable[Symbol.asyncIterator]) { 54 | for await (const item of iterable) { 55 | if (await this._shouldCancelAfterWaitingForPause()) { 56 | break 57 | } 58 | 59 | await body(item) 60 | 61 | if (await this._shouldCancelAfterWaitingForPause()) { 62 | break 63 | } 64 | } 65 | } else { 66 | for (const item of iterable as Iterable) { 67 | if (await this._shouldCancelAfterWaitingForPause()) { 68 | break 69 | } 70 | 71 | await body(item) 72 | } 73 | } 74 | } 75 | 76 | async execute(f: () => Promise) { 77 | if (await this._shouldCancelAfterWaitingForPause()) { 78 | return 79 | } 80 | 81 | return f() 82 | } 83 | 84 | async _shouldCancelAfterWaitingForPause() { 85 | if (this.pausePromise) { 86 | await this.pausePromise.promise 87 | } 88 | return this.cancelled 89 | } 90 | } 91 | -------------------------------------------------------------------------------- /ts/fast-sync/types.ts: -------------------------------------------------------------------------------- 1 | import TypedEmitter from 'typed-emitter' 2 | 3 | export type FastSyncRole = 'sender' | 'receiver' 4 | export const flippedRole = (role: FastSyncRole): FastSyncRole => 5 | role === 'sender' ? 'receiver' : 'sender' 6 | export type FastSyncOrder = 'receive-first' | 'send-first' 7 | export type FastSyncPackage = 8 | | { type: 'batch'; batch: any } 9 | | { type: 'confirm' } 10 | | { type: 'state-change'; state: 'paused' | 'running' } 11 | | { type: 'sync-info'; info: FastSyncInfo } 12 | | { type: 'finish' } 13 | | { type: 'user-package'; package: UserPackageType } 14 | export interface FastSyncChannelEvents { 15 | packageStalled: () => void 16 | channelTimeout: () => void 17 | paused: () => void 18 | resumed: () => void 19 | } 20 | export interface FastSyncChannel { 21 | packageTimeoutInMilliseconds: number 22 | channelTimeoutInMilliseconds: number 23 | preSend?: (syncPackage: FastSyncPackage) => Promise 24 | postReceive?: (syncPackage: FastSyncPackage) => Promise 25 | 26 | events: TypedEmitter 27 | 28 | sendUserPackage(jsonSerializable: UserPackageType): Promise 29 | receiveUserPackage(options?: { 30 | expectedType?: keyof UserPackageType 31 | }): Promise 32 | 33 | sendSyncInfo(syncInfo: FastSyncInfo): Promise 34 | receiveSyncInfo(): Promise 35 | 36 | streamObjectBatches(): AsyncIterableIterator 37 | sendObjectBatch(batch: FastSyncBatch): Promise 38 | 39 | sendStateChange(state: 'paused' | 'running'): Promise 40 | 41 | finish(): Promise 42 | destroy(): Promise 43 | } 44 | export interface FastSyncInfo { 45 | objectCount: number 46 | collectionCount: number 47 | } 48 | export interface FastSyncProgress extends FastSyncInfo { 49 | totalObjectsProcessed: number 50 | } 51 | export interface FastSyncBatch { 52 | collection: string 53 | objects: any[] 54 | } 55 | -------------------------------------------------------------------------------- /ts/fast-sync/utils.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import { splitWithTail } from './utils' 3 | 4 | describe('splitWithTail', () => { 5 | it('should work for strings that have less delimiters than the limit without a trailing delimiter', () => { 6 | expect(splitWithTail('a:bc', ':', 3)).toEqual(['a', 'bc']) 7 | }) 8 | it('should work for strings that have more delimiters than the limit without a trailing delimiter', () => { 9 | expect(splitWithTail('a:bc:de:fg:hi:jk', ':', 3)).toEqual([ 10 | 'a', 11 | 'bc', 12 | 'de:fg:hi:jk', 13 | ]) 14 | }) 15 | it('should work for strings that have less delimiters than the limit with a trailing delimiter', () => { 16 | expect(splitWithTail('a:', ':', 3)).toEqual(['a', '']) 17 | }) 18 | it('should work for strings that have more delimiters than the limit with a trailing delimiter', () => { 19 | expect(splitWithTail('a:bc:de:fg:hi:jk:', ':', 3)).toEqual([ 20 | 'a', 21 | 'bc', 22 | 'de:fg:hi:jk:', 23 | ]) 24 | }) 25 | it('should work for strings that have the delimiter as the first char', () => { 26 | expect(splitWithTail(':bc:de:fg:hi:jk', ':', 3)).toEqual([ 27 | '', 28 | 'bc', 29 | 'de:fg:hi:jk', 30 | ]) 31 | }) 32 | }) 33 | -------------------------------------------------------------------------------- /ts/fast-sync/utils.ts: -------------------------------------------------------------------------------- 1 | import StorageManager from '@worldbrain/storex' 2 | import { FastSyncInfo } from './types' 3 | 4 | export type ResolvablePromise = { 5 | promise: Promise 6 | resolve: (value: ReturnType) => void 7 | } 8 | 9 | export function resolvablePromise(): ResolvablePromise { 10 | let resolve: (value: ReturnType) => void 11 | const promise = new Promise(resolvePromise => { 12 | resolve = resolvePromise 13 | }) 14 | return { resolve: resolve!, promise } 15 | } 16 | 17 | export async function getFastSyncInfo( 18 | storageManager: StorageManager, 19 | options: { collections: string[] }, 20 | ): Promise { 21 | let collectionCount = 0 22 | let objectCount = 0 23 | for (const collectionName of options.collections) { 24 | collectionCount += 1 25 | objectCount += await storageManager 26 | .collection(collectionName) 27 | .countObjects({}) 28 | } 29 | return { collectionCount, objectCount } 30 | } 31 | 32 | export function splitWithTail( 33 | s: string, 34 | delimiter: string, 35 | limit: number, 36 | ): Array { 37 | if (delimiter.length > 1) { 38 | throw new Error(`splitWithTail() doesn't support multi-char delimiters`) 39 | } 40 | 41 | const result: string[] = [] 42 | 43 | let prevIndex: number | null = null 44 | let delimitersExhausted = false 45 | while (result.length < limit - 1) { 46 | const nextIndex = s.indexOf( 47 | delimiter, 48 | prevIndex === null ? 0 : prevIndex + 1, 49 | ) 50 | const nextSlice = s.substring( 51 | prevIndex === null ? 0 : prevIndex + 1, 52 | nextIndex !== -1 ? nextIndex : undefined, 53 | ) 54 | result.push(nextSlice) 55 | 56 | if (nextIndex === -1) { 57 | delimitersExhausted = true 58 | break 59 | } 60 | 61 | prevIndex = nextIndex 62 | } 63 | if (!delimitersExhausted) { 64 | result.push(s.substr(prevIndex === null ? 0 : prevIndex + 1)) 65 | } 66 | 67 | return result 68 | } 69 | -------------------------------------------------------------------------------- /ts/index.test.data.ts: -------------------------------------------------------------------------------- 1 | export const TEST_DATA = { 2 | test1: { 3 | key: 'one', 4 | label: 'Foo', 5 | createdWhen: new Date('2019-09-09 01:23:45'), 6 | }, 7 | test2: { 8 | key: 'two', 9 | label: 'Bar', 10 | createdWhen: new Date('2019-09-10 01:23:45'), 11 | }, 12 | test3: { 13 | key: 'three', 14 | label: 'spam', 15 | createdWhen: new Date('2019-09-11 01:23:45'), 16 | }, 17 | } 18 | -------------------------------------------------------------------------------- /ts/index.tests.ts: -------------------------------------------------------------------------------- 1 | import StorageManager, { StorageBackend } from '@worldbrain/storex' 2 | import { RegistryCollections } from '@worldbrain/storex/lib/registry' 3 | import { DexieStorageBackend } from '@worldbrain/storex-backend-dexie' 4 | import inMemory from '@worldbrain/storex-backend-dexie/lib/in-memory' 5 | import { ClientSyncLogStorage } from './client-sync-log' 6 | import { registerModuleMapCollections } from '@worldbrain/storex-pattern-modules' 7 | import { CustomAutoPkMiddleware } from './custom-auto-pk' 8 | import { SyncLoggingMiddleware } from './logging-middleware' 9 | import { StorageMiddleware } from '@worldbrain/storex/lib/types/middleware' 10 | 11 | export type TestDependencyInjector< 12 | TestDependencies, 13 | TestRunnerOptions = never 14 | > = ( 15 | body: (dependencies: TestDependencies) => Promise, 16 | options?: TestRunnerOptions, 17 | ) => Promise 18 | 19 | export function makeTestFactory( 20 | withTestDependencies: TestDependencyInjector< 21 | TestDependencies, 22 | TestRunnerOptions 23 | >, 24 | ) { 25 | type TestFunction = (dependencies: TestDependencies) => Promise 26 | 27 | return async function wrappedIt( 28 | description: string, 29 | test: TestFunction, 30 | options?: TestRunnerOptions, 31 | ) { 32 | it(description, async () => { 33 | await withTestDependencies( 34 | async (dependencies: TestDependencies) => { 35 | await test(dependencies) 36 | }, 37 | options, 38 | ) 39 | }) 40 | } 41 | } 42 | 43 | export async function setupSyncTestClient(options: { 44 | getNow: () => number | '$now' 45 | createClientStorageBackend?: () => StorageBackend 46 | withModificationMerging?: boolean 47 | pkGenerator?: () => string 48 | collections?: RegistryCollections 49 | dontFinishInitialization?: boolean 50 | withCompoundPks?: boolean 51 | }) { 52 | const backend = options.createClientStorageBackend 53 | ? options.createClientStorageBackend() 54 | : ((new DexieStorageBackend({ 55 | dbName: 'test', 56 | idbImplementation: inMemory(), 57 | }) as any) as StorageBackend) 58 | const storageManager = new StorageManager({ backend }) 59 | storageManager.registry.registerCollections( 60 | options.collections || { 61 | user: { 62 | version: new Date('2019-01-01'), 63 | fields: options.withCompoundPks 64 | ? { 65 | firstName: { type: 'string' }, 66 | lastName: { type: 'string' }, 67 | test: { type: 'string', optional: true }, 68 | } 69 | : { displayName: { type: 'string' } }, 70 | indices: options.withCompoundPks 71 | ? [ 72 | { field: ['firstName', 'lastName'], pk: true }, 73 | { field: 'lastName' }, 74 | ] 75 | : undefined, 76 | }, 77 | email: { 78 | version: new Date('2019-01-01'), 79 | fields: { 80 | address: { type: 'string' }, 81 | }, 82 | relationships: options.withCompoundPks 83 | ? [] 84 | : [{ childOf: 'user' }], 85 | }, 86 | }, 87 | ) 88 | const clientSyncLog = new ClientSyncLogStorage({ storageManager }) 89 | registerModuleMapCollections(storageManager.registry, { clientSyncLog }) 90 | 91 | const includeCollections = options.collections 92 | ? Object.keys(options.collections) 93 | : ['user', 'email'] 94 | 95 | const middleware: StorageMiddleware[] = [] 96 | if ( 97 | options.pkGenerator && 98 | (options.collections || !options.withCompoundPks) 99 | ) { 100 | const pkMiddleware = new CustomAutoPkMiddleware({ 101 | pkGenerator: options.pkGenerator, 102 | }) 103 | pkMiddleware.setup({ 104 | storageRegistry: storageManager.registry, 105 | collections: includeCollections, 106 | }) 107 | middleware.push(pkMiddleware) 108 | } 109 | 110 | const syncLoggingMiddleware = new SyncLoggingMiddleware({ 111 | storageManager, 112 | clientSyncLog, 113 | includeCollections, 114 | mergeModifications: options.withModificationMerging, 115 | }) 116 | syncLoggingMiddleware._getNow = async () => options.getNow() 117 | middleware.push(syncLoggingMiddleware) 118 | 119 | storageManager.setMiddleware(middleware) 120 | 121 | const deviceId: number | string = null as any 122 | 123 | if (!options.dontFinishInitialization) { 124 | await storageManager.finishInitialization() 125 | await storageManager.backend.migrate() 126 | } 127 | 128 | return { 129 | storageManager, 130 | syncLoggingMiddleware, 131 | clientSyncLog, 132 | deviceId, 133 | objects: {}, 134 | } 135 | } 136 | 137 | export function linearTimestampGenerator(options: { 138 | start: number 139 | step?: number 140 | }) { 141 | let now = options.start 142 | return () => { 143 | const oldNow = now 144 | now += options.step || 1 145 | return oldNow 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /ts/index.ts: -------------------------------------------------------------------------------- 1 | import Omit from 'lodash/omit' 2 | import TypedEmitter from 'typed-emitter' 3 | import { jsonDateParser } from 'json-date-parser' 4 | import last from 'lodash/last' 5 | import StorageManager, { OperationBatch } from '@worldbrain/storex' 6 | import { ClientSyncLogStorage } from './client-sync-log' 7 | import { SharedSyncLog } from './shared-sync-log' 8 | import { ReconcilerFunction } from './reconciliation' 9 | import { 10 | SharedSyncLogEntryData, 11 | SharedSyncLogEntry, 12 | } from './shared-sync-log/types' 13 | import { ClientSyncLogEntry } from './client-sync-log/types' 14 | 15 | export interface SyncSerializer { 16 | serializeSharedSyncLogEntryData: ( 17 | data: SharedSyncLogEntryData, 18 | ) => Promise 19 | deserializeSharedSyncLogEntryData: ( 20 | serialized: string, 21 | ) => Promise 22 | } 23 | 24 | export type SyncEvents = TypedEmitter 25 | export interface SyncEventMap { 26 | sendingSharedEntries: (event: { 27 | entries: Omit[] 28 | deviceId: number | string 29 | }) => void 30 | receivedSharedEntries: (event: { 31 | entries: SharedSyncLogEntry<'deserialized-data'>[] 32 | deviceId: number | string 33 | }) => void 34 | reconcilingEntries: (event: { 35 | entries: ClientSyncLogEntry[] 36 | deviceId: number | string 37 | }) => void 38 | reconciledEntries: (event: { 39 | entries: ClientSyncLogEntry[] 40 | deviceId: number | string 41 | reconciliation: any[] 42 | }) => void 43 | } 44 | export const SYNC_EVENTS: { [Key in keyof SyncEventMap]: {} } = { 45 | sendingSharedEntries: {}, 46 | receivedSharedEntries: {}, 47 | reconcilingEntries: {}, 48 | reconciledEntries: {}, 49 | } 50 | 51 | export type SyncPreSendProcessor = (params: { 52 | entry: ClientSyncLogEntry 53 | }) => Promise<{ entry: ClientSyncLogEntry | null }> 54 | export type SyncPostReceiveProcessor = (params: { 55 | entry: SharedSyncLogEntry<'deserialized-data'> 56 | }) => Promise<{ entry: SharedSyncLogEntry<'deserialized-data'> | null }> 57 | export type ExecuteReconciliationOperation = ( 58 | operationName: string, 59 | ...args: any[] 60 | ) => Promise 61 | 62 | export interface CommonSyncOptions { 63 | clientSyncLog: ClientSyncLogStorage 64 | sharedSyncLog: SharedSyncLog 65 | now: number | '$now' 66 | userId: number | string 67 | deviceId: number | string 68 | serializer?: SyncSerializer 69 | preSend?: SyncPreSendProcessor 70 | postReceive?: SyncPostReceiveProcessor 71 | syncEvents?: SyncEvents 72 | uploadBatchSize?: number 73 | uploadBatchByteLimit?: number 74 | downloadBatchSize?: number 75 | singleBatch?: boolean 76 | continueSync?: (info: { stage: SyncStage }) => boolean 77 | } 78 | export type SyncStage = 'receive' | 'share' | 'integrate' 79 | export interface SyncOptions extends CommonSyncOptions { 80 | storageManager: StorageManager 81 | reconciler: ReconcilerFunction 82 | extraSentInfo?: any 83 | stages?: { receive?: boolean; share?: boolean; reconcile?: boolean } 84 | reconciliationProcessor?: ( 85 | reconciliation: OperationBatch, 86 | ) => Promise 87 | executeReconciliationOperation?: ExecuteReconciliationOperation 88 | cleanupAfterReconcile?: boolean 89 | } 90 | export interface SyncReturnValue { 91 | finished: boolean 92 | } 93 | 94 | export async function shareLogEntries( 95 | args: CommonSyncOptions & { extraSentInfo?: any }, 96 | ): Promise<{ finished: boolean }> { 97 | const preSend: SyncPreSendProcessor = args.preSend || (async (args) => args) 98 | const serializeEntryData = args.serializer 99 | ? args.serializer.serializeSharedSyncLogEntryData 100 | : async (data: SharedSyncLogEntryData) => JSON.stringify(data) 101 | 102 | let temporaryBatchSize: number | null = null 103 | while (true) { 104 | const batchSize = temporaryBatchSize || args.uploadBatchSize 105 | temporaryBatchSize = null 106 | 107 | const entries = await args.clientSyncLog.getUnsharedEntries({ 108 | batchSize, 109 | }) 110 | if (!entries.length) { 111 | return { finished: true } 112 | } 113 | 114 | const processedEntries = ( 115 | await Promise.all( 116 | entries.map(async (entry) => (await preSend({ entry })).entry), 117 | ) 118 | ).filter((entry) => !!entry) as ClientSyncLogEntry[] 119 | 120 | const sharedLogEntries = await Promise.all( 121 | processedEntries.map(async (entry) => ({ 122 | createdOn: entry.createdOn, 123 | data: await serializeEntryData({ 124 | operation: entry.operation, 125 | collection: entry.collection, 126 | pk: entry.pk, 127 | field: entry['field'] || null, 128 | value: entry['value'] || null, 129 | }), 130 | })), 131 | ) 132 | 133 | if (args.uploadBatchByteLimit) { 134 | const estimatedBatchSizeBytes = sharedLogEntries.reduce( 135 | (acc, entry) => acc + entry.data.length + 100, 136 | 0, 137 | ) 138 | const limitExceeded = 139 | estimatedBatchSizeBytes > args.uploadBatchByteLimit 140 | if (limitExceeded) { 141 | if (batchSize) { 142 | if (batchSize < 2) { 143 | throw new Error( 144 | `Sync batch size exceeds limit during upload, but cannot make it smaller`, 145 | ) 146 | } 147 | 148 | temporaryBatchSize = Math.floor(batchSize / 2) 149 | } else { 150 | temporaryBatchSize = 16 151 | } 152 | continue 153 | } 154 | } 155 | 156 | if (args.syncEvents) { 157 | args.syncEvents.emit('sendingSharedEntries', { 158 | entries: sharedLogEntries, 159 | deviceId: args.deviceId, 160 | }) 161 | } 162 | await args.sharedSyncLog.writeEntries(sharedLogEntries, args) 163 | await args.clientSyncLog.updateSharedUntil({ 164 | until: last(entries)!.createdOn, 165 | sharedOn: args.now, 166 | }) 167 | 168 | if (args.singleBatch) { 169 | return { finished: false } 170 | } 171 | } 172 | } 173 | 174 | export async function receiveLogEntries( 175 | args: CommonSyncOptions, 176 | ): Promise<{ finished: boolean }> { 177 | const postReceive: SyncPostReceiveProcessor = 178 | args.postReceive || (async (args) => args) 179 | const deserializeEntryData = args.serializer 180 | ? args.serializer.deserializeSharedSyncLogEntryData 181 | : async (serialized: string) => JSON.parse(serialized, jsonDateParser) 182 | const serializeEntryData = args.serializer 183 | ? args.serializer.serializeSharedSyncLogEntryData 184 | : async (deserialized: SharedSyncLogEntryData) => 185 | JSON.stringify(deserialized) 186 | 187 | while (true) { 188 | const logUpdate = await args.sharedSyncLog.getUnsyncedEntries({ 189 | userId: args.userId, 190 | deviceId: args.deviceId, 191 | batchSize: args.downloadBatchSize, 192 | }) 193 | if (!logUpdate.entries.length) { 194 | await args.sharedSyncLog.markAsSeen(logUpdate, { 195 | userId: args.userId, 196 | deviceId: args.deviceId, 197 | now: args.now, 198 | }) 199 | return { finished: true } 200 | } 201 | 202 | const processedEntries = ( 203 | await Promise.all( 204 | logUpdate.entries.map(async (entry) => { 205 | const deserializedEntry: SharedSyncLogEntry<'deserialized-data'> = { 206 | ...entry, 207 | data: await deserializeEntryData(entry.data), 208 | } 209 | if (!deserializedEntry.data) { 210 | return null 211 | } 212 | 213 | const postProcessed = await postReceive({ 214 | entry: deserializedEntry, 215 | }) 216 | return postProcessed.entry 217 | }), 218 | ) 219 | ).filter((entry) => !!entry) as SharedSyncLogEntry< 220 | 'deserialized-data' 221 | >[] 222 | 223 | if (args.syncEvents) { 224 | args.syncEvents.emit('receivedSharedEntries', { 225 | entries: processedEntries, 226 | deviceId: args.deviceId, 227 | }) 228 | } 229 | await args.clientSyncLog.insertReceivedEntries(processedEntries, { 230 | now: args.now, 231 | }) 232 | await args.sharedSyncLog.markAsSeen(logUpdate, { 233 | userId: args.userId, 234 | deviceId: args.deviceId, 235 | now: args.now, 236 | }) 237 | 238 | if (!continueSync('receive', args)) { 239 | return { finished: false } 240 | } 241 | } 242 | } 243 | 244 | export async function writeReconcilation(args: { 245 | storageManager: StorageManager 246 | clientSyncLog: ClientSyncLogStorage 247 | entries: ClientSyncLogEntry[] 248 | reconciliation: OperationBatch 249 | executeReconciliationOperation?: ExecuteReconciliationOperation 250 | }) { 251 | const executeReconciliationOperation = 252 | args.executeReconciliationOperation ?? 253 | ((name, ...operation) => 254 | args.storageManager.backend.operation(name, ...operation)) 255 | 256 | const batchSteps = [ 257 | ...args.reconciliation, 258 | ...args.clientSyncLog.getMarkAsIntegratedBatchSteps(args.entries), 259 | ] 260 | for (const [stepIndex, step] of Object.entries(batchSteps)) { 261 | step.placeholder = `step-${stepIndex}` 262 | } 263 | await executeReconciliationOperation('executeBatch', batchSteps) 264 | } 265 | 266 | export async function reconcileStorage( 267 | options: SyncOptions, 268 | ): Promise<{ finished: boolean }> { 269 | while (true) { 270 | const entries = await options.clientSyncLog.getNextEntriesToIntgrate() 271 | if (!entries) { 272 | return { finished: true } 273 | } 274 | 275 | let reconciliation = await options.reconciler(entries, { 276 | storageRegistry: options.storageManager.registry, 277 | }) 278 | if (options.reconciliationProcessor) { 279 | reconciliation = await options.reconciliationProcessor( 280 | reconciliation, 281 | ) 282 | } 283 | 284 | if (options.syncEvents) { 285 | options.syncEvents.emit('reconciledEntries', { 286 | entries, 287 | reconciliation, 288 | deviceId: options.deviceId, 289 | }) 290 | } 291 | 292 | await writeReconcilation({ 293 | storageManager: options.storageManager, 294 | clientSyncLog: options.clientSyncLog, 295 | entries, 296 | reconciliation, 297 | executeReconciliationOperation: 298 | options.executeReconciliationOperation, 299 | }) 300 | 301 | if (!continueSync('integrate', options)) { 302 | return { finished: false } 303 | } 304 | } 305 | } 306 | 307 | export async function doSync(options: SyncOptions): Promise { 308 | if (options.stages?.receive ?? true) { 309 | const { finished: receiveFinished } = await receiveLogEntries(options) 310 | if (!receiveFinished || !continueSync('share', options)) { 311 | return { finished: false } 312 | } 313 | } 314 | 315 | if (options.stages?.share ?? true) { 316 | const { finished: shareFinished } = await shareLogEntries(options) 317 | if (!shareFinished || !continueSync('integrate', options)) { 318 | return { finished: false } 319 | } 320 | } 321 | 322 | if (options.stages?.reconcile ?? true) { 323 | const { finished: reconciliationFinished } = await reconcileStorage( 324 | options, 325 | ) 326 | if (!reconciliationFinished) { 327 | return { finished: false } 328 | } 329 | } 330 | 331 | if (options.cleanupAfterReconcile) { 332 | await options.clientSyncLog.deleteObsoleteEntries() 333 | } 334 | 335 | return { finished: true } 336 | } 337 | 338 | function continueSync( 339 | stage: SyncStage, 340 | options: Pick, 341 | ): boolean { 342 | return options.continueSync 343 | ? options.continueSync({ stage }) 344 | : !options.singleBatch 345 | } 346 | -------------------------------------------------------------------------------- /ts/integration/continuous-sync.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from 'events' 2 | import StorageManager from '@worldbrain/storex' 3 | import { SharedSyncLog } from '../shared-sync-log' 4 | import { reconcileSyncLog } from '../reconciliation' 5 | import { 6 | doSync, 7 | SyncPreSendProcessor, 8 | SyncSerializer, 9 | SyncEvents, 10 | SyncPostReceiveProcessor, 11 | SyncOptions, 12 | SyncReturnValue, 13 | ExecuteReconciliationOperation, 14 | } from '../' 15 | import { ClientSyncLogStorage } from '../client-sync-log' 16 | import { RecurringTask } from '../utils/recurring-task' 17 | import { SyncSettingsStore } from './settings' 18 | import TypedEventEmitter from 'typed-emitter' 19 | 20 | export interface ContinuousSyncDependencies { 21 | auth: { getUserId(): Promise } 22 | storageManager: StorageManager 23 | clientSyncLog: ClientSyncLogStorage 24 | getSharedSyncLog: () => Promise 25 | settingStore: SyncSettingsStore 26 | frequencyInMs?: number 27 | uploadBatchSize?: number 28 | uploadBatchByteLimit?: number 29 | downloadBatchSize?: number 30 | singleBatch?: boolean 31 | debug?: boolean 32 | toggleSyncLogging: ((enabled: true, deviceId: string | number) => void) & 33 | ((enabled: false) => void) 34 | executeReconciliationOperation?: ExecuteReconciliationOperation 35 | } 36 | export interface ContinuousSyncEvents { 37 | syncStarted(): void 38 | syncFinished(event: { hasChanges: boolean; error?: Error }): void 39 | } 40 | 41 | export class ContinuousSync { 42 | public events = new EventEmitter() as TypedEventEmitter< 43 | ContinuousSyncEvents 44 | > 45 | public recurringIncrementalSyncTask?: RecurringTask< 46 | Partial, 47 | SyncReturnValue | void 48 | > 49 | public deviceId?: number | string 50 | public enabled = false 51 | public debug: boolean 52 | public runningSync: Promise | null = null 53 | 54 | constructor(private dependencies: ContinuousSyncDependencies) { 55 | this.debug = !!dependencies.debug 56 | } 57 | 58 | async setup() { 59 | const enabled = await this.dependencies.settingStore.retrieveSetting( 60 | 'continuousSyncEnabled', 61 | ) 62 | if (!enabled) { 63 | return 64 | } 65 | 66 | this.deviceId = (await this.dependencies.settingStore.retrieveSetting( 67 | 'deviceId', 68 | )) as string | number 69 | await this.setupContinuousSync() 70 | } 71 | 72 | async tearDown() { 73 | if (this.recurringIncrementalSyncTask) { 74 | this.recurringIncrementalSyncTask.stop() 75 | } 76 | } 77 | 78 | setupRecurringTask() { 79 | if (this.dependencies.frequencyInMs) { 80 | this.recurringIncrementalSyncTask = new RecurringTask( 81 | async ( 82 | options?: Partial & { debug?: boolean }, 83 | ) => { 84 | return this.maybeDoIncrementalSync(options) 85 | }, 86 | { 87 | intervalInMs: this.dependencies.frequencyInMs, 88 | onError: () => { }, 89 | }, 90 | ) 91 | } 92 | } 93 | 94 | async initDevice() { 95 | const userId = await this.dependencies.auth.getUserId() 96 | if (!userId) { 97 | throw new Error( 98 | `Cannot generate Sync device ID without being logged in`, 99 | ) 100 | } 101 | 102 | const existingDeviceId = await this.dependencies.settingStore.retrieveSetting( 103 | 'deviceId', 104 | ) 105 | if (existingDeviceId) { 106 | this.deviceId = existingDeviceId as number | string 107 | return 108 | } 109 | 110 | const sharedSyncLog = await this.dependencies.getSharedSyncLog() 111 | const newDeviceId = await sharedSyncLog.createDeviceId({ 112 | userId, 113 | sharedUntil: Date.now(), 114 | }) 115 | await this.dependencies.settingStore.storeSetting( 116 | 'deviceId', 117 | newDeviceId, 118 | ) 119 | this.deviceId = newDeviceId 120 | } 121 | 122 | async enableContinuousSync() { 123 | await this.dependencies.settingStore.storeSetting( 124 | 'continuousSyncEnabled', 125 | true, 126 | ) 127 | await this.setupContinuousSync() 128 | } 129 | 130 | async setupContinuousSync() { 131 | if (!this.deviceId) { 132 | throw new Error(`Cannot set up continuous Sync without device id`) 133 | } 134 | 135 | this.dependencies.toggleSyncLogging(true, this.deviceId) 136 | this.enabled = true 137 | this.setupRecurringTask() 138 | } 139 | 140 | async forceIncrementalSync( 141 | options?: { debug?: boolean } & Partial, 142 | ): Promise { 143 | if (this.enabled) { 144 | if (this.recurringIncrementalSyncTask) { 145 | return this.recurringIncrementalSyncTask.forceRun(options) 146 | } else { 147 | return this.doIncrementalSync(options) 148 | } 149 | } 150 | } 151 | 152 | async maybeDoIncrementalSync(options?: { debug?: boolean }) { 153 | if (this.enabled) { 154 | return this.doIncrementalSync(options) 155 | } 156 | } 157 | 158 | async doIncrementalSync( 159 | options?: Partial & { 160 | debug?: boolean 161 | prettifier?: (object: any) => string 162 | }, 163 | ) { 164 | options = options || {} 165 | if (this.runningSync) { 166 | return 167 | } 168 | 169 | let resolveRunningSync: () => void 170 | this.runningSync = new Promise( 171 | resolve => (resolveRunningSync = resolve), 172 | ) 173 | try { 174 | this.events.emit('syncStarted') 175 | const syncOptions = { 176 | ...(await this.getSyncOptions()), 177 | ...options, 178 | } 179 | if (!syncOptions.syncEvents) { 180 | syncOptions.syncEvents = new EventEmitter() as SyncEvents 181 | } 182 | if (options?.debug) { 183 | const originalEmit = syncOptions.syncEvents.emit.bind( 184 | syncOptions.syncEvents, 185 | ) 186 | syncOptions.syncEvents.emit = ((name: string, event: any) => { 187 | console.log( 188 | `SYNC EVENT '${name}':`, 189 | options?.prettifier ? options.prettifier(event) : event, 190 | ) 191 | return originalEmit(name as any, event) 192 | }) as any 193 | } 194 | 195 | let hasChanges = false 196 | syncOptions.syncEvents.addListener( 197 | 'reconciledEntries', 198 | () => (hasChanges = true), 199 | ) 200 | try { 201 | const syncResult = await doSync(syncOptions) 202 | this.events.emit('syncFinished', { hasChanges }) 203 | return syncResult 204 | } finally { 205 | syncOptions.syncEvents.removeAllListeners('reconciledEntries') 206 | } 207 | } catch (error) { 208 | console.error(error) 209 | this.events.emit('syncFinished', { hasChanges: false, error }) 210 | } finally { 211 | this.runningSync = null 212 | resolveRunningSync!() 213 | } 214 | } 215 | 216 | async getSyncOptions(): Promise { 217 | const { auth } = this.dependencies 218 | const userId = await auth.getUserId() 219 | if (!userId) { 220 | throw new Error(`Cannot Sync without authenticated user`) 221 | } 222 | if (!this.deviceId) { 223 | throw new Error(`Cannot Sync without device ID`) 224 | } 225 | 226 | return { 227 | clientSyncLog: this.dependencies.clientSyncLog, 228 | sharedSyncLog: await this.dependencies.getSharedSyncLog(), 229 | storageManager: this.dependencies.storageManager, 230 | reconciler: reconcileSyncLog, 231 | now: Date.now(), 232 | userId, 233 | deviceId: this.deviceId, 234 | uploadBatchSize: this.dependencies.uploadBatchSize, 235 | uploadBatchByteLimit: this.dependencies.uploadBatchByteLimit, 236 | downloadBatchSize: this.dependencies.downloadBatchSize, 237 | singleBatch: this.dependencies.singleBatch, 238 | serializer: this.getSerializer() || undefined, 239 | preSend: this.getPreSendProcessor() || undefined, 240 | postReceive: this.getPostReceiveProcessor() || undefined, 241 | executeReconciliationOperation: this.dependencies.executeReconciliationOperation, 242 | } 243 | } 244 | 245 | getPreSendProcessor(): SyncPreSendProcessor | void { } 246 | 247 | getPostReceiveProcessor(): SyncPostReceiveProcessor | void { } 248 | 249 | getSerializer(): SyncSerializer | void { } 250 | 251 | _debugLog(...args: any[]) { 252 | if (this.debug) { 253 | console['log']('Initial Sync -', ...args) 254 | } 255 | } 256 | } 257 | -------------------------------------------------------------------------------- /ts/integration/index.test.ts: -------------------------------------------------------------------------------- 1 | const wrtc = require('wrtc') 2 | import expect from 'expect' 3 | import { RegistryCollections } from '@worldbrain/storex/lib/registry' 4 | import { setupSyncTestClient, linearTimestampGenerator } from '../index.tests' 5 | import { TEST_DATA } from '../index.test.data' 6 | import { InitialSync } from './initial-sync' 7 | import { ContinuousSync, ContinuousSyncDependencies } from './continuous-sync' 8 | import { 9 | createMemorySharedSyncLog, 10 | lazyMemorySignalTransportFactory, 11 | } from './index.tests' 12 | import { registerModuleMapCollections } from '@worldbrain/storex-pattern-modules' 13 | import { FastSyncEvents } from '../fast-sync' 14 | import { PromiseContentType } from '../types.test' 15 | 16 | describe('Integration helpers', () => { 17 | async function setupTest(options: { 18 | collections: RegistryCollections 19 | continuousSyncDependenciesProcessor?: ( 20 | deps: ContinuousSyncDependencies, 21 | options: { clientIndex: number }, 22 | ) => ContinuousSyncDependencies 23 | }) { 24 | const getNow = linearTimestampGenerator({ start: 1 }) 25 | const clients = [ 26 | await setupSyncTestClient({ 27 | getNow, 28 | collections: options.collections, 29 | dontFinishInitialization: true, 30 | }), 31 | await setupSyncTestClient({ 32 | getNow, 33 | collections: options.collections, 34 | dontFinishInitialization: true, 35 | }), 36 | ] 37 | const signalTransportFactory = lazyMemorySignalTransportFactory() 38 | const sharedSyncLog = await createMemorySharedSyncLog() 39 | const integration = clients.map((client, index) => { 40 | const settings = {} 41 | 42 | const initialSync = new InitialSync({ 43 | storageManager: client.storageManager, 44 | signalTransportFactory, 45 | syncedCollections: Object.keys(options.collections), 46 | executeReconciliationOperation: (...args) => 47 | client.storageManager.operation(...args), 48 | batchSize: 1, 49 | }) 50 | initialSync.wrtc = wrtc 51 | 52 | const continuousSyncDeps: ContinuousSyncDependencies = { 53 | auth: { getUserId: async () => 456 }, 54 | storageManager: client.storageManager, 55 | clientSyncLog: client.clientSyncLog, 56 | getSharedSyncLog: async () => sharedSyncLog, 57 | settingStore: { 58 | storeSetting: async (key, value) => { 59 | settings[key] = value 60 | }, 61 | retrieveSetting: async key => settings[key], 62 | }, 63 | toggleSyncLogging: client.syncLoggingMiddleware.toggle.bind( 64 | client.syncLoggingMiddleware, 65 | ), 66 | } 67 | const continuousSync = new ContinuousSync( 68 | options.continuousSyncDependenciesProcessor 69 | ? options.continuousSyncDependenciesProcessor( 70 | continuousSyncDeps, 71 | { clientIndex: index }, 72 | ) 73 | : continuousSyncDeps, 74 | ) 75 | 76 | return { 77 | settings, 78 | initialSync, 79 | continuousSync, 80 | } 81 | }) 82 | 83 | for (const clientIndex of [0, 1]) { 84 | await clients[clientIndex].storageManager.finishInitialization() 85 | } 86 | 87 | const doInitialSync = async (options: { 88 | source: { initialSync: InitialSync } 89 | target: { initialSync: InitialSync } 90 | }) => { 91 | const { 92 | initialMessage, 93 | } = await options.source.initialSync.requestInitialSync() 94 | await options.target.initialSync.answerInitialSync({ 95 | initialMessage, 96 | }) 97 | for (const client of [options.source, options.target]) { 98 | await client.initialSync.waitForInitialSync() 99 | } 100 | } 101 | 102 | return { clients, integration, doInitialSync } 103 | } 104 | 105 | async function testTwoWaySync(options: { 106 | insertData: ( 107 | clients: Array< 108 | PromiseContentType> 109 | >, 110 | ) => Promise 111 | validateSenderRoleSwitch: FastSyncEvents['roleSwitch'] 112 | expectNoData?: boolean 113 | }) { 114 | const { clients, integration, doInitialSync } = await setupTest({ 115 | collections: { 116 | test: { 117 | version: new Date(), 118 | fields: { 119 | key: { type: 'string' }, 120 | label: { type: 'string' }, 121 | createWhen: { type: 'datetime' }, 122 | }, 123 | indices: [{ field: 'key', pk: true }], 124 | }, 125 | }, 126 | }) 127 | 128 | await options.insertData(clients) 129 | 130 | integration[0].initialSync.events.once('roleSwitch', event => { 131 | options.validateSenderRoleSwitch(event) 132 | }) 133 | 134 | await doInitialSync({ 135 | source: integration[0], 136 | target: integration[1], 137 | }) 138 | 139 | expect({ 140 | device: 'two', 141 | objects: await clients[1].storageManager 142 | .collection('test') 143 | .findObjects({}, { order: [['createdWhen', 'asc']] }), 144 | }).toEqual({ 145 | device: 'two', 146 | objects: options.expectNoData 147 | ? [] 148 | : [ 149 | expect.objectContaining(TEST_DATA.test1), 150 | expect.objectContaining(TEST_DATA.test2), 151 | expect.objectContaining(TEST_DATA.test3), 152 | ], 153 | }) 154 | 155 | expect({ 156 | device: 'one', 157 | objects: await clients[0].storageManager 158 | .collection('test') 159 | .findObjects({}, { order: [['createdWhen', 'asc']] }), 160 | }).toEqual({ 161 | device: 'one', 162 | objects: options.expectNoData 163 | ? [] 164 | : [ 165 | (expect as any).objectContaining(TEST_DATA.test1), 166 | (expect as any).objectContaining(TEST_DATA.test2), 167 | (expect as any).objectContaining(TEST_DATA.test3), 168 | ], 169 | }) 170 | } 171 | 172 | it('should do a successful two way initial sync through integration classes with the receiver having less data', async () => { 173 | await testTwoWaySync({ 174 | async insertData(clients) { 175 | await clients[0].storageManager 176 | .collection('test') 177 | .createObject(TEST_DATA.test1) 178 | await clients[0].storageManager 179 | .collection('test') 180 | .createObject(TEST_DATA.test2) 181 | await clients[1].storageManager 182 | .collection('test') 183 | .createObject(TEST_DATA.test3) 184 | }, 185 | validateSenderRoleSwitch(event) { 186 | expect(event).toEqual({ 187 | before: 'receiver', 188 | after: 'sender', 189 | }) 190 | }, 191 | }) 192 | }) 193 | 194 | it('should do a successful two way initial sync through integration classes with the sender having less data', async () => { 195 | await testTwoWaySync({ 196 | async insertData(clients) { 197 | await clients[0].storageManager 198 | .collection('test') 199 | .createObject(TEST_DATA.test1) 200 | await clients[1].storageManager 201 | .collection('test') 202 | .createObject(TEST_DATA.test2) 203 | await clients[1].storageManager 204 | .collection('test') 205 | .createObject(TEST_DATA.test3) 206 | }, 207 | validateSenderRoleSwitch(event) { 208 | expect(event).toEqual({ 209 | before: 'sender', 210 | after: 'receiver', 211 | }) 212 | }, 213 | }) 214 | }) 215 | 216 | it('should do a successful two way initial sync through integration classes without any data', async () => { 217 | await testTwoWaySync({ 218 | async insertData(clients) {}, 219 | validateSenderRoleSwitch(event) { 220 | expect(event).toEqual({ 221 | before: 'receiver', 222 | after: 'sender', 223 | }) 224 | }, 225 | expectNoData: true, 226 | }) 227 | }) 228 | 229 | it('should not crash if trying to abort the sync without notifying the other side', async () => { 230 | const { clients, integration } = await setupTest({ 231 | collections: { 232 | test: { 233 | version: new Date(), 234 | fields: { 235 | key: { type: 'string' }, 236 | label: { type: 'string' }, 237 | createWhen: { type: 'datetime' }, 238 | }, 239 | indices: [{ field: 'key', pk: true }], 240 | }, 241 | }, 242 | }) 243 | 244 | await clients[0].storageManager 245 | .collection('test') 246 | .createObject(TEST_DATA.test1) 247 | await clients[0].storageManager 248 | .collection('test') 249 | .createObject(TEST_DATA.test2) 250 | await clients[0].storageManager 251 | .collection('test') 252 | .createObject(TEST_DATA.test3) 253 | 254 | integration[0].initialSync.events.on('progress', ({ progress }) => { 255 | if (progress.totalObjectsProcessed === 1) { 256 | integration[0].initialSync.abortInitialSync() 257 | } 258 | }) 259 | 260 | const { 261 | initialMessage, 262 | } = await integration[0].initialSync.requestInitialSync() 263 | await integration[1].initialSync.answerInitialSync({ 264 | initialMessage, 265 | }) 266 | await integration[0].initialSync.waitForInitialSync() 267 | 268 | expect( 269 | await clients[1].storageManager 270 | .collection('test') 271 | .findObjects({}, { order: [['createdWhen', 'asc']] }), 272 | ).toEqual([(expect as any).objectContaining(TEST_DATA.test1)]) 273 | }) 274 | 275 | it('should do a continuous sync with a small batch size and a upload batch byte limit being exceeded', async () => { 276 | const { clients, integration } = await setupTest({ 277 | collections: { 278 | test: { 279 | version: new Date(), 280 | fields: { 281 | key: { type: 'string' }, 282 | label: { type: 'string' }, 283 | createWhen: { type: 'datetime' }, 284 | }, 285 | indices: [{ field: 'key', pk: true }], 286 | }, 287 | }, 288 | continuousSyncDependenciesProcessor: ( 289 | dependencies, 290 | ): ContinuousSyncDependencies => ({ 291 | ...dependencies, 292 | uploadBatchSize: 2, 293 | uploadBatchByteLimit: 300, 294 | }), 295 | }) 296 | 297 | await integration[0].continuousSync.initDevice() 298 | await integration[0].continuousSync.enableContinuousSync() 299 | 300 | await integration[1].continuousSync.initDevice() 301 | await integration[1].continuousSync.enableContinuousSync() 302 | 303 | await clients[0].storageManager 304 | .collection('test') 305 | .createObject(TEST_DATA.test1) 306 | await clients[0].storageManager 307 | .collection('test') 308 | .createObject(TEST_DATA.test2) 309 | await clients[0].storageManager 310 | .collection('test') 311 | .createObject(TEST_DATA.test3) 312 | 313 | await integration[0].continuousSync.forceIncrementalSync() 314 | await integration[1].continuousSync.forceIncrementalSync() 315 | 316 | expect( 317 | await clients[1].storageManager 318 | .collection('test') 319 | .findObjects({}, { order: [['createdWhen', 'asc']] }), 320 | ).toEqual([ 321 | (expect as any).objectContaining(TEST_DATA.test1), 322 | (expect as any).objectContaining(TEST_DATA.test2), 323 | (expect as any).objectContaining(TEST_DATA.test3), 324 | ]) 325 | }) 326 | 327 | it('should throw an error when it cannot satisfy a batch byte limit', async () => { 328 | const { clients, integration } = await setupTest({ 329 | collections: { 330 | test: { 331 | version: new Date(), 332 | fields: { 333 | key: { type: 'string' }, 334 | label: { type: 'string' }, 335 | createWhen: { type: 'datetime' }, 336 | }, 337 | indices: [{ field: 'key', pk: true }], 338 | }, 339 | }, 340 | continuousSyncDependenciesProcessor: ( 341 | dependencies, 342 | ): ContinuousSyncDependencies => ({ 343 | ...dependencies, 344 | uploadBatchSize: 2, 345 | uploadBatchByteLimit: 100, 346 | }), 347 | }) 348 | 349 | const events: any[] = [] 350 | integration[0].continuousSync.events.on('syncFinished', event => 351 | events.push(event), 352 | ) 353 | 354 | await integration[0].continuousSync.initDevice() 355 | await integration[0].continuousSync.enableContinuousSync() 356 | 357 | await integration[1].continuousSync.initDevice() 358 | await integration[1].continuousSync.enableContinuousSync() 359 | 360 | await clients[0].storageManager 361 | .collection('test') 362 | .createObject(TEST_DATA.test1) 363 | await clients[0].storageManager 364 | .collection('test') 365 | .createObject(TEST_DATA.test2) 366 | await clients[0].storageManager 367 | .collection('test') 368 | .createObject(TEST_DATA.test3) 369 | 370 | await integration[0].continuousSync.forceIncrementalSync() 371 | expect(events).toEqual([ 372 | { 373 | hasChanges: false, 374 | error: new Error( 375 | 'Sync batch size exceeds limit during upload, but cannot make it smaller', 376 | ), 377 | }, 378 | ]) 379 | }) 380 | }) 381 | -------------------------------------------------------------------------------- /ts/integration/index.tests.ts: -------------------------------------------------------------------------------- 1 | import { MemorySignalTransportManager } from 'simple-signalling/lib/memory' 2 | import StorageManager from '@worldbrain/storex' 3 | import { DexieStorageBackend } from '@worldbrain/storex-backend-dexie' 4 | import inMemory from '@worldbrain/storex-backend-dexie/lib/in-memory' 5 | import { SharedSyncLogStorage } from '../shared-sync-log/storex' 6 | import { registerModuleMapCollections } from '@worldbrain/storex-pattern-modules' 7 | 8 | export function lazyMemorySignalTransportFactory() { 9 | let manager: MemorySignalTransportManager 10 | return () => { 11 | if (!manager) { 12 | manager = new MemorySignalTransportManager() 13 | } 14 | 15 | return manager.createTransport() 16 | } 17 | } 18 | 19 | export async function createMemorySharedSyncLog() { 20 | const sharedStorageManager = new StorageManager({ 21 | backend: new DexieStorageBackend({ 22 | dbName: 'shared', 23 | idbImplementation: inMemory(), 24 | }), 25 | }) 26 | const sharedSyncLog = new SharedSyncLogStorage({ 27 | storageManager: sharedStorageManager, 28 | autoPkType: 'int', 29 | }) 30 | registerModuleMapCollections(sharedStorageManager.registry, { 31 | sharedSyncLog, 32 | }) 33 | await sharedStorageManager.finishInitialization() 34 | return sharedSyncLog 35 | } 36 | -------------------------------------------------------------------------------- /ts/integration/initial-sync.ts: -------------------------------------------------------------------------------- 1 | import pick from 'lodash/pick' 2 | import Peer from 'simple-peer' 3 | import { SignalTransport, SignalChannel } from 'simple-signalling/lib/types' 4 | import { 5 | signalSimplePeer, 6 | SimplePeerSignallingEvents, 7 | } from 'simple-signalling/lib/simple-peer' 8 | import { 9 | FastSyncEvents, 10 | FastSyncPreSendProcessor, 11 | FastSync, 12 | } from '../fast-sync' 13 | import { WebRTCFastSyncChannel } from '../fast-sync/channels' 14 | import TypedEmitter from 'typed-emitter' 15 | import StorageManager from '@worldbrain/storex' 16 | import { 17 | FastSyncChannel, 18 | FastSyncRole, 19 | FastSyncOrder, 20 | FastSyncInfo, 21 | } from '../fast-sync/types' 22 | import { resolvablePromise, getFastSyncInfo } from '../fast-sync/utils' 23 | import { EventEmitter } from 'events' 24 | import { ExecuteReconciliationOperation } from '..' 25 | 26 | export type InitialSyncInfo = { 27 | signalChannel: SignalChannel 28 | events: TypedEmitter 29 | finishPromise: Promise 30 | role: FastSyncRole 31 | fastSyncChannel: FastSyncChannel 32 | fastSync: FastSync 33 | } 34 | 35 | export type InitialSyncEvents = FastSyncEvents & 36 | SimplePeerSignallingEvents & { 37 | connecting: {} 38 | releasingSignalChannel: {} 39 | connected: {} 40 | preSyncSuccess: {} 41 | finished: {} 42 | setupContinuingWithoutICE: {} 43 | } 44 | 45 | export interface InitialSyncDependencies { 46 | executeReconciliationOperation: ExecuteReconciliationOperation 47 | storageManager: StorageManager 48 | signalTransportFactory: SignalTransportFactory 49 | syncedCollections: string[] 50 | getIceServers?: () => Promise 51 | batchSize?: number 52 | debug?: boolean 53 | } 54 | 55 | export type SignalTransportFactory = () => SignalTransport 56 | export class InitialSync { 57 | events = new EventEmitter() as TypedEmitter 58 | 59 | public debug: boolean 60 | public wrtc?: any // Possibility for tests to inject wrtc library 61 | 62 | private fastSyncInfo?: InitialSyncInfo 63 | 64 | constructor(protected dependencies: InitialSyncDependencies) { 65 | this.debug = !!dependencies.debug 66 | const origEmit = this.events.emit.bind(this.events) as any 67 | this.events.emit = ((eventName: string, event: any) => { 68 | this._debugLog(`Event '${eventName}':`, event) 69 | return origEmit(eventName, event) 70 | }) as any 71 | } 72 | 73 | async requestInitialSync(options?: { 74 | preserveChannel?: boolean 75 | }): Promise<{ initialMessage: string }> { 76 | const role = 'sender' 77 | const { 78 | signalTransport, 79 | initialMessage, 80 | } = await this._createSignalTransport(role) 81 | this.fastSyncInfo = await this._setupInitialSync({ 82 | role, 83 | signalTransport, 84 | initialMessage, 85 | deviceId: 'first', 86 | ...(options || {}), 87 | }) 88 | 89 | return { initialMessage } 90 | } 91 | 92 | async answerInitialSync(options: { 93 | initialMessage: string 94 | preserveChannel?: boolean 95 | }): Promise { 96 | const role = 'receiver' 97 | const { signalTransport } = await this._createSignalTransport(role) 98 | this.fastSyncInfo = await this._setupInitialSync({ 99 | role, 100 | signalTransport, 101 | deviceId: 'second', 102 | ...options, 103 | }) 104 | } 105 | 106 | async waitForInitialSyncConnected() { 107 | if (!this.fastSyncInfo) { 108 | throw new Error( 109 | 'Cannot wait for initial sync connection if it has not been started, or already finished', 110 | ) 111 | } 112 | 113 | const connected = resolvablePromise() 114 | const handler = () => { 115 | connected.resolve() 116 | } 117 | this.fastSyncInfo.events.on('connected', handler) 118 | await connected.promise 119 | this.fastSyncInfo.events.removeListener('connected', handler) 120 | } 121 | 122 | async waitForInitialSync(): Promise { 123 | if (this.fastSyncInfo) { 124 | await this.fastSyncInfo.finishPromise 125 | } 126 | } 127 | 128 | async abortInitialSync(): Promise { 129 | if (!this.fastSyncInfo) { 130 | return 131 | } 132 | 133 | await this.fastSyncInfo.fastSync.abort() 134 | await this.cleanupInitialSync() 135 | } 136 | 137 | async cleanupInitialSync() { 138 | if (!this.fastSyncInfo) { 139 | return 140 | } 141 | 142 | const info = this.fastSyncInfo 143 | delete this.fastSyncInfo 144 | info.events.emit = () => false 145 | await Promise.race([ 146 | new Promise(resolve => setTimeout(resolve, 1000)), 147 | info.fastSyncChannel.destroy(), 148 | ]) 149 | } 150 | 151 | async cancelInitialSync() { 152 | if (!this.fastSyncInfo) { 153 | return 154 | } 155 | 156 | await this.fastSyncInfo.fastSync.cancel() 157 | } 158 | 159 | _createSignalTransport( 160 | role: 'sender', 161 | ): Promise<{ signalTransport: SignalTransport; initialMessage: string }> 162 | _createSignalTransport( 163 | role: 'receiver', 164 | ): Promise<{ signalTransport: SignalTransport }> 165 | async _createSignalTransport( 166 | role: FastSyncRole, 167 | ): Promise<{ 168 | signalTransport: SignalTransport 169 | initialMessage: string | undefined 170 | }> { 171 | const signalTransport: SignalTransport = this.dependencies.signalTransportFactory() 172 | return { 173 | signalTransport, 174 | initialMessage: 175 | role === 'sender' 176 | ? (await signalTransport.allocateChannel()).initialMessage 177 | : undefined, 178 | } 179 | } 180 | 181 | async _setupInitialSync(options: { 182 | role: FastSyncRole 183 | signalTransport: SignalTransport 184 | initialMessage: string 185 | deviceId: 'first' | 'second' 186 | preserveChannel?: boolean 187 | }): Promise { 188 | await this.cleanupInitialSync() 189 | 190 | const signalChannel = await options.signalTransport.openChannel( 191 | pick(options, 'initialMessage', 'deviceId'), 192 | ) 193 | 194 | const fastSyncChannel = await this.createFastSyncChannel({ 195 | role: options.role, 196 | signalChannel, 197 | }) 198 | const fastSync = new FastSync({ 199 | storageManager: this.dependencies.storageManager, 200 | channel: fastSyncChannel.channel, 201 | collections: this.dependencies.syncedCollections, 202 | preSendProcessor: this.getPreSendProcessor() || undefined, 203 | batchSize: this.dependencies.batchSize, 204 | executeReconciliationOperation: this.dependencies 205 | .executeReconciliationOperation, 206 | }) 207 | fastSync.events.emit = ((eventName: any, event: any) => { 208 | return this.events.emit(eventName, event) 209 | }) as any 210 | 211 | const buildInfo = (): InitialSyncInfo => { 212 | return { 213 | role: options.role, 214 | signalChannel, 215 | finishPromise, 216 | events: fastSync.events, 217 | fastSync, 218 | fastSyncChannel: fastSyncChannel.channel, 219 | } 220 | } 221 | 222 | const finishPromise: Promise = (async () => { 223 | this.events.emit('connecting', {}) 224 | await fastSyncChannel.setup() 225 | this.events.emit('connected', {}) 226 | 227 | await this.preSync(buildInfo()) 228 | this.events.emit('preSyncSuccess', {}) 229 | const fastSyncInfo = await getFastSyncInfo( 230 | this.dependencies.storageManager, 231 | { collections: this.dependencies.syncedCollections }, 232 | ) 233 | const syncOrder = await this.negotiateSyncOrder({ 234 | role: options.role, 235 | channel: fastSyncChannel.channel, 236 | fastSyncInfo, 237 | }) 238 | try { 239 | await fastSync.execute({ 240 | role: options.role, 241 | bothWays: syncOrder, 242 | fastSyncInfo, 243 | }) 244 | } catch (e) { 245 | if (e.name !== 'ChannelDestroyedError') { 246 | throw e 247 | } 248 | } 249 | this.events.emit('finished', {}) 250 | 251 | if (!options.preserveChannel) { 252 | await this.cleanupInitialSync() 253 | } 254 | })() 255 | 256 | return buildInfo() 257 | } 258 | 259 | async negotiateSyncOrder(params: { 260 | role: FastSyncRole 261 | channel: FastSyncChannel 262 | fastSyncInfo: FastSyncInfo 263 | }): Promise { 264 | const { channel } = params 265 | 266 | const localStorageSize = params.fastSyncInfo.objectCount 267 | if (params.role === 'sender') { 268 | await channel.sendUserPackage({ 269 | type: 'storage-size', 270 | size: localStorageSize, 271 | }) 272 | const remoteStorageSize = ( 273 | await channel.receiveUserPackage({ 274 | expectedType: 'storage-size', 275 | }) 276 | ).size 277 | return localStorageSize >= remoteStorageSize 278 | ? 'receive-first' 279 | : 'send-first' 280 | } else { 281 | const remoteStorageSize = ( 282 | await channel.receiveUserPackage({ 283 | expectedType: 'storage-size', 284 | }) 285 | ).size 286 | await channel.sendUserPackage({ 287 | type: 'storage-size', 288 | size: localStorageSize, 289 | }) 290 | return localStorageSize > remoteStorageSize 291 | ? 'receive-first' 292 | : 'send-first' 293 | } 294 | } 295 | 296 | getPreSendProcessor(): FastSyncPreSendProcessor | void { } 297 | async preSync(options: InitialSyncInfo) { } 298 | 299 | async getPeer(options: { initiator: boolean }): Promise { 300 | let iceServers 301 | try { 302 | iceServers = await this.dependencies.getIceServers?.() 303 | } catch (e) { 304 | console.warn('An error occurred while trying to get ICE servers, ignoring...') 305 | console.warn(e) 306 | this.events.emit('setupContinuingWithoutICE', {e}) 307 | } 308 | return new Peer({ 309 | initiator: options.initiator, 310 | wrtc: this.wrtc, 311 | ...(iceServers 312 | ? { 313 | config: { 314 | iceServers, 315 | }, 316 | } 317 | : {}), 318 | }) 319 | } 320 | 321 | async createFastSyncChannel(options: { 322 | role: FastSyncRole 323 | signalChannel: SignalChannel 324 | }) { 325 | const peer = await this.getPeer({ 326 | initiator: options.role === 'receiver', 327 | }) 328 | const channel: FastSyncChannel = new WebRTCFastSyncChannel({ 329 | peer, 330 | }) 331 | return { 332 | channel, 333 | setup: async () => { 334 | await options.signalChannel.connect() 335 | await signalSimplePeer({ 336 | signalChannel: options.signalChannel, 337 | simplePeer: peer, 338 | reporter: (eventName, event) => 339 | (this.events as any).emit(eventName, event), 340 | }) 341 | this.events.emit('releasingSignalChannel', {}) 342 | await options.signalChannel.release() 343 | }, 344 | } 345 | } 346 | 347 | _debugLog(...args: any[]) { 348 | if (this.debug) { 349 | console['log']('Initial Sync -', ...args) 350 | } 351 | } 352 | } 353 | -------------------------------------------------------------------------------- /ts/integration/settings.ts: -------------------------------------------------------------------------------- 1 | export interface SyncSettingsStore { 2 | retrieveSetting( 3 | key: SyncSetting 4 | ): Promise 5 | storeSetting( 6 | key: SyncSetting, 7 | value: SyncSettingValue, 8 | ): Promise 9 | } 10 | export type SyncSetting = 11 | | 'continuousSyncEnabled' 12 | | 'deviceId' 13 | | 'lastSyncTimestamp' 14 | export type SyncSettingValue = boolean | number | string | null 15 | -------------------------------------------------------------------------------- /ts/logging-middleware/change-processing.ts: -------------------------------------------------------------------------------- 1 | import { StorageRegistry } from '@worldbrain/storex' 2 | import { getObjectWithoutPk } from '@worldbrain/storex/lib/utils' 3 | import { StorageOperationChangeInfo } from '@worldbrain/storex-middleware-change-watcher/lib/types' 4 | import { 5 | ClientSyncLogEntry, 6 | ClientSyncLogEntryMetadata, 7 | } from '../client-sync-log/types' 8 | 9 | export async function convertChangeInfoToClientSyncLogEntries( 10 | info: StorageOperationChangeInfo<'pre'>, 11 | options: { 12 | createMetadata: () => Promise 13 | storageRegistry: StorageRegistry 14 | }, 15 | ) { 16 | const entries: ClientSyncLogEntry[] = [] 17 | const addEntry = (entry: ClientSyncLogEntry) => { 18 | entries.push(entry) 19 | } 20 | 21 | for (const change of info.changes) { 22 | if (change.type === 'create') { 23 | addEntry({ 24 | operation: 'create', 25 | ...(await options.createMetadata()), 26 | collection: change.collection, 27 | pk: change.pk, 28 | value: getObjectWithoutPk( 29 | change.values, 30 | change.collection, 31 | options.storageRegistry, 32 | ), 33 | }) 34 | } else if (change.type === 'modify') { 35 | for (const pk of change.pks) { 36 | addEntry({ 37 | operation: 'modify', 38 | ...(await options.createMetadata()), 39 | collection: change.collection, 40 | pk: pk as number | string, 41 | value: change.updates, 42 | }) 43 | } 44 | } else if (change.type === 'delete') { 45 | for (const pk of change.pks) { 46 | addEntry({ 47 | operation: 'delete', 48 | ...(await options.createMetadata()), 49 | collection: change.collection, 50 | pk: pk as number | string, 51 | }) 52 | } 53 | } 54 | } 55 | return entries 56 | } 57 | -------------------------------------------------------------------------------- /ts/logging-middleware/index.ts: -------------------------------------------------------------------------------- 1 | import StorageManager from '@worldbrain/storex' 2 | import { 3 | StorageMiddleware, 4 | StorageMiddlewareContext, 5 | } from '@worldbrain/storex/lib/types/middleware' 6 | import { ChangeWatchMiddleware } from '@worldbrain/storex-middleware-change-watcher' 7 | import { StorageOperationChangeInfo } from '@worldbrain/storex-middleware-change-watcher/lib/types' 8 | import { ClientSyncLogStorage } from '../client-sync-log' 9 | import { ClientSyncLogEntry } from '../client-sync-log/types' 10 | import { 11 | OperationProcessorMap, 12 | DEFAULT_OPERATION_PROCESSORS, 13 | } from './operation-processing' 14 | import { convertChangeInfoToClientSyncLogEntries } from './change-processing' 15 | 16 | export type SyncChangeInfoPreprocessor = ( 17 | changeInfo: StorageOperationChangeInfo<'pre'>, 18 | ) => Promise | void> 19 | export class SyncLoggingMiddleware implements StorageMiddleware { 20 | public changeInfoPreprocessor: SyncChangeInfoPreprocessor | null = null 21 | 22 | private operationProcessors: OperationProcessorMap = DEFAULT_OPERATION_PROCESSORS 23 | private includeCollections: Set 24 | private enabled = false 25 | private deviceId: string | number | null = null 26 | private lastSeenNow = 0 27 | 28 | constructor( 29 | private options: { 30 | clientSyncLog: ClientSyncLogStorage 31 | storageManager: StorageManager 32 | includeCollections: string[] 33 | mergeModifications?: boolean 34 | }, 35 | ) { 36 | this.includeCollections = new Set(options.includeCollections) 37 | } 38 | 39 | toggle(enabled: false): void 40 | toggle(enabled: true, deviceId: number | string): void 41 | toggle(enabled: boolean, deviceId?: number | string) { 42 | this.enabled = enabled 43 | this.deviceId = deviceId || null 44 | } 45 | 46 | enable(deviceId: string | number) { 47 | this.enabled = true 48 | this.deviceId = deviceId 49 | } 50 | 51 | disable() { 52 | this.enabled = false 53 | } 54 | 55 | async process(context: StorageMiddlewareContext) { 56 | if (typeof context.extraData.changeInfo === 'undefined') { 57 | const changeWatcher = new ChangeWatchMiddleware({ 58 | storageManager: this.options.storageManager, 59 | shouldWatchCollection: collection => 60 | this.includeCollections.has(collection), 61 | }) 62 | return changeWatcher.process({ 63 | operation: context.operation, 64 | extraData: context.extraData, 65 | next: { 66 | process: async incoming => { 67 | const extraData = { 68 | ...context.extraData, 69 | ...incoming.extraData, 70 | } 71 | return this.processWithChangeInfo({ 72 | operation: incoming.operation, 73 | next: context.next, 74 | extraData, 75 | }) 76 | }, 77 | }, 78 | }) 79 | } else { 80 | return this.processWithChangeInfo(context) 81 | } 82 | } 83 | 84 | async processWithChangeInfo({ 85 | next, 86 | operation, 87 | extraData, 88 | }: StorageMiddlewareContext) { 89 | if (!this.enabled) { 90 | return next.process({ operation }) 91 | } 92 | if (!this.deviceId) { 93 | throw new Error( 94 | `Cannot log sync operations without setting a device ID first`, 95 | ) 96 | } 97 | let changeInfo: StorageOperationChangeInfo<'pre'> = extraData.changeInfo 98 | if (typeof changeInfo === 'undefined') { 99 | throw new Error( 100 | `Sync logging middleware didn't receive any change info`, 101 | ) 102 | } 103 | if (!changeInfo.changes.length) { 104 | return next.process({ operation }) 105 | } 106 | 107 | const operationType = operation[0] as string 108 | const operationProcessor = this.operationProcessors[operationType] 109 | if (!operationProcessor) { 110 | return next.process({ operation }) 111 | } 112 | 113 | if (this.changeInfoPreprocessor) { 114 | const modifiedChangeInfo = await this.changeInfoPreprocessor( 115 | changeInfo, 116 | ) 117 | if (modifiedChangeInfo) { 118 | changeInfo = modifiedChangeInfo 119 | } 120 | if (!changeInfo.changes.length) { 121 | return next.process({ operation }) 122 | } 123 | } 124 | 125 | const logEntries = await convertChangeInfoToClientSyncLogEntries( 126 | changeInfo, 127 | { 128 | createMetadata: async () => ({ 129 | createdOn: await this._getNow(), 130 | sharedOn: 0, 131 | deviceId: this.deviceId!, 132 | needsIntegration: 0, 133 | }), 134 | storageRegistry: this.options.storageManager.registry, 135 | }, 136 | ) 137 | 138 | const executeAndLog = async ( 139 | originalOperation: any | any[], 140 | logEntries: ClientSyncLogEntry[], 141 | ) => { 142 | const batch = 143 | originalOperation instanceof Array 144 | ? originalOperation 145 | : [originalOperation] 146 | 147 | let operationIndex = -1 148 | for (const logEntry of logEntries) { 149 | operationIndex += 1 150 | 151 | batch.push({ 152 | placeholder: `logEntry-${operationIndex}`, 153 | operation: 'createObject', 154 | collection: 'clientSyncLogEntry', 155 | args: logEntry, 156 | }) 157 | } 158 | 159 | const result = await next.process({ 160 | operation: ['executeBatch', batch], 161 | }) 162 | return result 163 | } 164 | 165 | return operationProcessor({ 166 | operation, 167 | changeInfo, 168 | logEntries, 169 | // loggedOperation, 170 | executeAndLog, 171 | mergeModifications: this.options.mergeModifications, 172 | }) 173 | } 174 | 175 | async _getNow(): Promise { 176 | let now = Date.now() 177 | while (now === this.lastSeenNow) { 178 | now = Date.now() 179 | } 180 | this.lastSeenNow = now 181 | return now 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /ts/logging-middleware/operation-processing.ts: -------------------------------------------------------------------------------- 1 | import { ClientSyncLogEntry } from '../client-sync-log/types' 2 | import { BatchOperation } from '@worldbrain/storex' 3 | import { StorageOperationChangeInfo } from '@worldbrain/storex-middleware-change-watcher/lib/types' 4 | 5 | export type ExecuteAndLog = ( 6 | batchOperations: BatchOperation[], 7 | logEntries: ClientSyncLogEntry[], 8 | ) => Promise 9 | 10 | export interface OperationProcessorArgs { 11 | operation: any[] 12 | changeInfo: StorageOperationChangeInfo<'pre'> 13 | logEntries: ClientSyncLogEntry[] 14 | executeAndLog: ExecuteAndLog 15 | mergeModifications?: boolean 16 | } 17 | export type OperationProcessor = (args: OperationProcessorArgs) => Promise 18 | export type OperationProcessorMap = { [operation: string]: OperationProcessor } 19 | export const DEFAULT_OPERATION_PROCESSORS: OperationProcessorMap = { 20 | createObject: _processCreateObject, 21 | executeBatch: _processExecuteBatch, 22 | } 23 | 24 | /** 25 | * Creates 26 | */ 27 | async function _processCreateObject(args: OperationProcessorArgs) { 28 | const change = args.changeInfo.changes[0] 29 | if (change.type !== 'create') { 30 | throw new Error( 31 | `Tried to log createObject operation, but didn't get the right change info`, 32 | ) 33 | } 34 | 35 | const result = await args.executeAndLog( 36 | [ 37 | { 38 | placeholder: 'object', 39 | operation: 'createObject', 40 | collection: change.collection, 41 | args: args.operation[2], 42 | }, 43 | ], 44 | args.logEntries, 45 | ) 46 | const object = result.info.object.object 47 | return { object } 48 | } 49 | 50 | /** 51 | * Batch 52 | */ 53 | async function _processExecuteBatch(args: OperationProcessorArgs) { 54 | return args.executeAndLog(args.operation[1], args.logEntries) 55 | } 56 | -------------------------------------------------------------------------------- /ts/reconciliation/default.ts: -------------------------------------------------------------------------------- 1 | import sortBy from 'lodash/sortBy' 2 | import { StorageRegistry, OperationBatch } from '@worldbrain/storex' 3 | import { 4 | ClientSyncLogEntry, 5 | ClientSyncLogCreationEntry, 6 | ClientSyncLogDeletionEntry, 7 | ClientSyncLogModificationEntry, 8 | } from '../client-sync-log/types' 9 | import { setObjectPk } from '../utils' 10 | import { ReconcilerFunction, DoubleCreateBehaviour } from './types' 11 | 12 | type Modifications = { [collection: string]: CollectionModifications } 13 | type CollectionModifications = { [pk: string]: ObjectModifications } 14 | interface ObjectModifications { 15 | actualState: 'present' | 'absent' | 'deleted' 16 | action: 'ignore' | 'create' | 'update' | 'delete' | 'recreate' 17 | createdOn?: number | '$now' 18 | fields: { [field: string]: FieldModification } 19 | } 20 | type FieldModification = { 21 | createdOn: number | '$now' 22 | // syncedOn: number | null 23 | value: any 24 | } 25 | 26 | export const reconcileSyncLog: ReconcilerFunction = ( 27 | logEntries: ClientSyncLogEntry[], 28 | options: { 29 | storageRegistry: StorageRegistry 30 | doubleCreateBehaviour?: DoubleCreateBehaviour 31 | debug?: boolean 32 | }, 33 | ): OperationBatch => { 34 | const modificationsByObject: Modifications = {} 35 | for (const logEntry of sortBy(logEntries, 'createdOn')) { 36 | const collectionModifications = (modificationsByObject[ 37 | logEntry.collection 38 | ] = modificationsByObject[logEntry.collection] || {}) 39 | const pkAsJson = JSON.stringify(logEntry.pk) 40 | const objectModifications = collectionModifications[pkAsJson] 41 | 42 | const readableLogEntryState = logEntry.needsIntegration ? 'new' : 'old' 43 | if (options.debug) { 44 | console.log( 45 | `before %s (%s): %o`, 46 | logEntry.operation, 47 | readableLogEntryState, 48 | collectionModifications[pkAsJson], 49 | ) 50 | } 51 | if (logEntry.operation === 'modify') { 52 | _processModificationEntry({ 53 | objectModifications, 54 | logEntry, 55 | collectionModifications, 56 | pkAsJson, 57 | }) 58 | } else if (logEntry.operation === 'delete') { 59 | _processDeletionEntry({ 60 | objectModifications, 61 | logEntry, 62 | collectionModifications, 63 | pkAsJson, 64 | }) 65 | } else if (logEntry.operation === 'create') { 66 | _processCreationEntry({ 67 | objectModifications, 68 | logEntry, 69 | collectionModifications, 70 | pkAsJson, 71 | doubleCreateBehaviour: options.doubleCreateBehaviour, 72 | }) 73 | } 74 | if (options.debug) { 75 | console.log( 76 | `after %s (%s): %o`, 77 | logEntry.operation, 78 | readableLogEntryState, 79 | collectionModifications[pkAsJson], 80 | ) 81 | } 82 | } 83 | 84 | const operations: OperationBatch = [] 85 | for (const [collection, collectionModifications] of Object.entries( 86 | modificationsByObject, 87 | )) { 88 | for (const [pkAsJson, objectModifications] of Object.entries( 89 | collectionModifications, 90 | )) { 91 | const pk = JSON.parse(pkAsJson) 92 | operations.push( 93 | ...(_processModifications({ 94 | objectModifications, 95 | collection, 96 | pk, 97 | storageRegistry: options.storageRegistry, 98 | }) || []), 99 | ) 100 | } 101 | } 102 | return operations 103 | } 104 | 105 | export function _processCreationEntry({ 106 | objectModifications, 107 | logEntry, 108 | collectionModifications, 109 | pkAsJson, 110 | doubleCreateBehaviour, 111 | }: { 112 | objectModifications: ObjectModifications 113 | logEntry: ClientSyncLogCreationEntry 114 | collectionModifications: CollectionModifications 115 | pkAsJson: any 116 | doubleCreateBehaviour?: DoubleCreateBehaviour 117 | }) { 118 | if (!objectModifications) { 119 | const fields = {} 120 | for (const [key, value] of Object.entries(logEntry.value)) { 121 | fields[key] = { 122 | value, 123 | createdOn: logEntry.createdOn, 124 | syncedOn: logEntry.sharedOn, 125 | } 126 | } 127 | if (logEntry.needsIntegration) { 128 | collectionModifications[pkAsJson] = { 129 | actualState: 'absent', 130 | action: 'create', 131 | createdOn: logEntry.createdOn, 132 | fields, 133 | } 134 | } else { 135 | collectionModifications[pkAsJson] = { 136 | actualState: 'present', 137 | action: 'ignore', 138 | createdOn: logEntry.createdOn, 139 | fields, 140 | } 141 | } 142 | } else { 143 | if (objectModifications.action === 'create') { 144 | if (doubleCreateBehaviour !== 'merge') { 145 | throw new Error( 146 | `Detected double create in collection '${ 147 | logEntry.collection 148 | }', pk '${JSON.stringify(logEntry.pk)}'`, 149 | ) 150 | } 151 | } 152 | 153 | const fields = objectModifications.fields 154 | for (const [key, value] of Object.entries(logEntry.value)) { 155 | fields[key] = { 156 | value, 157 | createdOn: logEntry.createdOn, 158 | // syncedOn: logEntry.sharedOn, 159 | } 160 | } 161 | 162 | // console.log(objectModifications) 163 | if ( 164 | objectModifications.action === 'delete' || 165 | objectModifications.actualState === 'deleted' 166 | ) { 167 | objectModifications.action = 'recreate' 168 | } else if (objectModifications.actualState === 'present') { 169 | if (logEntry.needsIntegration) { 170 | if (objectModifications.actualState === 'present') { 171 | objectModifications.action = 'update' 172 | } else { 173 | objectModifications.action = 'create' 174 | } 175 | } 176 | } else { 177 | objectModifications.actualState = 'present' 178 | objectModifications.action = logEntry.needsIntegration 179 | ? 'create' 180 | : 'ignore' 181 | } 182 | objectModifications.createdOn = logEntry.createdOn 183 | } 184 | } 185 | 186 | export function _processDeletionEntry({ 187 | objectModifications, 188 | logEntry, 189 | collectionModifications, 190 | pkAsJson, 191 | }: { 192 | objectModifications: ObjectModifications 193 | logEntry: ClientSyncLogDeletionEntry 194 | collectionModifications: CollectionModifications 195 | pkAsJson: any 196 | }) { 197 | // const wouldBeCreated = objectModifications 198 | // ? objectModifications.actualState === 'absent' && 199 | // objectModifications.desiredState === 'present' 200 | // : false 201 | 202 | if (objectModifications) { 203 | if (!logEntry.needsIntegration) { 204 | collectionModifications[pkAsJson] = { 205 | actualState: 'deleted', 206 | action: 'ignore', 207 | fields: {}, 208 | } 209 | } else if (objectModifications.action === 'create') { 210 | collectionModifications[pkAsJson] = { 211 | actualState: 'absent', 212 | action: 'ignore', 213 | fields: {}, 214 | } 215 | } else if (objectModifications.action === 'ignore') { 216 | collectionModifications[pkAsJson] = { 217 | actualState: 'present', 218 | action: 'delete', 219 | fields: {}, 220 | } 221 | } else { 222 | collectionModifications[pkAsJson] = { 223 | actualState: 'present', 224 | action: 'delete', 225 | fields: {}, 226 | } 227 | } 228 | } else { 229 | collectionModifications[pkAsJson] = { 230 | actualState: logEntry.needsIntegration ? 'present' : 'absent', 231 | action: logEntry.needsIntegration ? 'delete' : 'ignore', 232 | fields: {}, 233 | } 234 | } 235 | } 236 | 237 | export function _processModificationEntry({ 238 | objectModifications, 239 | logEntry, 240 | collectionModifications, 241 | pkAsJson, 242 | }: { 243 | objectModifications: ObjectModifications 244 | logEntry: ClientSyncLogModificationEntry 245 | collectionModifications: CollectionModifications 246 | pkAsJson: any 247 | }) { 248 | const updateField = ( 249 | objectModifications: ObjectModifications, 250 | fieldName: string, 251 | value: any, 252 | createdOn: number, 253 | ) => { 254 | if (objectModifications.fields[fieldName]) { 255 | if ( 256 | logEntry.createdOn > 257 | objectModifications.fields[fieldName].createdOn 258 | ) { 259 | objectModifications.fields[fieldName].value = value 260 | } 261 | } else { 262 | objectModifications.fields[fieldName] = { 263 | createdOn, 264 | value, 265 | } 266 | } 267 | } 268 | const updateFields = (objectModifications: ObjectModifications) => { 269 | if ('field' in logEntry && logEntry.field) { 270 | // old format, single field per entry 271 | updateField( 272 | objectModifications, 273 | logEntry.field, 274 | logEntry.value, 275 | logEntry.createdOn as number, 276 | ) 277 | } else { 278 | for (const [fieldName, value] of Object.entries(logEntry.value)) { 279 | updateField( 280 | objectModifications, 281 | fieldName, 282 | value, 283 | logEntry.createdOn as number, 284 | ) 285 | } 286 | } 287 | } 288 | 289 | if (!objectModifications) { 290 | collectionModifications[pkAsJson] = { 291 | actualState: 'present', 292 | action: 'update', 293 | fields: {}, 294 | } 295 | updateFields(collectionModifications[pkAsJson]) 296 | return 297 | } 298 | 299 | updateFields(objectModifications) 300 | 301 | if ( 302 | objectModifications.actualState === 'present' && 303 | objectModifications.action === 'ignore' 304 | ) { 305 | objectModifications.action = 'update' 306 | } 307 | } 308 | 309 | export function _processModifications({ 310 | objectModifications, 311 | collection, 312 | pk, 313 | storageRegistry, 314 | }: { 315 | objectModifications: ObjectModifications 316 | collection: string 317 | pk: any 318 | storageRegistry: StorageRegistry 319 | }): OperationBatch { 320 | const pkFields = setObjectPk({}, pk, collection, storageRegistry) 321 | 322 | const operations: OperationBatch = [] 323 | if ( 324 | objectModifications.action === 'delete' || 325 | objectModifications.action === 'recreate' 326 | ) { 327 | if (objectModifications.actualState !== 'deleted') { 328 | operations.push({ 329 | operation: 'deleteObjects', 330 | collection, 331 | where: pkFields, 332 | }) 333 | } 334 | } 335 | 336 | if ( 337 | objectModifications.action === 'create' || 338 | objectModifications.action === 'recreate' 339 | ) { 340 | const object = {} 341 | for (const [key, fieldModification] of Object.entries( 342 | objectModifications.fields, 343 | )) { 344 | object[key] = fieldModification.value 345 | } 346 | operations.push({ 347 | operation: 'createObject', 348 | collection, 349 | args: { ...pkFields, ...object }, 350 | }) 351 | } else if (objectModifications.action === 'update') { 352 | for (const [fieldName, fieldModification] of Object.entries( 353 | objectModifications.fields, 354 | )) { 355 | if (Object.keys(pkFields).includes(fieldName)) { 356 | continue 357 | } 358 | 359 | operations.push({ 360 | operation: 'updateObjects', 361 | collection, 362 | where: pkFields, 363 | updates: { [fieldName]: fieldModification.value }, 364 | }) 365 | } 366 | return operations 367 | } 368 | 369 | return operations 370 | } 371 | -------------------------------------------------------------------------------- /ts/reconciliation/index.ts: -------------------------------------------------------------------------------- 1 | export * from './types' 2 | export { reconcileSyncLog } from './default' 3 | -------------------------------------------------------------------------------- /ts/reconciliation/types.ts: -------------------------------------------------------------------------------- 1 | import { StorageRegistry, OperationBatch } from '@worldbrain/storex' 2 | import { ClientSyncLogEntry } from '../client-sync-log/types' 3 | 4 | export type ReconcilerFunction = ( 5 | logEntries: ClientSyncLogEntry[], 6 | options: { 7 | storageRegistry: StorageRegistry 8 | doubleCreateBehaviour?: DoubleCreateBehaviour 9 | debug?: boolean 10 | }, 11 | ) => Promise | OperationBatch 12 | export type DoubleCreateBehaviour = 'error' | 'merge' 13 | // export interface ExecutableOperation { 14 | // operation: string 15 | // collection: string 16 | // args: any 17 | // } 18 | -------------------------------------------------------------------------------- /ts/shared-sync-log/index.tests.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import { SharedSyncLog, SharedSyncLogEntry } from './types' 3 | import { Omit } from '../types' 4 | 5 | export async function runTests(options: { 6 | createLog: () => Promise 7 | cleanUp?: () => Promise 8 | }) { 9 | it('should work', async () => { 10 | const sharedSyncLog = await options.createLog() 11 | const userId = 1 12 | const firstDeviceId = await sharedSyncLog.createDeviceId({ 13 | userId, 14 | sharedUntil: 2, 15 | }) 16 | const secondDeviceId = await sharedSyncLog.createDeviceId({ 17 | userId, 18 | sharedUntil: 2, 19 | }) 20 | 21 | const entries: Omit[] = [ 22 | { userId, deviceId: firstDeviceId, createdOn: 2, data: 'joe-1' }, 23 | { userId, deviceId: firstDeviceId, createdOn: 6, data: 'joe-2' }, 24 | ] 25 | 26 | await sharedSyncLog.writeEntries(entries, { 27 | userId, 28 | deviceId: firstDeviceId, 29 | now: 8, 30 | }) 31 | const logUpdate = await sharedSyncLog.getUnsyncedEntries({ 32 | userId, 33 | deviceId: secondDeviceId, 34 | }) 35 | expect(logUpdate).toEqual({ 36 | entries: [ 37 | (expect as any).objectContaining({ 38 | ...entries[0], 39 | userId: 1, 40 | deviceId: firstDeviceId, 41 | }), 42 | (expect as any).objectContaining({ 43 | ...entries[1], 44 | userId: 1, 45 | deviceId: firstDeviceId, 46 | }), 47 | ], 48 | memo: expect.any(Object), 49 | }) 50 | await sharedSyncLog.markAsSeen(logUpdate, { 51 | userId, 52 | deviceId: secondDeviceId, 53 | now: 10, 54 | }) 55 | expect( 56 | await sharedSyncLog.getUnsyncedEntries({ 57 | userId, 58 | deviceId: secondDeviceId, 59 | }), 60 | ).toEqual({ entries: [], memo: expect.any(Object) }) 61 | }) 62 | 63 | it('should work correctly even if entries from the past are added', async () => { 64 | const sharedSyncLog = await options.createLog() 65 | const userId = 1 66 | const firstDeviceId = await sharedSyncLog.createDeviceId({ 67 | userId, 68 | sharedUntil: 2, 69 | }) 70 | const secondDeviceId = await sharedSyncLog.createDeviceId({ 71 | userId, 72 | sharedUntil: 2, 73 | }) 74 | 75 | expect( 76 | await sharedSyncLog.getUnsyncedEntries({ 77 | userId, 78 | deviceId: secondDeviceId, 79 | }), 80 | ).toEqual({ entries: [], memo: expect.any(Object) }) 81 | 82 | const entries: Omit[] = [ 83 | { deviceId: firstDeviceId, createdOn: 4, data: 'joe-2' }, 84 | { deviceId: firstDeviceId, createdOn: 6, data: 'joe-3' }, 85 | ] 86 | await sharedSyncLog.writeEntries(entries, { 87 | userId, 88 | deviceId: firstDeviceId, 89 | now: 8, 90 | }) 91 | 92 | const firstLogUpdate = await sharedSyncLog.getUnsyncedEntries({ 93 | userId, 94 | deviceId: secondDeviceId, 95 | }) 96 | await sharedSyncLog.markAsSeen(firstLogUpdate, { 97 | userId, 98 | deviceId: secondDeviceId, 99 | }) 100 | 101 | const newEntries: Omit[] = [ 102 | { deviceId: firstDeviceId, createdOn: 1, data: 'joe-1' }, 103 | ] 104 | await sharedSyncLog.writeEntries(newEntries, { 105 | userId, 106 | deviceId: firstDeviceId, 107 | now: 10, 108 | }) 109 | 110 | const secondLogUpdate = await sharedSyncLog.getUnsyncedEntries({ 111 | userId, 112 | deviceId: secondDeviceId, 113 | }) 114 | expect(secondLogUpdate).toEqual({ 115 | entries: [ 116 | (expect as any).objectContaining({ 117 | ...newEntries[0], 118 | userId: 1, 119 | deviceId: firstDeviceId, 120 | }), 121 | ], 122 | memo: expect.any(Object), 123 | }) 124 | 125 | await sharedSyncLog.markAsSeen(secondLogUpdate, { 126 | userId, 127 | deviceId: secondDeviceId, 128 | }) 129 | 130 | expect( 131 | await sharedSyncLog.getUnsyncedEntries({ 132 | userId, 133 | deviceId: secondDeviceId, 134 | }), 135 | ).toEqual({ entries: [], memo: expect.any(Object) }) 136 | }) 137 | 138 | it('should not include its own entries when retrieving unseen entries', async () => { 139 | const sharedSyncLog = await options.createLog() 140 | const userId = 1 141 | const firstDeviceId = await sharedSyncLog.createDeviceId({ 142 | userId, 143 | sharedUntil: 2, 144 | }) 145 | 146 | expect( 147 | await sharedSyncLog.getUnsyncedEntries({ 148 | userId, 149 | deviceId: firstDeviceId, 150 | }), 151 | ).toEqual({ entries: [], memo: expect.any(Object) }) 152 | 153 | const entries: Omit[] = [ 154 | { deviceId: firstDeviceId, createdOn: 4, data: 'joe-2' }, 155 | { deviceId: firstDeviceId, createdOn: 6, data: 'joe-3' }, 156 | ] 157 | await sharedSyncLog.writeEntries(entries, { 158 | userId, 159 | deviceId: firstDeviceId, 160 | now: 8, 161 | }) 162 | 163 | const unseenEntries = await sharedSyncLog.getUnsyncedEntries({ 164 | userId, 165 | deviceId: firstDeviceId, 166 | }) 167 | expect(unseenEntries).toEqual({ entries: [], memo: expect.any(Object) }) 168 | }) 169 | 170 | it(`should keep giving me old entries as long as I don't mark retrieved entries as seen`, async () => { 171 | const sharedSyncLog = await options.createLog() 172 | const userId = 1 173 | const firstDeviceId = await sharedSyncLog.createDeviceId({ 174 | userId, 175 | sharedUntil: 2, 176 | }) 177 | const secondDeviceId = await sharedSyncLog.createDeviceId({ 178 | userId, 179 | sharedUntil: 2, 180 | }) 181 | 182 | const entries: Omit[] = [ 183 | { userId, deviceId: firstDeviceId, createdOn: 2, data: 'joe-1' }, 184 | { userId, deviceId: firstDeviceId, createdOn: 6, data: 'joe-2' }, 185 | ] 186 | 187 | await sharedSyncLog.writeEntries(entries, { 188 | userId, 189 | deviceId: firstDeviceId, 190 | now: 8, 191 | }) 192 | const firstLogUpdate = await sharedSyncLog.getUnsyncedEntries({ 193 | userId, 194 | deviceId: secondDeviceId, 195 | }) 196 | expect(firstLogUpdate).toEqual({ 197 | entries: [ 198 | (expect as any).objectContaining({ 199 | ...entries[0], 200 | userId: 1, 201 | deviceId: firstDeviceId, 202 | }), 203 | (expect as any).objectContaining({ 204 | ...entries[1], 205 | userId: 1, 206 | deviceId: firstDeviceId, 207 | }), 208 | ], 209 | memo: expect.any(Object), 210 | }) 211 | 212 | const secondLogUpdate = await sharedSyncLog.getUnsyncedEntries({ 213 | userId, 214 | deviceId: secondDeviceId, 215 | }) 216 | expect(secondLogUpdate).toEqual({ 217 | entries: [ 218 | (expect as any).objectContaining({ 219 | ...entries[0], 220 | userId: 1, 221 | deviceId: firstDeviceId, 222 | }), 223 | (expect as any).objectContaining({ 224 | ...entries[1], 225 | userId: 1, 226 | deviceId: firstDeviceId, 227 | }), 228 | ], 229 | memo: expect.any(Object), 230 | }) 231 | }) 232 | 233 | it(`should retrieve entries added between when a device fetches new entries and marks them as read`, async () => { 234 | const sharedSyncLog = await options.createLog() 235 | const userId = 1 236 | const firstDeviceId = await sharedSyncLog.createDeviceId({ 237 | userId, 238 | sharedUntil: 2, 239 | }) 240 | const secondDeviceId = await sharedSyncLog.createDeviceId({ 241 | userId, 242 | sharedUntil: 2, 243 | }) 244 | 245 | const firstBatch: Omit[] = [ 246 | { userId, deviceId: firstDeviceId, createdOn: 2, data: 'joe-1' }, 247 | { userId, deviceId: firstDeviceId, createdOn: 6, data: 'joe-2' }, 248 | ] 249 | await sharedSyncLog.writeEntries(firstBatch, { 250 | userId, 251 | deviceId: firstDeviceId, 252 | now: 8, 253 | }) 254 | const firstLogUpdate = await sharedSyncLog.getUnsyncedEntries({ 255 | userId, 256 | deviceId: secondDeviceId, 257 | }) 258 | expect(firstLogUpdate).toEqual({ 259 | entries: [ 260 | (expect as any).objectContaining({ 261 | ...firstBatch[0], 262 | userId: 1, 263 | deviceId: firstDeviceId, 264 | }), 265 | (expect as any).objectContaining({ 266 | ...firstBatch[1], 267 | userId: 1, 268 | deviceId: firstDeviceId, 269 | }), 270 | ], 271 | memo: expect.any(Object), 272 | }) 273 | 274 | const secondBatch: Omit[] = [ 275 | { userId, deviceId: firstDeviceId, createdOn: 8, data: 'joe-3' }, 276 | { userId, deviceId: firstDeviceId, createdOn: 10, data: 'joe-4' }, 277 | ] 278 | await sharedSyncLog.writeEntries(secondBatch, { 279 | userId, 280 | deviceId: firstDeviceId, 281 | now: 10, 282 | }) 283 | await sharedSyncLog.markAsSeen(firstLogUpdate, { 284 | userId, 285 | deviceId: secondDeviceId, 286 | }) 287 | 288 | const secondLogUpdate = await sharedSyncLog.getUnsyncedEntries({ 289 | userId, 290 | deviceId: secondDeviceId, 291 | }) 292 | expect(secondLogUpdate).toEqual({ 293 | entries: [ 294 | (expect as any).objectContaining({ 295 | ...secondBatch[0], 296 | userId: 1, 297 | deviceId: firstDeviceId, 298 | }), 299 | (expect as any).objectContaining({ 300 | ...secondBatch[1], 301 | userId: 1, 302 | deviceId: firstDeviceId, 303 | }), 304 | ], 305 | memo: { lastBatchTime: 10 }, 306 | }) 307 | }) 308 | } 309 | -------------------------------------------------------------------------------- /ts/shared-sync-log/index.ts: -------------------------------------------------------------------------------- 1 | export { SharedSyncLog } from './types' 2 | -------------------------------------------------------------------------------- /ts/shared-sync-log/storex.test.ts: -------------------------------------------------------------------------------- 1 | import { setupStorexTest } from '@worldbrain/storex-pattern-modules/lib/index.tests' 2 | import { runTests } from './index.tests' 3 | import { SharedSyncLogStorage } from './storex' 4 | 5 | describe('SharedSyncLogStorage', () => { 6 | async function createLog() { 7 | return (await setupStorexTest<{ sharedSyncLog: SharedSyncLogStorage }>({ 8 | collections: {}, 9 | modules: { 10 | sharedSyncLog: ({ storageManager }) => 11 | new SharedSyncLogStorage({ 12 | storageManager, 13 | autoPkType: 'int', 14 | }), 15 | }, 16 | })).modules.sharedSyncLog 17 | } 18 | 19 | runTests({ createLog }) 20 | }) 21 | -------------------------------------------------------------------------------- /ts/shared-sync-log/storex.ts: -------------------------------------------------------------------------------- 1 | import flatten from 'lodash/flatten' 2 | import sortBy from 'lodash/sortBy' 3 | import omit from 'lodash/omit' 4 | import { 5 | StorageModule, 6 | StorageModuleConfig, 7 | StorageModuleConstructorArgs, 8 | StorageModuleDebugConfig, 9 | } from '@worldbrain/storex-pattern-modules' 10 | import { 11 | SharedSyncLog, 12 | SharedSyncLogEntry, 13 | createSharedSyncLogConfig, 14 | SharedSyncLogUpdate, 15 | } from './types' 16 | import { Omit } from '../types' 17 | 18 | interface SharedSyncLogEntryBatch { 19 | userId: string | number 20 | deviceId: string | number 21 | sharedOn: number 22 | data: string 23 | } 24 | 25 | export class SharedSyncLogStorage extends StorageModule 26 | implements SharedSyncLog { 27 | constructor( 28 | private options: StorageModuleConstructorArgs & { 29 | autoPkType: 'string' | 'int' 30 | excludeTimestampChecks?: boolean 31 | }, 32 | ) { 33 | super(options) 34 | } 35 | 36 | getConfig: () => StorageModuleConfig = () => 37 | createSharedSyncLogConfig({ 38 | autoPkType: this.options.autoPkType, 39 | collections: { 40 | sharedSyncLogEntryBatch: { 41 | version: new Date('2019-02-05'), 42 | fields: { 43 | userId: { type: this.options.autoPkType }, 44 | deviceId: { type: this.options.autoPkType }, 45 | sharedOn: { type: 'timestamp' }, // when was this entry uploaded 46 | data: { type: 'string' }, 47 | }, 48 | groupBy: [{ key: 'userId', subcollectionName: 'entries' }], 49 | }, 50 | // sharedSyncLogSeenEntry: { 51 | // version: new Date('2019-02-05'), 52 | // fields: { 53 | // userId: { type: this.options.autoPkType }, 54 | // creatorDeviceId: { type: this.options.autoPkType }, 55 | // retrieverDeviceId: { type: this.options.autoPkType }, 56 | // createdOn: { type: 'timestamp' }, 57 | // }, 58 | // groupBy: [{ key: 'userId', subcollectionName: 'entries' }], 59 | // }, 60 | }, 61 | operations: { 62 | createDeviceInfo: { 63 | operation: 'createObject', 64 | collection: 'sharedSyncLogDeviceInfo', 65 | args: { 66 | userId: '$userId:pk', 67 | }, 68 | }, 69 | getDeviceInfo: { 70 | operation: 'findObject', 71 | collection: 'sharedSyncLogDeviceInfo', 72 | args: { userId: '$userId:pk', id: '$deviceId:pk' }, 73 | }, 74 | updateSharedUntil: { 75 | operation: 'updateObjects', 76 | collection: 'sharedSyncLogDeviceInfo', 77 | args: [ 78 | { userId: '$userId:pk', id: '$deviceId:pk' }, 79 | { sharedUntil: '$sharedUntil:timestamp' }, 80 | ], 81 | }, 82 | createLogEntryBatch: { 83 | operation: 'createObject', 84 | collection: 'sharedSyncLogEntryBatch', 85 | }, 86 | findUnseenSyncEntries: { 87 | operation: 'findObjects', 88 | collection: 'sharedSyncLogEntryBatch', 89 | args: [ 90 | { 91 | userId: '$userId', 92 | sharedOn: { $gt: '$after:timestamp' }, 93 | }, 94 | ], 95 | }, 96 | // insertSeenEntries: { 97 | // operation: 'executeBatch', 98 | // args: ['$operations'], 99 | // }, 100 | // retrieveSeenEntries: { 101 | // operation: 'findObjects', 102 | // collection: 'sharedSyncLogSeenEntry', 103 | // args: { 104 | // userId: '$userId:pk', 105 | // retrieverDeviceId: '$deviceId:pk', 106 | // }, 107 | // }, 108 | }, 109 | accessRules: { 110 | ownership: { 111 | sharedSyncLogDeviceInfo: { 112 | field: 'userId', 113 | access: ['list', 'read', 'create', 'update', 'delete'], 114 | }, 115 | sharedSyncLogEntryBatch: { 116 | field: 'userId', 117 | access: ['list', 'read', 'create', 'delete'], 118 | }, 119 | // sharedSyncLogSeenEntry: { 120 | // field: 'userId', 121 | // access: ['list', 'read', 'create', 'delete'], 122 | // }, 123 | }, 124 | // validation: { 125 | // sharedSyncLogDeviceInfo: !this.options 126 | // .excludeTimestampChecks 127 | // ? [ 128 | // { 129 | // field: 'sharedUntil', 130 | // rule: { eq: ['$value', '$context.now'] }, 131 | // }, 132 | // ] 133 | // : [], 134 | // }, 135 | }, 136 | }) 137 | 138 | async createDeviceId(options: { 139 | userId: number | string 140 | sharedUntil?: number | null 141 | }): Promise { 142 | if (typeof options.sharedUntil === 'undefined') { 143 | options.sharedUntil = 0 144 | } 145 | return (await this.operation('createDeviceInfo', options)).object.id 146 | } 147 | 148 | async getDeviceInfo(options: { 149 | userId: number | string 150 | deviceId: number | string 151 | }): Promise<{ sharedUntil: number | null } | null> { 152 | const deviceInfo: { sharedUntil: number | null } = await this.operation( 153 | 'getDeviceInfo', 154 | options, 155 | ) 156 | if (!deviceInfo) { 157 | return null 158 | } 159 | 160 | if (!deviceInfo.sharedUntil) { 161 | deviceInfo.sharedUntil = null 162 | } 163 | 164 | return deviceInfo 165 | } 166 | 167 | async writeEntries( 168 | entries: Omit[], 169 | options: { 170 | userId: number | string 171 | deviceId: string | number 172 | now: number | '$now' 173 | extraSentInfo?: any 174 | }, 175 | ): Promise { 176 | if (!entries.length) { 177 | return 178 | } 179 | 180 | const batch: SharedSyncLogEntryBatch = { 181 | data: JSON.stringify({ entries, extraInfo: options.extraSentInfo }), 182 | userId: options.userId, 183 | deviceId: options.deviceId, 184 | sharedOn: (options && options.now) || ('$now' as any), 185 | } 186 | await this.operation('createLogEntryBatch', batch) 187 | } 188 | 189 | async getUnsyncedEntries(options: { 190 | userId: string | number 191 | deviceId: string | number 192 | }): Promise { 193 | const deviceInfo: { sharedUntil: number } = await this.operation( 194 | 'getDeviceInfo', 195 | options, 196 | ) 197 | if (!deviceInfo) { 198 | throw new Error(`No such device: ${options.deviceId}`) 199 | } 200 | 201 | const entryBatches: Array = await this.operation( 202 | 'findUnseenSyncEntries', 203 | { 204 | userId: options.userId, 205 | after: deviceInfo.sharedUntil || 0, 206 | }, 207 | ) 208 | 209 | const lastBatch = entryBatches.length 210 | ? entryBatches[entryBatches.length - 1] 211 | : null 212 | const lastBatchTime = lastBatch && lastBatch.sharedOn 213 | 214 | const entries = flatten( 215 | entryBatches 216 | .filter(batch => batch.deviceId !== options.deviceId) 217 | .map((batch): SharedSyncLogEntry[] => { 218 | const batchData = JSON.parse(batch.data) 219 | return batchData.entries.map( 220 | (entry: SharedSyncLogEntry) => ({ 221 | ...entry, 222 | sharedOn: batch.sharedOn, 223 | deviceId: batch.deviceId, 224 | userId: options.userId, 225 | extraInfo: batchData.extraInfo, 226 | }), 227 | ) 228 | }), 229 | ) as SharedSyncLogEntry[] 230 | 231 | return { 232 | entries: sortBy(entries, 'createdOn'), 233 | memo: { lastBatchTime }, 234 | } 235 | } 236 | 237 | async markAsSeen( 238 | update: SharedSyncLogUpdate, 239 | options: { 240 | userId: string | number 241 | deviceId: string | number 242 | now?: number | '$now' 243 | }, 244 | ): Promise { 245 | const sharedUntil = update.entries.length 246 | ? update.memo.lastBatchTime 247 | : options.now ?? Date.now() 248 | 249 | await this.operation('updateSharedUntil', { 250 | userId: options.userId, 251 | deviceId: options.deviceId, 252 | sharedUntil, 253 | }) 254 | } 255 | } 256 | -------------------------------------------------------------------------------- /ts/shared-sync-log/types.ts: -------------------------------------------------------------------------------- 1 | import { 2 | StorageModuleConfig, 3 | StorageOperationDefinitions, 4 | AccessRules, 5 | } from '@worldbrain/storex-pattern-modules' 6 | import { CollectionDefinitionMap } from '@worldbrain/storex' 7 | import { Omit } from '../types' 8 | import { 9 | ClientSyncLogEntry, 10 | ClientSyncLogModificationEntry, 11 | } from '../client-sync-log/types' 12 | 13 | export interface SharedSyncLog { 14 | createDeviceId(options: { 15 | userId: number | string 16 | sharedUntil: number | null 17 | }): Promise 18 | getDeviceInfo(options: { 19 | userId: number | string 20 | deviceId: number | string 21 | }): Promise<{ sharedUntil: number | null } | null> 22 | 23 | writeEntries( 24 | entries: Omit[], 25 | options: { 26 | userId: number | string 27 | deviceId: string | number 28 | now?: number | '$now' 29 | extraSentInfo?: any 30 | }, 31 | ): Promise 32 | getUnsyncedEntries(options: { 33 | userId: string | number 34 | deviceId: string | number 35 | batchSize?: number 36 | }): Promise 37 | markAsSeen( 38 | update: Pick, 39 | options: { 40 | userId: string | number 41 | deviceId: string | number 42 | now?: number | '$now' 43 | }, 44 | ): Promise 45 | } 46 | export interface SharedSyncLogUpdate { 47 | entries: SharedSyncLogEntry[] 48 | memo?: any 49 | } 50 | 51 | interface SharedSyncLogEntryBase { 52 | userId: number | string 53 | deviceId: number | string 54 | createdOn: number | '$now' 55 | sharedOn: number 56 | extraInfo?: any 57 | } 58 | 59 | export type SharedSyncLogEntry< 60 | SerializedData extends 61 | | 'serialized-data' 62 | | 'deserialized-data' = 'serialized-data' 63 | > = SharedSyncLogEntryBase & 64 | (SerializedData extends 'serialized-data' 65 | ? { data: string } 66 | : { data: SharedSyncLogEntryData }) 67 | 68 | export interface SharedSyncLogEntryData { 69 | operation: ClientSyncLogEntry['operation'] 70 | collection: ClientSyncLogEntry['collection'] 71 | pk: ClientSyncLogEntry['pk'] 72 | field: string | null 73 | value: ClientSyncLogModificationEntry['value'] | null 74 | } 75 | 76 | export function createSharedSyncLogConfig(options: { 77 | autoPkType: 'int' | 'string' 78 | collections?: CollectionDefinitionMap 79 | operations?: StorageOperationDefinitions 80 | accessRules?: AccessRules 81 | }): StorageModuleConfig { 82 | return { 83 | operations: options.operations, 84 | collections: { 85 | sharedSyncLogEntry: { 86 | version: new Date('2019-02-05'), 87 | fields: { 88 | userId: { type: options.autoPkType }, 89 | deviceId: { type: options.autoPkType }, 90 | createdOn: { type: 'timestamp' }, // when was this entry created on a device 91 | sharedOn: { type: 'timestamp' }, // when was this entry uploaded 92 | data: { type: 'string' }, 93 | }, 94 | groupBy: [{ key: 'userId', subcollectionName: 'entries' }], 95 | }, 96 | sharedSyncLogDeviceInfo: { 97 | version: new Date('2019-02-05'), 98 | fields: { 99 | userId: { type: options.autoPkType }, 100 | sharedUntil: { type: 'timestamp', optional: true }, 101 | }, 102 | groupBy: [{ key: 'userId', subcollectionName: 'devices' }], 103 | }, 104 | ...(options.collections || {}), 105 | }, 106 | methods: { 107 | createDeviceId: { 108 | type: 'mutation', 109 | args: { 110 | userId: options.autoPkType, 111 | sharedUntil: 'float', 112 | }, 113 | returns: options.autoPkType, 114 | }, 115 | writeEntries: { 116 | type: 'mutation', 117 | args: { 118 | entries: { 119 | type: { array: { collection: 'sharedSyncLogEntry' } }, 120 | positional: true, 121 | }, 122 | }, 123 | returns: 'void', 124 | }, 125 | getUnsyncedEntries: { 126 | type: 'query', 127 | args: { 128 | deviceId: { type: options.autoPkType }, 129 | }, 130 | returns: { array: { collection: 'sharedSyncLogEntry' } }, 131 | }, 132 | markAsSeen: { 133 | type: 'mutation', 134 | args: { 135 | entries: { 136 | type: { 137 | array: { 138 | object: { 139 | createdOn: 'float', 140 | deviceId: options.autoPkType, 141 | }, 142 | singular: 'entry', 143 | }, 144 | }, 145 | }, 146 | deviceId: { type: options.autoPkType }, 147 | }, 148 | returns: 'void', 149 | }, 150 | }, 151 | accessRules: options.accessRules, 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /ts/types.test.ts: -------------------------------------------------------------------------------- 1 | export type PromiseContentType = T extends Promise ? U : T 2 | -------------------------------------------------------------------------------- /ts/types.ts: -------------------------------------------------------------------------------- 1 | export type Omit = Pick< 2 | T, 3 | ({ [P in keyof T]: P } & 4 | { [P in K]: never } & { [x: string]: never })[keyof T] 5 | > 6 | -------------------------------------------------------------------------------- /ts/utils.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import StorageManager, { CollectionDefinitionMap } from '@worldbrain/storex' 3 | import { getObjectPk, getObjectWithoutPk, setObjectPk } from './utils' 4 | 5 | describe('Primary key utils', () => { 6 | async function setupTest(config: { collections: CollectionDefinitionMap }) { 7 | const backend = { 8 | configure: () => null, 9 | operation: async (...args: any[]) => ({ args }), 10 | } as any 11 | const storageManager = new StorageManager({ backend }) 12 | storageManager.registry.registerCollections(config.collections) 13 | return { storageManager } 14 | } 15 | 16 | describe('getObjectPk()', () => { 17 | it('should work for an object with a single field pk', async () => { 18 | const { storageManager } = await setupTest({ 19 | collections: { 20 | user: { 21 | version: new Date('2019-02-19'), 22 | fields: { 23 | displayName: { type: 'string' }, 24 | }, 25 | }, 26 | }, 27 | }) 28 | expect( 29 | getObjectPk( 30 | { id: 1, displayName: 'Joe' }, 31 | 'user', 32 | storageManager.registry, 33 | ), 34 | ).toEqual(1) 35 | }) 36 | 37 | it('should work for an object with a compound pk', async () => { 38 | const { storageManager } = await setupTest({ 39 | collections: { 40 | user: { 41 | version: new Date('2019-02-19'), 42 | fields: { 43 | firstName: { type: 'string' }, 44 | lastName: { type: 'string' }, 45 | email: { type: 'string' }, 46 | }, 47 | pkIndex: ['firstName', 'lastName'], 48 | }, 49 | }, 50 | }) 51 | expect( 52 | getObjectPk( 53 | { firstName: 'Joe', lastName: 'Doe', email: 'bla@bla.com' }, 54 | 'user', 55 | storageManager.registry, 56 | ), 57 | ).toEqual(['Joe', 'Doe']) 58 | }) 59 | }) 60 | 61 | describe('getObjectWithoutPk()', () => { 62 | it('should work for an object with a single field pk', async () => { 63 | const { storageManager } = await setupTest({ 64 | collections: { 65 | user: { 66 | version: new Date('2019-02-19'), 67 | fields: { 68 | displayName: { type: 'string' }, 69 | }, 70 | }, 71 | }, 72 | }) 73 | expect( 74 | getObjectWithoutPk( 75 | { id: 1, displayName: 'Joe' }, 76 | 'user', 77 | storageManager.registry, 78 | ), 79 | ).toEqual({ displayName: 'Joe' }) 80 | }) 81 | 82 | it('should work for an object with a compound pk', async () => { 83 | const { storageManager } = await setupTest({ 84 | collections: { 85 | user: { 86 | version: new Date('2019-02-19'), 87 | fields: { 88 | firstName: { type: 'string' }, 89 | lastName: { type: 'string' }, 90 | email: { type: 'string' }, 91 | }, 92 | pkIndex: ['firstName', 'lastName'], 93 | }, 94 | }, 95 | }) 96 | expect( 97 | getObjectWithoutPk( 98 | { firstName: 'Joe', lastName: 'Doe', email: 'bla@bla.com' }, 99 | 'user', 100 | storageManager.registry, 101 | ), 102 | ).toEqual({ email: 'bla@bla.com' }) 103 | }) 104 | }) 105 | 106 | describe('setObjectPk()', () => { 107 | it('should work for an object with a single field pk', async () => { 108 | const { storageManager } = await setupTest({ 109 | collections: { 110 | user: { 111 | version: new Date('2019-02-19'), 112 | fields: { 113 | displayName: { type: 'string' }, 114 | }, 115 | }, 116 | }, 117 | }) 118 | 119 | const object = { displayName: 'Joe' } 120 | const returned = setObjectPk( 121 | object, 122 | 2, 123 | 'user', 124 | storageManager.registry, 125 | ) 126 | expect(object).toEqual({ id: 2, displayName: 'Joe' }) 127 | expect(returned).toEqual(object) 128 | }) 129 | 130 | it('should work for an object with a compound pk', async () => { 131 | const { storageManager } = await setupTest({ 132 | collections: { 133 | user: { 134 | version: new Date('2019-02-19'), 135 | fields: { 136 | firstName: { type: 'string' }, 137 | lastName: { type: 'string' }, 138 | email: { type: 'string' }, 139 | }, 140 | pkIndex: ['firstName', 'lastName'], 141 | }, 142 | }, 143 | }) 144 | 145 | const object = { email: 'joe@doe.com' } 146 | const returned = setObjectPk( 147 | object, 148 | ['Joe', 'Doe'], 149 | 'user', 150 | storageManager.registry, 151 | ) 152 | expect(object).toEqual({ 153 | firstName: 'Joe', 154 | lastName: 'Doe', 155 | email: 'joe@doe.com', 156 | }) 157 | expect(returned).toEqual(object) 158 | }) 159 | }) 160 | }) 161 | -------------------------------------------------------------------------------- /ts/utils.ts: -------------------------------------------------------------------------------- 1 | import { StorageRegistry } from '@worldbrain/storex' 2 | 3 | export function getObjectPk( 4 | object: any, 5 | collection: string, 6 | registry: StorageRegistry, 7 | ) { 8 | const pkIndex = registry.collections[collection].pkIndex 9 | if (typeof pkIndex === 'string') { 10 | return object[pkIndex] 11 | } 12 | 13 | const pk = [] 14 | for (const indexField of pkIndex as string[]) { 15 | if (typeof indexField === 'string') { 16 | pk.push(object[indexField]) 17 | } else { 18 | throw new Error( 19 | `getObject() called with relationship as pk, which is not supported yet.`, 20 | ) 21 | } 22 | } 23 | return pk 24 | } 25 | 26 | export function getObjectWithoutPk( 27 | object: any, 28 | collection: string, 29 | registry: StorageRegistry, 30 | ) { 31 | object = { ...object } 32 | 33 | const pkIndex = registry.collections[collection].pkIndex 34 | if (typeof pkIndex === 'string') { 35 | delete object[pkIndex] 36 | return object 37 | } 38 | 39 | for (const indexField of pkIndex as string[]) { 40 | if (typeof indexField === 'string') { 41 | delete object[indexField] 42 | } else { 43 | throw new Error( 44 | `getObject() called with relationship as pk, which is not supported yet.`, 45 | ) 46 | } 47 | } 48 | return object 49 | } 50 | 51 | export function setObjectPk( 52 | object: any, 53 | pk: number | string | number[] | string[], 54 | collection: string, 55 | registry: StorageRegistry, 56 | ) { 57 | const collectionDefinition = registry.collections[collection] 58 | if (!collectionDefinition) { 59 | throw new Error( 60 | `Could not find collection definition for '${collection}'`, 61 | ) 62 | } 63 | 64 | const pkIndex = collectionDefinition.pkIndex 65 | if (typeof pkIndex === 'string') { 66 | object[pkIndex] = pk 67 | return object 68 | } 69 | 70 | let indexFieldIdx = 0 71 | for (const indexField of pkIndex as string[]) { 72 | if (typeof indexField === 'string') { 73 | object[indexField] = pk[indexFieldIdx++] 74 | } else { 75 | throw new Error( 76 | `setObjectPk() called with relationship as pk, which is not supported yet.`, 77 | ) 78 | } 79 | } 80 | 81 | return object 82 | } 83 | -------------------------------------------------------------------------------- /ts/utils/recurring-task.test.ts: -------------------------------------------------------------------------------- 1 | import expect from 'expect' 2 | import { RecurringTask } from './recurring-task' 3 | 4 | function sleepPromise(miliseconds: number) { 5 | return new Promise(resolve => { 6 | setTimeout(resolve, miliseconds) 7 | }) 8 | } 9 | 10 | function createTestTask(f?: () => Promise) { 11 | const runs: number[] = [] 12 | return { 13 | runs, 14 | task: async () => { 15 | runs.push(Date.now()) 16 | if (f) { 17 | await f() 18 | } 19 | }, 20 | } 21 | } 22 | 23 | function createTestSetTimeout() { 24 | const calls: Array<{ f: () => void; miliseconds: number }> = [] 25 | return { 26 | calls, 27 | setTimeout: (f: () => void, miliseconds: number) => { 28 | calls.push({ f, miliseconds }) 29 | return setTimeout(f, miliseconds) 30 | }, 31 | } 32 | } 33 | 34 | interface TestOptions { 35 | intervalInMs: number 36 | task?: () => Promise 37 | } 38 | function setupTest(options: TestOptions) { 39 | const intervalInMs = options.intervalInMs 40 | const testTask = createTestTask(options.task) 41 | const testSetTimeout = createTestSetTimeout() 42 | const nowBefore = Date.now() 43 | const errors: Error[] = [] 44 | const recurring = new RecurringTask(testTask.task, { 45 | intervalInMs, 46 | setTimeout: testSetTimeout.setTimeout, 47 | onError: err => errors.push(err), 48 | }) 49 | 50 | return { 51 | intervalInMs, 52 | testTask, 53 | testSetTimeout, 54 | nowBefore, 55 | recurringTask: recurring, 56 | errors, 57 | } 58 | } 59 | 60 | async function runTest( 61 | options: TestOptions, 62 | f: (setup: ReturnType) => Promise, 63 | ) { 64 | const setup = setupTest(options) 65 | try { 66 | await f(setup) 67 | } finally { 68 | setup.recurringTask.stop() 69 | } 70 | } 71 | 72 | describe('Recurring task', () => { 73 | it('should not run the task when constructed', async () => { 74 | await runTest({ intervalInMs: 1000 }, async setup => { 75 | expect(setup.testTask.runs).toEqual([]) 76 | expect(setup.errors).toEqual([]) 77 | }) 78 | }) 79 | 80 | it('should schedule the task when constructed', async () => { 81 | await runTest({ intervalInMs: 1000 }, async setup => { 82 | expect(setup.testSetTimeout.calls).toEqual([ 83 | { f: expect.any(Function), miliseconds: setup.intervalInMs }, 84 | ]) 85 | expect( 86 | setup.recurringTask.aproximateNextRun! - 87 | (setup.nowBefore + setup.intervalInMs), 88 | ).toBeLessThan(50) 89 | expect(setup.errors).toEqual([]) 90 | }) 91 | }) 92 | 93 | it('should trigger the task on the desired interval', async () => { 94 | await runTest({ intervalInMs: 300 }, async setup => { 95 | await sleepPromise(setup.intervalInMs + 10) 96 | setup.recurringTask.stop() 97 | expect(setup.testTask.runs.length).toEqual(1) 98 | const firstRun = setup.testTask.runs[0] 99 | expect( 100 | firstRun - (setup.nowBefore + setup.intervalInMs), 101 | ).toBeLessThan(50) 102 | expect(setup.errors).toEqual([]) 103 | }) 104 | }) 105 | 106 | it('should reschedule the task when triggered by timout', async () => { 107 | await runTest({ intervalInMs: 300 }, async setup => { 108 | await sleepPromise(setup.intervalInMs + 10) 109 | expect( 110 | setup.recurringTask.aproximateNextRun! - 111 | (setup.nowBefore + setup.intervalInMs * 2), 112 | ).toBeLessThan(50) 113 | setup.recurringTask.stop() 114 | expect(setup.testTask.runs.length).toEqual(1) 115 | expect(setup.errors).toEqual([]) 116 | }) 117 | }) 118 | 119 | it('should reschedule the task when triggered by timout fails', async () => { 120 | await runTest( 121 | { 122 | intervalInMs: 300, 123 | task: async () => { 124 | throw new Error('Boooh!') 125 | }, 126 | }, 127 | async setup => { 128 | await sleepPromise(setup.intervalInMs + 10) 129 | expect( 130 | setup.recurringTask.aproximateNextRun! - 131 | (setup.nowBefore + setup.intervalInMs * 2), 132 | ).toBeLessThan(50) 133 | setup.recurringTask.stop() 134 | expect(setup.testTask.runs.length).toEqual(1) 135 | expect(setup.errors).toEqual([new Error('Boooh!')]) 136 | }, 137 | ) 138 | }) 139 | 140 | it('should reschedule the task only after task is done', async () => { 141 | await runTest( 142 | { intervalInMs: 300, task: () => sleepPromise(500) }, 143 | async setup => { 144 | await sleepPromise(350) 145 | expect( 146 | setup.recurringTask.aproximateNextRun! - 147 | (setup.nowBefore + setup.intervalInMs), 148 | ).toBeLessThan(350) 149 | setup.recurringTask.stop() 150 | expect(setup.testTask.runs.length).toEqual(1) 151 | expect(setup.testSetTimeout.calls).toEqual([ 152 | { 153 | f: expect.any(Function), 154 | miliseconds: setup.intervalInMs, 155 | }, 156 | { 157 | f: expect.any(Function), 158 | miliseconds: setup.intervalInMs, 159 | }, 160 | ]) 161 | expect(setup.errors).toEqual([]) 162 | }, 163 | ) 164 | }) 165 | 166 | it('should be able to force the task to be triggered', async () => { 167 | await runTest({ intervalInMs: 300 }, async setup => { 168 | await setup.recurringTask.forceRun() 169 | setup.recurringTask.stop() 170 | expect(setup.testTask.runs.length).toEqual(1) 171 | expect(setup.errors).toEqual([]) 172 | }) 173 | }) 174 | 175 | it('should reschedule the task after forcing', async () => { 176 | await runTest({ intervalInMs: 300 }, async setup => { 177 | await setup.recurringTask.forceRun() 178 | expect( 179 | setup.recurringTask.aproximateNextRun! - 180 | (setup.nowBefore + setup.intervalInMs), 181 | ).toBeLessThan(50) 182 | setup.recurringTask.stop() 183 | expect(setup.testSetTimeout.calls).toEqual([ 184 | { f: expect.any(Function), miliseconds: setup.intervalInMs }, 185 | { f: expect.any(Function), miliseconds: setup.intervalInMs }, 186 | ]) 187 | expect(setup.errors).toEqual([]) 188 | }) 189 | }) 190 | 191 | it('should reschedule the task if it fails when forcing', async () => { 192 | await runTest( 193 | { 194 | intervalInMs: 300, 195 | task: async () => { 196 | throw new Error('Boooh!') 197 | }, 198 | }, 199 | async setup => { 200 | await (expect( 201 | setup.recurringTask.forceRun(), 202 | ) as any).rejects.toThrow('Boooh!') 203 | expect( 204 | setup.recurringTask.aproximateNextRun! - 205 | (setup.nowBefore + setup.intervalInMs), 206 | ).toBeLessThan(50) 207 | setup.recurringTask.stop() 208 | 209 | expect(setup.testSetTimeout.calls).toEqual([ 210 | { 211 | f: expect.any(Function), 212 | miliseconds: setup.intervalInMs, 213 | }, 214 | { 215 | f: expect.any(Function), 216 | miliseconds: setup.intervalInMs, 217 | }, 218 | ]) 219 | expect(setup.errors).toEqual([new Error('Boooh!')]) 220 | }, 221 | ) 222 | }) 223 | }) 224 | -------------------------------------------------------------------------------- /ts/utils/recurring-task.ts: -------------------------------------------------------------------------------- 1 | export class RecurringTask { 2 | // taskRunning = false // TODO: Write tests before introducing this feature 3 | aproximateNextRun: number | null = null 4 | private timeoutId: ReturnType | null = null 5 | 6 | constructor( 7 | private task: (options?: TaskOptions) => Promise, 8 | private options: { 9 | intervalInMs: number 10 | onError: (error: Error) => void 11 | setTimeout?: ( 12 | f: () => void, 13 | miliseconds: number, 14 | ) => ReturnType 15 | clearTimeout?: (timeoutId: ReturnType) => void 16 | }, 17 | ) { 18 | this.schedule() 19 | } 20 | 21 | stop() { 22 | this.clearTimeout() 23 | this.aproximateNextRun = null 24 | } 25 | 26 | async forceRun(options?: TaskOptions) { 27 | this.clearTimeout() 28 | try { 29 | const result = await this.run(options) 30 | return result 31 | } catch (e) { 32 | this.options.onError(e) 33 | throw e 34 | } 35 | } 36 | 37 | private schedule() { 38 | if (this.timeoutId) { 39 | this.clearTimeout() 40 | } 41 | 42 | const { intervalInMs } = this.options 43 | const now = Date.now() 44 | this.aproximateNextRun = now + intervalInMs 45 | this.timeoutId = (this.options.setTimeout || setTimeout)(async () => { 46 | try { 47 | await this.run() 48 | } catch (e) { 49 | this.options.onError(e) 50 | } 51 | }, intervalInMs) 52 | } 53 | 54 | private async run(options?: TaskOptions) { 55 | // this.taskRunning = true 56 | try { 57 | return this.task(options) 58 | } finally { 59 | this.schedule() 60 | // this.taskRunning = false 61 | } 62 | } 63 | 64 | private clearTimeout() { 65 | if (this.timeoutId) { 66 | ;(this.options.clearTimeout || clearTimeout)(this.timeoutId) 67 | this.timeoutId = null 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "3.7", 3 | "compilerOptions": { 4 | "target": "es5", 5 | "module": "commonjs", 6 | "moduleResolution": "node", 7 | "strict": true, 8 | "esModuleInterop": true, 9 | "allowSyntheticDefaultImports": true, 10 | "removeComments": true, 11 | "lib": ["es2017", "dom", "esnext.asynciterable"], 12 | "noLib": false, 13 | "downlevelIteration": true, 14 | "preserveConstEnums": true, 15 | "declaration": true, 16 | "sourceMap": true, 17 | "suppressImplicitAnyIndexErrors": true, 18 | "typeRoots": ["./node_modules/@types"], 19 | "outDir": "lib" 20 | }, 21 | "filesGlob": ["./ts/**/*.ts", "!./node_modules/**/*.ts"] 22 | } 23 | --------------------------------------------------------------------------------