├── .gitignore
├── img
├── sign.png
├── lockup.png
├── brandmark.png
├── brandmark.svg
├── lockup.svg
└── sign.svg
├── .babelrc
├── .mocharc.yaml
├── src
├── uuid.js
├── common.js
└── automerge.js
├── backend
├── util.js
├── index.js
├── backend.js
└── sync.js
├── tsconfig.json
├── webpack.config.js
├── frontend
├── constants.js
├── numbers.js
├── counter.js
├── observable.js
├── text.js
├── table.js
├── proxies.js
├── apply_patch.js
└── index.js
├── test
├── uuid_test.js
├── helpers.js
├── columnar_test.js
├── table_test.js
├── observable_test.js
├── fuzz_test.js
├── wasm.js
└── proxies_test.js
├── LICENSE
├── karma.conf.js
├── README.md
├── package.json
├── .github
└── workflows
│ └── automerge-ci.yml
├── karma.sauce.js
├── .eslintrc.json
└── @types
└── automerge
└── index.d.ts
/.gitignore:
--------------------------------------------------------------------------------
1 | /coverage
2 | /dist
3 | /node_modules
4 | .nyc_output
5 | .vscode
6 |
--------------------------------------------------------------------------------
/img/sign.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/automerge/automerge-classic/HEAD/img/sign.png
--------------------------------------------------------------------------------
/img/lockup.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/automerge/automerge-classic/HEAD/img/lockup.png
--------------------------------------------------------------------------------
/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | [
4 | "@babel/preset-env"
5 | ]
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/img/brandmark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/automerge/automerge-classic/HEAD/img/brandmark.png
--------------------------------------------------------------------------------
/.mocharc.yaml:
--------------------------------------------------------------------------------
1 | use_strict: true
2 | require:
3 | - ts-node/register
4 | - tsconfig-paths/register
5 | watch-files:
6 | - 'src/*.js'
7 | - 'frontend/*.js'
8 | - 'backend/*.js'
9 | - 'test/*.js'
10 | - 'test/*.ts'
11 | spec:
12 | - 'test/*test*.js'
13 | - 'test/*test*.ts'
14 |
--------------------------------------------------------------------------------
/src/uuid.js:
--------------------------------------------------------------------------------
1 | const { v4: uuid } = require('uuid')
2 |
3 | function defaultFactory() {
4 | return uuid().replace(/-/g, '')
5 | }
6 |
7 | let factory = defaultFactory
8 |
9 | function makeUuid() {
10 | return factory()
11 | }
12 |
13 | makeUuid.setFactory = newFactory => { factory = newFactory }
14 | makeUuid.reset = () => { factory = defaultFactory }
15 |
16 | module.exports = makeUuid
17 |
--------------------------------------------------------------------------------
/backend/util.js:
--------------------------------------------------------------------------------
1 | function backendState(backend) {
2 | if (backend.frozen) {
3 | throw new Error(
4 | 'Attempting to use an outdated Automerge document that has already been updated. ' +
5 | 'Please use the latest document state, or call Automerge.clone() if you really ' +
6 | 'need to use this old document state.'
7 | )
8 | }
9 | return backend.state
10 | }
11 |
12 | module.exports = {
13 | backendState
14 | }
15 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "allowJs": false,
4 | "baseUrl": ".",
5 | "esModuleInterop": true,
6 | "lib": ["dom", "esnext.asynciterable", "es2017", "es2016", "es2015"],
7 | "module": "commonjs",
8 | "moduleResolution": "node",
9 | "paths": { "automerge": ["*"]},
10 | "rootDir": "",
11 | "target": "es2016",
12 | "typeRoots": ["./@types", "./node_modules/@types"]
13 | },
14 | "exclude": ["dist/**/*"]
15 | }
16 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 |
3 | module.exports = {
4 | entry: './src/automerge.js',
5 | mode: 'development',
6 | output: {
7 | filename: 'automerge.js',
8 | library: 'Automerge',
9 | libraryTarget: 'umd',
10 | path: path.resolve(__dirname, 'dist'),
11 | // https://github.com/webpack/webpack/issues/6525
12 | globalObject: 'this',
13 | // https://github.com/webpack/webpack/issues/11660
14 | chunkLoading: false,
15 | },
16 | devtool: 'source-map',
17 | module: {rules: []},
18 | target: "browserslist:web"
19 | }
20 |
--------------------------------------------------------------------------------
/backend/index.js:
--------------------------------------------------------------------------------
1 | const { init, clone, free, applyChanges, applyLocalChange, save, load, loadChanges, getPatch, getHeads, getAllChanges, getChanges, getChangesAdded, getChangeByHash, getMissingDeps } = require("./backend")
2 | const { receiveSyncMessage, generateSyncMessage, encodeSyncMessage, decodeSyncMessage, encodeSyncState, decodeSyncState, initSyncState } = require('./sync')
3 |
4 | module.exports = {
5 | init, clone, free, applyChanges, applyLocalChange, save, load, loadChanges, getPatch,
6 | getHeads, getAllChanges, getChanges, getChangesAdded, getChangeByHash, getMissingDeps,
7 | receiveSyncMessage, generateSyncMessage, encodeSyncMessage, decodeSyncMessage, encodeSyncState, decodeSyncState, initSyncState
8 | }
9 |
--------------------------------------------------------------------------------
/img/brandmark.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/frontend/constants.js:
--------------------------------------------------------------------------------
1 | // Properties of the document root object
2 | const OPTIONS = Symbol('_options') // object containing options passed to init()
3 | const CACHE = Symbol('_cache') // map from objectId to immutable object
4 | const STATE = Symbol('_state') // object containing metadata about current state (e.g. sequence numbers)
5 |
6 | // Properties of all Automerge objects
7 | const OBJECT_ID = Symbol('_objectId') // the object ID of the current object (string)
8 | const CONFLICTS = Symbol('_conflicts') // map or list (depending on object type) of conflicts
9 | const CHANGE = Symbol('_change') // the context object on proxy objects used in change callback
10 | const ELEM_IDS = Symbol('_elemIds') // list containing the element ID of each list element
11 |
12 | module.exports = {
13 | OPTIONS, CACHE, STATE, OBJECT_ID, CONFLICTS, CHANGE, ELEM_IDS
14 | }
15 |
--------------------------------------------------------------------------------
/test/uuid_test.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
3 |
4 | const uuid = Automerge.uuid
5 |
6 | describe('uuid', () => {
7 | afterEach(() => {
8 | uuid.reset()
9 | })
10 |
11 | describe('default implementation', () => {
12 | it('generates unique values', () => {
13 | assert.notEqual(uuid(), uuid())
14 | })
15 | })
16 |
17 | describe('custom implementation', () => {
18 | let counter
19 |
20 | function customUuid() {
21 | return `custom-uuid-${counter++}`
22 | }
23 |
24 | before(() => uuid.setFactory(customUuid))
25 | beforeEach(() => counter = 0)
26 |
27 | it('invokes the custom factory', () => {
28 | assert.equal(uuid(), 'custom-uuid-0')
29 | assert.equal(uuid(), 'custom-uuid-1')
30 | })
31 | })
32 | })
33 |
--------------------------------------------------------------------------------
/frontend/numbers.js:
--------------------------------------------------------------------------------
1 | // Convience classes to allow users to stricly specify the number type they want
2 |
3 | class Int {
4 | constructor(value) {
5 | if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= Number.MIN_SAFE_INTEGER)) {
6 | throw new RangeError(`Value ${value} cannot be a uint`)
7 | }
8 | this.value = value
9 | Object.freeze(this)
10 | }
11 | }
12 |
13 | class Uint {
14 | constructor(value) {
15 | if (!(Number.isInteger(value) && value <= Number.MAX_SAFE_INTEGER && value >= 0)) {
16 | throw new RangeError(`Value ${value} cannot be a uint`)
17 | }
18 | this.value = value
19 | Object.freeze(this)
20 | }
21 | }
22 |
23 | class Float64 {
24 | constructor(value) {
25 | if (typeof value !== 'number') {
26 | throw new RangeError(`Value ${value} cannot be a float64`)
27 | }
28 | this.value = value || 0.0
29 | Object.freeze(this)
30 | }
31 | }
32 |
33 | module.exports = { Int, Uint, Float64 }
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2017-present Martin Kleppmann, Ink & Switch LLC, and the Automerge contributors
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in all
11 | copies or substantial portions of the Software.
12 |
13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
19 | SOFTWARE.
20 |
--------------------------------------------------------------------------------
/test/helpers.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const { Encoder } = require('../backend/encoding')
3 |
4 | // Assertion that succeeds if the first argument deepStrictEquals at least one of the
5 | // subsequent arguments (but we don't care which one)
6 | function assertEqualsOneOf(actual, ...expected) {
7 | assert(expected.length > 0)
8 | for (let i = 0; i < expected.length; i++) {
9 | try {
10 | assert.deepStrictEqual(actual, expected[i])
11 | return // if we get here without an exception, that means success
12 | } catch (e) {
13 | if (!e.name.match(/^AssertionError/) || i === expected.length - 1) throw e
14 | }
15 | }
16 | }
17 |
18 | /**
19 | * Asserts that the byte array maintained by `encoder` contains the same byte
20 | * sequence as the array `bytes`.
21 | */
22 | function checkEncoded(encoder, bytes, detail) {
23 | const encoded = (encoder instanceof Encoder) ? encoder.buffer : encoder
24 | const expected = new Uint8Array(bytes)
25 | const message = (detail ? `${detail}: ` : '') + `${encoded} expected to equal ${expected}`
26 | assert(encoded.byteLength === expected.byteLength, message)
27 | for (let i = 0; i < encoded.byteLength; i++) {
28 | assert(encoded[i] === expected[i], message)
29 | }
30 | }
31 |
32 | module.exports = { assertEqualsOneOf, checkEncoded }
33 |
--------------------------------------------------------------------------------
/karma.conf.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const webpack = require('webpack')
3 | const webpackConfig = require('./webpack.config.js')
4 |
5 | // Karma-Webpack needs these gone
6 | delete webpackConfig.entry
7 | delete webpackConfig.output.filename
8 |
9 | // Don't mix dist/
10 | webpackConfig.output.path = path.join(webpackConfig.output.path, 'test')
11 |
12 | // You're importing *a lot* of Node-specific code so the bundle is huge...
13 | webpackConfig.plugins = [
14 | new webpack.DefinePlugin({
15 | 'process.env.TEST_DIST': JSON.stringify(process.env.TEST_DIST) || '1',
16 | 'process.env.NODE_DEBUG': false,
17 | }),
18 | ...(webpackConfig.plugins || []),
19 | ]
20 |
21 | module.exports = function(config) {
22 | config.set({
23 | frameworks: ['webpack', 'mocha', 'karma-typescript'],
24 | files: [
25 | { pattern: 'test/*test*.js', watched: false },
26 | { pattern: 'test/*test*.ts' },
27 | ],
28 | preprocessors: {
29 | 'test/*test*.js': ['webpack'],
30 | 'test/*test*.ts': ['karma-typescript'],
31 | },
32 | webpack: webpackConfig,
33 | browsers: ['Chrome', 'Firefox', 'Safari'],
34 | singleRun: true,
35 | // Webpack can handle Typescript via ts-loader
36 | karmaTypescriptConfig: {
37 | tsconfig: './tsconfig.json',
38 | bundlerOptions: {
39 | resolve: {
40 | alias: { automerge: './src/automerge.js' }
41 | }
42 | },
43 | compilerOptions: {
44 | allowJs: true,
45 | sourceMap: true,
46 | }
47 | }
48 | })
49 | }
50 |
--------------------------------------------------------------------------------
/src/common.js:
--------------------------------------------------------------------------------
1 | function isObject(obj) {
2 | return typeof obj === 'object' && obj !== null
3 | }
4 |
5 | /**
6 | * Returns a shallow copy of the object `obj`. Faster than `Object.assign({}, obj)`.
7 | * https://jsperf.com/cloning-large-objects/1
8 | */
9 | function copyObject(obj) {
10 | if (!isObject(obj)) return {}
11 | let copy = {}
12 | for (let key of Object.keys(obj)) {
13 | copy[key] = obj[key]
14 | }
15 | return copy
16 | }
17 |
18 | /**
19 | * Takes a string in the form that is used to identify operations (a counter concatenated
20 | * with an actor ID, separated by an `@` sign) and returns an object `{counter, actorId}`.
21 | */
22 | function parseOpId(opId) {
23 | const match = /^(\d+)@(.*)$/.exec(opId || '')
24 | if (!match) {
25 | throw new RangeError(`Not a valid opId: ${opId}`)
26 | }
27 | return {counter: parseInt(match[1], 10), actorId: match[2]}
28 | }
29 |
30 | /**
31 | * Returns true if the two byte arrays contain the same data, false if not.
32 | */
33 | function equalBytes(array1, array2) {
34 | if (!(array1 instanceof Uint8Array) || !(array2 instanceof Uint8Array)) {
35 | throw new TypeError('equalBytes can only compare Uint8Arrays')
36 | }
37 | if (array1.byteLength !== array2.byteLength) return false
38 | for (let i = 0; i < array1.byteLength; i++) {
39 | if (array1[i] !== array2[i]) return false
40 | }
41 | return true
42 | }
43 |
44 | /**
45 | * Creates an array containing the value `null` repeated `length` times.
46 | */
47 | function createArrayOfNulls(length) {
48 | const array = new Array(length)
49 | for (let i = 0; i < length; i++) array[i] = null
50 | return array
51 | }
52 |
53 | module.exports = {
54 | isObject, copyObject, parseOpId, equalBytes, createArrayOfNulls
55 | }
56 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | ## Deprecation Notice
4 |
5 | Automerge now has a shiny new implementation at https://github.com/automerge/automerge. This repository is the original pure javascript implementation. All development effort has shifted to the new implementation which is written in Rust and so can easily be ported to other platforms.
6 |
7 | ## Original Readme
8 |
9 | 💬 [Join the Automerge Slack community](https://join.slack.com/t/automerge/shared_invite/zt-e4p3760n-kKh7r3KRH1YwwNfiZM8ktw)
10 |
11 | [](https://github.com/automerge/automerge/actions/workflows/automerge-ci.yml)
12 | [](https://app.saucelabs.com/open_sauce/user/automerge/builds)
13 |
14 | Automerge is a library of data structures for building collaborative applications in JavaScript.
15 |
16 | Please see [automerge.org](http://automerge.org/) for documentation.
17 |
18 | For a set of extensible examples in TypeScript, see [automerge-repo](https://github.com/automerge/automerge-repo)
19 |
20 | ## Setup
21 |
22 | If you're using npm, `npm install automerge`. If you're using yarn, `yarn add automerge`. Then you
23 | can import it with `require('automerge')` as in [the example below](#usage) (or
24 | `import * as Automerge from 'automerge'` if using ES2015 or TypeScript).
25 |
26 | Otherwise, clone this repository, and then you can use the following commands:
27 |
28 | - `yarn install` — installs dependencies.
29 | - `yarn test` — runs the test suite in Node.
30 | - `yarn run browsertest` — runs the test suite in web browsers.
31 | - `yarn build` — creates a bundled JS file `dist/automerge.js` for web browsers. It includes the
32 | dependencies and is set up so that you can load through a script tag.
33 |
34 | ## Meta
35 |
36 | Copyright 2017–2021, the Automerge contributors. Released under the terms of the
37 | MIT license (see `LICENSE`).
38 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "automerge",
3 | "version": "1.0.1-preview.7",
4 | "description": "Data structures for building collaborative applications",
5 | "main": "src/automerge.js",
6 | "browser": "dist/automerge.js",
7 | "types": "@types/automerge/index.d.ts",
8 | "scripts": {
9 | "browsertest": "karma start",
10 | "coverage": "nyc --reporter=html --reporter=text mocha",
11 | "test": "mocha",
12 | "testwasm": "mocha --file test/wasm.js",
13 | "build": "webpack && copyfiles --flat @types/automerge/index.d.ts dist",
14 | "prepublishOnly": "npm run-script build",
15 | "lint": "eslint ."
16 | },
17 | "author": "",
18 | "repository": {
19 | "type": "git",
20 | "url": "git+ssh://git@github.com/automerge/automerge.git"
21 | },
22 | "bugs": {
23 | "url": "https://github.com/automerge/automerge/issues"
24 | },
25 | "homepage": "https://github.com/automerge/automerge",
26 | "license": "MIT",
27 | "files": [
28 | "/src/**",
29 | "/frontend/**",
30 | "/backend/**",
31 | "/test/**",
32 | "/@types/**",
33 | "/dist/**",
34 | "/img/**",
35 | "/*.md",
36 | "/LICENSE",
37 | "/.babelrc",
38 | "/.eslintrc.json",
39 | "/.mocharc.yaml",
40 | "/karma.*.js",
41 | "/tsconfig.json",
42 | "/webpack.config.js"
43 | ],
44 | "dependencies": {
45 | "fast-sha256": "^1.3.0",
46 | "pako": "^2.0.3",
47 | "uuid": "^3.4.0"
48 | },
49 | "devDependencies": {
50 | "@types/mocha": "^8.2.1",
51 | "@types/node": "^14.14.31",
52 | "copyfiles": "^2.4.1",
53 | "eslint": "^7.24.0",
54 | "eslint-plugin-compat": "^3.9.0",
55 | "karma": "^6.1.1",
56 | "karma-chrome-launcher": "^3.1.0",
57 | "karma-firefox-launcher": "^2.1.0",
58 | "karma-mocha": "^2.0.1",
59 | "karma-safari-launcher": "^1.0.0",
60 | "karma-sauce-launcher": "^4.3.5",
61 | "karma-typescript": "^5.4.0",
62 | "karma-webpack": "^5.0.0",
63 | "mocha": "^8.3.0",
64 | "nyc": "^15.1.0",
65 | "sinon": "^9.2.4",
66 | "ts-node": "^9.1.1",
67 | "tsconfig-paths": "^3.9.0",
68 | "typescript": "^4.1.5",
69 | "watchify": "^4.0.0",
70 | "webpack": "^5.24.0",
71 | "webpack-cli": "^4.5.0"
72 | },
73 | "resolutions": {
74 | "karma-sauce-launcher/selenium-webdriver": "4.0.0-alpha.7"
75 | },
76 | "browserslist": {
77 | "production": [
78 | "defaults",
79 | "not IE 11",
80 | "maintained node versions"
81 | ],
82 | "web": [
83 | "defaults",
84 | "not IE 11"
85 | ]
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/.github/workflows/automerge-ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 | on: [push, pull_request]
3 |
4 | jobs:
5 | node-build:
6 | runs-on: ubuntu-latest
7 | strategy:
8 | matrix:
9 | node-version: [12.x, 14.x, 16.x]
10 | steps:
11 | - name: Check out repo
12 | uses: actions/checkout@v2
13 | - name: Use Node.js ${{ matrix.node-version }}
14 | uses: actions/setup-node@v2
15 | with:
16 | node-version: ${{ matrix.node-version }}
17 | cache: 'yarn'
18 | - name: Install dependencies
19 | run: yarn
20 | - name: ESLint
21 | run: yarn lint
22 | - name: Test suite
23 | run: yarn test
24 | - name: Bundle
25 | run: yarn build
26 | - name: Test suite using bundle
27 | run: TEST_DIST=1 yarn test
28 | - name: Load bundled code
29 | run: node -e "const Automerge = require(\"./dist/automerge\")"
30 |
31 | # browsertest:
32 | # runs-on: ubuntu-latest
33 | # # Don't run this job when triggered from a forked repository, since the secrets
34 | # # (Sauce Labs credentials) are not available in that context
35 | # if: ${{ github.repository == 'automerge/automerge' }}
36 | # steps:
37 | # - uses: actions/checkout@v2
38 | # - name: Use Node.js
39 | # uses: actions/setup-node@v2
40 | # with:
41 | # node-version: 16.x
42 | # cache: 'yarn'
43 | # - name: Install dependencies
44 | # run: yarn
45 | # - name: Bundle
46 | # run: yarn build
47 | # - name: Sauce Connect
48 | # uses: saucelabs/sauce-connect-action@v1
49 | # with:
50 | # username: ${{ secrets.SAUCE_USERNAME }}
51 | # accessKey: ${{ secrets.SAUCE_ACCESS_KEY }}
52 | # tunnelIdentifier: github-action-tunnel
53 | # scVersion: 4.7.0
54 | # - name: Run browser tests
55 | # run: node_modules/.bin/karma start karma.sauce.js
56 | # env:
57 | # SAUCE_USERNAME: ${{secrets.SAUCE_USERNAME}}
58 | # SAUCE_ACCESS_KEY: ${{secrets.SAUCE_ACCESS_KEY}}
59 |
60 | npm-publish:
61 | name: npm-publish
62 | if: ${{ github.repository == 'automerge/automerge' && github.ref == 'refs/heads/main' }}
63 | # needs: [ node-build, browsertest ]
64 | needs: [ node-build ]
65 | runs-on: ubuntu-latest
66 | steps:
67 | - name: Check out repo
68 | uses: actions/checkout@v2
69 | - name: Use Node.js
70 | uses: actions/setup-node@v2
71 | with:
72 | node-version: 16
73 | - name: Install dependencies
74 | run: yarn install
75 | - name: npm publish if version has been updated
76 | uses: JS-DevTools/npm-publish@v1
77 | with:
78 | token: ${{ secrets.NPM_AUTH_TOKEN }}
79 | check-version: true
80 |
--------------------------------------------------------------------------------
/karma.sauce.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const webpack = require('webpack')
3 | const webpackConfig = require("./webpack.config.js")
4 |
5 | // Karma-Webpack needs these gone
6 | delete webpackConfig.entry
7 | delete webpackConfig.output.filename
8 |
9 | // Don't mix dist/
10 | webpackConfig.output.path = path.join(webpackConfig.output.path, 'test')
11 |
12 | // You're importing *a lot* of Node-specific code so the bundle is huge...
13 | webpackConfig.plugins = [
14 | new webpack.DefinePlugin({
15 | 'process.env.TEST_DIST': JSON.stringify(process.env.TEST_DIST) || '1',
16 | 'process.env.NODE_DEBUG': false,
17 | }),
18 | ...(webpackConfig.plugins || []),
19 | ]
20 |
21 | module.exports = function(config) {
22 | if (!process.env.SAUCE_USERNAME || !process.env.SAUCE_ACCESS_KEY) {
23 | console.log('Make sure the SAUCE_USERNAME and SAUCE_ACCESS_KEY environment variables are set.') // eslint-disable-line
24 | process.exit(1)
25 | }
26 |
27 | // Browsers to run on Sauce Labs
28 | // Check out https://saucelabs.com/platforms for all browser/OS combos
29 | const customLaunchers = {
30 | sl_chrome: {
31 | base: 'SauceLabs',
32 | browserName: 'chrome',
33 | platform: 'Windows 10',
34 | version: 'latest'
35 | },
36 | sl_firefox: {
37 | base: 'SauceLabs',
38 | browserName: 'firefox',
39 | platform: 'Windows 10',
40 | version: 'latest'
41 | },
42 | sl_edge: {
43 | base: 'SauceLabs',
44 | browserName: 'MicrosoftEdge',
45 | platform: 'Windows 10',
46 | version: 'latest'
47 | },
48 | sl_safari_mac: {
49 | base: 'SauceLabs',
50 | browserName: 'safari',
51 | platform: 'macOS 10.15',
52 | version: 'latest'
53 | }
54 | }
55 |
56 | config.set({
57 | frameworks: ['webpack', 'mocha', 'karma-typescript'],
58 | files: [
59 | { pattern: 'test/*test*.js', watched: false },
60 | { pattern: 'test/*test*.ts' },
61 | ],
62 | preprocessors: {
63 | 'test/*test*.js': ['webpack'],
64 | 'test/*test*.ts': ['karma-typescript'],
65 | },
66 | webpack: webpackConfig,
67 | karmaTypescriptConfig: {
68 | tsconfig: './tsconfig.json',
69 | bundlerOptions: {
70 | resolve: {
71 | alias: { automerge: './src/automerge.js' }
72 | }
73 | },
74 | compilerOptions: {
75 | allowJs: true,
76 | sourceMap: true,
77 | }
78 | },
79 | port: 9876,
80 | captureTimeout: 120000,
81 | sauceLabs: {
82 | testName: 'Automerge unit tests',
83 | startConnect: false, // Sauce Connect is started in GitHub action
84 | tunnelIdentifier: 'github-action-tunnel'
85 | },
86 | customLaunchers,
87 | browsers: Object.keys(customLaunchers),
88 | reporters: ['progress', 'saucelabs'],
89 | singleRun: true
90 | })
91 | }
92 |
--------------------------------------------------------------------------------
/frontend/counter.js:
--------------------------------------------------------------------------------
1 | /**
2 | * The most basic CRDT: an integer value that can be changed only by
3 | * incrementing and decrementing. Since addition of integers is commutative,
4 | * the value trivially converges.
5 | */
6 | class Counter {
7 | constructor(value) {
8 | this.value = value || 0
9 | Object.freeze(this)
10 | }
11 |
12 | /**
13 | * A peculiar JavaScript language feature from its early days: if the object
14 | * `x` has a `valueOf()` method that returns a number, you can use numerical
15 | * operators on the object `x` directly, such as `x + 1` or `x < 4`.
16 | * This method is also called when coercing a value to a string by
17 | * concatenating it with another string, as in `x + ''`.
18 | * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf
19 | */
20 | valueOf() {
21 | return this.value
22 | }
23 |
24 | /**
25 | * Returns the counter value as a decimal string. If `x` is a counter object,
26 | * this method is called e.g. when you do `['value: ', x].join('')` or when
27 | * you use string interpolation: `value: ${x}`.
28 | */
29 | toString() {
30 | return this.valueOf().toString()
31 | }
32 |
33 | /**
34 | * Returns the counter value, so that a JSON serialization of an Automerge
35 | * document represents the counter simply as an integer.
36 | */
37 | toJSON() {
38 | return this.value
39 | }
40 | }
41 |
42 | /**
43 | * An instance of this class is used when a counter is accessed within a change
44 | * callback.
45 | */
46 | class WriteableCounter extends Counter {
47 | /**
48 | * Increases the value of the counter by `delta`. If `delta` is not given,
49 | * increases the value of the counter by 1.
50 | */
51 | increment(delta) {
52 | delta = typeof delta === 'number' ? delta : 1
53 | this.context.increment(this.path, this.key, delta)
54 | this.value += delta
55 | return this.value
56 | }
57 |
58 | /**
59 | * Decreases the value of the counter by `delta`. If `delta` is not given,
60 | * decreases the value of the counter by 1.
61 | */
62 | decrement(delta) {
63 | return this.increment(typeof delta === 'number' ? -delta : -1)
64 | }
65 | }
66 |
67 | /**
68 | * Returns an instance of `WriteableCounter` for use in a change callback.
69 | * `context` is the proxy context that keeps track of the mutations.
70 | * `objectId` is the ID of the object containing the counter, and `key` is
71 | * the property name (key in map, or index in list) where the counter is
72 | * located.
73 | */
74 | function getWriteableCounter(value, context, path, objectId, key) {
75 | const instance = Object.create(WriteableCounter.prototype)
76 | instance.value = value
77 | instance.context = context
78 | instance.path = path
79 | instance.objectId = objectId
80 | instance.key = key
81 | return instance
82 | }
83 |
84 | module.exports = { Counter, getWriteableCounter }
85 |
--------------------------------------------------------------------------------
/img/lockup.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/img/sign.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/test/columnar_test.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const { checkEncoded } = require('./helpers')
3 | const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
4 | const { encodeChange, decodeChange } = require('../backend/columnar')
5 |
6 | describe('change encoding', () => {
7 | it('should encode text edits', () => {
8 | const change1 = {actor: 'aaaa', seq: 1, startOp: 1, time: 9, message: '', deps: [], ops: [
9 | {action: 'makeText', obj: '_root', key: 'text', insert: false, pred: []},
10 | {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'h', pred: []},
11 | {action: 'del', obj: '1@aaaa', elemId: '2@aaaa', insert: false, pred: ['2@aaaa']},
12 | {action: 'set', obj: '1@aaaa', elemId: '_head', insert: true, value: 'H', pred: []},
13 | {action: 'set', obj: '1@aaaa', elemId: '4@aaaa', insert: true, value: 'i', pred: []}
14 | ]}
15 | checkEncoded(encodeChange(change1), [
16 | 0x85, 0x6f, 0x4a, 0x83, // magic bytes
17 | 0xe2, 0xbd, 0xfb, 0xf5, // checksum
18 | 1, 94, 0, 2, 0xaa, 0xaa, // chunkType: change, length, deps, actor 'aaaa'
19 | 1, 1, 9, 0, 0, // seq, startOp, time, message, actor list
20 | 12, 0x01, 4, 0x02, 4, // column count, objActor, objCtr
21 | 0x11, 8, 0x13, 7, 0x15, 8, // keyActor, keyCtr, keyStr
22 | 0x34, 4, 0x42, 6, // insert, action
23 | 0x56, 6, 0x57, 3, // valLen, valRaw
24 | 0x70, 6, 0x71, 2, 0x73, 2, // predNum, predActor, predCtr
25 | 0, 1, 4, 0, // objActor column: null, 0, 0, 0, 0
26 | 0, 1, 4, 1, // objCtr column: null, 1, 1, 1, 1
27 | 0, 2, 0x7f, 0, 0, 1, 0x7f, 0, // keyActor column: null, null, 0, null, 0
28 | 0, 1, 0x7c, 0, 2, 0x7e, 4, // keyCtr column: null, 0, 2, 0, 4
29 | 0x7f, 4, 0x74, 0x65, 0x78, 0x74, 0, 4, // keyStr column: 'text', null, null, null, null
30 | 1, 1, 1, 2, // insert column: false, true, false, true, true
31 | 0x7d, 4, 1, 3, 2, 1, // action column: makeText, set, del, set, set
32 | 0x7d, 0, 0x16, 0, 2, 0x16, // valLen column: 0, 0x16, 0, 0x16, 0x16
33 | 0x68, 0x48, 0x69, // valRaw column: 'h', 'H', 'i'
34 | 2, 0, 0x7f, 1, 2, 0, // predNum column: 0, 0, 1, 0, 0
35 | 0x7f, 0, // predActor column: 0
36 | 0x7f, 2 // predCtr column: 2
37 | ])
38 | const decoded = decodeChange(encodeChange(change1))
39 | assert.deepStrictEqual(decoded, Object.assign({hash: decoded.hash}, change1))
40 | })
41 |
42 | it('should require strict ordering of preds', () => {
43 | const change = new Uint8Array([
44 | 133, 111, 74, 131, 31, 229, 112, 44, 1, 105, 1, 58, 30, 190, 100, 253, 180, 180, 66, 49, 126,
45 | 81, 142, 10, 3, 35, 140, 189, 231, 34, 145, 57, 66, 23, 224, 149, 64, 97, 88, 140, 168, 194,
46 | 229, 4, 244, 209, 58, 138, 67, 140, 1, 152, 236, 250, 2, 0, 1, 4, 55, 234, 66, 242, 8, 21, 11,
47 | 52, 1, 66, 2, 86, 3, 87, 10, 112, 2, 113, 3, 115, 4, 127, 9, 99, 111, 109, 109, 111, 110, 86,
48 | 97, 114, 1, 127, 1, 127, 166, 1, 52, 48, 57, 49, 52, 57, 52, 53, 56, 50, 127, 2, 126, 0, 1,
49 | 126, 139, 1, 0
50 | ])
51 | assert.throws(() => { decodeChange(change) }, /operation IDs are not in ascending order/)
52 | })
53 |
54 | describe('with trailing bytes', () => {
55 | let change = new Uint8Array([
56 | 0x85, 0x6f, 0x4a, 0x83, // magic bytes
57 | 0xb2, 0x98, 0x9e, 0xa9, // checksum
58 | 1, 61, 0, 2, 0x12, 0x34, // chunkType: change, length, deps, actor '1234'
59 | 1, 1, 252, 250, 220, 255, 5, // seq, startOp, time
60 | 14, 73, 110, 105, 116, 105, 97, 108, 105, 122, 97, 116, 105, 111, 110, // message: 'Initialization'
61 | 0, 6, // actor list, column count
62 | 0x15, 3, 0x34, 1, 0x42, 2, // keyStr, insert, action
63 | 0x56, 2, 0x57, 1, 0x70, 2, // valLen, valRaw, predNum
64 | 0x7f, 1, 0x78, // keyStr: 'x'
65 | 1, // insert: false
66 | 0x7f, 1, // action: set
67 | 0x7f, 19, // valLen: 1 byte of type uint
68 | 1, // valRaw: 1
69 | 0x7f, 0, // predNum: 0
70 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 // 10 trailing bytes
71 | ])
72 |
73 | it('should allow decoding and re-encoding', () => {
74 | // NOTE: This calls the JavaScript encoding and decoding functions, even when the WebAssembly
75 | // backend is loaded. Should the wasm backend export its own functions for testing?
76 | checkEncoded(change, encodeChange(decodeChange(change)))
77 | })
78 |
79 | it('should be preserved in document encoding', () => {
80 | const [doc] = Automerge.applyChanges(Automerge.init(), [change])
81 | const [reconstructed] = Automerge.getAllChanges(Automerge.load(Automerge.save(doc)))
82 | checkEncoded(change, reconstructed)
83 | })
84 | })
85 | })
86 |
--------------------------------------------------------------------------------
/frontend/observable.js:
--------------------------------------------------------------------------------
1 | const { OBJECT_ID, CONFLICTS } = require('./constants')
2 |
3 | /**
4 | * Allows an application to register a callback when a particular object in
5 | * a document changes.
6 | *
7 | * NOTE: This API is experimental and may change without warning in minor releases.
8 | */
9 | class Observable {
10 | constructor() {
11 | this.observers = {} // map from objectId to array of observers for that object
12 | }
13 |
14 | /**
15 | * Called by an Automerge document when `patch` is applied. `before` is the
16 | * state of the document before the patch, and `after` is the state after
17 | * applying it. `local` is true if the update is a result of locally calling
18 | * `Automerge.change()`, and false otherwise. `changes` is an array of
19 | * changes that were applied to the document (as Uint8Arrays).
20 | */
21 | patchCallback(patch, before, after, local, changes) {
22 | this._objectUpdate(patch.diffs, before, after, local, changes)
23 | }
24 |
25 | /**
26 | * Recursively walks a patch and calls the callbacks for all objects that
27 | * appear in the patch.
28 | */
29 | _objectUpdate(diff, before, after, local, changes) {
30 | if (!diff.objectId) return
31 | if (this.observers[diff.objectId]) {
32 | for (let callback of this.observers[diff.objectId]) {
33 | callback(diff, before, after, local, changes)
34 | }
35 | }
36 |
37 | if (diff.type === 'map' && diff.props) {
38 | for (const propName of Object.keys(diff.props)) {
39 | for (const opId of Object.keys(diff.props[propName])) {
40 | this._objectUpdate(diff.props[propName][opId],
41 | before && before[CONFLICTS] && before[CONFLICTS][propName] && before[CONFLICTS][propName][opId],
42 | after && after[CONFLICTS] && after[CONFLICTS][propName] && after[CONFLICTS][propName][opId],
43 | local, changes)
44 | }
45 | }
46 |
47 | } else if (diff.type === 'table' && diff.props) {
48 | for (const rowId of Object.keys(diff.props)) {
49 | for (const opId of Object.keys(diff.props[rowId])) {
50 | this._objectUpdate(diff.props[rowId][opId],
51 | before && before.byId(rowId),
52 | after && after.byId(rowId),
53 | local, changes)
54 | }
55 | }
56 |
57 | } else if (diff.type === 'list' && diff.edits) {
58 | let offset = 0
59 | for (const edit of diff.edits) {
60 | if (edit.action === 'insert') {
61 | offset -= 1
62 | this._objectUpdate(edit.value, undefined,
63 | after && after[CONFLICTS] && after[CONFLICTS][edit.index] && after[CONFLICTS][edit.index][edit.elemId],
64 | local, changes)
65 | } else if (edit.action === 'multi-insert') {
66 | offset -= edit.values.length
67 | } else if (edit.action === 'update') {
68 | this._objectUpdate(edit.value,
69 | before && before[CONFLICTS] && before[CONFLICTS][edit.index + offset] &&
70 | before[CONFLICTS][edit.index + offset][edit.opId],
71 | after && after[CONFLICTS] && after[CONFLICTS][edit.index] && after[CONFLICTS][edit.index][edit.opId],
72 | local, changes)
73 | } else if (edit.action === 'remove') {
74 | offset += edit.count
75 | }
76 | }
77 |
78 | } else if (diff.type === 'text' && diff.edits) {
79 | let offset = 0
80 | for (const edit of diff.edits) {
81 | if (edit.action === 'insert') {
82 | offset -= 1
83 | this._objectUpdate(edit.value, undefined, after && after.get(edit.index), local, changes)
84 | } else if (edit.action === 'multi-insert') {
85 | offset -= edit.values.length
86 | } else if (edit.action === 'update') {
87 | this._objectUpdate(edit.value,
88 | before && before.get(edit.index + offset),
89 | after && after.get(edit.index),
90 | local, changes)
91 | } else if (edit.action === 'remove') {
92 | offset += edit.count
93 | }
94 | }
95 | }
96 | }
97 |
98 | /**
99 | * Call this to register a callback that will get called whenever a particular
100 | * object in a document changes. The callback is passed five arguments: the
101 | * part of the patch describing the update to that object, the old state of
102 | * the object, the new state of the object, a boolean that is true if the
103 | * change is the result of calling `Automerge.change()` locally, and the array
104 | * of binary changes applied to the document.
105 | */
106 | observe(object, callback) {
107 | const objectId = object[OBJECT_ID]
108 | if (!objectId) throw new TypeError('The observed object must be part of an Automerge document')
109 | if (!this.observers[objectId]) this.observers[objectId] = []
110 | this.observers[objectId].push(callback)
111 | }
112 | }
113 |
114 | module.exports = { Observable }
115 |
--------------------------------------------------------------------------------
/frontend/text.js:
--------------------------------------------------------------------------------
1 | const { OBJECT_ID } = require('./constants')
2 | const { isObject } = require('../src/common')
3 |
4 | class Text {
5 | constructor (text) {
6 | if (typeof text === 'string') {
7 | const elems = [...text].map(value => ({value}))
8 | return instantiateText(undefined, elems) // eslint-disable-line
9 | } else if (Array.isArray(text)) {
10 | const elems = text.map(value => ({value}))
11 | return instantiateText(undefined, elems) // eslint-disable-line
12 | } else if (text === undefined) {
13 | return instantiateText(undefined, []) // eslint-disable-line
14 | } else {
15 | throw new TypeError(`Unsupported initial value for Text: ${text}`)
16 | }
17 | }
18 |
19 | get length () {
20 | return this.elems.length
21 | }
22 |
23 | get (index) {
24 | const value = this.elems[index].value
25 | if (this.context && isObject(value)) {
26 | const objectId = value[OBJECT_ID]
27 | const path = this.path.concat([{key: index, objectId}])
28 | return this.context.instantiateObject(path, objectId)
29 | } else {
30 | return value
31 | }
32 | }
33 |
34 | getElemId (index) {
35 | return this.elems[index].elemId
36 | }
37 |
38 | /**
39 | * Iterates over the text elements character by character, including any
40 | * inline objects.
41 | */
42 | [Symbol.iterator] () {
43 | let elems = this.elems, index = -1
44 | return {
45 | next () {
46 | index += 1
47 | if (index < elems.length) {
48 | return {done: false, value: elems[index].value}
49 | } else {
50 | return {done: true}
51 | }
52 | }
53 | }
54 | }
55 |
56 | /**
57 | * Returns the content of the Text object as a simple string, ignoring any
58 | * non-character elements.
59 | */
60 | toString() {
61 | // Concatting to a string is faster than creating an array and then
62 | // .join()ing for small (<100KB) arrays.
63 | // https://jsperf.com/join-vs-loop-w-type-test
64 | let str = ''
65 | for (const elem of this.elems) {
66 | if (typeof elem.value === 'string') str += elem.value
67 | }
68 | return str
69 | }
70 |
71 | /**
72 | * Returns the content of the Text object as a sequence of strings,
73 | * interleaved with non-character elements.
74 | *
75 | * For example, the value ['a', 'b', {x: 3}, 'c', 'd'] has spans:
76 | * => ['ab', {x: 3}, 'cd']
77 | */
78 | toSpans() {
79 | let spans = []
80 | let chars = ''
81 | for (const elem of this.elems) {
82 | if (typeof elem.value === 'string') {
83 | chars += elem.value
84 | } else {
85 | if (chars.length > 0) {
86 | spans.push(chars)
87 | chars = ''
88 | }
89 | spans.push(elem.value)
90 | }
91 | }
92 | if (chars.length > 0) {
93 | spans.push(chars)
94 | }
95 | return spans
96 | }
97 |
98 | /**
99 | * Returns the content of the Text object as a simple string, so that the
100 | * JSON serialization of an Automerge document represents text nicely.
101 | */
102 | toJSON() {
103 | return this.toString()
104 | }
105 |
106 | /**
107 | * Returns a writeable instance of this object. This instance is returned when
108 | * the text object is accessed within a change callback. `context` is the
109 | * proxy context that keeps track of the mutations.
110 | */
111 | getWriteable(context, path) {
112 | if (!this[OBJECT_ID]) {
113 | throw new RangeError('getWriteable() requires the objectId to be set')
114 | }
115 |
116 | const instance = instantiateText(this[OBJECT_ID], this.elems)
117 | instance.context = context
118 | instance.path = path
119 | return instance
120 | }
121 |
122 | /**
123 | * Updates the list item at position `index` to a new value `value`.
124 | */
125 | set (index, value) {
126 | if (this.context) {
127 | this.context.setListIndex(this.path, index, value)
128 | } else if (!this[OBJECT_ID]) {
129 | this.elems[index].value = value
130 | } else {
131 | throw new TypeError('Automerge.Text object cannot be modified outside of a change block')
132 | }
133 | return this
134 | }
135 |
136 | /**
137 | * Inserts new list items `values` starting at position `index`.
138 | */
139 | insertAt(index, ...values) {
140 | if (this.context) {
141 | this.context.splice(this.path, index, 0, values)
142 | } else if (!this[OBJECT_ID]) {
143 | this.elems.splice(index, 0, ...values.map(value => ({value})))
144 | } else {
145 | throw new TypeError('Automerge.Text object cannot be modified outside of a change block')
146 | }
147 | return this
148 | }
149 |
150 | /**
151 | * Deletes `numDelete` list items starting at position `index`.
152 | * if `numDelete` is not given, one item is deleted.
153 | */
154 | deleteAt(index, numDelete = 1) {
155 | if (this.context) {
156 | this.context.splice(this.path, index, numDelete, [])
157 | } else if (!this[OBJECT_ID]) {
158 | this.elems.splice(index, numDelete)
159 | } else {
160 | throw new TypeError('Automerge.Text object cannot be modified outside of a change block')
161 | }
162 | return this
163 | }
164 | }
165 |
166 | // Read-only methods that can delegate to the JavaScript built-in array
167 | for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
168 | 'indexOf', 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
169 | 'slice', 'some', 'toLocaleString']) {
170 | Text.prototype[method] = function (...args) {
171 | const array = [...this]
172 | return array[method](...args)
173 | }
174 | }
175 |
176 | function instantiateText(objectId, elems) {
177 | const instance = Object.create(Text.prototype)
178 | instance[OBJECT_ID] = objectId
179 | instance.elems = elems
180 | return instance
181 | }
182 |
183 | module.exports = { Text, instantiateText }
184 |
--------------------------------------------------------------------------------
/src/automerge.js:
--------------------------------------------------------------------------------
1 | const uuid = require('./uuid')
2 | const Frontend = require('../frontend')
3 | const { OPTIONS } = require('../frontend/constants')
4 | const { encodeChange, decodeChange } = require('../backend/columnar')
5 | const { isObject } = require('./common')
6 | let backend = require('../backend') // mutable: can be overridden with setDefaultBackend()
7 |
8 | /**
9 | * Automerge.* API
10 | * The functions in this file constitute the publicly facing Automerge API which combines
11 | * the features of the Frontend (a document interface) and the backend (CRDT operations)
12 | */
13 |
14 | function init(options) {
15 | if (typeof options === 'string') {
16 | options = {actorId: options}
17 | } else if (typeof options === 'undefined') {
18 | options = {}
19 | } else if (!isObject(options)) {
20 | throw new TypeError(`Unsupported options for init(): ${options}`)
21 | }
22 | return Frontend.init(Object.assign({backend}, options))
23 | }
24 |
25 | /**
26 | * Returns a new document object initialized with the given state.
27 | */
28 | function from(initialState, options) {
29 | const changeOpts = {message: 'Initialization'}
30 | return change(init(options), changeOpts, doc => Object.assign(doc, initialState))
31 | }
32 |
33 | function change(doc, options, callback) {
34 | const [newDoc] = Frontend.change(doc, options, callback)
35 | return newDoc
36 | }
37 |
38 | function emptyChange(doc, options) {
39 | const [newDoc] = Frontend.emptyChange(doc, options)
40 | return newDoc
41 | }
42 |
43 | function clone(doc, options = {}) {
44 | const state = backend.clone(Frontend.getBackendState(doc, 'clone'))
45 | return applyPatch(init(options), backend.getPatch(state), state, [], options)
46 | }
47 |
48 | function free(doc) {
49 | backend.free(Frontend.getBackendState(doc, 'free'))
50 | }
51 |
52 | function load(data, options = {}) {
53 | const state = backend.load(data)
54 | return applyPatch(init(options), backend.getPatch(state), state, [data], options)
55 | }
56 |
57 | function save(doc) {
58 | return backend.save(Frontend.getBackendState(doc, 'save'))
59 | }
60 |
61 | function merge(localDoc, remoteDoc) {
62 | const localState = Frontend.getBackendState(localDoc, 'merge')
63 | const remoteState = Frontend.getBackendState(remoteDoc, 'merge', 'second')
64 | const changes = backend.getChangesAdded(localState, remoteState)
65 | const [updatedDoc] = applyChanges(localDoc, changes)
66 | return updatedDoc
67 | }
68 |
69 | function getChanges(oldDoc, newDoc) {
70 | const oldState = Frontend.getBackendState(oldDoc, 'getChanges')
71 | const newState = Frontend.getBackendState(newDoc, 'getChanges', 'second')
72 | return backend.getChanges(newState, backend.getHeads(oldState))
73 | }
74 |
75 | function getAllChanges(doc) {
76 | return backend.getAllChanges(Frontend.getBackendState(doc, 'getAllChanges'))
77 | }
78 |
79 | function applyPatch(doc, patch, backendState, changes, options) {
80 | const newDoc = Frontend.applyPatch(doc, patch, backendState)
81 | const patchCallback = options.patchCallback || doc[OPTIONS].patchCallback
82 | if (patchCallback) {
83 | patchCallback(patch, doc, newDoc, false, changes)
84 | }
85 | return newDoc
86 | }
87 |
88 | function applyChanges(doc, changes, options = {}) {
89 | const oldState = Frontend.getBackendState(doc, 'applyChanges')
90 | const [newState, patch] = backend.applyChanges(oldState, changes)
91 | return [applyPatch(doc, patch, newState, changes, options), patch]
92 | }
93 |
94 | function equals(val1, val2) {
95 | if (!isObject(val1) || !isObject(val2)) return val1 === val2
96 | const keys1 = Object.keys(val1).sort(), keys2 = Object.keys(val2).sort()
97 | if (keys1.length !== keys2.length) return false
98 | for (let i = 0; i < keys1.length; i++) {
99 | if (keys1[i] !== keys2[i]) return false
100 | if (!equals(val1[keys1[i]], val2[keys2[i]])) return false
101 | }
102 | return true
103 | }
104 |
105 | function getHistory(doc) {
106 | const actor = Frontend.getActorId(doc)
107 | const history = getAllChanges(doc)
108 | return history.map((change, index) => ({
109 | get change () {
110 | return decodeChange(change)
111 | },
112 | get snapshot () {
113 | const state = backend.loadChanges(backend.init(), history.slice(0, index + 1))
114 | return Frontend.applyPatch(init(actor), backend.getPatch(state), state)
115 | }
116 | })
117 | )
118 | }
119 |
120 | function generateSyncMessage(doc, syncState) {
121 | const state = Frontend.getBackendState(doc, 'generateSyncMessage')
122 | return backend.generateSyncMessage(state, syncState)
123 | }
124 |
125 | function receiveSyncMessage(doc, oldSyncState, message) {
126 | const oldBackendState = Frontend.getBackendState(doc, 'receiveSyncMessage')
127 | const [backendState, syncState, patch] = backend.receiveSyncMessage(oldBackendState, oldSyncState, message)
128 | if (!patch) return [doc, syncState, patch]
129 |
130 | // The patchCallback is passed as argument all changes that are applied.
131 | // We get those from the sync message if a patchCallback is present.
132 | let changes = null
133 | if (doc[OPTIONS].patchCallback) {
134 | changes = backend.decodeSyncMessage(message).changes
135 | }
136 | return [applyPatch(doc, patch, backendState, changes, {}), syncState, patch]
137 | }
138 |
139 | function initSyncState() {
140 | return backend.initSyncState()
141 | }
142 |
143 | /**
144 | * Replaces the default backend implementation with a different one.
145 | * This allows you to switch to using the Rust/WebAssembly implementation.
146 | */
147 | function setDefaultBackend(newBackend) {
148 | backend = newBackend
149 | }
150 |
151 | module.exports = {
152 | init, from, change, emptyChange, clone, free,
153 | load, save, merge, getChanges, getAllChanges, applyChanges,
154 | encodeChange, decodeChange, equals, getHistory, uuid,
155 | Frontend, setDefaultBackend, generateSyncMessage, receiveSyncMessage, initSyncState,
156 | get Backend() { return backend }
157 | }
158 |
159 | for (let name of ['getObjectId', 'getObjectById', 'getActorId',
160 | 'setActorId', 'getConflicts', 'getLastLocalChange',
161 | 'Text', 'Table', 'Counter', 'Observable', 'Int', 'Uint', 'Float64']) {
162 | module.exports[name] = Frontend[name]
163 | }
164 |
--------------------------------------------------------------------------------
/test/table_test.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
3 | const Frontend = Automerge.Frontend
4 | const uuid = require('../src/uuid')
5 | const { assertEqualsOneOf } = require('./helpers')
6 |
7 | // Example data
8 | const DDIA = {
9 | authors: ['Kleppmann, Martin'],
10 | title: 'Designing Data-Intensive Applications',
11 | isbn: '1449373321'
12 | }
13 | const RSDP = {
14 | authors: ['Cachin, Christian', 'Guerraoui, Rachid', 'Rodrigues, Luís'],
15 | title: 'Introduction to Reliable and Secure Distributed Programming',
16 | isbn: '3-642-15259-7'
17 | }
18 |
19 | describe('Automerge.Table', () => {
20 | describe('Frontend', () => {
21 | it('should generate ops to create a table', () => {
22 | const actor = uuid()
23 | const [, change] = Frontend.change(Frontend.init(actor), doc => {
24 | doc.books = new Automerge.Table()
25 | })
26 | assert.deepStrictEqual(change, {
27 | actor, seq: 1, time: change.time, message: '', startOp: 1, deps: [], ops: [
28 | {obj: '_root', action: 'makeTable', key: 'books', insert: false, pred: []}
29 | ]
30 | })
31 | })
32 |
33 | it('should generate ops to insert a row', () => {
34 | const actor = uuid()
35 | const [doc1] = Frontend.change(Frontend.init(actor), doc => {
36 | doc.books = new Automerge.Table()
37 | })
38 | let rowId
39 | const [doc2, change2] = Frontend.change(doc1, doc => {
40 | rowId = doc.books.add({authors: 'Kleppmann, Martin', title: 'Designing Data-Intensive Applications'})
41 | })
42 | const books = Frontend.getObjectId(doc2.books)
43 | const rowObjID = Frontend.getObjectId(doc2.books.entries[rowId])
44 | assert.deepStrictEqual(change2, {
45 | actor, seq: 2, time: change2.time, message: '', startOp: 2, deps: [], ops: [
46 | {obj: books, action: 'makeMap', key: rowId, insert: false, pred: []},
47 | {obj: rowObjID, action: 'set', key: 'authors', insert: false, value: 'Kleppmann, Martin', pred: []},
48 | {obj: rowObjID, action: 'set', key: 'title', insert: false, value: 'Designing Data-Intensive Applications', pred: []}
49 | ]
50 | })
51 | })
52 | })
53 |
54 | describe('with one row', () => {
55 | let s1, rowId, rowWithId
56 |
57 | beforeEach(() => {
58 | s1 = Automerge.change(Automerge.init({freeze: true}), doc => {
59 | doc.books = new Automerge.Table()
60 | rowId = doc.books.add(DDIA)
61 | })
62 | rowWithId = Object.assign({id: rowId}, DDIA)
63 | })
64 |
65 | it('should look up a row by ID', () => {
66 | const row = s1.books.byId(rowId)
67 | assert.deepStrictEqual(row, rowWithId)
68 | })
69 |
70 | it('should return the row count', () => {
71 | assert.strictEqual(s1.books.count, 1)
72 | })
73 |
74 | it('should return a list of row IDs', () => {
75 | assert.deepStrictEqual(s1.books.ids, [rowId])
76 | })
77 |
78 | it('should allow iterating over rows', () => {
79 | assert.deepStrictEqual([...s1.books], [rowWithId])
80 | })
81 |
82 | it('should support standard array methods', () => {
83 | assert.deepStrictEqual(s1.books.filter(book => book.isbn === '1449373321'), [rowWithId])
84 | assert.deepStrictEqual(s1.books.filter(book => book.isbn === '9781449373320'), [])
85 | assert.deepStrictEqual(s1.books.find(book => book.isbn === '1449373321'), rowWithId)
86 | assert.strictEqual(s1.books.find(book => book.isbn === '9781449373320'), undefined)
87 | assert.deepStrictEqual(s1.books.map(book => book.title), ['Designing Data-Intensive Applications'])
88 | })
89 |
90 | it('should be immutable', () => {
91 | assert.strictEqual(s1.books.add, undefined)
92 | assert.throws(() => s1.books.remove(rowId), /can only be modified in a change function/)
93 | })
94 |
95 | it('should save and reload', () => {
96 | // FIXME - the bug is in parseAllOpIds()
97 | // maps and tables with a string key that has an `@` gets
98 | // improperly encoded as an opId
99 | const s2 = Automerge.load(Automerge.save(s1))
100 | assert.deepStrictEqual(s2.books.byId(rowId), rowWithId)
101 | })
102 |
103 | it('should allow a row to be updated', () => {
104 | const s2 = Automerge.change(s1, doc => {
105 | doc.books.byId(rowId).isbn = '9781449373320'
106 | })
107 | assert.deepStrictEqual(s2.books.byId(rowId), {
108 | id: rowId,
109 | authors: ['Kleppmann, Martin'],
110 | title: 'Designing Data-Intensive Applications',
111 | isbn: '9781449373320'
112 | })
113 | })
114 |
115 | it('should allow a row to be removed', () => {
116 | const s2 = Automerge.change(s1, doc => {
117 | doc.books.remove(rowId)
118 | })
119 | assert.strictEqual(s2.books.count, 0)
120 | assert.deepStrictEqual([...s2.books], [])
121 | })
122 |
123 | it('should not allow a row ID to be specified', () => {
124 | assert.throws(() => {
125 | Automerge.change(s1, doc => {
126 | doc.books.add(Object.assign({id: 'beafbfde-8e44-4a5f-b679-786e2ebba03f'}, RSDP))
127 | })
128 | }, /A table row must not have an "id" property/)
129 | })
130 |
131 | it('should not allow a row ID to be modified', () => {
132 | assert.throws(() => {
133 | Automerge.change(s1, doc => {
134 | doc.books.byId(rowId).id = 'beafbfde-8e44-4a5f-b679-786e2ebba03f'
135 | })
136 | }, /Object property "id" cannot be modified/)
137 | })
138 | })
139 |
140 | it('should allow concurrent row insertion', () => {
141 | const a0 = Automerge.change(Automerge.init(), doc => {
142 | doc.books = new Automerge.Table()
143 | })
144 | const b0 = Automerge.merge(Automerge.init(), a0)
145 |
146 | let ddia, rsdp
147 | const a1 = Automerge.change(a0, doc => { ddia = doc.books.add(DDIA) })
148 | const b1 = Automerge.change(b0, doc => { rsdp = doc.books.add(RSDP) })
149 | const a2 = Automerge.merge(a1, b1)
150 | assert.deepStrictEqual(a2.books.byId(ddia), Object.assign({id: ddia}, DDIA))
151 | assert.deepStrictEqual(a2.books.byId(rsdp), Object.assign({id: rsdp}, RSDP))
152 | assert.strictEqual(a2.books.count, 2)
153 | assertEqualsOneOf(a2.books.ids, [ddia, rsdp], [rsdp, ddia])
154 | })
155 |
156 | it('should allow row creation, update, and deletion in the same change', () => {
157 | const doc = Automerge.change(Automerge.init(), doc => {
158 | doc.table = new Automerge.Table()
159 | const id = doc.table.add({})
160 | doc.table.byId(id).x = 3
161 | doc.table.remove(id)
162 | })
163 | assert.strictEqual(doc.table.count, 0)
164 | })
165 |
166 | it('should allow rows to be sorted in various ways', () => {
167 | let ddia, rsdp
168 | const s = Automerge.change(Automerge.init(), doc => {
169 | doc.books = new Automerge.Table()
170 | ddia = doc.books.add(DDIA)
171 | rsdp = doc.books.add(RSDP)
172 | })
173 | const ddiaWithId = Object.assign({id: ddia}, DDIA)
174 | const rsdpWithId = Object.assign({id: rsdp}, RSDP)
175 | assert.deepStrictEqual(s.books.sort('title'), [ddiaWithId, rsdpWithId])
176 | assert.deepStrictEqual(s.books.sort(['authors', 'title']), [rsdpWithId, ddiaWithId])
177 | assert.deepStrictEqual(s.books.sort(row1 => ((row1.isbn === '1449373321') ? -1 : +1)), [ddiaWithId, rsdpWithId])
178 | })
179 |
180 | it('should allow serialization to JSON', () => {
181 | let ddia
182 | const s = Automerge.change(Automerge.init(), doc => {
183 | doc.books = new Automerge.Table()
184 | ddia = doc.books.add(DDIA)
185 | })
186 | const ddiaWithId = Object.assign({id: ddia}, DDIA)
187 | assert.deepStrictEqual(JSON.parse(JSON.stringify(s)), {books: {[ddia]: ddiaWithId}})
188 | })
189 | })
190 |
--------------------------------------------------------------------------------
/test/observable_test.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
3 |
4 | describe('Automerge.Observable', () => {
5 | it('allows registering a callback on the root object', () => {
6 | let observable = new Automerge.Observable(), callbackChanges
7 | let doc = Automerge.init({observable}), actor = Automerge.getActorId(doc)
8 | observable.observe(doc, (diff, before, after, local, changes) => {
9 | callbackChanges = changes
10 | assert.deepStrictEqual(diff, {
11 | objectId: '_root', type: 'map', props: {bird: {[`1@${actor}`]: {type: 'value', value: 'Goldfinch'}}}
12 | })
13 | assert.deepStrictEqual(before, {})
14 | assert.deepStrictEqual(after, {bird: 'Goldfinch'})
15 | assert.strictEqual(local, true)
16 | assert.strictEqual(changes.length, 1)
17 | })
18 | doc = Automerge.change(doc, doc => doc.bird = 'Goldfinch')
19 | assert.strictEqual(callbackChanges.length, 1)
20 | assert.ok(callbackChanges[0] instanceof Uint8Array)
21 | assert.strictEqual(callbackChanges[0], Automerge.getLastLocalChange(doc))
22 | })
23 |
24 | it('allows registering a callback on a text object', () => {
25 | let observable = new Automerge.Observable(), callbackCalled = false
26 | let doc = Automerge.from({text: new Automerge.Text()}, {observable})
27 | let actor = Automerge.getActorId(doc)
28 | observable.observe(doc.text, (diff, before, after, local) => {
29 | callbackCalled = true
30 | assert.deepStrictEqual(diff, {
31 | objectId: `1@${actor}`, type: 'text', edits: [
32 | {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['a', 'b', 'c']}
33 | ]
34 | })
35 | assert.deepStrictEqual(before.toString(), '')
36 | assert.deepStrictEqual(after.toString(), 'abc')
37 | assert.deepStrictEqual(local, true)
38 | })
39 | doc = Automerge.change(doc, doc => doc.text.insertAt(0, 'a', 'b', 'c'))
40 | assert.strictEqual(callbackCalled, true)
41 | })
42 |
43 | it('should call the callback when applying remote changes', () => {
44 | let observable = new Automerge.Observable(), callbackChanges
45 | let local = Automerge.from({text: new Automerge.Text()}, {observable})
46 | let remote = Automerge.init()
47 | const localId = Automerge.getActorId(local), remoteId = Automerge.getActorId(remote)
48 | observable.observe(local.text, (diff, before, after, local, changes) => {
49 | callbackChanges = changes
50 | assert.deepStrictEqual(diff, {
51 | objectId: `1@${localId}`, type: 'text', edits: [
52 | {action: 'insert', index: 0, elemId: `2@${remoteId}`, opId: `2@${remoteId}`, value: {type: 'value', value: 'a'}}
53 | ]
54 | })
55 | assert.deepStrictEqual(before.toString(), '')
56 | assert.deepStrictEqual(after.toString(), 'a')
57 | assert.deepStrictEqual(local, false)
58 | })
59 | ;[remote] = Automerge.applyChanges(remote, Automerge.getAllChanges(local))
60 | remote = Automerge.change(remote, doc => doc.text.insertAt(0, 'a'))
61 | const allChanges = Automerge.getAllChanges(remote)
62 | ;[local] = Automerge.applyChanges(local, allChanges)
63 | assert.strictEqual(callbackChanges, allChanges)
64 | })
65 |
66 | it('should observe objects nested inside list elements', () => {
67 | let observable = new Automerge.Observable(), callbackCalled = false
68 | let doc = Automerge.from({todos: [{title: 'Buy milk', done: false}]}, {observable})
69 | const actor = Automerge.getActorId(doc)
70 | observable.observe(doc.todos[0], (diff, before, after, local) => {
71 | callbackCalled = true
72 | assert.deepStrictEqual(diff, {
73 | objectId: `2@${actor}`, type: 'map', props: {done: {[`5@${actor}`]: {type: 'value', value: true}}}
74 | })
75 | assert.deepStrictEqual(before, {title: 'Buy milk', done: false})
76 | assert.deepStrictEqual(after, {title: 'Buy milk', done: true})
77 | assert.strictEqual(local, true)
78 | })
79 | doc = Automerge.change(doc, doc => doc.todos[0].done = true)
80 | assert.strictEqual(callbackCalled, true)
81 | })
82 |
83 | it('should provide before and after states if list indexes changed', () => {
84 | let observable = new Automerge.Observable(), callbackCalled = false
85 | let doc = Automerge.from({todos: [{title: 'Buy milk', done: false}]}, {observable})
86 | const actor = Automerge.getActorId(doc)
87 | observable.observe(doc.todos[0], (diff, before, after, local) => {
88 | callbackCalled = true
89 | assert.deepStrictEqual(diff, {
90 | objectId: `2@${actor}`, type: 'map', props: {done: {[`8@${actor}`]: {type: 'value', value: true}}}
91 | })
92 | assert.deepStrictEqual(before, {title: 'Buy milk', done: false})
93 | assert.deepStrictEqual(after, {title: 'Buy milk', done: true})
94 | assert.strictEqual(local, true)
95 | })
96 | doc = Automerge.change(doc, doc => {
97 | doc.todos.unshift({title: 'Water plants', done: false})
98 | doc.todos[1].done = true
99 | })
100 | assert.strictEqual(callbackCalled, true)
101 | })
102 |
103 | it('should observe rows inside tables', () => {
104 | let observable = new Automerge.Observable(), callbackCalled = false
105 | let doc = Automerge.init({observable}), actor = Automerge.getActorId(doc), rowId
106 | doc = Automerge.change(doc, doc => {
107 | doc.todos = new Automerge.Table()
108 | rowId = doc.todos.add({title: 'Buy milk', done: false})
109 | })
110 | observable.observe(doc.todos.byId(rowId), (diff, before, after, local) => {
111 | callbackCalled = true
112 | assert.deepStrictEqual(diff, {
113 | objectId: `2@${actor}`, type: 'map', props: {done: {[`5@${actor}`]: {type: 'value', value: true}}}
114 | })
115 | assert.deepStrictEqual(before, {id: rowId, title: 'Buy milk', done: false})
116 | assert.deepStrictEqual(after, {id: rowId, title: 'Buy milk', done: true})
117 | assert.strictEqual(local, true)
118 | })
119 | doc = Automerge.change(doc, doc => doc.todos.byId(rowId).done = true)
120 | assert.strictEqual(callbackCalled, true)
121 | })
122 |
123 | it('should observe nested objects inside text', () => {
124 | let observable = new Automerge.Observable(), callbackCalled = false
125 | let doc = Automerge.init({observable}), actor = Automerge.getActorId(doc)
126 | doc = Automerge.change(doc, doc => {
127 | doc.text = new Automerge.Text()
128 | doc.text.insertAt(0, 'a', 'b', {start: 'bold'}, 'c', {end: 'bold'})
129 | })
130 | observable.observe(doc.text.get(2), (diff, before, after, local) => {
131 | callbackCalled = true
132 | assert.deepStrictEqual(diff, {
133 | objectId: `4@${actor}`, type: 'map', props: {start: {[`9@${actor}`]: {type: 'value', value: 'italic'}}}
134 | })
135 | assert.deepStrictEqual(before, {start: 'bold'})
136 | assert.deepStrictEqual(after, {start: 'italic'})
137 | assert.strictEqual(local, true)
138 | })
139 | doc = Automerge.change(doc, doc => doc.text.get(2).start = 'italic')
140 | assert.strictEqual(callbackCalled, true)
141 | })
142 |
143 | it('should not allow observers on non-document objects', () => {
144 | let observable = new Automerge.Observable()
145 | let doc = Automerge.init({observable})
146 | assert.throws(() => {
147 | Automerge.change(doc, doc => {
148 | const text = new Automerge.Text()
149 | doc.text = text
150 | observable.observe(text, () => {})
151 | })
152 | }, /The observed object must be part of an Automerge document/)
153 | })
154 |
155 | it('should allow multiple observers', () => {
156 | let observable = new Automerge.Observable(), called1 = false, called2 = false
157 | let doc = Automerge.init({observable})
158 | observable.observe(doc, () => { called1 = true })
159 | observable.observe(doc, () => { called2 = true })
160 | Automerge.change(doc, doc => doc.foo = 'bar')
161 | assert.strictEqual(called1, true)
162 | assert.strictEqual(called2, true)
163 | })
164 | })
165 |
--------------------------------------------------------------------------------
/backend/backend.js:
--------------------------------------------------------------------------------
1 | const { encodeChange } = require('./columnar')
2 | const { BackendDoc } = require('./new')
3 | const { backendState } = require('./util')
4 |
5 | /**
6 | * Returns an empty node state.
7 | */
8 | function init() {
9 | return {state: new BackendDoc(), heads: []}
10 | }
11 |
12 | function clone(backend) {
13 | return {state: backendState(backend).clone(), heads: backend.heads}
14 | }
15 |
16 | function free(backend) {
17 | backend.state = null
18 | backend.frozen = true
19 | }
20 |
21 | /**
22 | * Applies a list of `changes` from remote nodes to the node state `backend`.
23 | * Returns a two-element array `[state, patch]` where `state` is the updated
24 | * node state, and `patch` describes the modifications that need to be made
25 | * to the document objects to reflect these changes.
26 | */
27 | function applyChanges(backend, changes) {
28 | const state = backendState(backend)
29 | const patch = state.applyChanges(changes)
30 | backend.frozen = true
31 | return [{state, heads: state.heads}, patch]
32 | }
33 |
34 | function hashByActor(state, actorId, index) {
35 | if (state.hashesByActor[actorId] && state.hashesByActor[actorId][index]) {
36 | return state.hashesByActor[actorId][index]
37 | }
38 | if (!state.haveHashGraph) {
39 | state.computeHashGraph()
40 | if (state.hashesByActor[actorId] && state.hashesByActor[actorId][index]) {
41 | return state.hashesByActor[actorId][index]
42 | }
43 | }
44 | throw new RangeError(`Unknown change: actorId = ${actorId}, seq = ${index + 1}`)
45 | }
46 |
47 | /**
48 | * Takes a single change request `request` made by the local user, and applies
49 | * it to the node state `backend`. Returns a three-element array `[backend, patch, binaryChange]`
50 | * where `backend` is the updated node state,`patch` confirms the
51 | * modifications to the document objects, and `binaryChange` is a binary-encoded form of
52 | * the change submitted.
53 | */
54 | function applyLocalChange(backend, change) {
55 | const state = backendState(backend)
56 | if (change.seq <= state.clock[change.actor] || 0) {
57 | throw new RangeError('Change request has already been applied')
58 | }
59 |
60 | // Add the local actor's last change hash to deps. We do this because when frontend
61 | // and backend are on separate threads, the frontend may fire off several local
62 | // changes in sequence before getting a response from the backend; since the binary
63 | // encoding and hashing is done by the backend, the frontend does not know the hash
64 | // of its own last change in this case. Rather than handle this situation as a
65 | // special case, we say that the frontend includes only specifies other actors'
66 | // deps in changes it generates, and the dependency from the local actor's last
67 | // change is always added here in the backend.
68 | //
69 | // Strictly speaking, we should check whether the local actor's last change is
70 | // indirectly reachable through a different actor's change; in that case, it is not
71 | // necessary to add this dependency. However, it doesn't do any harm either (only
72 | // using a few extra bytes of storage).
73 | if (change.seq > 1) {
74 | const lastHash = hashByActor(state, change.actor, change.seq - 2)
75 | if (!lastHash) {
76 | throw new RangeError(`Cannot find hash of localChange before seq=${change.seq}`)
77 | }
78 | let deps = {[lastHash]: true}
79 | for (let hash of change.deps) deps[hash] = true
80 | change.deps = Object.keys(deps).sort()
81 | }
82 |
83 | const binaryChange = encodeChange(change)
84 | const patch = state.applyChanges([binaryChange], true)
85 | backend.frozen = true
86 |
87 | // On the patch we send out, omit the last local change hash
88 | const lastHash = hashByActor(state, change.actor, change.seq - 1)
89 | patch.deps = patch.deps.filter(head => head !== lastHash)
90 | return [{state, heads: state.heads}, patch, binaryChange]
91 | }
92 |
93 | /**
94 | * Returns the state of the document serialised to an Uint8Array.
95 | */
96 | function save(backend) {
97 | return backendState(backend).save()
98 | }
99 |
100 | /**
101 | * Loads the document and/or changes contained in an Uint8Array, and returns a
102 | * backend initialised with this state.
103 | */
104 | function load(data) {
105 | const state = new BackendDoc(data)
106 | return {state, heads: state.heads}
107 | }
108 |
109 | /**
110 | * Applies a list of `changes` to the node state `backend`, and returns the updated
111 | * state with those changes incorporated. Unlike `applyChanges()`, this function
112 | * does not produce a patch describing the incremental modifications, making it
113 | * a little faster when loading a document from disk. When all the changes have
114 | * been loaded, you can use `getPatch()` to construct the latest document state.
115 | */
116 | function loadChanges(backend, changes) {
117 | const state = backendState(backend)
118 | state.applyChanges(changes)
119 | backend.frozen = true
120 | return {state, heads: state.heads}
121 | }
122 |
123 | /**
124 | * Returns a patch that, when applied to an empty document, constructs the
125 | * document tree in the state described by the node state `backend`.
126 | */
127 | function getPatch(backend) {
128 | return backendState(backend).getPatch()
129 | }
130 |
131 | /**
132 | * Returns an array of hashes of the current "head" changes (i.e. those changes
133 | * that no other change depends on).
134 | */
135 | function getHeads(backend) {
136 | return backend.heads
137 | }
138 |
139 | /**
140 | * Returns the full history of changes that have been applied to a document.
141 | */
142 | function getAllChanges(backend) {
143 | return getChanges(backend, [])
144 | }
145 |
146 | /**
147 | * Returns all changes that are newer than or concurrent to the changes
148 | * identified by the hashes in `haveDeps`. If `haveDeps` is an empty array, all
149 | * changes are returned. Throws an exception if any of the given hashes is unknown.
150 | */
151 | function getChanges(backend, haveDeps) {
152 | if (!Array.isArray(haveDeps)) {
153 | throw new TypeError('Pass an array of hashes to Backend.getChanges()')
154 | }
155 | return backendState(backend).getChanges(haveDeps)
156 | }
157 |
158 | /**
159 | * Returns all changes that are present in `backend2` but not in `backend1`.
160 | * Intended for use in situations where the two backends are for different actors.
161 | * To get the changes added between an older and a newer document state of the same
162 | * actor, use `getChanges()` instead. `getChangesAdded()` throws an exception if
163 | * one of the backend states is frozen (i.e. if it is not the latest state of that
164 | * backend instance; this distinction matters when the backend is mutable).
165 | */
166 | function getChangesAdded(backend1, backend2) {
167 | return backendState(backend2).getChangesAdded(backendState(backend1))
168 | }
169 |
170 | /**
171 | * If the backend has applied a change with the given `hash` (given as a
172 | * hexadecimal string), returns that change (as a byte array). Returns undefined
173 | * if no change with that hash has been applied. A change with missing
174 | * dependencies does not count as having been applied.
175 | */
176 | function getChangeByHash(backend, hash) {
177 | return backendState(backend).getChangeByHash(hash)
178 | }
179 |
180 | /**
181 | * Returns the hashes of any missing dependencies, i.e. where we have applied a
182 | * change that has a dependency on a change we have not seen.
183 | *
184 | * If the argument `heads` is given (an array of hexadecimal strings representing
185 | * hashes as returned by `getHeads()`), this function also ensures that all of
186 | * those hashes resolve to either a change that has been applied to the document,
187 | * or that has been enqueued for later application once missing dependencies have
188 | * arrived. Any missing heads hashes are included in the returned array.
189 | */
190 | function getMissingDeps(backend, heads = []) {
191 | return backendState(backend).getMissingDeps(heads)
192 | }
193 |
194 | module.exports = {
195 | init, clone, free, applyChanges, applyLocalChange, save, load, loadChanges, getPatch,
196 | getHeads, getAllChanges, getChanges, getChangesAdded, getChangeByHash, getMissingDeps
197 | }
198 |
--------------------------------------------------------------------------------
/test/fuzz_test.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Miniature implementation of a subset of Automerge, which is used below as definition of the
3 | * expected behaviour during fuzz testing. Supports the following:
4 | * - only map, list, and primitive datatypes (no table, text, counter, or date objects)
5 | * - no undo/redo
6 | * - no conflicts on concurrent updates to the same field (uses last-writer-wins instead)
7 | * - no API for creating new changes (you need to create change objects yourself)
8 | * - no buffering of changes that are missing their causal dependencies
9 | * - no saving or loading in serialised form
10 | * - relies on object mutation (no immutability)
11 | */
12 | class Micromerge {
13 | constructor() {
14 | this.byActor = {} // map from actorId to array of changes
15 | this.byObjId = {_root: {}} // objects, keyed by the ID of the operation that created the object
16 | this.metadata = {_root: {}} // map from objID to object with CRDT metadata for each object field
17 | }
18 |
19 | get root() {
20 | return this.byObjId._root
21 | }
22 |
23 | /**
24 | * Updates the document state by applying the change object `change`, in the format documented here:
25 | * https://github.com/automerge/automerge/blob/performance/BINARY_FORMAT.md#json-representation-of-changes
26 | */
27 | applyChange(change) {
28 | // Check that the change's dependencies are met
29 | const lastSeq = this.byActor[change.actor] ? this.byActor[change.actor].length : 0
30 | if (change.seq !== lastSeq + 1) {
31 | throw new RangeError(`Expected sequence number ${lastSeq + 1}, got ${change.seq}`)
32 | }
33 | for (let [actor, dep] of Object.entries(change.deps || {})) {
34 | if (!this.byActor[actor] || this.byActor[actor].length < dep) {
35 | throw new RangeError(`Missing dependency: change ${dep} by actor ${actor}`)
36 | }
37 | }
38 |
39 | if (!this.byActor[change.actor]) this.byActor[change.actor] = []
40 | this.byActor[change.actor].push(change)
41 |
42 | change.ops.forEach((op, index) => {
43 | this.applyOp(Object.assign({opId: `${change.startOp + index}@${change.actor}`}, op))
44 | })
45 | }
46 |
47 | /**
48 | * Updates the document state with one of the operations from a change.
49 | */
50 | applyOp(op) {
51 | if (!this.metadata[op.obj]) throw new RangeError(`Object does not exist: ${op.obj}`)
52 | if (op.action === 'makeMap') {
53 | this.byObjId[op.opId] = {}
54 | this.metadata[op.opId] = {}
55 | } else if (op.action === 'makeList') {
56 | this.byObjId[op.opId] = []
57 | this.metadata[op.opId] = []
58 | } else if (op.action !== 'set' && op.action !== 'del') {
59 | throw new RangeError(`Unsupported operation type: ${op.action}`)
60 | }
61 |
62 | if (Array.isArray(this.metadata[op.obj])) {
63 | if (op.insert) this.applyListInsert(op); else this.applyListUpdate(op)
64 | } else if (!this.metadata[op.obj][op.key] || this.compareOpIds(this.metadata[op.obj][op.key], op.opId)) {
65 | this.metadata[op.obj][op.key] = op.opId
66 | if (op.action === 'del') {
67 | delete this.byObjId[op.obj][op.key]
68 | } else if (op.action.startsWith('make')) {
69 | this.byObjId[op.obj][op.key] = this.byObjId[op.opId]
70 | } else {
71 | this.byObjId[op.obj][op.key] = op.value
72 | }
73 | }
74 | }
75 |
76 | /**
77 | * Applies a list insertion operation.
78 | */
79 | applyListInsert(op) {
80 | const meta = this.metadata[op.obj]
81 | const value = op.action.startsWith('make') ? this.byObjId[op.opId] : op.value
82 | let {index, visible} =
83 | (op.key === '_head') ? {index: -1, visible: 0} : this.findListElement(op.obj, op.key)
84 | if (index >= 0 && !meta[index].deleted) visible++
85 | index++
86 | while (index < meta.length && this.compareOpIds(op.opId, meta[index].elemId)) {
87 | if (!meta[index].deleted) visible++
88 | index++
89 | }
90 | meta.splice(index, 0, {elemId: op.opId, valueId: op.opId, deleted: false})
91 | this.byObjId[op.obj].splice(visible, 0, value)
92 | }
93 |
94 | /**
95 | * Applies a list element update (setting the value of a list element, or deleting a list element).
96 | */
97 | applyListUpdate(op) {
98 | const {index, visible} = this.findListElement(op.obj, op.key)
99 | const meta = this.metadata[op.obj][index]
100 | if (op.action === 'del') {
101 | if (!meta.deleted) this.byObjId[op.obj].splice(visible, 1)
102 | meta.deleted = true
103 | } else if (this.compareOpIds(meta.valueId, op.opId)) {
104 | if (!meta.deleted) {
105 | this.byObjId[op.obj][visible] = op.action.startsWith('make') ? this.byObjId[op.opId] : op.value
106 | }
107 | meta.valueId = op.opId
108 | }
109 | }
110 |
111 | /**
112 | * Searches for the list element with ID `elemId` in the object with ID `objId`. Returns an object
113 | * `{index, visible}` where `index` is the index of the element in the metadata array, and
114 | * `visible` is the number of non-deleted elements that precede the specified element.
115 | */
116 | findListElement(objectId, elemId) {
117 | let index = 0, visible = 0, meta = this.metadata[objectId]
118 | while (index < meta.length && meta[index].elemId !== elemId) {
119 | if (!meta[index].deleted) visible++
120 | index++
121 | }
122 | if (index === meta.length) throw new RangeError(`List element not found: ${elemId}`)
123 | return {index, visible}
124 | }
125 |
126 | /**
127 | * Compares two operation IDs in the form `counter@actor`. Returns true if `id1` has a lower counter
128 | * than `id2`, or if the counter values are the same and `id1` has an actorId that sorts
129 | * lexicographically before the actorId of `id2`.
130 | */
131 | compareOpIds(id1, id2) {
132 | const regex = /^([0-9]+)@(.*)$/
133 | const match1 = regex.exec(id1), match2 = regex.exec(id2)
134 | const counter1 = parseInt(match1[1], 10), counter2 = parseInt(match2[1], 10)
135 | return (counter1 < counter2) || (counter1 === counter2 && match1[2] < match2[2])
136 | }
137 | }
138 |
139 |
140 | /* TESTS */
141 |
142 | const assert = require('assert')
143 |
144 | const change1 = {actor: '1234', seq: 1, deps: {}, startOp: 1, ops: [
145 | {action: 'set', obj: '_root', key: 'title', insert: false, value: 'Hello'},
146 | {action: 'makeList', obj: '_root', key: 'tags', insert: false},
147 | {action: 'set', obj: '2@1234', key: '_head', insert: true, value: 'foo'}
148 | ]}
149 |
150 | const change2 = {actor: '1234', seq: 2, deps: {}, startOp: 4, ops: [
151 | {action: 'set', obj: '_root', key: 'title', insert: false, value: 'Hello 1'},
152 | {action: 'set', obj: '2@1234', key: '3@1234', insert: true, value: 'bar'},
153 | {action: 'del', obj: '2@1234', key: '3@1234', insert: false}
154 | ]}
155 |
156 | const change3 = {actor: 'abcd', seq: 1, deps: {'1234': 1}, startOp: 4, ops: [
157 | {action: 'set', obj: '_root', key: 'title', insert: false, value: 'Hello 2'},
158 | {action: 'set', obj: '2@1234', key: '3@1234', insert: true, value: 'baz'}
159 | ]}
160 |
161 | let doc1 = new Micromerge(), doc2 = new Micromerge()
162 | for (let c of [change1, change2, change3]) doc1.applyChange(c)
163 | for (let c of [change1, change3, change2]) doc2.applyChange(c)
164 | assert.deepStrictEqual(doc1.root, {title: 'Hello 2', tags: ['baz', 'bar']})
165 | assert.deepStrictEqual(doc2.root, {title: 'Hello 2', tags: ['baz', 'bar']})
166 |
167 | const change4 = {actor: '2345', seq: 1, deps: {}, startOp: 1, ops: [
168 | {action: 'makeList', obj: '_root', key: 'todos', insert: false},
169 | {action: 'set', obj: '1@2345', key: '_head', insert: true, value: 'Task 1'},
170 | {action: 'set', obj: '1@2345', key: '2@2345', insert: true, value: 'Task 2'}
171 | ]}
172 |
173 | let doc3 = new Micromerge()
174 | doc3.applyChange(change4)
175 | assert.deepStrictEqual(doc3.root, {todos: ['Task 1', 'Task 2']})
176 |
177 | const change5 = {actor: '2345', seq: 2, deps: {}, startOp: 4, ops: [
178 | {action: 'del', obj: '1@2345', key: '2@2345', insert: false},
179 | {action: 'set', obj: '1@2345', key: '3@2345', insert: true, value: 'Task 3'}
180 | ]}
181 | doc3.applyChange(change5)
182 | assert.deepStrictEqual(doc3.root, {todos: ['Task 2', 'Task 3']})
183 |
184 | const change6 = {actor: '2345', seq: 3, deps: {}, startOp: 6, ops: [
185 | {action: 'del', obj: '1@2345', key: '3@2345', insert: false},
186 | {action: 'set', obj: '1@2345', key: '5@2345', insert: false, value: 'Task 3b'},
187 | {action: 'set', obj: '1@2345', key: '5@2345', insert: true, value: 'Task 4'}
188 | ]}
189 | doc3.applyChange(change6)
190 | assert.deepStrictEqual(doc3.root, {todos: ['Task 3b', 'Task 4']})
191 |
--------------------------------------------------------------------------------
/frontend/table.js:
--------------------------------------------------------------------------------
1 | const { OBJECT_ID, CONFLICTS } = require('./constants')
2 | const { isObject, copyObject } = require('../src/common')
3 |
4 | function compareRows(properties, row1, row2) {
5 | for (let prop of properties) {
6 | if (row1[prop] === row2[prop]) continue
7 |
8 | if (typeof row1[prop] === 'number' && typeof row2[prop] === 'number') {
9 | return row1[prop] - row2[prop]
10 | } else {
11 | const prop1 = '' + row1[prop], prop2 = '' + row2[prop]
12 | if (prop1 === prop2) continue
13 | if (prop1 < prop2) return -1; else return +1
14 | }
15 | }
16 | return 0
17 | }
18 |
19 |
20 | /**
21 | * A relational-style unordered collection of records (rows). Each row is an
22 | * object that maps column names to values. The set of rows is represented by
23 | * a map from UUID to row object.
24 | */
25 | class Table {
26 | /**
27 | * This constructor is used by application code when creating a new Table
28 | * object within a change callback.
29 | */
30 | constructor() {
31 | this.entries = Object.freeze({})
32 | this.opIds = Object.freeze({})
33 | Object.freeze(this)
34 | }
35 |
36 | /**
37 | * Looks up a row in the table by its unique ID.
38 | */
39 | byId(id) {
40 | return this.entries[id]
41 | }
42 |
43 | /**
44 | * Returns an array containing the unique IDs of all rows in the table, in no
45 | * particular order.
46 | */
47 | get ids() {
48 | return Object.keys(this.entries).filter(key => {
49 | const entry = this.entries[key]
50 | return isObject(entry) && entry.id === key
51 | })
52 | }
53 |
54 | /**
55 | * Returns the number of rows in the table.
56 | */
57 | get count() {
58 | return this.ids.length
59 | }
60 |
61 | /**
62 | * Returns an array containing all of the rows in the table, in no particular
63 | * order.
64 | */
65 | get rows() {
66 | return this.ids.map(id => this.byId(id))
67 | }
68 |
69 | /**
70 | * The standard JavaScript `filter()` method, which passes each row to the
71 | * callback function and returns all rows for which the it returns true.
72 | */
73 | filter(callback, thisArg) {
74 | return this.rows.filter(callback, thisArg)
75 | }
76 |
77 | /**
78 | * The standard JavaScript `find()` method, which passes each row to the
79 | * callback function and returns the first row for which it returns true.
80 | */
81 | find(callback, thisArg) {
82 | return this.rows.find(callback, thisArg)
83 | }
84 |
85 | /**
86 | * The standard JavaScript `map()` method, which passes each row to the
87 | * callback function and returns a list of its return values.
88 | */
89 | map(callback, thisArg) {
90 | return this.rows.map(callback, thisArg)
91 | }
92 |
93 | /**
94 | * Returns the list of rows, sorted by one of the following:
95 | * - If a function argument is given, it compares rows as per
96 | * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/sort#Description
97 | * - If a string argument is given, it is interpreted as a column name and
98 | * rows are sorted according to that column.
99 | * - If an array of strings is given, it is interpreted as a list of column
100 | * names, and rows are sorted lexicographically by those columns.
101 | * - If no argument is given, it sorts by row ID by default.
102 | */
103 | sort(arg) {
104 | if (typeof arg === 'function') {
105 | return this.rows.sort(arg)
106 | } else if (typeof arg === 'string') {
107 | return this.rows.sort((row1, row2) => compareRows([arg], row1, row2))
108 | } else if (Array.isArray(arg)) {
109 | return this.rows.sort((row1, row2) => compareRows(arg, row1, row2))
110 | } else if (arg === undefined) {
111 | return this.rows.sort((row1, row2) => compareRows(['id'], row1, row2))
112 | } else {
113 | throw new TypeError(`Unsupported sorting argument: ${arg}`)
114 | }
115 | }
116 |
117 | /**
118 | * When iterating over a table, you get all rows in the table, in no
119 | * particular order.
120 | */
121 | [Symbol.iterator] () {
122 | let rows = this.rows, index = -1
123 | return {
124 | next () {
125 | index += 1
126 | if (index < rows.length) {
127 | return {done: false, value: rows[index]}
128 | } else {
129 | return {done: true}
130 | }
131 | }
132 | }
133 | }
134 |
135 | /**
136 | * Returns a shallow clone of this object. This clone is used while applying
137 | * a patch to the table, and `freeze()` is called on it when we have finished
138 | * applying the patch.
139 | */
140 | _clone() {
141 | if (!this[OBJECT_ID]) {
142 | throw new RangeError('clone() requires the objectId to be set')
143 | }
144 | return instantiateTable(this[OBJECT_ID], copyObject(this.entries), copyObject(this.opIds))
145 | }
146 |
147 | /**
148 | * Sets the entry with key `id` to `value`. `opId` is the ID of the operation
149 | * performing this assignment. This method is for internal use only; it is
150 | * not part of the public API of Automerge.Table.
151 | */
152 | _set(id, value, opId) {
153 | if (Object.isFrozen(this.entries)) {
154 | throw new Error('A table can only be modified in a change function')
155 | }
156 | if (isObject(value) && !Array.isArray(value)) {
157 | Object.defineProperty(value, 'id', {value: id, enumerable: true})
158 | }
159 | this.entries[id] = value
160 | this.opIds[id] = opId
161 | }
162 |
163 | /**
164 | * Removes the row with unique ID `id` from the table.
165 | */
166 | remove(id) {
167 | if (Object.isFrozen(this.entries)) {
168 | throw new Error('A table can only be modified in a change function')
169 | }
170 | delete this.entries[id]
171 | delete this.opIds[id]
172 | }
173 |
174 | /**
175 | * Makes this object immutable. This is called after a change has been made.
176 | */
177 | _freeze() {
178 | Object.freeze(this.entries)
179 | Object.freeze(this.opIds)
180 | Object.freeze(this)
181 | }
182 |
183 | /**
184 | * Returns a writeable instance of this table. This instance is returned when
185 | * the table is accessed within a change callback. `context` is the proxy
186 | * context that keeps track of the mutations.
187 | */
188 | getWriteable(context, path) {
189 | if (!this[OBJECT_ID]) {
190 | throw new RangeError('getWriteable() requires the objectId to be set')
191 | }
192 |
193 | const instance = Object.create(WriteableTable.prototype)
194 | instance[OBJECT_ID] = this[OBJECT_ID]
195 | instance.context = context
196 | instance.entries = this.entries
197 | instance.opIds = this.opIds
198 | instance.path = path
199 | return instance
200 | }
201 |
202 | /**
203 | * Returns an object containing the table entries, indexed by objectID,
204 | * for serializing an Automerge document to JSON.
205 | */
206 | toJSON() {
207 | const rows = {}
208 | for (let id of this.ids) rows[id] = this.byId(id)
209 | return rows
210 | }
211 | }
212 |
213 | /**
214 | * An instance of this class is used when a table is accessed within a change
215 | * callback.
216 | */
217 | class WriteableTable extends Table {
218 | /**
219 | * Returns a proxied version of the row with ID `id`. This row object can be
220 | * modified within a change callback.
221 | */
222 | byId(id) {
223 | if (isObject(this.entries[id]) && this.entries[id].id === id) {
224 | const objectId = this.entries[id][OBJECT_ID]
225 | const path = this.path.concat([{key: id, objectId}])
226 | return this.context.instantiateObject(path, objectId, ['id'])
227 | }
228 | }
229 |
230 | /**
231 | * Adds a new row to the table. The row is given as a map from
232 | * column name to value. Returns the objectId of the new row.
233 | */
234 | add(row) {
235 | return this.context.addTableRow(this.path, row)
236 | }
237 |
238 | /**
239 | * Removes the row with ID `id` from the table. Throws an exception if the row
240 | * does not exist in the table.
241 | */
242 | remove(id) {
243 | if (isObject(this.entries[id]) && this.entries[id].id === id) {
244 | this.context.deleteTableRow(this.path, id, this.opIds[id])
245 | } else {
246 | throw new RangeError(`There is no row with ID ${id} in this table`)
247 | }
248 | }
249 | }
250 |
251 | /**
252 | * This function is used to instantiate a Table object in the context of
253 | * applying a patch (see apply_patch.js).
254 | */
255 | function instantiateTable(objectId, entries, opIds) {
256 | const instance = Object.create(Table.prototype)
257 | if (!objectId) {
258 | throw new RangeError('instantiateTable requires an objectId to be given')
259 | }
260 | instance[OBJECT_ID] = objectId
261 | instance[CONFLICTS] = Object.freeze({})
262 | instance.entries = entries || {}
263 | instance.opIds = opIds || {}
264 | return instance
265 | }
266 |
267 | module.exports = { Table, instantiateTable }
268 |
--------------------------------------------------------------------------------
/frontend/proxies.js:
--------------------------------------------------------------------------------
1 | const { OBJECT_ID, CHANGE, STATE } = require('./constants')
2 | const { isObject, createArrayOfNulls } = require('../src/common')
3 | const { Text } = require('./text')
4 | const { Table } = require('./table')
5 |
6 | function parseListIndex(key) {
7 | if (typeof key === 'string' && /^[0-9]+$/.test(key)) key = parseInt(key, 10)
8 | if (typeof key !== 'number') {
9 | throw new TypeError('A list index must be a number, but you passed ' + JSON.stringify(key))
10 | }
11 | if (key < 0 || isNaN(key) || key === Infinity || key === -Infinity) {
12 | throw new RangeError('A list index must be positive, but you passed ' + key)
13 | }
14 | return key
15 | }
16 |
17 | function listMethods(context, listId, path) {
18 | const methods = {
19 | deleteAt(index, numDelete) {
20 | context.splice(path, parseListIndex(index), numDelete || 1, [])
21 | return this
22 | },
23 |
24 | fill(value, start, end) {
25 | let list = context.getObject(listId)
26 | for (let index = parseListIndex(start || 0); index < parseListIndex(end || list.length); index++) {
27 | context.setListIndex(path, index, value)
28 | }
29 | return this
30 | },
31 |
32 | indexOf(o, start = 0) {
33 | const id = isObject(o) ? o[OBJECT_ID] : undefined
34 | if (id) {
35 | const list = context.getObject(listId)
36 | for (let index = start; index < list.length; index++) {
37 | if (list[index][OBJECT_ID] === id) {
38 | return index
39 | }
40 | }
41 | return -1
42 | } else {
43 | return context.getObject(listId).indexOf(o, start)
44 | }
45 | },
46 |
47 | insertAt(index, ...values) {
48 | context.splice(path, parseListIndex(index), 0, values)
49 | return this
50 | },
51 |
52 | pop() {
53 | let list = context.getObject(listId)
54 | if (list.length == 0) return
55 | const last = context.getObjectField(path, listId, list.length - 1)
56 | context.splice(path, list.length - 1, 1, [])
57 | return last
58 | },
59 |
60 | push(...values) {
61 | let list = context.getObject(listId)
62 | context.splice(path, list.length, 0, values)
63 | // need to getObject() again because the list object above may be immutable
64 | return context.getObject(listId).length
65 | },
66 |
67 | shift() {
68 | let list = context.getObject(listId)
69 | if (list.length == 0) return
70 | const first = context.getObjectField(path, listId, 0)
71 | context.splice(path, 0, 1, [])
72 | return first
73 | },
74 |
75 | splice(start, deleteCount, ...values) {
76 | let list = context.getObject(listId)
77 | start = parseListIndex(start)
78 | if (deleteCount === undefined || deleteCount > list.length - start) {
79 | deleteCount = list.length - start
80 | }
81 | const deleted = []
82 | for (let n = 0; n < deleteCount; n++) {
83 | deleted.push(context.getObjectField(path, listId, start + n))
84 | }
85 | context.splice(path, start, deleteCount, values)
86 | return deleted
87 | },
88 |
89 | unshift(...values) {
90 | context.splice(path, 0, 0, values)
91 | return context.getObject(listId).length
92 | }
93 | }
94 |
95 | for (let iterator of ['entries', 'keys', 'values']) {
96 | let list = context.getObject(listId)
97 | methods[iterator] = () => list[iterator]()
98 | }
99 |
100 | // Read-only methods that can delegate to the JavaScript built-in implementations
101 | for (let method of ['concat', 'every', 'filter', 'find', 'findIndex', 'forEach', 'includes',
102 | 'join', 'lastIndexOf', 'map', 'reduce', 'reduceRight',
103 | 'slice', 'some', 'toLocaleString', 'toString']) {
104 | methods[method] = (...args) => {
105 | const list = context.getObject(listId)
106 | .map((item, index) => context.getObjectField(path, listId, index))
107 | return list[method](...args)
108 | }
109 | }
110 |
111 | return methods
112 | }
113 |
114 | const MapHandler = {
115 | get (target, key) {
116 | const { context, objectId, path } = target
117 | if (key === OBJECT_ID) return objectId
118 | if (key === CHANGE) return context
119 | if (key === STATE) return {actorId: context.actorId}
120 | return context.getObjectField(path, objectId, key)
121 | },
122 |
123 | set (target, key, value) {
124 | const { context, path, readonly } = target
125 | if (Array.isArray(readonly) && readonly.indexOf(key) >= 0) {
126 | throw new RangeError(`Object property "${key}" cannot be modified`)
127 | }
128 | context.setMapKey(path, key, value)
129 | return true
130 | },
131 |
132 | deleteProperty (target, key) {
133 | const { context, path, readonly } = target
134 | if (Array.isArray(readonly) && readonly.indexOf(key) >= 0) {
135 | throw new RangeError(`Object property "${key}" cannot be modified`)
136 | }
137 | context.deleteMapKey(path, key)
138 | return true
139 | },
140 |
141 | has (target, key) {
142 | const { context, objectId } = target
143 | return [OBJECT_ID, CHANGE].includes(key) || (key in context.getObject(objectId))
144 | },
145 |
146 | getOwnPropertyDescriptor (target, key) {
147 | const { context, objectId } = target
148 | const object = context.getObject(objectId)
149 | if (key in object) {
150 | return {
151 | configurable: true, enumerable: true,
152 | value: context.getObjectField(objectId, key)
153 | }
154 | }
155 | },
156 |
157 | ownKeys (target) {
158 | const { context, objectId } = target
159 | return Object.keys(context.getObject(objectId))
160 | }
161 | }
162 |
163 | const ListHandler = {
164 | get (target, key) {
165 | const [context, objectId, path] = target
166 | if (key === Symbol.iterator) return context.getObject(objectId)[Symbol.iterator]
167 | if (key === OBJECT_ID) return objectId
168 | if (key === CHANGE) return context
169 | if (key === 'length') return context.getObject(objectId).length
170 | if (typeof key === 'string' && /^[0-9]+$/.test(key)) {
171 | return context.getObjectField(path, objectId, parseListIndex(key))
172 | }
173 | return listMethods(context, objectId, path)[key]
174 | },
175 |
176 | set (target, key, value) {
177 | const [context, objectId, path] = target
178 | if (key === 'length') {
179 | if (typeof value !== 'number') {
180 | throw new RangeError("Invalid array length")
181 | }
182 | const length = context.getObject(objectId).length
183 | if (length > value) {
184 | context.splice(path, value, length - value, [])
185 | } else {
186 | context.splice(path, length, 0, createArrayOfNulls(value - length))
187 | }
188 | } else {
189 | context.setListIndex(path, parseListIndex(key), value)
190 | }
191 | return true
192 | },
193 |
194 | deleteProperty (target, key) {
195 | const [context, /* objectId */, path] = target
196 | context.splice(path, parseListIndex(key), 1, [])
197 | return true
198 | },
199 |
200 | has (target, key) {
201 | const [context, objectId, /* path */] = target
202 | if (typeof key === 'string' && /^[0-9]+$/.test(key)) {
203 | return parseListIndex(key) < context.getObject(objectId).length
204 | }
205 | return ['length', OBJECT_ID, CHANGE].includes(key)
206 | },
207 |
208 | getOwnPropertyDescriptor (target, key) {
209 | const [context, objectId, /* path */] = target
210 | const object = context.getObject(objectId)
211 |
212 | if (key === 'length') return {writable: true, value: object.length}
213 | if (key === OBJECT_ID) return {configurable: false, enumerable: false, value: objectId}
214 |
215 | if (typeof key === 'string' && /^[0-9]+$/.test(key)) {
216 | const index = parseListIndex(key)
217 | if (index < object.length) return {
218 | configurable: true, enumerable: true,
219 | value: context.getObjectField(objectId, index)
220 | }
221 | }
222 | },
223 |
224 | ownKeys (target) {
225 | const [context, objectId, /* path */] = target
226 | const object = context.getObject(objectId)
227 | let keys = ['length']
228 | for (let key of Object.keys(object)) keys.push(key)
229 | return keys
230 | }
231 | }
232 |
233 | function mapProxy(context, objectId, path, readonly) {
234 | return new Proxy({context, objectId, path, readonly}, MapHandler)
235 | }
236 |
237 | function listProxy(context, objectId, path) {
238 | return new Proxy([context, objectId, path], ListHandler)
239 | }
240 |
241 | /**
242 | * Instantiates a proxy object for the given `objectId`.
243 | * This function is added as a method to the context object by rootObjectProxy().
244 | * When it is called, `this` is the context object.
245 | * `readonly` is a list of map property names that cannot be modified.
246 | */
247 | function instantiateProxy(path, objectId, readonly) {
248 | const object = this.getObject(objectId)
249 | if (Array.isArray(object)) {
250 | return listProxy(this, objectId, path)
251 | } else if (object instanceof Text || object instanceof Table) {
252 | return object.getWriteable(this, path)
253 | } else {
254 | return mapProxy(this, objectId, path, readonly)
255 | }
256 | }
257 |
258 | function rootObjectProxy(context) {
259 | context.instantiateObject = instantiateProxy
260 | return mapProxy(context, '_root', [])
261 | }
262 |
263 | module.exports = { rootObjectProxy }
264 |
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "env": {
3 | "browser": true,
4 | "commonjs": true,
5 | "es2015": true,
6 | "node": true,
7 | "mocha": true
8 | },
9 | "ignorePatterns": "dist/**",
10 | "extends": ["eslint:recommended", "plugin:compat/recommended"],
11 | "parserOptions": {
12 | "ecmaVersion": 2015
13 | },
14 | "rules": {
15 | "accessor-pairs": "error",
16 | "array-bracket-newline": "off",
17 | "array-bracket-spacing": "off",
18 | "array-callback-return": "error",
19 | "array-element-newline": "off",
20 | "arrow-body-style": "error",
21 | "arrow-parens": "off",
22 | "arrow-spacing": [
23 | "error",
24 | {
25 | "after": true,
26 | "before": true
27 | }
28 | ],
29 | "block-scoped-var": "error",
30 | "block-spacing": "error",
31 | "brace-style": [
32 | "error",
33 | "1tbs", { "allowSingleLine": true }
34 | ],
35 | "camelcase": "off",
36 | "capitalized-comments": "off",
37 | "class-methods-use-this": "off",
38 | "comma-dangle": "off",
39 | "comma-spacing": [
40 | "error",
41 | {
42 | "after": true,
43 | "before": false
44 | }
45 | ],
46 | "comma-style": [
47 | "error",
48 | "last"
49 | ],
50 | "complexity": "off",
51 | "computed-property-spacing": [
52 | "off",
53 | "never"
54 | ],
55 | "consistent-return": "off",
56 | "consistent-this": "error",
57 | "curly": "off",
58 | "default-case": "off",
59 | "default-case-last": "error",
60 | "default-param-last": "error",
61 | "dot-location": [
62 | "error",
63 | "property"
64 | ],
65 | "dot-notation": "error",
66 | "eol-last": "error",
67 | "eqeqeq": "off",
68 | "func-call-spacing": "off",
69 | "func-name-matching": "error",
70 | "func-names": "off",
71 | "func-style": [
72 | "error",
73 | "declaration"
74 | ],
75 | "function-paren-newline": "off",
76 | "generator-star-spacing": "error",
77 | "grouped-accessor-pairs": "error",
78 | "guard-for-in": "error",
79 | "id-denylist": "error",
80 | "id-length": "off",
81 | "id-match": "error",
82 | "implicit-arrow-linebreak": "off",
83 | "indent": "off",
84 | "init-declarations": "off",
85 | "jsx-quotes": "error",
86 | "key-spacing": "off",
87 | "keyword-spacing": [
88 | "error",
89 | {
90 | "after": true,
91 | "before": true
92 | }
93 | ],
94 | "line-comment-position": "off",
95 | "linebreak-style": "off",
96 | "lines-around-comment": "off",
97 | "lines-between-class-members": "error",
98 | "max-classes-per-file": "off",
99 | "max-depth": "off",
100 | "max-len": "off",
101 | "max-lines": "off",
102 | "max-lines-per-function": "off",
103 | "max-nested-callbacks": "error",
104 | "max-params": "off",
105 | "max-statements": "off",
106 | "max-statements-per-line": "off",
107 | "multiline-comment-style": [
108 | "error",
109 | "separate-lines"
110 | ],
111 | "new-parens": "error",
112 | "newline-per-chained-call": "off",
113 | "no-alert": "error",
114 | "no-array-constructor": "error",
115 | "no-await-in-loop": "error",
116 | "no-bitwise": "off",
117 | "no-caller": "error",
118 | "no-confusing-arrow": "error",
119 | "no-console": "error",
120 | "no-constant-condition": [
121 | "error",
122 | {
123 | "checkLoops": false
124 | }
125 | ],
126 | "no-constructor-return": "error",
127 | "no-continue": "off",
128 | "no-div-regex": "error",
129 | "no-duplicate-imports": "error",
130 | "no-else-return": "off",
131 | "no-empty-function": "off",
132 | "no-eq-null": "error",
133 | "no-eval": "error",
134 | "no-extend-native": "error",
135 | "no-extra-bind": "error",
136 | "no-extra-label": "error",
137 | "no-extra-parens": "off",
138 | "no-floating-decimal": "error",
139 | "no-implicit-coercion": "off",
140 | "no-implicit-globals": "error",
141 | "no-implied-eval": "error",
142 | "no-inline-comments": "off",
143 | "no-invalid-this": "error",
144 | "no-iterator": "error",
145 | "no-label-var": "error",
146 | "no-labels": "error",
147 | "no-lone-blocks": "error",
148 | "no-lonely-if": "off",
149 | "no-loop-func": "off",
150 | "no-loss-of-precision": "error",
151 | "no-magic-numbers": "off",
152 | "no-mixed-operators": "off",
153 | "no-multi-assign": "error",
154 | "no-multi-spaces": "off",
155 | "no-multi-str": "error",
156 | "no-multiple-empty-lines": "error",
157 | "no-negated-condition": "off",
158 | "no-nested-ternary": "off",
159 | "no-new": "error",
160 | "no-new-func": "error",
161 | "no-new-object": "error",
162 | "no-new-wrappers": "error",
163 | "no-nonoctal-decimal-escape": "error",
164 | "no-octal-escape": "error",
165 | "no-param-reassign": "off",
166 | "no-plusplus": "off",
167 | "no-promise-executor-return": "error",
168 | "no-proto": "error",
169 | "no-restricted-exports": "error",
170 | "no-restricted-globals": "error",
171 | "no-restricted-imports": "error",
172 | "no-restricted-properties": "error",
173 | "no-restricted-syntax": "error",
174 | "no-return-assign": "off",
175 | "no-return-await": "error",
176 | "no-script-url": "error",
177 | "no-self-compare": "error",
178 | "no-sequences": "error",
179 | "no-shadow": "off",
180 | "no-tabs": "error",
181 | "no-template-curly-in-string": "error",
182 | "no-ternary": "off",
183 | "no-throw-literal": "error",
184 | "no-trailing-spaces": "error",
185 | "no-undef-init": "off",
186 | "no-undefined": "off",
187 | "no-underscore-dangle": "off",
188 | "no-unmodified-loop-condition": "error",
189 | "no-unneeded-ternary": "error",
190 | "no-unreachable-loop": "error",
191 | "no-unsafe-optional-chaining": "error",
192 | "no-unused-expressions": "error",
193 | "no-unused-vars": ["error", { "args": "after-used" }],
194 | "no-use-before-define": "off",
195 | "no-useless-backreference": "error",
196 | "no-useless-call": "error",
197 | "no-useless-computed-key": "error",
198 | "no-useless-concat": "error",
199 | "no-useless-constructor": "error",
200 | "no-useless-rename": "error",
201 | "no-useless-return": "error",
202 | "no-var": "error",
203 | "no-void": "error",
204 | "no-warning-comments": "off",
205 | "no-whitespace-before-property": "off",
206 | "nonblock-statement-body-position": "error",
207 | "object-curly-newline": "error",
208 | "object-curly-spacing": "off",
209 | "object-property-newline": "off",
210 | "object-shorthand": "error",
211 | "one-var": "off",
212 | "one-var-declaration-per-line": "off",
213 | "operator-assignment": "off",
214 | "operator-linebreak": "error",
215 | "padded-blocks": "off",
216 | "padding-line-between-statements": "error",
217 | "prefer-arrow-callback": "off",
218 | "prefer-const": "off",
219 | "prefer-destructuring": "off",
220 | "prefer-exponentiation-operator": "error",
221 | "prefer-named-capture-group": "off",
222 | "prefer-numeric-literals": "error",
223 | "prefer-object-spread": "off",
224 | "prefer-promise-reject-errors": "error",
225 | "prefer-regex-literals": "error",
226 | "prefer-rest-params": "error",
227 | "prefer-spread": "error",
228 | "prefer-template": "off",
229 | "quote-props": "off",
230 | "quotes": "off",
231 | "radix": "error",
232 | "require-atomic-updates": "error",
233 | "require-await": "error",
234 | "require-unicode-regexp": "off",
235 | "rest-spread-spacing": "error",
236 | "semi": "off",
237 | "semi-spacing": [
238 | "error",
239 | {
240 | "after": true,
241 | "before": false
242 | }
243 | ],
244 | "semi-style": [
245 | "error",
246 | "first"
247 | ],
248 | "sort-imports": "error",
249 | "sort-keys": "off",
250 | "sort-vars": "off",
251 | "space-before-blocks": "error",
252 | "space-before-function-paren": "off",
253 | "space-in-parens": [
254 | "error",
255 | "never"
256 | ],
257 | "space-infix-ops": "error",
258 | "space-unary-ops": "error",
259 | "spaced-comment": [
260 | "error",
261 | "always"
262 | ],
263 | "strict": [
264 | "error",
265 | "never"
266 | ],
267 | "switch-colon-spacing": "error",
268 | "symbol-description": "error",
269 | "template-curly-spacing": [
270 | "error",
271 | "never"
272 | ],
273 | "template-tag-spacing": "error",
274 | "unicode-bom": [
275 | "error",
276 | "never"
277 | ],
278 | "vars-on-top": "error",
279 | "wrap-iife": "error",
280 | "wrap-regex": "off",
281 | "yield-star-spacing": "error",
282 | "yoda": [
283 | "error",
284 | "never"
285 | ]
286 | }
287 | }
288 |
--------------------------------------------------------------------------------
/frontend/apply_patch.js:
--------------------------------------------------------------------------------
1 | const { isObject, copyObject, parseOpId } = require('../src/common')
2 | const { OBJECT_ID, CONFLICTS, ELEM_IDS } = require('./constants')
3 | const { instantiateText } = require('./text')
4 | const { instantiateTable } = require('./table')
5 | const { Counter } = require('./counter')
6 |
7 | /**
8 | * Reconstructs the value from the patch object `patch`.
9 | */
10 | function getValue(patch, object, updated) {
11 | if (patch.objectId) {
12 | // If the objectId of the existing object does not match the objectId in the patch,
13 | // that means the patch is replacing the object with a new one made from scratch
14 | if (object && object[OBJECT_ID] !== patch.objectId) {
15 | object = undefined
16 | }
17 | return interpretPatch(patch, object, updated)
18 | } else if (patch.datatype === 'timestamp') {
19 | // Timestamp: value is milliseconds since 1970 epoch
20 | return new Date(patch.value)
21 | } else if (patch.datatype === 'counter') {
22 | return new Counter(patch.value)
23 | } else {
24 | // Primitive value (int, uint, float64, string, boolean, or null)
25 | return patch.value
26 | }
27 | }
28 |
29 | /**
30 | * Compares two strings, interpreted as Lamport timestamps of the form
31 | * 'counter@actorId'. Returns 1 if ts1 is greater, or -1 if ts2 is greater.
32 | */
33 | function lamportCompare(ts1, ts2) {
34 | const regex = /^(\d+)@(.*)$/
35 | const time1 = regex.test(ts1) ? parseOpId(ts1) : {counter: 0, actorId: ts1}
36 | const time2 = regex.test(ts2) ? parseOpId(ts2) : {counter: 0, actorId: ts2}
37 | if (time1.counter < time2.counter) return -1
38 | if (time1.counter > time2.counter) return 1
39 | if (time1.actorId < time2.actorId) return -1
40 | if (time1.actorId > time2.actorId) return 1
41 | return 0
42 | }
43 |
44 | /**
45 | * `props` is an object of the form:
46 | * `{key1: {opId1: {...}, opId2: {...}}, key2: {opId3: {...}}}`
47 | * where the outer object is a mapping from property names to inner objects,
48 | * and the inner objects are a mapping from operation ID to sub-patch.
49 | * This function interprets that structure and updates the objects `object` and
50 | * `conflicts` to reflect it. For each key, the greatest opId (by Lamport TS
51 | * order) is chosen as the default resolution; that op's value is assigned
52 | * to `object[key]`. Moreover, all the opIds and values are packed into a
53 | * conflicts object of the form `{opId1: value1, opId2: value2}` and assigned
54 | * to `conflicts[key]`. If there is no conflict, the conflicts object contains
55 | * just a single opId-value mapping.
56 | */
57 | function applyProperties(props, object, conflicts, updated) {
58 | if (!props) return
59 |
60 | for (let key of Object.keys(props)) {
61 | const values = {}, opIds = Object.keys(props[key]).sort(lamportCompare).reverse()
62 | for (let opId of opIds) {
63 | const subpatch = props[key][opId]
64 | if (conflicts[key] && conflicts[key][opId]) {
65 | values[opId] = getValue(subpatch, conflicts[key][opId], updated)
66 | } else {
67 | values[opId] = getValue(subpatch, undefined, updated)
68 | }
69 | }
70 |
71 | if (opIds.length === 0) {
72 | delete object[key]
73 | delete conflicts[key]
74 | } else {
75 | object[key] = values[opIds[0]]
76 | conflicts[key] = values
77 | }
78 | }
79 | }
80 |
81 | /**
82 | * Creates a writable copy of an immutable map object. If `originalObject`
83 | * is undefined, creates an empty object with ID `objectId`.
84 | */
85 | function cloneMapObject(originalObject, objectId) {
86 | const object = copyObject(originalObject)
87 | const conflicts = copyObject(originalObject ? originalObject[CONFLICTS] : undefined)
88 | Object.defineProperty(object, OBJECT_ID, {value: objectId})
89 | Object.defineProperty(object, CONFLICTS, {value: conflicts})
90 | return object
91 | }
92 |
93 | /**
94 | * Updates the map object `obj` according to the modifications described in
95 | * `patch`, or creates a new object if `obj` is undefined. Mutates `updated`
96 | * to map the objectId to the new object, and returns the new object.
97 | */
98 | function updateMapObject(patch, obj, updated) {
99 | const objectId = patch.objectId
100 | if (!updated[objectId]) {
101 | updated[objectId] = cloneMapObject(obj, objectId)
102 | }
103 |
104 | const object = updated[objectId]
105 | applyProperties(patch.props, object, object[CONFLICTS], updated)
106 | return object
107 | }
108 |
109 | /**
110 | * Updates the table object `obj` according to the modifications described in
111 | * `patch`, or creates a new object if `obj` is undefined. Mutates `updated`
112 | * to map the objectId to the new object, and returns the new object.
113 | */
114 | function updateTableObject(patch, obj, updated) {
115 | const objectId = patch.objectId
116 | if (!updated[objectId]) {
117 | updated[objectId] = obj ? obj._clone() : instantiateTable(objectId)
118 | }
119 |
120 | const object = updated[objectId]
121 |
122 | for (let key of Object.keys(patch.props || {})) {
123 | const opIds = Object.keys(patch.props[key])
124 |
125 | if (opIds.length === 0) {
126 | object.remove(key)
127 | } else if (opIds.length === 1) {
128 | const subpatch = patch.props[key][opIds[0]]
129 | object._set(key, getValue(subpatch, object.byId(key), updated), opIds[0])
130 | } else {
131 | throw new RangeError('Conflicts are not supported on properties of a table')
132 | }
133 | }
134 | return object
135 | }
136 |
137 | /**
138 | * Creates a writable copy of an immutable list object. If `originalList` is
139 | * undefined, creates an empty list with ID `objectId`.
140 | */
141 | function cloneListObject(originalList, objectId) {
142 | const list = originalList ? originalList.slice() : [] // slice() makes a shallow clone
143 | const conflicts = (originalList && originalList[CONFLICTS]) ? originalList[CONFLICTS].slice() : []
144 | const elemIds = (originalList && originalList[ELEM_IDS]) ? originalList[ELEM_IDS].slice() : []
145 | Object.defineProperty(list, OBJECT_ID, {value: objectId})
146 | Object.defineProperty(list, CONFLICTS, {value: conflicts})
147 | Object.defineProperty(list, ELEM_IDS, {value: elemIds})
148 | return list
149 | }
150 |
151 | /**
152 | * Updates the list object `obj` according to the modifications described in
153 | * `patch`, or creates a new object if `obj` is undefined. Mutates `updated`
154 | * to map the objectId to the new object, and returns the new object.
155 | */
156 | function updateListObject(patch, obj, updated) {
157 | const objectId = patch.objectId
158 | if (!updated[objectId]) {
159 | updated[objectId] = cloneListObject(obj, objectId)
160 | }
161 |
162 | const list = updated[objectId], conflicts = list[CONFLICTS], elemIds = list[ELEM_IDS]
163 | for (let i = 0; i < patch.edits.length; i++) {
164 | const edit = patch.edits[i]
165 |
166 | if (edit.action === 'insert' || edit.action === 'update') {
167 | const oldValue = conflicts[edit.index] && conflicts[edit.index][edit.opId]
168 | let lastValue = getValue(edit.value, oldValue, updated)
169 | let values = {[edit.opId]: lastValue}
170 |
171 | // Successive updates for the same index are an indication of a conflict on that list element.
172 | // Edits are sorted in increasing order by Lamport timestamp, so the last value (with the
173 | // greatest timestamp) is the default resolution of the conflict.
174 | while (i < patch.edits.length - 1 && patch.edits[i + 1].index === edit.index &&
175 | patch.edits[i + 1].action === 'update') {
176 | i++
177 | const conflict = patch.edits[i]
178 | const oldValue2 = conflicts[conflict.index] && conflicts[conflict.index][conflict.opId]
179 | lastValue = getValue(conflict.value, oldValue2, updated)
180 | values[conflict.opId] = lastValue
181 | }
182 |
183 | if (edit.action === 'insert') {
184 | list.splice(edit.index, 0, lastValue)
185 | conflicts.splice(edit.index, 0, values)
186 | elemIds.splice(edit.index, 0, edit.elemId)
187 | } else {
188 | list[edit.index] = lastValue
189 | conflicts[edit.index] = values
190 | }
191 |
192 | } else if (edit.action === 'multi-insert') {
193 | const startElemId = parseOpId(edit.elemId), newElems = [], newValues = [], newConflicts = []
194 | const datatype = edit.datatype
195 | edit.values.forEach((value, index) => {
196 | const elemId = `${startElemId.counter + index}@${startElemId.actorId}`
197 | value = getValue({ value, datatype }, undefined, updated)
198 | newValues.push(value)
199 | newConflicts.push({[elemId]: {value, datatype, type: 'value'}})
200 | newElems.push(elemId)
201 | })
202 | list.splice(edit.index, 0, ...newValues)
203 | conflicts.splice(edit.index, 0, ...newConflicts)
204 | elemIds.splice(edit.index, 0, ...newElems)
205 |
206 | } else if (edit.action === 'remove') {
207 | list.splice(edit.index, edit.count)
208 | conflicts.splice(edit.index, edit.count)
209 | elemIds.splice(edit.index, edit.count)
210 | }
211 | }
212 | return list
213 | }
214 |
215 | /**
216 | * Updates the text object `obj` according to the modifications described in
217 | * `patch`, or creates a new object if `obj` is undefined. Mutates `updated`
218 | * to map the objectId to the new object, and returns the new object.
219 | */
220 | function updateTextObject(patch, obj, updated) {
221 | const objectId = patch.objectId
222 | let elems
223 | if (updated[objectId]) {
224 | elems = updated[objectId].elems
225 | } else if (obj) {
226 | elems = obj.elems.slice()
227 | } else {
228 | elems = []
229 | }
230 |
231 | for (const edit of patch.edits) {
232 | if (edit.action === 'insert') {
233 | const value = getValue(edit.value, undefined, updated)
234 | const elem = {elemId: edit.elemId, pred: [edit.opId], value}
235 | elems.splice(edit.index, 0, elem)
236 |
237 | } else if (edit.action === 'multi-insert') {
238 | const startElemId = parseOpId(edit.elemId)
239 | const datatype = edit.datatype
240 | const newElems = edit.values.map((value, index) => {
241 | value = getValue({ datatype, value }, undefined, updated)
242 | const elemId = `${startElemId.counter + index}@${startElemId.actorId}`
243 | return {elemId, pred: [elemId], value}
244 | })
245 | elems.splice(edit.index, 0, ...newElems)
246 |
247 | } else if (edit.action === 'update') {
248 | const elemId = elems[edit.index].elemId
249 | const value = getValue(edit.value, elems[edit.index].value, updated)
250 | elems[edit.index] = {elemId, pred: [edit.opId], value}
251 |
252 | } else if (edit.action === 'remove') {
253 | elems.splice(edit.index, edit.count)
254 | }
255 | }
256 |
257 | updated[objectId] = instantiateText(objectId, elems)
258 | return updated[objectId]
259 | }
260 |
261 | /**
262 | * Applies the patch object `patch` to the read-only document object `obj`.
263 | * Clones a writable copy of `obj` and places it in `updated` (indexed by
264 | * objectId), if that has not already been done. Returns the updated object.
265 | */
266 | function interpretPatch(patch, obj, updated) {
267 | // Return original object if it already exists and isn't being modified
268 | if (isObject(obj) && (!patch.props || Object.keys(patch.props).length === 0) &&
269 | (!patch.edits || patch.edits.length === 0) && !updated[patch.objectId]) {
270 | return obj
271 | }
272 |
273 | if (patch.type === 'map') {
274 | return updateMapObject(patch, obj, updated)
275 | } else if (patch.type === 'table') {
276 | return updateTableObject(patch, obj, updated)
277 | } else if (patch.type === 'list') {
278 | return updateListObject(patch, obj, updated)
279 | } else if (patch.type === 'text') {
280 | return updateTextObject(patch, obj, updated)
281 | } else {
282 | throw new TypeError(`Unknown object type: ${patch.type}`)
283 | }
284 | }
285 |
286 | /**
287 | * Creates a writable copy of the immutable document root object `root`.
288 | */
289 | function cloneRootObject(root) {
290 | if (root[OBJECT_ID] !== '_root') {
291 | throw new RangeError(`Not the root object: ${root[OBJECT_ID]}`)
292 | }
293 | return cloneMapObject(root, '_root')
294 | }
295 |
296 | module.exports = {
297 | interpretPatch, cloneRootObject
298 | }
299 |
--------------------------------------------------------------------------------
/test/wasm.js:
--------------------------------------------------------------------------------
1 | /* eslint-disable no-unused-vars */
2 | // This file is used for running the test suite against an alternative backend
3 | // implementation, such as the WebAssembly version compiled from Rust.
4 | // It needs to be loaded before the test suite files, which can be done with
5 | // `mocha --file test/wasm.js` (shortcut: `yarn testwasm`).
6 | // You need to set the environment variable WASM_BACKEND_PATH to the path where
7 | // the alternative backend module can be found; typically this is something
8 | // like `../automerge-rs/automerge-backend-wasm`.
9 | // Since this file relies on an environment variable and filesystem paths, it
10 | // currently only works in Node, not in a browser.
11 |
12 | if (!process.env.WASM_BACKEND_PATH) {
13 | throw new RangeError('Please set environment variable WASM_BACKEND_PATH to the path of the WebAssembly backend')
14 | }
15 |
16 | const assert = require('assert')
17 | const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
18 | const jsBackend = require('../backend')
19 | const Frontend = require('../frontend')
20 | const { decodeChange } = require('../backend/columnar')
21 | const uuid = require('../src/uuid')
22 |
23 | const path = require('path')
24 | const wasmBackend = require(path.resolve(process.env.WASM_BACKEND_PATH))
25 | Automerge.setDefaultBackend(wasmBackend)
26 |
27 | describe('JavaScript-WebAssembly interoperability', () => {
28 | describe('from JS to Wasm', () => {
29 | interopTests(jsBackend, wasmBackend)
30 | })
31 |
32 | describe('from Wasm to JS', () => {
33 | interopTests(wasmBackend, jsBackend)
34 | })
35 | })
36 |
37 | function interopTests(sourceBackend, destBackend) {
38 | let source, dest
39 | beforeEach(() => {
40 | source = sourceBackend.init()
41 | dest = destBackend.init()
42 | })
43 |
44 | it('should set a key in a map', () => {
45 | const actor = uuid()
46 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
47 | actor, seq: 1, time: 0, startOp: 1, deps: [], ops: [
48 | {action: 'set', obj: '_root', key: 'bird', value: 'magpie', pred: []}
49 | ]
50 | })
51 | const [dest1, patch] = destBackend.applyChanges(dest, [change1])
52 | assert.deepStrictEqual(patch, {
53 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 1, pendingChanges: 0,
54 | diffs: {objectId: '_root', type: 'map', props: {
55 | bird: {[`1@${actor}`]: {type: 'value', value: 'magpie'}}
56 | }}
57 | })
58 | })
59 |
60 | it('should delete a key from a map', () => {
61 | const actor = uuid()
62 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
63 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
64 | {action: 'set', obj: '_root', key: 'bird', value: 'magpie', pred: []}
65 | ]
66 | })
67 | const [source2, p2, change2] = sourceBackend.applyLocalChange(source1, {
68 | actor, seq: 2, startOp: 2, time: 0, deps: [], ops: [
69 | {action: 'del', obj: '_root', key: 'bird', pred: [`1@${actor}`]}
70 | ]
71 | })
72 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
73 | const [dest2, patch2] = destBackend.applyChanges(dest1, [change2])
74 | assert.deepStrictEqual(patch2, {
75 | clock: {[actor]: 2}, deps: [decodeChange(change2).hash], maxOp: 2, pendingChanges: 0,
76 | diffs: {objectId: '_root', type: 'map', props: {bird: {}}}
77 | })
78 | })
79 |
80 | it('should create nested maps', () => {
81 | const actor = uuid()
82 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
83 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
84 | {action: 'makeMap', obj: '_root', key: 'birds', pred: []},
85 | {action: 'set', obj: `1@${actor}`, key: 'wrens', datatype: 'int', value: 3, pred: []}
86 | ]
87 | })
88 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
89 | assert.deepStrictEqual(patch1, {
90 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 2, pendingChanges: 0,
91 | diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
92 | objectId: `1@${actor}`, type: 'map', props: {wrens: {[`2@${actor}`]: {type: 'value', datatype: 'int', value: 3}}}
93 | }}}}
94 | })
95 | })
96 |
97 | it('should create lists', () => {
98 | const actor = uuid()
99 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
100 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
101 | {action: 'makeList', obj: '_root', key: 'birds', pred: []},
102 | {action: 'set', obj: `1@${actor}`, elemId: '_head', insert: true, value: 'chaffinch', pred: []}
103 | ]
104 | })
105 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
106 | assert.deepStrictEqual(patch1, {
107 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 2, pendingChanges: 0,
108 | diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
109 | objectId: `1@${actor}`, type: 'list', edits: [
110 | {action: 'insert', index: 0, elemId: `2@${actor}`, opId: `2@${actor}`,
111 | value: {type: 'value', value: 'chaffinch'}}
112 | ]
113 | }}}}
114 | })
115 | })
116 |
117 | it('should delete list elements', () => {
118 | const actor = uuid()
119 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
120 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
121 | {action: 'makeList', obj: '_root', key: 'birds', pred: []},
122 | {action: 'set', obj: `1@${actor}`, elemId: '_head', insert: true, value: 'chaffinch', pred: []}
123 | ]
124 | })
125 | const [source2, p2, change2] = sourceBackend.applyLocalChange(source1, {
126 | actor, seq: 2, startOp: 3, time: 0, deps: [], ops: [
127 | {action: 'del', obj: `1@${actor}`, elemId: `2@${actor}`, pred: [`2@${actor}`]}
128 | ]
129 | })
130 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
131 | const [dest2, patch2] = destBackend.applyChanges(dest1, [change2])
132 | assert.deepStrictEqual(patch2, {
133 | clock: {[actor]: 2}, deps: [decodeChange(change2).hash], maxOp: 3, pendingChanges: 0,
134 | diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
135 | objectId: `1@${actor}`, type: 'list',
136 | edits: [{action: 'remove', index: 0, count: 1}]
137 | }}}}
138 | })
139 | })
140 |
141 | it('should support Text objects', () => {
142 | const actor = uuid()
143 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
144 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
145 | {action: 'makeText', obj: '_root', key: 'text', pred: []},
146 | {action: 'set', obj: `1@${actor}`, elemId: '_head', insert: true, value: 'a', pred: []},
147 | {action: 'set', obj: `1@${actor}`, elemId: `2@${actor}`, insert: true, value: 'b', pred: []},
148 | {action: 'set', obj: `1@${actor}`, elemId: `3@${actor}`, insert: true, value: 'c', pred: []}
149 | ]
150 | })
151 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
152 | assert.deepStrictEqual(patch1, {
153 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 4, pendingChanges: 0,
154 | diffs: {objectId: '_root', type: 'map', props: {text: {[`1@${actor}`]: {
155 | objectId: `1@${actor}`, type: 'text', edits: [
156 | {action: 'multi-insert', index: 0, elemId: `2@${actor}`, values: ['a', 'b', 'c']},
157 | ],
158 | }}}}
159 | })
160 | })
161 |
162 | it('should support Table objects', () => {
163 | const actor = uuid(), rowId = uuid()
164 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
165 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
166 | {action: 'makeTable', obj: '_root', key: 'birds', insert: false, pred: []},
167 | {action: 'makeMap', obj: `1@${actor}`, key: rowId, insert: false, pred: []},
168 | {action: 'set', obj: `2@${actor}`, key: 'species', insert: false, value: 'Chaffinch', pred: []},
169 | {action: 'set', obj: `2@${actor}`, key: 'colour', insert: false, value: 'brown', pred: []}
170 | ]
171 | })
172 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
173 | assert.deepStrictEqual(patch1, {
174 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 4, pendingChanges: 0,
175 | diffs: {objectId: '_root', type: 'map', props: {birds: {[`1@${actor}`]: {
176 | objectId: `1@${actor}`, type: 'table', props: {[rowId]: {[`2@${actor}`]: {
177 | objectId: `2@${actor}`, type: 'map', props: {
178 | species: {[`3@${actor}`]: {type: 'value', value: 'Chaffinch'}},
179 | colour: {[`4@${actor}`]: {type: 'value', value: 'brown'}}
180 | }
181 | }}}
182 | }}}}
183 | })
184 | })
185 |
186 | it('should support Counter objects', () => {
187 | const actor = uuid()
188 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
189 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
190 | {action: 'set', obj: '_root', key: 'counter', value: 1, datatype: 'counter', pred: []}
191 | ]
192 | })
193 | const [source2, p2, change2] = sourceBackend.applyLocalChange(source1, {
194 | actor, seq: 2, startOp: 2, time: 0, deps: [], ops: [
195 | {action: 'inc', obj: '_root', key: 'counter', value: 2, pred: [`1@${actor}`]}
196 | ]
197 | })
198 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
199 | const [dest2, patch2] = destBackend.applyChanges(dest1, [change2])
200 | assert.deepStrictEqual(patch2, {
201 | clock: {[actor]: 2}, deps: [decodeChange(change2).hash], maxOp: 2, pendingChanges: 0,
202 | diffs: {objectId: '_root', type: 'map', props: {
203 | counter: {[`1@${actor}`]: {type: 'value', value: 3, datatype: 'counter'}}
204 | }}
205 | })
206 | })
207 |
208 | it('should support Date objects', () => {
209 | const actor = uuid(), now = new Date()
210 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
211 | actor, seq: 1, startOp: 1, time: 0, deps: [], ops: [
212 | {action: 'set', obj: '_root', key: 'now', value: now.getTime(), datatype: 'timestamp', pred: []}
213 | ]
214 | })
215 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
216 | assert.deepStrictEqual(patch1, {
217 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 1, pendingChanges: 0,
218 | diffs: {objectId: '_root', type: 'map', props: {
219 | now: {[`1@${actor}`]: {type: 'value', value: now.getTime(), datatype: 'timestamp'}}
220 | }}
221 | })
222 | })
223 |
224 | it('should support DEFLATE-compressed changes', () => {
225 | let longString = '', actor = uuid()
226 | for (let i = 0; i < 1024; i++) longString += 'a'
227 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
228 | actor, seq: 1, time: 0, startOp: 1, deps: [], ops: [
229 | {action: 'set', obj: '_root', key: 'longString', value: longString, pred: []}
230 | ]
231 | })
232 | assert.ok(change1.byteLength < 100)
233 | const [dest1, patch1] = destBackend.applyChanges(dest, [change1])
234 | assert.deepStrictEqual(patch1, {
235 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 1, pendingChanges: 0,
236 | diffs: {objectId: '_root', type: 'map', props: {
237 | longString: {[`1@${actor}`]: {type: 'value', value: longString}}
238 | }}
239 | })
240 | })
241 |
242 | describe('save() and load()', () => {
243 | it('should work for a simple document', () => {
244 | const actor = uuid()
245 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
246 | actor, seq: 1, time: 0, startOp: 1, deps: [], ops: [
247 | {action: 'set', obj: '_root', key: 'bird', value: 'magpie', pred: []}
248 | ]
249 | })
250 | const dest1 = destBackend.load(sourceBackend.save(source1))
251 | const patch = destBackend.getPatch(dest1)
252 | assert.deepStrictEqual(patch, {
253 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 1, pendingChanges: 0,
254 | diffs: {objectId: '_root', type: 'map', props: {
255 | bird: {[`1@${actor}`]: {type: 'value', value: 'magpie'}}
256 | }}
257 | })
258 | })
259 |
260 | it('should allow DEFLATE-compressed columns', () => {
261 | let longString = '', actor = uuid()
262 | for (let i = 0; i < 1024; i++) longString += 'a'
263 | const [source1, p1, change1] = sourceBackend.applyLocalChange(source, {
264 | actor, seq: 1, time: 0, startOp: 1, deps: [], ops: [
265 | {action: 'set', obj: '_root', key: 'longString', value: longString, pred: []}
266 | ]
267 | })
268 | const compressedDoc = sourceBackend.save(source1)
269 | assert.ok(compressedDoc.byteLength < 200)
270 | const patch = destBackend.getPatch(destBackend.load(compressedDoc))
271 | assert.deepStrictEqual(patch, {
272 | clock: {[actor]: 1}, deps: [decodeChange(change1).hash], maxOp: 1, pendingChanges: 0,
273 | diffs: {objectId: '_root', type: 'map', props: {
274 | longString: {[`1@${actor}`]: {type: 'value', value: longString}}
275 | }}
276 | })
277 | })
278 |
279 | // TODO need more tests for save() and load()
280 | })
281 | }
282 |
--------------------------------------------------------------------------------
/@types/automerge/index.d.ts:
--------------------------------------------------------------------------------
1 | declare module 'automerge' {
2 | /**
3 | * The return type of `Automerge.init()`, `Automerge.change()`, etc. where `T` is the
4 | * original type. It is a recursively frozen version of the original type.
5 | */
6 | type Doc = FreezeObject
7 |
8 | type ChangeFn = (doc: T) => void
9 |
10 | // Automerge.* functions
11 |
12 | function init(options?: InitOptions): Doc
13 | function from(initialState: T | Doc, options?: InitOptions): Doc
14 | function clone(doc: Doc, options?: InitOptions): Doc
15 | function free(doc: Doc): void
16 |
17 | type InitOptions =
18 | | string // = actorId
19 | | {
20 | actorId?: string
21 | deferActorId?: boolean
22 | freeze?: boolean
23 | patchCallback?: PatchCallback
24 | observable?: Observable
25 | }
26 |
27 | type ChangeOptions =
28 | | string // = message
29 | | {
30 | message?: string
31 | time?: number
32 | patchCallback?: PatchCallback
33 | }
34 |
35 | type PatchCallback = (patch: Patch, before: T, after: T, local: boolean, changes: BinaryChange[]) => void
36 | type ObserverCallback = (diff: MapDiff | ListDiff | ValueDiff, before: T, after: T, local: boolean, changes: BinaryChange[]) => void
37 |
38 | class Observable {
39 | observe(object: T, callback: ObserverCallback): void
40 | }
41 |
42 | function merge(localdoc: Doc, remotedoc: Doc): Doc
43 |
44 | function change(doc: Doc, options: ChangeOptions, callback: ChangeFn): Doc
45 | function change(doc: Doc, callback: ChangeFn): Doc
46 | function emptyChange>(doc: D, options?: ChangeOptions): D
47 | function applyChanges(doc: Doc, changes: BinaryChange[]): [Doc, Patch]
48 | function equals(val1: T, val2: T): boolean
49 | function encodeChange(change: Change): BinaryChange
50 | function decodeChange(binaryChange: BinaryChange): Change
51 |
52 | function getActorId(doc: Doc): string
53 | function getAllChanges(doc: Doc): BinaryChange[]
54 | function getChanges(olddoc: Doc, newdoc: Doc): BinaryChange[]
55 | function getConflicts(doc: Doc, key: keyof T): any
56 | function getHistory(doc: Doc): State[]
57 | function getLastLocalChange(doc: Doc): BinaryChange
58 | function getObjectById(doc: Doc, objectId: OpId): any
59 | function getObjectId(object: any): OpId
60 |
61 | function load(data: BinaryDocument, options?: InitOptions): Doc
62 | function save(doc: Doc): BinaryDocument
63 |
64 | function generateSyncMessage(doc: Doc, syncState: SyncState): [SyncState, BinarySyncMessage?]
65 | function receiveSyncMessage(doc: Doc, syncState: SyncState, message: BinarySyncMessage): [Doc, SyncState, Patch?]
66 | function initSyncState(): SyncState
67 |
68 | // custom CRDT types
69 |
70 | class TableRow {
71 | readonly id: UUID
72 | }
73 |
74 | class Table {
75 | constructor()
76 | add(item: T): UUID
77 | byId(id: UUID): T & TableRow
78 | count: number
79 | ids: UUID[]
80 | remove(id: UUID): void
81 | rows: (T & TableRow)[]
82 | }
83 |
84 | class List extends Array {
85 | insertAt?(index: number, ...args: T[]): List
86 | deleteAt?(index: number, numDelete?: number): List
87 | }
88 |
89 | class Text extends List {
90 | constructor(text?: string | string[])
91 | get(index: number): string
92 | toSpans(): (string | T)[]
93 | }
94 |
95 | // Note that until https://github.com/Microsoft/TypeScript/issues/2361 is addressed, we
96 | // can't treat a Counter like a literal number without force-casting it as a number.
97 | // This won't compile:
98 | // `assert.strictEqual(c + 10, 13) // Operator '+' cannot be applied to types 'Counter' and '10'.ts(2365)`
99 | // But this will:
100 | // `assert.strictEqual(c as unknown as number + 10, 13)`
101 | class Counter extends Number {
102 | constructor(value?: number)
103 | increment(delta?: number): void
104 | decrement(delta?: number): void
105 | toString(): string
106 | valueOf(): number
107 | value: number
108 | }
109 |
110 | class Int { constructor(value: number) }
111 | class Uint { constructor(value: number) }
112 | class Float64 { constructor(value: number) }
113 |
114 | // Readonly variants
115 |
116 | type ReadonlyTable = ReadonlyArray & Table
117 | type ReadonlyList = ReadonlyArray & List
118 | type ReadonlyText = ReadonlyList & Text
119 |
120 | // Front & back
121 |
122 | namespace Frontend {
123 | function applyPatch(doc: Doc, patch: Patch, backendState?: BackendState): Doc
124 | function change(doc: Doc, message: string | undefined, callback: ChangeFn): [Doc, Change]
125 | function change(doc: Doc, callback: ChangeFn): [Doc, Change]
126 | function emptyChange(doc: Doc, message?: string): [Doc, Change]
127 | function from(initialState: T | Doc, options?: InitOptions): [Doc, Change]
128 | function getActorId(doc: Doc): string
129 | function getBackendState(doc: Doc): BackendState
130 | function getConflicts(doc: Doc, key: keyof T): any
131 | function getElementIds(list: any): string[]
132 | function getLastLocalChange(doc: Doc): BinaryChange
133 | function getObjectById(doc: Doc, objectId: OpId): Doc
134 | function getObjectId(doc: Doc): OpId
135 | function init(options?: InitOptions): Doc
136 | function setActorId(doc: Doc, actorId: string): Doc
137 | }
138 |
139 | namespace Backend {
140 | function applyChanges(state: BackendState, changes: BinaryChange[]): [BackendState, Patch]
141 | function applyLocalChange(state: BackendState, change: Change): [BackendState, Patch, BinaryChange]
142 | function clone(state: BackendState): BackendState
143 | function free(state: BackendState): void
144 | function getAllChanges(state: BackendState): BinaryChange[]
145 | function getChangeByHash(state: BackendState, hash: Hash): BinaryChange
146 | function getChanges(state: BackendState, haveDeps: Hash[]): BinaryChange[]
147 | function getChangesAdded(state1: BackendState, state2: BackendState): BinaryChange[]
148 | function getHeads(state: BackendState): Hash[]
149 | function getMissingDeps(state: BackendState, heads?: Hash[]): Hash[]
150 | function getPatch(state: BackendState): Patch
151 | function init(): BackendState
152 | function load(data: BinaryDocument): BackendState
153 | function loadChanges(state: BackendState, changes: BinaryChange[]): BackendState
154 | function save(state: BackendState): BinaryDocument
155 | function generateSyncMessage(state: BackendState, syncState: SyncState): [SyncState, BinarySyncMessage?]
156 | function receiveSyncMessage(state: BackendState, syncState: SyncState, message: BinarySyncMessage): [BackendState, SyncState, Patch?]
157 | function encodeSyncMessage(message: SyncMessage): BinarySyncMessage
158 | function decodeSyncMessage(bytes: BinarySyncMessage): SyncMessage
159 | function initSyncState(): SyncState
160 | function encodeSyncState(syncState: SyncState): BinarySyncState
161 | function decodeSyncState(bytes: BinarySyncState): SyncState
162 | }
163 |
164 | // Internals
165 |
166 | type Hash = string // 64-digit hex string
167 | type OpId = string // of the form `${counter}@${actorId}`
168 |
169 | type UUID = string
170 | type UUIDGenerator = () => UUID
171 | interface UUIDFactory extends UUIDGenerator {
172 | setFactory: (generator: UUIDGenerator) => void
173 | reset: () => void
174 | }
175 | const uuid: UUIDFactory
176 |
177 | interface Clock {
178 | [actorId: string]: number
179 | }
180 |
181 | interface State {
182 | change: Change
183 | snapshot: T
184 | }
185 |
186 | interface BackendState {
187 | // no public methods or properties
188 | }
189 |
190 | type BinaryChange = Uint8Array & { __binaryChange: true }
191 | type BinaryDocument = Uint8Array & { __binaryDocument: true }
192 | type BinarySyncState = Uint8Array & { __binarySyncState: true }
193 | type BinarySyncMessage = Uint8Array & { __binarySyncMessage: true }
194 |
195 | interface SyncState {
196 | // no public methods or properties
197 | }
198 |
199 | interface SyncMessage {
200 | heads: Hash[]
201 | need: Hash[]
202 | have: SyncHave[]
203 | changes: BinaryChange[]
204 | }
205 |
206 | interface SyncHave {
207 | lastSync: Hash[]
208 | bloom: Uint8Array
209 | }
210 |
211 | interface Change {
212 | message: string
213 | actor: string
214 | time: number
215 | seq: number
216 | startOp: number
217 | hash?: Hash
218 | deps: Hash[]
219 | ops: Op[]
220 | }
221 |
222 | interface Op {
223 | action: OpAction
224 | obj: OpId
225 | key: string | number
226 | insert: boolean
227 | elemId?: OpId
228 | child?: OpId
229 | value?: number | boolean | string | null
230 | datatype?: DataType
231 | pred?: OpId[]
232 | values?: (number | boolean | string | null)[]
233 | multiOp?: number
234 | }
235 |
236 | interface Patch {
237 | actor?: string
238 | seq?: number
239 | pendingChanges: number
240 | clock: Clock
241 | deps: Hash[]
242 | diffs: MapDiff
243 | maxOp: number
244 | }
245 |
246 | // Describes changes to a map (in which case propName represents a key in the
247 | // map) or a table object (in which case propName is the primary key of a row).
248 | interface MapDiff {
249 | objectId: OpId // ID of object being updated
250 | type: 'map' | 'table' // type of object being updated
251 | // For each key/property that is changing, props contains one entry
252 | // (properties that are not changing are not listed). The nested object is
253 | // empty if the property is being deleted, contains one opId if it is set to
254 | // a single value, and contains multiple opIds if there is a conflict.
255 | props: {[propName: string]: {[opId: string]: MapDiff | ListDiff | ValueDiff }}
256 | }
257 |
258 | // Describes changes to a list or Automerge.Text object, in which each element
259 | // is identified by its index.
260 | interface ListDiff {
261 | objectId: OpId // ID of object being updated
262 | type: 'list' | 'text' // type of objct being updated
263 | // This array contains edits in the order they should be applied.
264 | edits: (SingleInsertEdit | MultiInsertEdit | UpdateEdit | RemoveEdit)[]
265 | }
266 |
267 | // Describes the insertion of a single element into a list or text object.
268 | // The element can be a nested object.
269 | interface SingleInsertEdit {
270 | action: 'insert'
271 | index: number // the list index at which to insert the new element
272 | elemId: OpId // the unique element ID of the new list element
273 | opId: OpId // ID of the operation that assigned this value
274 | value: MapDiff | ListDiff | ValueDiff
275 | }
276 |
277 | // Describes the insertion of a consecutive sequence of primitive values into
278 | // a list or text object. In the case of text, the values are strings (each
279 | // character as a separate string value). Each inserted value is given a
280 | // consecutive element ID: starting with `elemId` for the first value, the
281 | // subsequent values are given elemIds with the same actor ID and incrementing
282 | // counters. To insert non-primitive values, use SingleInsertEdit.
283 | interface MultiInsertEdit {
284 | action: 'multi-insert'
285 | index: number // the list index at which to insert the first value
286 | elemId: OpId // the unique ID of the first inserted element
287 | values: number[] | boolean[] | string[] | null[] // list of values to insert
288 | datatype?: DataType // all values must be of the same datatype
289 | }
290 |
291 | // Describes the update of the value or nested object at a particular index
292 | // of a list or text object. In the case where there are multiple conflicted
293 | // values at the same list index, multiple UpdateEdits with the same index
294 | // (but different opIds) appear in the edits array of ListDiff.
295 | interface UpdateEdit {
296 | action: 'update'
297 | index: number // the list index to update
298 | opId: OpId // ID of the operation that assigned this value
299 | value: MapDiff | ListDiff | ValueDiff
300 | }
301 |
302 | // Describes the deletion of one or more consecutive elements from a list or
303 | // text object.
304 | interface RemoveEdit {
305 | action: 'remove'
306 | index: number // index of the first list element to remove
307 | count: number // number of list elements to remove
308 | }
309 |
310 | // Describes a primitive value, optionally tagged with a datatype that
311 | // indicates how the value should be interpreted.
312 | interface ValueDiff {
313 | type: 'value'
314 | value: number | boolean | string | null
315 | datatype?: DataType
316 | }
317 |
318 | type OpAction =
319 | | 'del'
320 | | 'inc'
321 | | 'set'
322 | | 'link'
323 | | 'makeText'
324 | | 'makeTable'
325 | | 'makeList'
326 | | 'makeMap'
327 |
328 | type CollectionType =
329 | | 'list' //..
330 | | 'map'
331 | | 'table'
332 | | 'text'
333 |
334 | type DataType =
335 | | 'int'
336 | | 'uint'
337 | | 'float64'
338 | | 'counter'
339 | | 'timestamp'
340 |
341 | // TYPE UTILITY FUNCTIONS
342 |
343 | // Type utility function: Freeze
344 | // Generates a readonly version of a given object, array, or map type applied recursively to the nested members of the root type.
345 | // It's like TypeScript's `readonly`, but goes all the way down a tree.
346 |
347 | // prettier-ignore
348 | type Freeze =
349 | T extends Function ? T
350 | : T extends Text ? ReadonlyText
351 | : T extends Table ? FreezeTable
352 | : T extends List ? FreezeList
353 | : T extends Array ? FreezeArray
354 | : T extends Map ? FreezeMap
355 | : T extends string & infer O ? string & O
356 | : FreezeObject
357 |
358 | interface FreezeTable extends ReadonlyTable> {}
359 | interface FreezeList extends ReadonlyList> {}
360 | interface FreezeArray extends ReadonlyArray> {}
361 | interface FreezeMap extends ReadonlyMap, Freeze> {}
362 | type FreezeObject = { readonly [P in keyof T]: Freeze }
363 | }
364 |
--------------------------------------------------------------------------------
/frontend/index.js:
--------------------------------------------------------------------------------
1 | const { OPTIONS, CACHE, STATE, OBJECT_ID, CONFLICTS, CHANGE, ELEM_IDS } = require('./constants')
2 | const { isObject, copyObject } = require('../src/common')
3 | const uuid = require('../src/uuid')
4 | const { interpretPatch, cloneRootObject } = require('./apply_patch')
5 | const { rootObjectProxy } = require('./proxies')
6 | const { Context } = require('./context')
7 | const { Text } = require('./text')
8 | const { Table } = require('./table')
9 | const { Counter } = require('./counter')
10 | const { Float64, Int, Uint } = require('./numbers')
11 | const { Observable } = require('./observable')
12 |
13 | /**
14 | * Actor IDs must consist only of hexadecimal digits so that they can be encoded
15 | * compactly in binary form.
16 | */
17 | function checkActorId(actorId) {
18 | if (typeof actorId !== 'string') {
19 | throw new TypeError(`Unsupported type of actorId: ${typeof actorId}`)
20 | }
21 | if (!/^[0-9a-f]+$/.test(actorId)) {
22 | throw new RangeError('actorId must consist only of lowercase hex digits')
23 | }
24 | if (actorId.length % 2 !== 0) {
25 | throw new RangeError('actorId must consist of an even number of digits')
26 | }
27 | }
28 |
29 | /**
30 | * Takes a set of objects that have been updated (in `updated`) and an updated state object
31 | * `state`, and returns a new immutable document root object based on `doc` that reflects
32 | * those updates.
33 | */
34 | function updateRootObject(doc, updated, state) {
35 | let newDoc = updated._root
36 | if (!newDoc) {
37 | newDoc = cloneRootObject(doc[CACHE]._root)
38 | updated._root = newDoc
39 | }
40 | Object.defineProperty(newDoc, OPTIONS, {value: doc[OPTIONS]})
41 | Object.defineProperty(newDoc, CACHE, {value: updated})
42 | Object.defineProperty(newDoc, STATE, {value: state})
43 |
44 | if (doc[OPTIONS].freeze) {
45 | for (let objectId of Object.keys(updated)) {
46 | if (updated[objectId] instanceof Table) {
47 | updated[objectId]._freeze()
48 | } else if (updated[objectId] instanceof Text) {
49 | Object.freeze(updated[objectId].elems)
50 | Object.freeze(updated[objectId])
51 | } else {
52 | Object.freeze(updated[objectId])
53 | Object.freeze(updated[objectId][CONFLICTS])
54 | }
55 | }
56 | }
57 |
58 | for (let objectId of Object.keys(doc[CACHE])) {
59 | if (!updated[objectId]) {
60 | updated[objectId] = doc[CACHE][objectId]
61 | }
62 | }
63 |
64 | if (doc[OPTIONS].freeze) {
65 | Object.freeze(updated)
66 | }
67 | return newDoc
68 | }
69 |
70 | /**
71 | * Adds a new change request to the list of pending requests, and returns an
72 | * updated document root object.
73 | * The details of the change are taken from the context object `context`.
74 | * `options` contains properties that may affect how the change is processed; in
75 | * particular, the `message` property of `options` is an optional human-readable
76 | * string describing the change.
77 | */
78 | function makeChange(doc, context, options) {
79 | const actor = getActorId(doc)
80 | if (!actor) {
81 | throw new Error('Actor ID must be initialized with setActorId() before making a change')
82 | }
83 | const state = copyObject(doc[STATE])
84 | state.seq += 1
85 |
86 | const change = {
87 | actor,
88 | seq: state.seq,
89 | startOp: state.maxOp + 1,
90 | deps: state.deps,
91 | time: (options && typeof options.time === 'number') ? options.time
92 | : Math.round(new Date().getTime() / 1000),
93 | message: (options && typeof options.message === 'string') ? options.message : '',
94 | ops: context.ops
95 | }
96 |
97 | if (doc[OPTIONS].backend) {
98 | const [backendState, patch, binaryChange] = doc[OPTIONS].backend.applyLocalChange(state.backendState, change)
99 | state.backendState = backendState
100 | state.lastLocalChange = binaryChange
101 | // NOTE: When performing a local change, the patch is effectively applied twice -- once by the
102 | // context invoking interpretPatch as soon as any change is made, and the second time here
103 | // (after a round-trip through the backend). This is perhaps more robust, as changes only take
104 | // effect in the form processed by the backend, but the downside is a performance cost.
105 | // Should we change this?
106 | const newDoc = applyPatchToDoc(doc, patch, state, true)
107 | const patchCallback = options && options.patchCallback || doc[OPTIONS].patchCallback
108 | if (patchCallback) patchCallback(patch, doc, newDoc, true, [binaryChange])
109 | return [newDoc, change]
110 |
111 | } else {
112 | const queuedRequest = {actor, seq: change.seq, before: doc}
113 | state.requests = state.requests.concat([queuedRequest])
114 | state.maxOp = state.maxOp + countOps(change.ops)
115 | state.deps = []
116 | return [updateRootObject(doc, context ? context.updated : {}, state), change]
117 | }
118 | }
119 |
120 | function countOps(ops) {
121 | let count = 0
122 | for (const op of ops) {
123 | if (op.action === 'set' && op.values) {
124 | count += op.values.length
125 | } else {
126 | count += 1
127 | }
128 | }
129 | return count
130 | }
131 |
132 | /**
133 | * Returns the binary encoding of the last change made by the local actor.
134 | */
135 | function getLastLocalChange(doc) {
136 | return doc[STATE] && doc[STATE].lastLocalChange ? doc[STATE].lastLocalChange : null
137 | }
138 |
139 | /**
140 | * Applies the changes described in `patch` to the document with root object
141 | * `doc`. The state object `state` is attached to the new root object.
142 | * `fromBackend` should be set to `true` if the patch came from the backend,
143 | * and to `false` if the patch is a transient local (optimistically applied)
144 | * change from the frontend.
145 | */
146 | function applyPatchToDoc(doc, patch, state, fromBackend) {
147 | const actor = getActorId(doc)
148 | const updated = {}
149 | interpretPatch(patch.diffs, doc, updated)
150 |
151 | if (fromBackend) {
152 | if (!patch.clock) throw new RangeError('patch is missing clock field')
153 | if (patch.clock[actor] && patch.clock[actor] > state.seq) {
154 | state.seq = patch.clock[actor]
155 | }
156 | state.clock = patch.clock
157 | state.deps = patch.deps
158 | state.maxOp = Math.max(state.maxOp, patch.maxOp)
159 | }
160 | return updateRootObject(doc, updated, state)
161 | }
162 |
163 | /**
164 | * Creates an empty document object with no changes.
165 | */
166 | function init(options) {
167 | if (typeof options === 'string') {
168 | options = {actorId: options}
169 | } else if (typeof options === 'undefined') {
170 | options = {}
171 | } else if (!isObject(options)) {
172 | throw new TypeError(`Unsupported value for init() options: ${options}`)
173 | }
174 |
175 | if (!options.deferActorId) {
176 | if (options.actorId === undefined) {
177 | options.actorId = uuid()
178 | }
179 | checkActorId(options.actorId)
180 | }
181 |
182 | if (options.observable) {
183 | const patchCallback = options.patchCallback, observable = options.observable
184 | options.patchCallback = (patch, before, after, local, changes) => {
185 | if (patchCallback) patchCallback(patch, before, after, local, changes)
186 | observable.patchCallback(patch, before, after, local, changes)
187 | }
188 | }
189 |
190 | const root = {}, cache = {_root: root}
191 | const state = {seq: 0, maxOp: 0, requests: [], clock: {}, deps: []}
192 | if (options.backend) {
193 | state.backendState = options.backend.init()
194 | state.lastLocalChange = null
195 | }
196 | Object.defineProperty(root, OBJECT_ID, {value: '_root'})
197 | Object.defineProperty(root, OPTIONS, {value: Object.freeze(options)})
198 | Object.defineProperty(root, CONFLICTS, {value: Object.freeze({})})
199 | Object.defineProperty(root, CACHE, {value: Object.freeze(cache)})
200 | Object.defineProperty(root, STATE, {value: Object.freeze(state)})
201 | return Object.freeze(root)
202 | }
203 |
204 | /**
205 | * Returns a new document object initialized with the given state.
206 | */
207 | function from(initialState, options) {
208 | return change(init(options), 'Initialization', doc => Object.assign(doc, initialState))
209 | }
210 |
211 |
212 | /**
213 | * Changes a document `doc` according to actions taken by the local user.
214 | * `options` is an object that can contain the following properties:
215 | * - `message`: an optional descriptive string that is attached to the change.
216 | * If `options` is a string, it is treated as `message`.
217 | *
218 | * The actual change is made within the callback function `callback`, which is
219 | * given a mutable version of the document as argument. Returns a two-element
220 | * array `[doc, request]` where `doc` is the updated document, and `request`
221 | * is the change request to send to the backend. If nothing was actually
222 | * changed, returns the original `doc` and a `null` change request.
223 | */
224 | function change(doc, options, callback) {
225 | if (doc[OBJECT_ID] !== '_root') {
226 | throw new TypeError('The first argument to Automerge.change must be the document root')
227 | }
228 | if (doc[CHANGE]) {
229 | throw new TypeError('Calls to Automerge.change cannot be nested')
230 | }
231 | if (typeof options === 'function' && callback === undefined) {
232 | [options, callback] = [callback, options]
233 | }
234 | if (typeof options === 'string') {
235 | options = {message: options}
236 | }
237 | if (options !== undefined && !isObject(options)) {
238 | throw new TypeError('Unsupported type of options')
239 | }
240 |
241 | const actorId = getActorId(doc)
242 | if (!actorId) {
243 | throw new Error('Actor ID must be initialized with setActorId() before making a change')
244 | }
245 | const context = new Context(doc, actorId)
246 | callback(rootObjectProxy(context))
247 |
248 | if (Object.keys(context.updated).length === 0) {
249 | // If the callback didn't change anything, return the original document object unchanged
250 | return [doc, null]
251 | } else {
252 | return makeChange(doc, context, options)
253 | }
254 | }
255 |
256 | /**
257 | * Triggers a new change request on the document `doc` without actually
258 | * modifying its data. `options` is an object as described in the documentation
259 | * for the `change` function. This function can be useful for acknowledging the
260 | * receipt of some message (as it's incorported into the `deps` field of the
261 | * change). Returns a two-element array `[doc, request]` where `doc` is the
262 | * updated document, and `request` is the change request to send to the backend.
263 | */
264 | function emptyChange(doc, options) {
265 | if (doc[OBJECT_ID] !== '_root') {
266 | throw new TypeError('The first argument to Automerge.emptyChange must be the document root')
267 | }
268 | if (typeof options === 'string') {
269 | options = {message: options}
270 | }
271 | if (options !== undefined && !isObject(options)) {
272 | throw new TypeError('Unsupported type of options')
273 | }
274 |
275 | const actorId = getActorId(doc)
276 | if (!actorId) {
277 | throw new Error('Actor ID must be initialized with setActorId() before making a change')
278 | }
279 | return makeChange(doc, new Context(doc, actorId), options)
280 | }
281 |
282 | /**
283 | * Applies `patch` to the document root object `doc`. This patch must come
284 | * from the backend; it may be the result of a local change or a remote change.
285 | * If it is the result of a local change, the `seq` field from the change
286 | * request should be included in the patch, so that we can match them up here.
287 | */
288 | function applyPatch(doc, patch, backendState = undefined) {
289 | if (doc[OBJECT_ID] !== '_root') {
290 | throw new TypeError('The first argument to Frontend.applyPatch must be the document root')
291 | }
292 | const state = copyObject(doc[STATE])
293 |
294 | if (doc[OPTIONS].backend) {
295 | if (!backendState) {
296 | throw new RangeError('applyPatch must be called with the updated backend state')
297 | }
298 | state.backendState = backendState
299 | return applyPatchToDoc(doc, patch, state, true)
300 | }
301 |
302 | let baseDoc
303 |
304 | if (state.requests.length > 0) {
305 | baseDoc = state.requests[0].before
306 | if (patch.actor === getActorId(doc)) {
307 | if (state.requests[0].seq !== patch.seq) {
308 | throw new RangeError(`Mismatched sequence number: patch ${patch.seq} does not match next request ${state.requests[0].seq}`)
309 | }
310 | state.requests = state.requests.slice(1)
311 | } else {
312 | state.requests = state.requests.slice()
313 | }
314 | } else {
315 | baseDoc = doc
316 | state.requests = []
317 | }
318 |
319 | let newDoc = applyPatchToDoc(baseDoc, patch, state, true)
320 | if (state.requests.length === 0) {
321 | return newDoc
322 | } else {
323 | state.requests[0] = copyObject(state.requests[0])
324 | state.requests[0].before = newDoc
325 | return updateRootObject(doc, {}, state)
326 | }
327 | }
328 |
329 | /**
330 | * Returns the Automerge object ID of the given object.
331 | */
332 | function getObjectId(object) {
333 | return object[OBJECT_ID]
334 | }
335 |
336 | /**
337 | * Returns the object with the given Automerge object ID. Note: when called
338 | * within a change callback, the returned object is read-only (not a mutable
339 | * proxy object).
340 | */
341 | function getObjectById(doc, objectId) {
342 | // It would be nice to return a proxied object in a change callback.
343 | // However, that requires knowing the path from the root to the current
344 | // object, which we don't have if we jumped straight to the object by its ID.
345 | // If we maintained an index from object ID to parent ID we could work out the path.
346 | if (doc[CHANGE]) {
347 | throw new TypeError('Cannot use getObjectById in a change callback')
348 | }
349 | return doc[CACHE][objectId]
350 | }
351 |
352 | /**
353 | * Returns the Automerge actor ID of the given document.
354 | */
355 | function getActorId(doc) {
356 | return doc[STATE].actorId || doc[OPTIONS].actorId
357 | }
358 |
359 | /**
360 | * Sets the Automerge actor ID on the document object `doc`, returning a
361 | * document object with updated metadata.
362 | */
363 | function setActorId(doc, actorId) {
364 | checkActorId(actorId)
365 | const state = Object.assign({}, doc[STATE], {actorId})
366 | return updateRootObject(doc, {}, state)
367 | }
368 |
369 | /**
370 | * Fetches the conflicts on the property `key` of `object`, which may be any
371 | * object in a document. If `object` is a list, then `key` must be a list
372 | * index; if `object` is a map, then `key` must be a property name.
373 | */
374 | function getConflicts(object, key) {
375 | if (object[CONFLICTS] && object[CONFLICTS][key] &&
376 | Object.keys(object[CONFLICTS][key]).length > 1) {
377 | return object[CONFLICTS][key]
378 | }
379 | }
380 |
381 | /**
382 | * Returns the backend state associated with the document `doc` (only used if
383 | * a backend implementation is passed to `init()`).
384 | */
385 | function getBackendState(doc, callerName = null, argPos = 'first') {
386 | if (doc[OBJECT_ID] !== '_root') {
387 | // Most likely cause of passing an array here is forgetting to deconstruct the return value of
388 | // Automerge.applyChanges().
389 | const extraMsg = Array.isArray(doc) ? '. Note: Automerge.applyChanges now returns an array.' : ''
390 | if (callerName) {
391 | throw new TypeError(`The ${argPos} argument to Automerge.${callerName} must be the document root${extraMsg}`)
392 | } else {
393 | throw new TypeError(`Argument is not an Automerge document root${extraMsg}`)
394 | }
395 | }
396 | return doc[STATE].backendState
397 | }
398 |
399 | /**
400 | * Given an array or text object from an Automerge document, returns an array
401 | * containing the unique element ID of each list element/character.
402 | */
403 | function getElementIds(list) {
404 | if (list instanceof Text) {
405 | return list.elems.map(elem => elem.elemId)
406 | } else {
407 | return list[ELEM_IDS]
408 | }
409 | }
410 |
411 | module.exports = {
412 | init, from, change, emptyChange, applyPatch,
413 | getObjectId, getObjectById, getActorId, setActorId, getConflicts, getLastLocalChange,
414 | getBackendState, getElementIds,
415 | Text, Table, Counter, Observable, Float64, Int, Uint
416 | }
417 |
--------------------------------------------------------------------------------
/test/proxies_test.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const Automerge = process.env.TEST_DIST === '1' ? require('../dist/automerge') : require('../src/automerge')
3 | const { assertEqualsOneOf } = require('./helpers')
4 | const UUID_PATTERN = /^[0-9a-f]{32}$/
5 |
6 | describe('Automerge proxy API', () => {
7 | describe('root object', () => {
8 | it('should have a fixed object ID', () => {
9 | Automerge.change(Automerge.init(), doc => {
10 | assert.strictEqual(Automerge.getObjectId(doc), '_root')
11 | })
12 | })
13 |
14 | it('should know its actor ID', () => {
15 | Automerge.change(Automerge.init(), doc => {
16 | assert(UUID_PATTERN.test(Automerge.getActorId(doc).toString()))
17 | assert.notEqual(Automerge.getActorId(doc), '_root')
18 | assert.strictEqual(Automerge.getActorId(Automerge.init('01234567')), '01234567')
19 | })
20 | })
21 |
22 | it('should expose keys as object properties', () => {
23 | Automerge.change(Automerge.init(), doc => {
24 | doc.key1 = 'value1'
25 | assert.strictEqual(doc.key1, 'value1')
26 | })
27 | })
28 |
29 | it('should return undefined for unknown properties', () => {
30 | Automerge.change(Automerge.init(), doc => {
31 | assert.strictEqual(doc.someProperty, undefined)
32 | })
33 | })
34 |
35 | it('should support the "in" operator', () => {
36 | Automerge.change(Automerge.init(), doc => {
37 | assert.strictEqual('key1' in doc, false)
38 | doc.key1 = 'value1'
39 | assert.strictEqual('key1' in doc, true)
40 | })
41 | })
42 |
43 | it('should support Object.keys()', () => {
44 | Automerge.change(Automerge.init(), doc => {
45 | assert.deepStrictEqual(Object.keys(doc), [])
46 | doc.key1 = 'value1'
47 | assert.deepStrictEqual(Object.keys(doc), ['key1'])
48 | doc.key2 = 'value2'
49 | assertEqualsOneOf(Object.keys(doc), ['key1', 'key2'], ['key2', 'key1'])
50 | })
51 | })
52 |
53 | it('should support Object.getOwnPropertyNames()', () => {
54 | Automerge.change(Automerge.init(), doc => {
55 | assert.deepStrictEqual(Object.getOwnPropertyNames(doc), [])
56 | doc.key1 = 'value1'
57 | assert.deepStrictEqual(Object.getOwnPropertyNames(doc), ['key1'])
58 | doc.key2 = 'value2'
59 | assertEqualsOneOf(Object.getOwnPropertyNames(doc), ['key1', 'key2'], ['key2', 'key1'])
60 | })
61 | })
62 |
63 | it('should support bulk assignment with Object.assign()', () => {
64 | Automerge.change(Automerge.init(), doc => {
65 | Object.assign(doc, {key1: 'value1', key2: 'value2'})
66 | assert.deepStrictEqual(doc, {key1: 'value1', key2: 'value2'})
67 | })
68 | })
69 |
70 | it('should support JSON.stringify()', () => {
71 | Automerge.change(Automerge.init(), doc => {
72 | assert.deepStrictEqual(JSON.stringify(doc), '{}')
73 | doc.key1 = 'value1'
74 | assert.deepStrictEqual(JSON.stringify(doc), '{"key1":"value1"}')
75 | doc.key2 = 'value2'
76 | assert.deepStrictEqual(JSON.parse(JSON.stringify(doc)), {
77 | key1: 'value1', key2: 'value2'
78 | })
79 | })
80 | })
81 |
82 | it('should allow access to an object by id', () => {
83 | const doc = Automerge.change(Automerge.init(), doc => {
84 | doc.deepObj = {}
85 | doc.deepObj.deepList = []
86 | const listId = Automerge.getObjectId(doc.deepObj.deepList)
87 | assert.throws(() => { Automerge.getObjectById(doc, listId) }, /Cannot use getObjectById in a change callback/)
88 | })
89 |
90 | const objId = Automerge.getObjectId(doc.deepObj)
91 | assert.strictEqual(Automerge.getObjectById(doc, objId), doc.deepObj)
92 | const listId = Automerge.getObjectId(doc.deepObj.deepList)
93 | assert.strictEqual(Automerge.getObjectById(doc, listId), doc.deepObj.deepList)
94 | })
95 | })
96 |
97 | describe('list object', () => {
98 | let root
99 | beforeEach(() => {
100 | root = Automerge.change(Automerge.init(), doc => {
101 | doc.list = [1, 2, 3]
102 | doc.empty = []
103 | doc.listObjects = [ {id: "first"}, {id: "second"} ]
104 | })
105 | })
106 |
107 | it('should look like a JavaScript array', () => {
108 | Automerge.change(root, doc => {
109 | assert.strictEqual(Array.isArray(doc.list), true)
110 | assert.strictEqual(typeof doc.list, 'object')
111 | assert.strictEqual(toString.call(doc.list), '[object Array]')
112 | })
113 | })
114 |
115 | it('should have a length property', () => {
116 | Automerge.change(root, doc => {
117 | assert.strictEqual(doc.empty.length, 0)
118 | assert.strictEqual(doc.list.length, 3)
119 | })
120 | })
121 |
122 | it('should allow entries to be fetched by index', () => {
123 | Automerge.change(root, doc => {
124 | assert.strictEqual(doc.list[0], 1)
125 | assert.strictEqual(doc.list['0'], 1)
126 | assert.strictEqual(doc.list[1], 2)
127 | assert.strictEqual(doc.list['1'], 2)
128 | assert.strictEqual(doc.list[2], 3)
129 | assert.strictEqual(doc.list['2'], 3)
130 | assert.strictEqual(doc.list[3], undefined)
131 | assert.strictEqual(doc.list['3'], undefined)
132 | assert.strictEqual(doc.list[-1], undefined)
133 | assert.strictEqual(doc.list.someProperty, undefined)
134 | })
135 | })
136 |
137 | it('should support the "in" operator', () => {
138 | Automerge.change(root, doc => {
139 | assert.strictEqual(0 in doc.list, true)
140 | assert.strictEqual('0' in doc.list, true)
141 | assert.strictEqual(3 in doc.list, false)
142 | assert.strictEqual('3' in doc.list, false)
143 | assert.strictEqual('length' in doc.list, true)
144 | assert.strictEqual('someProperty' in doc.list, false)
145 | })
146 | })
147 |
148 | it('should support Object.keys()', () => {
149 | Automerge.change(root, doc => {
150 | assert.deepStrictEqual(Object.keys(doc.list), ['0', '1', '2'])
151 | })
152 | })
153 |
154 | it('should support Object.getOwnPropertyNames()', () => {
155 | Automerge.change(root, doc => {
156 | assert.deepStrictEqual(Object.getOwnPropertyNames(doc.list), ['length', '0', '1', '2'])
157 | })
158 | })
159 |
160 | it('should support JSON.stringify()', () => {
161 | Automerge.change(root, doc => {
162 | assert.deepStrictEqual(JSON.parse(JSON.stringify(doc)), {
163 | list: [1, 2, 3], empty: [], listObjects: [ {id: "first"}, {id: "second"} ]
164 | })
165 | assert.deepStrictEqual(JSON.stringify(doc.list), '[1,2,3]')
166 | })
167 | })
168 |
169 | it('should support iteration', () => {
170 | Automerge.change(root, doc => {
171 | let copy = []
172 | for (let x of doc.list) copy.push(x)
173 | assert.deepStrictEqual(copy, [1, 2, 3])
174 |
175 | // spread operator also uses iteration protocol
176 | assert.deepStrictEqual([0, ...doc.list, 4], [0, 1, 2, 3, 4])
177 | })
178 | })
179 |
180 | describe('should support standard array read-only operations', () => {
181 | it('concat()', () => {
182 | Automerge.change(root, doc => {
183 | assert.deepStrictEqual(doc.list.concat([4, 5, 6]), [1, 2, 3, 4, 5, 6])
184 | assert.deepStrictEqual(doc.list.concat([4], [5, [6]]), [1, 2, 3, 4, 5, [6]])
185 | })
186 | })
187 |
188 | it('entries()', () => {
189 | Automerge.change(root, doc => {
190 | let copy = []
191 | for (let x of doc.list.entries()) copy.push(x)
192 | assert.deepStrictEqual(copy, [[0, 1], [1, 2], [2, 3]])
193 | assert.deepStrictEqual([...doc.list.entries()], [[0, 1], [1, 2], [2, 3]])
194 | })
195 | })
196 |
197 | it('every()', () => {
198 | Automerge.change(root, doc => {
199 | assert.strictEqual(doc.empty.every(() => false), true)
200 | assert.strictEqual(doc.list.every(val => val > 0), true)
201 | assert.strictEqual(doc.list.every(val => val > 2), false)
202 | assert.strictEqual(doc.list.every((val, index) => index < 3), true)
203 | // check that in the callback, 'this' is set to the second argument of 'every'
204 | doc.list.every(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
205 | })
206 | })
207 |
208 | it('filter()', () => {
209 | Automerge.change(root, doc => {
210 | assert.deepStrictEqual(doc.empty.filter(() => false), [])
211 | assert.deepStrictEqual(doc.list.filter(num => num % 2 === 1), [1, 3])
212 | assert.deepStrictEqual(doc.list.filter(() => true), [1, 2, 3])
213 | doc.list.filter(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
214 | })
215 | })
216 |
217 | it('find()', () => {
218 | Automerge.change(root, doc => {
219 | assert.strictEqual(doc.empty.find(() => true), undefined)
220 | assert.strictEqual(doc.list.find(num => num >= 2), 2)
221 | assert.strictEqual(doc.list.find(num => num >= 4), undefined)
222 | doc.list.find(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
223 | })
224 | })
225 |
226 | it('findIndex()', () => {
227 | Automerge.change(root, doc => {
228 | assert.strictEqual(doc.empty.findIndex(() => true), -1)
229 | assert.strictEqual(doc.list.findIndex(num => num >= 2), 1)
230 | assert.strictEqual(doc.list.findIndex(num => num >= 4), -1)
231 | doc.list.findIndex(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
232 | })
233 | })
234 |
235 | it('forEach()', () => {
236 | Automerge.change(root, doc => {
237 | doc.empty.forEach(() => { assert.fail('was called', 'not called', 'callback error') })
238 | let binary = []
239 | doc.list.forEach(num => binary.push(num.toString(2)))
240 | assert.deepStrictEqual(binary, ['1', '10', '11'])
241 | doc.list.forEach(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
242 | })
243 | })
244 |
245 | it('includes()', () => {
246 | Automerge.change(root, doc => {
247 | assert.strictEqual(doc.empty.includes(3), false)
248 | assert.strictEqual(doc.list.includes(3), true)
249 | assert.strictEqual(doc.list.includes(1, 1), false)
250 | assert.strictEqual(doc.list.includes(2, -2), true)
251 | assert.strictEqual(doc.list.includes(0), false)
252 | })
253 | })
254 |
255 | it('indexOf()', () => {
256 | Automerge.change(root, doc => {
257 | assert.strictEqual(doc.empty.indexOf(3), -1)
258 | assert.strictEqual(doc.list.indexOf(3), 2)
259 | assert.strictEqual(doc.list.indexOf(1, 1), -1)
260 | assert.strictEqual(doc.list.indexOf(2, -2), 1)
261 | assert.strictEqual(doc.list.indexOf(0), -1)
262 | assert.strictEqual(doc.list.indexOf(undefined), -1)
263 | })
264 | })
265 |
266 | it('indexOf() with objects', () => {
267 | Automerge.change(root, doc => {
268 | assert.strictEqual(doc.listObjects.indexOf(doc.listObjects[0]), 0)
269 | assert.strictEqual(doc.listObjects.indexOf(doc.listObjects[1]), 1)
270 |
271 | assert.strictEqual(doc.listObjects.indexOf(doc.listObjects[0], 0), 0)
272 | assert.strictEqual(doc.listObjects.indexOf(doc.listObjects[0], 1), -1)
273 | assert.strictEqual(doc.listObjects.indexOf(doc.listObjects[1], 0), 1)
274 | assert.strictEqual(doc.listObjects.indexOf(doc.listObjects[1], 1), 1)
275 | })
276 | })
277 |
278 | it('join()', () => {
279 | Automerge.change(root, doc => {
280 | assert.strictEqual(doc.empty.join(', '), '')
281 | assert.strictEqual(doc.list.join(), '1,2,3')
282 | assert.strictEqual(doc.list.join(''), '123')
283 | assert.strictEqual(doc.list.join(', '), '1, 2, 3')
284 | })
285 | })
286 |
287 | it('keys()', () => {
288 | Automerge.change(root, doc => {
289 | let keys = []
290 | for (let x of doc.list.keys()) keys.push(x)
291 | assert.deepStrictEqual(keys, [0, 1, 2])
292 | assert.deepStrictEqual([...doc.list.keys()], [0, 1, 2])
293 | })
294 | })
295 |
296 | it('lastIndexOf()', () => {
297 | Automerge.change(root, doc => {
298 | assert.strictEqual(doc.empty.lastIndexOf(3), -1)
299 | assert.strictEqual(doc.list.lastIndexOf(3), 2)
300 | assert.strictEqual(doc.list.lastIndexOf(3, 1), -1)
301 | assert.strictEqual(doc.list.lastIndexOf(3, -1), 2)
302 | assert.strictEqual(doc.list.lastIndexOf(0), -1)
303 | })
304 | })
305 |
306 | it('map()', () => {
307 | Automerge.change(root, doc => {
308 | assert.deepStrictEqual(doc.empty.map(num => num * 2), [])
309 | assert.deepStrictEqual(doc.list.map(num => num * 2), [2, 4, 6])
310 | assert.deepStrictEqual(doc.list.map((num, index) => index + '->' + num), ['0->1', '1->2', '2->3'])
311 | doc.list.map(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
312 | })
313 | })
314 |
315 | it('reduce()', () => {
316 | Automerge.change(root, doc => {
317 | assert.strictEqual(doc.empty.reduce((sum, val) => sum + val, 0), 0)
318 | assert.strictEqual(doc.list.reduce((sum, val) => sum + val, 0), 6)
319 | assert.strictEqual(doc.list.reduce((sum, val) => sum + val, ''), '123')
320 | assert.strictEqual(doc.list.reduce((sum, val) => sum + val), 6)
321 | assert.strictEqual(doc.list.reduce((sum, val, index) => ((index % 2 === 0) ? (sum + val) : sum), 0), 4)
322 | })
323 | })
324 |
325 | it('reduceRight()', () => {
326 | Automerge.change(root, doc => {
327 | assert.strictEqual(doc.empty.reduceRight((sum, val) => sum + val, 0), 0)
328 | assert.strictEqual(doc.list.reduceRight((sum, val) => sum + val, 0), 6)
329 | assert.strictEqual(doc.list.reduceRight((sum, val) => sum + val, ''), '321')
330 | assert.strictEqual(doc.list.reduceRight((sum, val) => sum + val), 6)
331 | assert.strictEqual(doc.list.reduceRight((sum, val, index) => ((index % 2 === 0) ? (sum + val) : sum), 0), 4)
332 | })
333 | })
334 |
335 | it('slice()', () => {
336 | Automerge.change(root, doc => {
337 | assert.deepStrictEqual(doc.empty.slice(), [])
338 | assert.deepStrictEqual(doc.list.slice(2), [3])
339 | assert.deepStrictEqual(doc.list.slice(-2), [2, 3])
340 | assert.deepStrictEqual(doc.list.slice(0, 0), [])
341 | assert.deepStrictEqual(doc.list.slice(0, 1), [1])
342 | assert.deepStrictEqual(doc.list.slice(0, -1), [1, 2])
343 | })
344 | })
345 |
346 | it('some()', () => {
347 | Automerge.change(root, doc => {
348 | assert.strictEqual(doc.empty.some(() => true), false)
349 | assert.strictEqual(doc.list.some(val => val > 2), true)
350 | assert.strictEqual(doc.list.some(val => val > 4), false)
351 | assert.strictEqual(doc.list.some((val, index) => index > 2), false)
352 | doc.list.some(function () { assert.strictEqual(this.hello, 'world'); return true }, {hello: 'world'})
353 | })
354 | })
355 |
356 | it('toString()', () => {
357 | Automerge.change(root, doc => {
358 | assert.strictEqual(doc.empty.toString(), '')
359 | assert.strictEqual(doc.list.toString(), '1,2,3')
360 | })
361 | })
362 |
363 | it('values()', () => {
364 | Automerge.change(root, doc => {
365 | let values = []
366 | for (let x of doc.list.values()) values.push(x)
367 | assert.deepStrictEqual(values, [1, 2, 3])
368 | assert.deepStrictEqual([...doc.list.values()], [1, 2, 3])
369 | })
370 | })
371 |
372 | it('should allow mutation of objects returned from built in list iteration', () => {
373 | root = Automerge.change(Automerge.init({freeze: true}), doc => {
374 | doc.objects = [{id: 1, value: 'one'}, {id: 2, value: 'two'}]
375 | })
376 | root = Automerge.change(root, doc => {
377 | for (let obj of doc.objects) if (obj.id === 1) obj.value = 'ONE!'
378 | })
379 | assert.deepStrictEqual(root, {objects: [{id: 1, value: 'ONE!'}, {id: 2, value: 'two'}]})
380 | })
381 |
382 | it('should allow mutation of objects returned from readonly list methods', () => {
383 | root = Automerge.change(Automerge.init({freeze: true}), doc => {
384 | doc.objects = [{id: 1, value: 'one'}, {id: 2, value: 'two'}]
385 | })
386 | root = Automerge.change(root, doc => {
387 | doc.objects.find(obj => obj.id === 1).value = 'ONE!'
388 | })
389 | assert.deepStrictEqual(root, {objects: [{id: 1, value: 'ONE!'}, {id: 2, value: 'two'}]})
390 | })
391 | })
392 |
393 | describe('should support standard mutation methods', () => {
394 | it('fill()', () => {
395 | root = Automerge.change(root, doc => doc.list.fill('a'))
396 | assert.deepStrictEqual(root.list, ['a', 'a', 'a'])
397 | root = Automerge.change(root, doc => doc.list.fill('c', 1).fill('b', 1, 2))
398 | assert.deepStrictEqual(root.list, ['a', 'b', 'c'])
399 | })
400 |
401 | it('pop()', () => {
402 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.pop(), 3))
403 | assert.deepStrictEqual(root.list, [1, 2])
404 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.pop(), 2))
405 | assert.deepStrictEqual(root.list, [1])
406 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.pop(), 1))
407 | assert.deepStrictEqual(root.list, [])
408 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.pop(), undefined))
409 | assert.deepStrictEqual(root.list, [])
410 | })
411 |
412 | it('push()', () => {
413 | root = Automerge.change(root, doc => doc.noodles = [])
414 | root = Automerge.change(root, doc => doc.noodles.push('udon', 'soba'))
415 | root = Automerge.change(root, doc => doc.noodles.push('ramen'))
416 | assert.deepStrictEqual(root.noodles, ['udon', 'soba', 'ramen'])
417 | assert.strictEqual(root.noodles[0], 'udon')
418 | assert.strictEqual(root.noodles[1], 'soba')
419 | assert.strictEqual(root.noodles[2], 'ramen')
420 | assert.strictEqual(root.noodles.length, 3)
421 | })
422 |
423 | it('shift()', () => {
424 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.shift(), 1))
425 | assert.deepStrictEqual(root.list, [2, 3])
426 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.shift(), 2))
427 | assert.deepStrictEqual(root.list, [3])
428 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.shift(), 3))
429 | assert.deepStrictEqual(root.list, [])
430 | root = Automerge.change(root, doc => assert.strictEqual(doc.list.shift(), undefined))
431 | assert.deepStrictEqual(root.list, [])
432 | })
433 |
434 | it('splice()', () => {
435 | root = Automerge.change(root, doc => assert.deepStrictEqual(doc.list.splice(1), [2, 3]))
436 | assert.deepStrictEqual(root.list, [1])
437 | root = Automerge.change(root, doc => assert.deepStrictEqual(doc.list.splice(0, 0, 'a', 'b', 'c'), []))
438 | assert.deepStrictEqual(root.list, ['a', 'b', 'c', 1])
439 | root = Automerge.change(root, doc => assert.deepStrictEqual(doc.list.splice(1, 2, '-->'), ['b', 'c']))
440 | assert.deepStrictEqual(root.list, ['a', '-->', 1])
441 | root = Automerge.change(root, doc => assert.deepStrictEqual(doc.list.splice(2, 200, 2), [1]))
442 | assert.deepStrictEqual(root.list, ['a', '-->', 2])
443 | })
444 |
445 | it('unshift()', () => {
446 | root = Automerge.change(root, doc => doc.noodles = [])
447 | root = Automerge.change(root, doc => doc.noodles.unshift('soba', 'udon'))
448 | root = Automerge.change(root, doc => doc.noodles.unshift('ramen'))
449 | assert.deepStrictEqual(root.noodles, ['ramen', 'soba', 'udon'])
450 | assert.strictEqual(root.noodles[0], 'ramen')
451 | assert.strictEqual(root.noodles[1], 'soba')
452 | assert.strictEqual(root.noodles[2], 'udon')
453 | assert.strictEqual(root.noodles.length, 3)
454 | })
455 | })
456 | })
457 | })
458 |
--------------------------------------------------------------------------------
/backend/sync.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Implementation of the data synchronisation protocol that brings a local and a remote document
3 | * into the same state. This is typically used when two nodes have been disconnected for some time,
4 | * and need to exchange any changes that happened while they were disconnected. The two nodes that
5 | * are syncing could be client and server, or server and client, or two peers with symmetric roles.
6 | *
7 | * The protocol is based on this paper: Martin Kleppmann and Heidi Howard. Byzantine Eventual
8 | * Consistency and the Fundamental Limits of Peer-to-Peer Databases. https://arxiv.org/abs/2012.00472
9 | *
10 | * The protocol assumes that every time a node successfully syncs with another node, it remembers
11 | * the current heads (as returned by `Backend.getHeads()`) after the last sync with that node. The
12 | * next time we try to sync with the same node, we start from the assumption that the other node's
13 | * document version is no older than the outcome of the last sync, so we only need to exchange any
14 | * changes that are more recent than the last sync. This assumption may not be true if the other
15 | * node did not correctly persist its state (perhaps it crashed before writing the result of the
16 | * last sync to disk), and we fall back to sending the entire document in this case.
17 | */
18 |
19 | const Backend = require('./backend')
20 | const { hexStringToBytes, bytesToHexString, Encoder, Decoder } = require('./encoding')
21 | const { decodeChangeMeta } = require('./columnar')
22 | const { copyObject } = require('../src/common')
23 |
24 | const HASH_SIZE = 32 // 256 bits = 32 bytes
25 | const MESSAGE_TYPE_SYNC = 0x42 // first byte of a sync message, for identification
26 | const PEER_STATE_TYPE = 0x43 // first byte of an encoded peer state, for identification
27 |
28 | // These constants correspond to a 1% false positive rate. The values can be changed without
29 | // breaking compatibility of the network protocol, since the parameters used for a particular
30 | // Bloom filter are encoded in the wire format.
31 | const BITS_PER_ENTRY = 10, NUM_PROBES = 7
32 |
33 | /**
34 | * A Bloom filter implementation that can be serialised to a byte array for transmission
35 | * over a network. The entries that are added are assumed to already be SHA-256 hashes,
36 | * so this implementation does not perform its own hashing.
37 | */
38 | class BloomFilter {
39 | constructor (arg) {
40 | if (Array.isArray(arg)) {
41 | // arg is an array of SHA256 hashes in hexadecimal encoding
42 | this.numEntries = arg.length
43 | this.numBitsPerEntry = BITS_PER_ENTRY
44 | this.numProbes = NUM_PROBES
45 | this.bits = new Uint8Array(Math.ceil(this.numEntries * this.numBitsPerEntry / 8))
46 | for (let hash of arg) this.addHash(hash)
47 | } else if (arg instanceof Uint8Array) {
48 | if (arg.byteLength === 0) {
49 | this.numEntries = 0
50 | this.numBitsPerEntry = 0
51 | this.numProbes = 0
52 | this.bits = arg
53 | } else {
54 | const decoder = new Decoder(arg)
55 | this.numEntries = decoder.readUint32()
56 | this.numBitsPerEntry = decoder.readUint32()
57 | this.numProbes = decoder.readUint32()
58 | this.bits = decoder.readRawBytes(Math.ceil(this.numEntries * this.numBitsPerEntry / 8))
59 | }
60 | } else {
61 | throw new TypeError('invalid argument')
62 | }
63 | }
64 |
65 | /**
66 | * Returns the Bloom filter state, encoded as a byte array.
67 | */
68 | get bytes() {
69 | if (this.numEntries === 0) return new Uint8Array(0)
70 | const encoder = new Encoder()
71 | encoder.appendUint32(this.numEntries)
72 | encoder.appendUint32(this.numBitsPerEntry)
73 | encoder.appendUint32(this.numProbes)
74 | encoder.appendRawBytes(this.bits)
75 | return encoder.buffer
76 | }
77 |
78 | /**
79 | * Given a SHA-256 hash (as hex string), returns an array of probe indexes indicating which bits
80 | * in the Bloom filter need to be tested or set for this particular entry. We do this by
81 | * interpreting the first 12 bytes of the hash as three little-endian 32-bit unsigned integers,
82 | * and then using triple hashing to compute the probe indexes. The algorithm comes from:
83 | *
84 | * Peter C. Dillinger and Panagiotis Manolios. Bloom Filters in Probabilistic Verification.
85 | * 5th International Conference on Formal Methods in Computer-Aided Design (FMCAD), November 2004.
86 | * http://www.ccis.northeastern.edu/home/pete/pub/bloom-filters-verification.pdf
87 | */
88 | getProbes(hash) {
89 | const hashBytes = hexStringToBytes(hash), modulo = 8 * this.bits.byteLength
90 | if (hashBytes.byteLength !== 32) throw new RangeError(`Not a 256-bit hash: ${hash}`)
91 | // on the next three lines, the right shift means interpret value as unsigned
92 | let x = ((hashBytes[0] | hashBytes[1] << 8 | hashBytes[2] << 16 | hashBytes[3] << 24) >>> 0) % modulo
93 | let y = ((hashBytes[4] | hashBytes[5] << 8 | hashBytes[6] << 16 | hashBytes[7] << 24) >>> 0) % modulo
94 | let z = ((hashBytes[8] | hashBytes[9] << 8 | hashBytes[10] << 16 | hashBytes[11] << 24) >>> 0) % modulo
95 | const probes = [x]
96 | for (let i = 1; i < this.numProbes; i++) {
97 | x = (x + y) % modulo
98 | y = (y + z) % modulo
99 | probes.push(x)
100 | }
101 | return probes
102 | }
103 |
104 | /**
105 | * Sets the Bloom filter bits corresponding to a given SHA-256 hash (given as hex string).
106 | */
107 | addHash(hash) {
108 | for (let probe of this.getProbes(hash)) {
109 | this.bits[probe >>> 3] |= 1 << (probe & 7)
110 | }
111 | }
112 |
113 | /**
114 | * Tests whether a given SHA-256 hash (given as hex string) is contained in the Bloom filter.
115 | */
116 | containsHash(hash) {
117 | if (this.numEntries === 0) return false
118 | for (let probe of this.getProbes(hash)) {
119 | if ((this.bits[probe >>> 3] & (1 << (probe & 7))) === 0) {
120 | return false
121 | }
122 | }
123 | return true
124 | }
125 | }
126 |
127 | /**
128 | * Encodes a sorted array of SHA-256 hashes (as hexadecimal strings) into a byte array.
129 | */
130 | function encodeHashes(encoder, hashes) {
131 | if (!Array.isArray(hashes)) throw new TypeError('hashes must be an array')
132 | encoder.appendUint32(hashes.length)
133 | for (let i = 0; i < hashes.length; i++) {
134 | if (i > 0 && hashes[i - 1] >= hashes[i]) throw new RangeError('hashes must be sorted')
135 | const bytes = hexStringToBytes(hashes[i])
136 | if (bytes.byteLength !== HASH_SIZE) throw new TypeError('heads hashes must be 256 bits')
137 | encoder.appendRawBytes(bytes)
138 | }
139 | }
140 |
141 | /**
142 | * Decodes a byte array in the format returned by encodeHashes(), and returns its content as an
143 | * array of hex strings.
144 | */
145 | function decodeHashes(decoder) {
146 | let length = decoder.readUint32(), hashes = []
147 | for (let i = 0; i < length; i++) {
148 | hashes.push(bytesToHexString(decoder.readRawBytes(HASH_SIZE)))
149 | }
150 | return hashes
151 | }
152 |
153 | /**
154 | * Takes a sync message of the form `{heads, need, have, changes}` and encodes it as a byte array for
155 | * transmission.
156 | */
157 | function encodeSyncMessage(message) {
158 | const encoder = new Encoder()
159 | encoder.appendByte(MESSAGE_TYPE_SYNC)
160 | encodeHashes(encoder, message.heads)
161 | encodeHashes(encoder, message.need)
162 | encoder.appendUint32(message.have.length)
163 | for (let have of message.have) {
164 | encodeHashes(encoder, have.lastSync)
165 | encoder.appendPrefixedBytes(have.bloom)
166 | }
167 | encoder.appendUint32(message.changes.length)
168 | for (let change of message.changes) {
169 | encoder.appendPrefixedBytes(change)
170 | }
171 | return encoder.buffer
172 | }
173 |
174 | /**
175 | * Takes a binary-encoded sync message and decodes it into the form `{heads, need, have, changes}`.
176 | */
177 | function decodeSyncMessage(bytes) {
178 | const decoder = new Decoder(bytes)
179 | const messageType = decoder.readByte()
180 | if (messageType !== MESSAGE_TYPE_SYNC) {
181 | throw new RangeError(`Unexpected message type: ${messageType}`)
182 | }
183 | const heads = decodeHashes(decoder)
184 | const need = decodeHashes(decoder)
185 | const haveCount = decoder.readUint32()
186 | let message = {heads, need, have: [], changes: []}
187 | for (let i = 0; i < haveCount; i++) {
188 | const lastSync = decodeHashes(decoder)
189 | const bloom = decoder.readPrefixedBytes(decoder)
190 | message.have.push({lastSync, bloom})
191 | }
192 | const changeCount = decoder.readUint32()
193 | for (let i = 0; i < changeCount; i++) {
194 | const change = decoder.readPrefixedBytes()
195 | message.changes.push(change)
196 | }
197 | // Ignore any trailing bytes -- they can be used for extensions by future versions of the protocol
198 | return message
199 | }
200 |
201 | /**
202 | * Takes a SyncState and encodes as a byte array those parts of the state that should persist across
203 | * an application restart or disconnect and reconnect. The ephemeral parts of the state that should
204 | * be cleared on reconnect are not encoded.
205 | */
206 | function encodeSyncState(syncState) {
207 | const encoder = new Encoder()
208 | encoder.appendByte(PEER_STATE_TYPE)
209 | encodeHashes(encoder, syncState.sharedHeads)
210 | return encoder.buffer
211 | }
212 |
213 | /**
214 | * Takes a persisted peer state as encoded by `encodeSyncState` and decodes it into a SyncState
215 | * object. The parts of the peer state that were not encoded are initialised with default values.
216 | */
217 | function decodeSyncState(bytes) {
218 | const decoder = new Decoder(bytes)
219 | const recordType = decoder.readByte()
220 | if (recordType !== PEER_STATE_TYPE) {
221 | throw new RangeError(`Unexpected record type: ${recordType}`)
222 | }
223 | const sharedHeads = decodeHashes(decoder)
224 | return Object.assign(initSyncState(), { sharedHeads })
225 | }
226 |
227 | /**
228 | * Constructs a Bloom filter containing all changes that are not one of the hashes in
229 | * `lastSync` or its transitive dependencies. In other words, the filter contains those
230 | * changes that have been applied since the version identified by `lastSync`. Returns
231 | * an object of the form `{lastSync, bloom}` as required for the `have` field of a sync
232 | * message.
233 | */
234 | function makeBloomFilter(backend, lastSync) {
235 | const newChanges = Backend.getChanges(backend, lastSync)
236 | const hashes = newChanges.map(change => decodeChangeMeta(change, true).hash)
237 | return {lastSync, bloom: new BloomFilter(hashes).bytes}
238 | }
239 |
240 | /**
241 | * Call this function when a sync message is received from another node. The `message` argument
242 | * needs to already have been decoded using `decodeSyncMessage()`. This function determines the
243 | * changes that we need to send to the other node in response. Returns an array of changes (as
244 | * byte arrays).
245 | */
246 | function getChangesToSend(backend, have, need) {
247 | if (have.length === 0) {
248 | return need.map(hash => Backend.getChangeByHash(backend, hash)).filter(change => change !== undefined)
249 | }
250 |
251 | let lastSyncHashes = {}, bloomFilters = []
252 | for (let h of have) {
253 | for (let hash of h.lastSync) lastSyncHashes[hash] = true
254 | bloomFilters.push(new BloomFilter(h.bloom))
255 | }
256 |
257 | // Get all changes that were added since the last sync
258 | const changes = Backend.getChanges(backend, Object.keys(lastSyncHashes))
259 | .map(change => decodeChangeMeta(change, true))
260 |
261 | let changeHashes = {}, dependents = {}, hashesToSend = {}
262 | for (let change of changes) {
263 | changeHashes[change.hash] = true
264 |
265 | // For each change, make a list of changes that depend on it
266 | for (let dep of change.deps) {
267 | if (!dependents[dep]) dependents[dep] = []
268 | dependents[dep].push(change.hash)
269 | }
270 |
271 | // Exclude any change hashes contained in one or more Bloom filters
272 | if (bloomFilters.every(bloom => !bloom.containsHash(change.hash))) {
273 | hashesToSend[change.hash] = true
274 | }
275 | }
276 |
277 | // Include any changes that depend on a Bloom-negative change
278 | let stack = Object.keys(hashesToSend)
279 | while (stack.length > 0) {
280 | const hash = stack.pop()
281 | if (dependents[hash]) {
282 | for (let dep of dependents[hash]) {
283 | if (!hashesToSend[dep]) {
284 | hashesToSend[dep] = true
285 | stack.push(dep)
286 | }
287 | }
288 | }
289 | }
290 |
291 | // Include any explicitly requested changes
292 | let changesToSend = []
293 | for (let hash of need) {
294 | hashesToSend[hash] = true
295 | if (!changeHashes[hash]) { // Change is not among those returned by getMissingChanges()?
296 | const change = Backend.getChangeByHash(backend, hash)
297 | if (change) changesToSend.push(change)
298 | }
299 | }
300 |
301 | // Return changes in the order they were returned by getMissingChanges()
302 | for (let change of changes) {
303 | if (hashesToSend[change.hash]) changesToSend.push(change.change)
304 | }
305 | return changesToSend
306 | }
307 |
308 | function initSyncState() {
309 | return {
310 | sharedHeads: [],
311 | lastSentHeads: [],
312 | theirHeads: null,
313 | theirNeed: null,
314 | theirHave: null,
315 | sentHashes: {},
316 | }
317 | }
318 |
319 | function compareArrays(a, b) {
320 | return (a.length === b.length) && a.every((v, i) => v === b[i])
321 | }
322 |
323 | /**
324 | * Given a backend and what we believe to be the state of our peer, generate a message which tells
325 | * them about we have and includes any changes we believe they need
326 | */
327 | function generateSyncMessage(backend, syncState) {
328 | if (!backend) {
329 | throw new Error("generateSyncMessage called with no Automerge document")
330 | }
331 | if (!syncState) {
332 | throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()")
333 | }
334 |
335 | let { sharedHeads, lastSentHeads, theirHeads, theirNeed, theirHave, sentHashes } = syncState
336 | const ourHeads = Backend.getHeads(backend)
337 |
338 | // Hashes to explicitly request from the remote peer: any missing dependencies of unapplied
339 | // changes, and any of the remote peer's heads that we don't know about
340 | const ourNeed = Backend.getMissingDeps(backend, theirHeads || [])
341 |
342 | // There are two reasons why ourNeed may be nonempty: 1. we might be missing dependencies due to
343 | // Bloom filter false positives; 2. we might be missing heads that the other peer mentioned
344 | // because they (intentionally) only sent us a subset of changes. In case 1, we leave the `have`
345 | // field of the message empty because we just want to fill in the missing dependencies for now.
346 | // In case 2, or if ourNeed is empty, we send a Bloom filter to request any unsent changes.
347 | let ourHave = []
348 | if (!theirHeads || ourNeed.every(hash => theirHeads.includes(hash))) {
349 | ourHave = [makeBloomFilter(backend, sharedHeads)]
350 | }
351 |
352 | // Fall back to a full re-sync if the sender's last sync state includes hashes
353 | // that we don't know. This could happen if we crashed after the last sync and
354 | // failed to persist changes that the other node already sent us.
355 | if (theirHave && theirHave.length > 0) {
356 | const lastSync = theirHave[0].lastSync
357 | if (!lastSync.every(hash => Backend.getChangeByHash(backend, hash))) {
358 | // we need to queue them to send us a fresh sync message, the one they sent is uninteligible so we don't know what they need
359 | const resetMsg = {heads: ourHeads, need: [], have: [{ lastSync: [], bloom: new Uint8Array(0) }], changes: []}
360 | return [syncState, encodeSyncMessage(resetMsg)]
361 | }
362 | }
363 |
364 | // XXX: we should limit ourselves to only sending a subset of all the messages, probably limited by a total message size
365 | // these changes should ideally be RLE encoded but we haven't implemented that yet.
366 | let changesToSend = Array.isArray(theirHave) && Array.isArray(theirNeed) ? getChangesToSend(backend, theirHave, theirNeed) : []
367 |
368 | // If the heads are equal, we're in sync and don't need to do anything further
369 | const headsUnchanged = Array.isArray(lastSentHeads) && compareArrays(ourHeads, lastSentHeads)
370 | const headsEqual = Array.isArray(theirHeads) && compareArrays(ourHeads, theirHeads)
371 | if (headsUnchanged && headsEqual && changesToSend.length === 0) {
372 | // no need to send a sync message if we know we're synced!
373 | return [syncState, null]
374 | }
375 |
376 | // TODO: this recomputes the SHA-256 hash of each change; we should restructure this to avoid the
377 | // unnecessary recomputation
378 | changesToSend = changesToSend.filter(change => !sentHashes[decodeChangeMeta(change, true).hash])
379 |
380 | // Regular response to a sync message: send any changes that the other node
381 | // doesn't have. We leave the "have" field empty because the previous message
382 | // generated by `syncStart` already indicated what changes we have.
383 | const syncMessage = {heads: ourHeads, have: ourHave, need: ourNeed, changes: changesToSend}
384 | if (changesToSend.length > 0) {
385 | sentHashes = copyObject(sentHashes)
386 | for (const change of changesToSend) {
387 | sentHashes[decodeChangeMeta(change, true).hash] = true
388 | }
389 | }
390 |
391 | syncState = Object.assign({}, syncState, {lastSentHeads: ourHeads, sentHashes})
392 | return [syncState, encodeSyncMessage(syncMessage)]
393 | }
394 |
395 | /**
396 | * Computes the heads that we share with a peer after we have just received some changes from that
397 | * peer and applied them. This may not be sufficient to bring our heads in sync with the other
398 | * peer's heads, since they may have only sent us a subset of their outstanding changes.
399 | *
400 | * `myOldHeads` are the local heads before the most recent changes were applied, `myNewHeads` are
401 | * the local heads after those changes were applied, and `ourOldSharedHeads` is the previous set of
402 | * shared heads. Applying the changes will have replaced some heads with others, but some heads may
403 | * have remained unchanged (because they are for branches on which no changes have been added). Any
404 | * such unchanged heads remain in the sharedHeads. Any sharedHeads that were replaced by applying
405 | * changes are also replaced as sharedHeads. This is safe because if we received some changes from
406 | * another peer, that means that peer had those changes, and therefore we now both know about them.
407 | */
408 | function advanceHeads(myOldHeads, myNewHeads, ourOldSharedHeads) {
409 | const newHeads = myNewHeads.filter((head) => !myOldHeads.includes(head))
410 | const commonHeads = ourOldSharedHeads.filter((head) => myNewHeads.includes(head))
411 | const advancedHeads = [...new Set([...newHeads, ...commonHeads])].sort()
412 | return advancedHeads
413 | }
414 |
415 |
416 | /**
417 | * Given a backend, a message message and the state of our peer, apply any changes, update what
418 | * we believe about the peer, and (if there were applied changes) produce a patch for the frontend
419 | */
420 | function receiveSyncMessage(backend, oldSyncState, binaryMessage) {
421 | if (!backend) {
422 | throw new Error("generateSyncMessage called with no Automerge document")
423 | }
424 | if (!oldSyncState) {
425 | throw new Error("generateSyncMessage requires a syncState, which can be created with initSyncState()")
426 | }
427 |
428 | let { sharedHeads, lastSentHeads, sentHashes } = oldSyncState, patch = null
429 | const message = decodeSyncMessage(binaryMessage)
430 | const beforeHeads = Backend.getHeads(backend)
431 |
432 | // If we received changes, we try to apply them to the document. There may still be missing
433 | // dependencies due to Bloom filter false positives, in which case the backend will enqueue the
434 | // changes without applying them. The set of changes may also be incomplete if the sender decided
435 | // to break a large set of changes into chunks.
436 | if (message.changes.length > 0) {
437 | [backend, patch] = Backend.applyChanges(backend, message.changes)
438 | sharedHeads = advanceHeads(beforeHeads, Backend.getHeads(backend), sharedHeads)
439 | }
440 |
441 | // If heads are equal, indicate we don't need to send a response message
442 | if (message.changes.length === 0 && compareArrays(message.heads, beforeHeads)) {
443 | lastSentHeads = message.heads
444 | }
445 |
446 | // If all of the remote heads are known to us, that means either our heads are equal, or we are
447 | // ahead of the remote peer. In this case, take the remote heads to be our shared heads.
448 | const knownHeads = message.heads.filter(head => Backend.getChangeByHash(backend, head))
449 | if (knownHeads.length === message.heads.length) {
450 | sharedHeads = message.heads
451 | // If the remote peer has lost all its data, reset our state to perform a full resync
452 | if (message.heads.length === 0) {
453 | lastSentHeads = []
454 | sentHashes = []
455 | }
456 | } else {
457 | // If some remote heads are unknown to us, we add all the remote heads we know to
458 | // sharedHeads, but don't remove anything from sharedHeads. This might cause sharedHeads to
459 | // contain some redundant hashes (where one hash is actually a transitive dependency of
460 | // another), but this will be cleared up as soon as we know all the remote heads.
461 | sharedHeads = [...new Set(knownHeads.concat(sharedHeads))].sort()
462 | }
463 |
464 | const syncState = {
465 | sharedHeads, // what we have in common to generate an efficient bloom filter
466 | lastSentHeads,
467 | theirHave: message.have, // the information we need to calculate the changes they need
468 | theirHeads: message.heads,
469 | theirNeed: message.need,
470 | sentHashes
471 | }
472 | return [backend, syncState, patch]
473 | }
474 |
475 | module.exports = {
476 | receiveSyncMessage, generateSyncMessage,
477 | encodeSyncMessage, decodeSyncMessage,
478 | initSyncState, encodeSyncState, decodeSyncState,
479 | BloomFilter // BloomFilter is a private API, exported only for testing purposes
480 | }
481 |
--------------------------------------------------------------------------------