├── demo
├── src
│ └── app
│ │ ├── components
│ │ ├── editor.tsx
│ │ └── filetree.tsx
│ │ ├── favicon.ico
│ │ ├── page.module.css
│ │ ├── globals.css
│ │ ├── layout.tsx
│ │ ├── page.tsx
│ │ └── ystream.tsx
├── .eslintrc.json
├── next.config.mjs
├── README.md
├── .gitignore
├── public
│ ├── vercel.svg
│ └── next.svg
├── tsconfig.json
└── package.json
├── src
├── utils
│ ├── websocket.node.js
│ └── websocket.browser.js
├── comms
│ ├── websocket-utils.js
│ ├── websocket.js
│ └── websocket-server.js
├── index.js
├── bindydoc.js
├── comm.js
├── api
│ ├── authorization.js
│ ├── authentication.js
│ └── dbtypes.js
├── utils.js
├── messages.js
├── db.js
├── protocol.js
├── ystream.js
├── extensions
│ └── fs.js
└── operations.js
├── .well-known
└── funding-manifest-urls
├── .dockerignore
├── .gitignore
├── tests
├── index.js
├── actions.tests.js
├── authentication.tests.js
├── yfs.tests.js
├── helpers.js
└── ystream.tests.js
├── Dockerfile
├── LICENSE
├── bin
├── server.js
└── yfs.js
├── README.md
├── tsconfig.json
└── package.json
/demo/src/app/components/editor.tsx:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/src/utils/websocket.node.js:
--------------------------------------------------------------------------------
1 | export { WebSocket } from 'ws'
2 |
--------------------------------------------------------------------------------
/demo/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next/core-web-vitals"
3 | }
4 |
--------------------------------------------------------------------------------
/.well-known/funding-manifest-urls:
--------------------------------------------------------------------------------
1 | https://github.com/yjs/yjs/blob/main/funding.json
2 |
--------------------------------------------------------------------------------
/demo/src/app/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yjs/titanic/HEAD/demo/src/app/favicon.ico
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | .env
2 | .env.backup*
3 | node_modules
4 | dist
5 | coverage
6 | .test_dbs
7 | tmp
8 | .ystream
9 |
--------------------------------------------------------------------------------
/src/utils/websocket.browser.js:
--------------------------------------------------------------------------------
1 | /* eslint-env browser */
2 |
3 | export const WebSocket = window.WebSocket
4 |
--------------------------------------------------------------------------------
/demo/next.config.mjs:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = { }
3 |
4 | export default nextConfig
5 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | coverage
2 | node_modules
3 | dist
4 | .vscode
5 | .test_dbs
6 | test.html
7 | tmp
8 | .ystream
9 | coverage
10 | docs
11 |
--------------------------------------------------------------------------------
/demo/src/app/page.module.css:
--------------------------------------------------------------------------------
1 | .main {
2 | display: flex;
3 | flex-direction: column;
4 | justify-content: space-between;
5 | align-items: center;
6 | padding: 6rem;
7 | min-height: 100vh;
8 | }
9 |
--------------------------------------------------------------------------------
/demo/src/app/globals.css:
--------------------------------------------------------------------------------
1 | #main {
2 | width: 100%;
3 | height: 100%;
4 | display: flex;
5 | flex-flow: row wrap;
6 | margin: 0;
7 | }
8 |
9 | body, html, #main {
10 | height: 100%;
11 | }
12 |
13 | .filetree {
14 | max-width: 300px;
15 | }
16 |
17 | .cm-theme-light, .cm-editor {
18 | display: flex;
19 | flex-direction: column;
20 | flex-grow: 1;
21 | height: 100%;
22 | }
23 |
--------------------------------------------------------------------------------
/src/comms/websocket-utils.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Status codes for closing websocket connections
3 | *
4 | * We may specify status codes in the range of 3000-3999.
5 | * Ref: https://www.rfc-editor.org/rfc/rfc6455.html#section-7.4
6 | */
7 |
8 | export const statusNormal = 1000
9 | export const statusUnauthenticated = 3000
10 | export const statusParseError = 3100
11 | export const statusConsistencyError = 3200
12 |
--------------------------------------------------------------------------------
/demo/README.md:
--------------------------------------------------------------------------------
1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app).
2 |
3 | ## Getting Started
4 |
5 | First, run the development server:
6 |
7 | ```bash
8 | npm run dev
9 | # or
10 | yarn dev
11 | # or
12 | pnpm dev
13 | # or
14 | bun dev
15 | ```
16 |
17 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
18 |
--------------------------------------------------------------------------------
/demo/src/app/layout.tsx:
--------------------------------------------------------------------------------
1 | import type { Metadata } from "next";
2 | import { Inter } from "next/font/google";
3 | import "./globals.css";
4 |
5 | const inter = Inter({ subsets: ["latin"] });
6 |
7 | export const metadata: Metadata = {
8 | title: "@Y/stream demo",
9 | description: "Collaborative note taking app",
10 | };
11 |
12 | export default function RootLayout({
13 | children,
14 | }: Readonly<{
15 | children: React.ReactNode;
16 | }>) {
17 | return (
18 |
19 |
{children}
20 |
21 | );
22 | }
23 |
--------------------------------------------------------------------------------
/demo/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 | .yarn/install-state.gz
8 |
9 | # testing
10 | /coverage
11 |
12 | # next.js
13 | /.next/
14 | /out/
15 |
16 | # production
17 | /build
18 |
19 | # misc
20 | .DS_Store
21 | *.pem
22 |
23 | # debug
24 | npm-debug.log*
25 | yarn-debug.log*
26 | yarn-error.log*
27 |
28 | # local env files
29 | .env*.local
30 |
31 | # vercel
32 | .vercel
33 |
34 | # typescript
35 | *.tsbuildinfo
36 | next-env.d.ts
37 | .ystream
38 | tmp
39 |
--------------------------------------------------------------------------------
/demo/public/vercel.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/index.js:
--------------------------------------------------------------------------------
1 | import { runTests } from 'lib0/testing.js'
2 | import { isBrowser, isNode } from 'lib0/environment.js'
3 | import * as log from 'lib0/logging'
4 | import * as ystream from './ystream.tests.js'
5 | import * as yfs from './yfs.tests.js'
6 | import * as actions from './actions.tests.js'
7 | import * as authentication from './authentication.tests.js'
8 |
9 | /* c8 ignore next 3 */
10 | if (isBrowser) {
11 | log.createVConsole(document.body)
12 | }
13 |
14 | runTests(/** @type {any} */ ({
15 | authentication,
16 | ystream,
17 | actions,
18 | yfs
19 | })).then(success => {
20 | /* istanbul ignore next 3 */
21 | if (isNode) {
22 | process.exit(success ? 0 : 1)
23 | }
24 | })
25 |
--------------------------------------------------------------------------------
/demo/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "lib": ["dom", "dom.iterable", "esnext"],
4 | "allowJs": true,
5 | "skipLibCheck": true,
6 | "strict": true,
7 | "noEmit": true,
8 | "esModuleInterop": true,
9 | "module": "esnext",
10 | "target": "esnext",
11 | "moduleResolution": "bundler",
12 | "resolveJsonModule": true,
13 | "isolatedModules": true,
14 | "jsx": "preserve",
15 | "incremental": true,
16 | "plugins": [
17 | {
18 | "name": "next"
19 | }
20 | ],
21 | "paths": {
22 | "@/*": ["./src/*"]
23 | }
24 | },
25 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
26 | "exclude": ["node_modules"]
27 | }
28 |
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | import * as isodb from 'isodb'
2 | import * as db from './db.js'
3 | import { Ystream } from './ystream.js'
4 |
5 | export { Ystream, Collection, YTransaction } from './ystream.js'
6 |
7 | export const remove = isodb.deleteDB
8 |
9 | /**
10 | * @param {string} dbname
11 | * @param {import('./ystream.js').YstreamConf} [conf]
12 | * @return {Promise}
13 | */
14 | export const open = async (dbname, conf) => {
15 | const { idb, isAuthenticated, user, deviceClaim, clientid } = await db.createDb(dbname)
16 | const ystream = new Ystream(dbname, idb, clientid, user, deviceClaim, conf)
17 | if (isAuthenticated) {
18 | ystream.isAuthenticated = true
19 | ystream.emit('authenticate', [])
20 | }
21 | return ystream
22 | }
23 |
--------------------------------------------------------------------------------
/demo/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "ystream-notes-app",
3 | "version": "0.1.0",
4 | "private": true,
5 | "scripts": {
6 | "dev": "next dev",
7 | "build": "next build",
8 | "start": "next start",
9 | "lint": "next lint"
10 | },
11 | "dependencies": {
12 | "@codemirror/lang-javascript": "^6.2.2",
13 | "@uiw/react-codemirror": "^4.22.1",
14 | "@y/stream": "^0.1.2",
15 | "bufferutil": "^4.0.8",
16 | "next": "14.2.3",
17 | "react": "^18",
18 | "react-arborist": "^3.4.0",
19 | "react-dom": "^18",
20 | "react-icons": "^5.2.1",
21 | "y-codemirror.next": "^0.3.4",
22 | "y-indexeddb": "^9.0.12",
23 | "y-webrtc": "^10.3.0",
24 | "y-websocket": "^2.0.3",
25 | "yjs": "^13.6.15"
26 | },
27 | "devDependencies": {
28 | "@types/node": "^20",
29 | "@types/react": "^18",
30 | "@types/react-dom": "^18",
31 | "eslint": "8.57.0",
32 | "eslint-config-next": "14.2.3",
33 | "typescript": "^5"
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Instructions to build the Docker image
2 | # docker build -t y-redis .
3 |
4 | # Run the worker as follows:
5 | # docker run --env-file ./.env y-redis npm run start:worker
6 |
7 | # Run the server as follows:
8 | # docker run -p 3002:3002 --env-file ./.env y-redis npm run start:server
9 |
10 | # Use an official Node.js runtime as a parent image
11 | FROM node:20-alpine
12 |
13 | # Install glibc compatibility for alpine
14 | # See more at https://wiki.alpinelinux.org/wiki/Running_glibc_programs
15 | RUN apk add gcompat
16 |
17 | # Set the working directory in the container
18 | WORKDIR /usr/src/app
19 |
20 | # Copy package.json and package-lock.json (if available) to the working directory
21 | COPY package*.json ./
22 |
23 | # Install any dependencies
24 | RUN npm install
25 |
26 | # Bundle your app source inside the Docker image
27 | COPY . .
28 |
29 | EXPOSE 9000
30 |
31 | # Removed CMD instruction to allow dynamic command execution at runtime
32 | CMD node ./bin/server.js
33 |
--------------------------------------------------------------------------------
/tests/actions.tests.js:
--------------------------------------------------------------------------------
1 | import * as t from 'lib0/testing'
2 | import * as utils from '../src/utils.js'
3 | import * as operations from '../src/operations.js'
4 | import * as dbtypes from '@y/stream/api/dbtypes'
5 | import { emptyUpdate } from './helpers.js'
6 |
7 | const testOwner = new Uint8Array([1, 2, 3])
8 |
9 | /**
10 | * @param {t.TestCase} _tc
11 | */
12 | export const testMergeOps = (_tc) => {
13 | const ops = [
14 | new dbtypes.OpValue(0, 0, testOwner, 'c1', 'd1', new operations.OpYjsUpdate(emptyUpdate)),
15 | new dbtypes.OpValue(1, 3, testOwner, 'c1', 'd1', new operations.OpYjsUpdate(emptyUpdate)),
16 | new dbtypes.OpValue(0, 1, testOwner, 'c1', 'd1', new operations.OpYjsUpdate(emptyUpdate)),
17 | new dbtypes.OpValue(0, 2, testOwner, 'c2', 'd1', new operations.OpYjsUpdate(emptyUpdate))
18 | ]
19 | const merged = utils.mergeOps(ops, false)
20 | t.assert(merged.length === 2)
21 | t.assert(merged[0].client === 0)
22 | t.assert(merged[0].clock === 1)
23 | t.compare(merged[0].op.update, emptyUpdate)
24 | t.assert(merged[1].client === 0)
25 | t.assert(merged[1].clock === 2)
26 | t.compare(merged[1].op.update, emptyUpdate)
27 | }
28 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2022 Kevin Jahns .
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/bin/server.js:
--------------------------------------------------------------------------------
1 | import { createWSServer } from '../src/comms/websocket-server.js'
2 | import * as json from 'lib0/json'
3 | import * as buffer from 'lib0/buffer'
4 | import * as decoding from 'lib0/decoding'
5 | import * as ecdsa from 'lib0/crypto/ecdsa'
6 | import * as dbtypes from '../src/api/dbtypes.js'
7 |
8 | // @todo this seriously should live somewhere else!
9 | const testServerIdentityRaw = {
10 | privateKey: '{"key_ops":["sign"],"ext":true,"kty":"EC","x":"CYwMakpn0onaNeCa-wqLn4Fzsris_UY4Z5gRQUA9xQOoh94YG9OHhItr6rovaYpZ","y":"74Ulju86IUMJZsYsSjxSjusLjj9U6rozZwbK9Xaqj3MgIWtnjNyjL1D-NzOP3FJ7","crv":"P-384","d":"-yKNOty9EshGL0yAOQ2q6c_b_PNCpeEK9FVPoB0wc9EUyt9BR4DZuqrC9t_DgNaF"}',
11 | user: 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6IkNZd01ha3BuMG9uYU5lQ2Etd3FMbjRGenNyaXNfVVk0WjVnUlFVQTl4UU9vaDk0WUc5T0hoSXRyNnJvdmFZcFoiLCJ5IjoiNzRVbGp1ODZJVU1KWnNZc1NqeFNqdXNMamo5VTZyb3pad2JLOVhhcWozTWdJV3Ruak55akwxRC1Oek9QM0ZKNyIsImNydiI6IlAtMzg0In0='
12 | }
13 |
14 | const testServerIdentity = {
15 | privateKey: await ecdsa.importKeyJwk(json.parse(testServerIdentityRaw.privateKey)),
16 | user: dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testServerIdentityRaw.user)))
17 | }
18 |
19 | export const server = await createWSServer({
20 | identity: testServerIdentity
21 | })
22 |
--------------------------------------------------------------------------------
/demo/public/next.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | # Titanic
3 |
4 | A *unique* attempt to create a multi-master eventual consistent database that
5 | syncs many CRDTs (e.g. Yjs documents) efficiently.
6 |
7 | - network agnostic
8 | - works everywhere (browser, node, ..)
9 | - small footprint
10 | - documents can have "capabilities" that can be synced with a filesystem.
11 | - syncs __many__ documents efficiently
12 | - Auth* integrated
13 | - (experimental software)
14 |
15 | ### Demos
16 | The demos sync changes to server that I set up on `ystream.yjs.dev`. You can run
17 | your own "server" using `npm run ws-server`. Note that a server is not strictly
18 | required to sync ystreams. It is possible to write p2p connection providers for
19 | ystream.
20 |
21 | #### Sync collection to filesystem
22 | - run `./bin/yfs.js --init dir --collection my-collection-name` to sync `dir` to
23 | a collection.
24 | - run `./bin/yfs.js --clone clone --collection my-collection-name` to clone
25 | collection state to a filesystem
26 | - The different directories are now synced. If you open a file in one directory,
27 | it will sync the changes to the other directory. This even works on different
28 | computers. DONT SHARE SENSITIVE DATA!
29 |
30 | #### Sync collection to a browser application for real-time editing of the files
31 | - `cd demo`
32 | - Open `demo/src/app/ystream.tsx` and change the variable `collectionName`:
33 | `const collectionName = 'my-collection-name'`
34 | - `npm run dev`
35 | - open [demo]{http://localhost:3000}
36 | - You can run this demo in combination with the file-system demo to see
37 | filesystem and editor state synced in real-time.
38 |
39 | ### Work with me
40 |
41 | This project is still in the early stages. I'm looking for a company that wants
42 | to sponsor this work and integrate it in their product. Contact me
43 | .
44 |
45 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | /* Basic Options */
4 | "target": "es2022",
5 | "module": "es2022",
6 | "lib": ["es2022", "dom"], /* Specify library files to be included in the compilation. */
7 | "allowJs": true, /* Allow javascript files to be compiled. */
8 | "checkJs": true, /* Report errors in .js files. */
9 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
10 | "declaration": true, /* Generates corresponding '.d.ts' file. */
11 | "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
12 | "outDir": "dist", // this is overritten by `npm run types`
13 | "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
14 | "rootDir": "./",
15 | "emitDeclarationOnly": true,
16 | "strict": true,
17 | "noImplicitAny": true,
18 | "moduleResolution": "bundler",
19 | "allowSyntheticDefaultImports": true,
20 | "paths": {
21 | "@y/stream": ["./src/index.js"],
22 | "@y/stream/utils/websocket": ["./src/utils/websocket.browser.js"],
23 | "@y/stream/api/authentication": ["./src/api/authentication.js"],
24 | "@y/stream/api/actions": ["./src/api/actions.js"],
25 | "@y/stream/api/dbtypes": ["./src/api/dbtypes.js"],
26 | "@y/stream/comms/websocket": ["./src/comms/websocket.js"],
27 | "@y/stream/comms/websocket-server": ["./src/comms/websocket-server.js"],
28 | "@y/stream/extensions/fs": ["./src/extensions/fs.js"]
29 | }
30 | },
31 | "include": ["./src/**/*.js", "./tests/**/*.js", "./demo/**/*.js", "./demo/*.js", "./bin/*.js", "extensions/*.js"],
32 | "exclude": ["./dist", "./node_modules"]
33 | }
34 |
--------------------------------------------------------------------------------
/demo/src/app/page.tsx:
--------------------------------------------------------------------------------
1 | 'use client'
2 | import { useEffect, useState } from 'react'
3 | import CodeMirror from '@uiw/react-codemirror'
4 | import * as Y from 'yjs'
5 | import { yCollab } from 'y-codemirror.next'
6 | import { javascript } from '@codemirror/lang-javascript'
7 | import Filetree from './components/filetree'
8 | import { WebsocketProvider } from 'y-websocket'
9 | import { WebrtcProvider } from 'y-webrtc'
10 | import { IndexeddbPersistence } from 'y-indexeddb'
11 | import ystream from './ystream'
12 |
13 | const ydoc = new Y.Doc()
14 | const ytext = ydoc.getText()
15 | ytext.observe(event => console.log('evnet', event))
16 | //
17 | // const websocketProvider = new WebsocketProvider('wss://demos.yjs.dev/ws', 'lf24', ydoc)
18 | // const webrtcProvider = new WebrtcProvider('lf24', ydoc)
19 |
20 | ystream.then(async ({y, ycollection}) => {
21 | ycollection.bindYdoc('root', ydoc)
22 | y.transact(async tr => {
23 | ycollection.setFileInfo(tr, 'root', 'root.md', null, 'text')
24 | })
25 | })
26 |
27 | export default function Home () {
28 | const [ydoc, setYdoc] = useState(null as null | Y.Doc)
29 | return
30 |
31 | {ydoc != null ? : }
32 |
33 | }
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 | // -- copy paste in case of struggle..
69 | //
70 | // export default function Home () {
71 | // const [ydoc, setYdoc] = useState(null as null | Y.Doc)
72 | // return
73 | //
74 | // {ydoc != null ? : }
75 | //
76 | // }
77 | //
78 | //
79 | //
80 | // ystream.then(({ y, ycollection }) => {
81 | // ycollection.bindYdoc('root', ydoc)
82 | // y.transact(async tr => {
83 | // await ycollection.setFileInfo(tr, 'root', 'root.md', null, 'text')
84 | // })
85 | // })
86 |
--------------------------------------------------------------------------------
/demo/src/app/ystream.tsx:
--------------------------------------------------------------------------------
1 | import * as Ystream from '@y/stream'
2 | import * as dbtypes from '@y/stream/api/dbtypes'
3 | import * as wscomm from '@y/stream/comms/websocket'
4 | import * as authentication from '@y/stream/api/authentication'
5 | import * as ecdsa from 'lib0/crypto/ecdsa'
6 | import * as json from 'lib0/json'
7 | import * as decoding from 'lib0/decoding'
8 | import * as buffer from 'lib0/buffer'
9 |
10 | const initYstream = async (): Promise<{ y: Ystream.Ystream, ycollection: Ystream.Collection }> => {
11 | if (typeof window === 'undefined') { return {} as any }
12 | const testServerUser = 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6IkNZd01ha3BuMG9uYU5lQ2Etd3FMbjRGenNyaXNfVVk0WjVnUlFVQTl4UU9vaDk0WUc5T0hoSXRyNnJvdmFZcFoiLCJ5IjoiNzRVbGp1ODZJVU1KWnNZc1NqeFNqdXNMamo5VTZyb3pad2JLOVhhcWozTWdJV3Ruak55akwxRC1Oek9QM0ZKNyIsImNydiI6IlAtMzg0In0='
13 | const testUserRaw = {
14 | privateKey: '{"key_ops":["sign"],"ext":true,"kty":"EC","x":"pAUmLYc-UFmPIt7leafPTbhxQyygcaW7__nPcUNCuu0wH27yS9P_pWFP1GwcsoAN","y":"u3109KjrPGsNUn2k5Whn2uHLAckQPdLNqtM4GpBEpUJwlvVDvk71-lS3YOEYJ_Sq","crv":"P-384","d":"OHnRw5an9hlSqSKg966lFRvB7dow669pVSn7sFZUi7UQh_Y9Xc95SQ6pEWsofsYD"}',
15 | user: 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6InBBVW1MWWMtVUZtUEl0N2xlYWZQVGJoeFF5eWdjYVc3X19uUGNVTkN1dTB3SDI3eVM5UF9wV0ZQMUd3Y3NvQU4iLCJ5IjoidTMxMDlLanJQR3NOVW4yazVXaG4ydUhMQWNrUVBkTE5xdE00R3BCRXBVSndsdlZEdms3MS1sUzNZT0VZSl9TcSIsImNydiI6IlAtMzg0In0='
16 | }
17 | const testUser = {
18 | privateKey: await ecdsa.importKeyJwk(json.parse(testUserRaw.privateKey)),
19 | user: dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testUserRaw.user)))
20 | }
21 | const owner = buffer.toBase64(testUser.user.hash)
22 | const collectionName = 'ystream-24-10'
23 |
24 | const y: Ystream.Ystream = await Ystream.open('.ystream', {
25 | comms: [new wscomm.WebSocketComm('wss://ystream.yjs.dev', { owner, name: collectionName })]
26 | })
27 | await authentication.registerUser(y, dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testServerUser))), { isTrusted: true })
28 | await authentication.setUserIdentity(y, testUser.user, await testUser.user.publicKey, testUser.privateKey)
29 |
30 | const ycollection: Ystream.Collection = y.getCollection(owner, collectionName)
31 | return { y, ycollection }
32 | }
33 |
34 | export default initYstream()
35 |
--------------------------------------------------------------------------------
/tests/authentication.tests.js:
--------------------------------------------------------------------------------
1 | import * as t from 'lib0/testing'
2 | import * as authentication from '../src/api/authentication.js'
3 | import * as Ystream from '../src/index.js'
4 | import * as map from 'lib0/map'
5 | import * as ecdsa from 'lib0/crypto/ecdsa'
6 | import * as encoding from 'lib0/encoding'
7 | import * as promise from 'lib0/promise'
8 | import * as buffer from 'lib0/buffer'
9 | import * as json from 'lib0/json'
10 |
11 | /**
12 | * @type {Map>}
13 | */
14 | const instances = new Map()
15 |
16 | /**
17 | * @param {t.TestCase} tc
18 | */
19 | const createTestDb = async tc => {
20 | const testInstances = map.setIfUndefined(instances, tc.testName, () => /** @type {any} */ ([]))
21 | const dbname = `./.test_dbs/${tc.moduleName}-${tc.testName}-${testInstances.length}`
22 | await Ystream.remove(dbname)
23 | const y = await Ystream.open(dbname)
24 | testInstances.push(testInstances)
25 | return y
26 | }
27 |
28 | /**
29 | * @param {t.TestCase} _tc
30 | */
31 | export const testGenerateAuth = async _tc => {
32 | const userObject = await authentication.createUserIdentity({ extractable: true })
33 | const [publicKey, privateKey, user] = await promise.all([
34 | ecdsa.exportKeyJwk(userObject.publicKey),
35 | ecdsa.exportKeyJwk(userObject.privateKey),
36 | encoding.encode(encoder => userObject.userIdentity.encode(encoder))
37 | ])
38 | console.log({
39 | publicKey: json.stringify(publicKey),
40 | privateKey: json.stringify(privateKey),
41 | user: buffer.toBase64(user)
42 | })
43 | }
44 |
45 | /**
46 | * @param {t.TestCase} tc
47 | */
48 | export const testBasic = async tc => {
49 | const db1 = await createTestDb(tc)
50 | t.assert(db1.isAuthenticated === false)
51 | const { userIdentity, publicKey, privateKey } = await authentication.createUserIdentity()
52 | await authentication.setUserIdentity(db1, userIdentity, publicKey, privateKey)
53 | t.assert(db1.isAuthenticated)
54 | const db2 = await createTestDb(tc)
55 | const device2 = await authentication.getDeviceIdentity(db2)
56 | // @todo maybe createDeviceClaim should return a dbtypes.DeviceClaim
57 | const claim1 = await authentication.createDeviceClaim(db1, device2)
58 | await authentication.useDeviceClaim(db2, claim1)
59 | t.assert(db2.isAuthenticated)
60 | const uid1 = await authentication.getUserIdentity(db1)
61 | const uid2 = await authentication.getUserIdentity(db2)
62 | t.assert(uid1.ekey === uid2.ekey)
63 | }
64 |
--------------------------------------------------------------------------------
/src/bindydoc.js:
--------------------------------------------------------------------------------
1 | import * as Y from 'yjs'
2 | import * as bc from 'lib0/broadcastchannel'
3 | import * as buffer from 'lib0/buffer'
4 | import * as env from 'lib0/environment'
5 | import * as actions from './api/actions.js'
6 | import * as operations from './operations.js'
7 | import * as promise from 'lib0/promise'
8 |
9 | /**
10 | * @typedef {import('./index.js').Ystream} Ystream
11 | */
12 |
13 | /**
14 | * @param {Ystream} ystream
15 | * @param {string} owner
16 | * @param {string} collection
17 | * @param {string} doc
18 | * @param {Y.Doc} ydoc - should be an empty doc
19 | */
20 | export const bindydoc = async (ystream, owner, collection, doc, ydoc) => {
21 | const bcroom = `ystream#${ystream.dbname}#${owner}#${collection}#${doc}`
22 | const ownerBin = buffer.fromBase64(owner)
23 | // const currentClock = ..
24 | ydoc.on('updateV2', /** @type {function(Uint8Array, any)} */ (update, origin) => {
25 | if (origin !== ystream) {
26 | /* c8 ignore next 3 */
27 | if (env.isBrowser) {
28 | // @todo could use more efficient encoding - allow Uint8Array in lib0/bc
29 | bc.publish(bcroom, buffer.toBase64(update), origin)
30 | } else {
31 | // @todo iterate through opened documents in ystream and apply update
32 | // Thought: iterating through the docs should be the default
33 | }
34 | ystream.transact(tr => actions.addOp(tr, ystream, ownerBin, collection, doc, new operations.OpYjsUpdate(update)))
35 | }
36 | })
37 | const updates = await ystream.transact(async tr => {
38 | const [
39 | updates,
40 | isDeleted
41 | ] = await promise.all([
42 | actions.getDocOps(tr, ystream, ownerBin, collection, doc, operations.OpYjsUpdateType, 0),
43 | actions.isDocDeleted(tr, ystream, ownerBin, collection, doc)
44 | ])
45 | return isDeleted ? null : updates
46 | })
47 | if (updates === null) {
48 | console.error('[ystream] You opened a deleted document. The doc will be destroyed.')
49 | ydoc.destroy()
50 | return null
51 | }
52 | /* c8 ignore start */
53 | if (env.isBrowser) {
54 | const sub = bc.subscribe(bcroom, (data, origin) => {
55 | if (origin !== ystream) {
56 | Y.applyUpdateV2(ydoc, buffer.fromBase64(data), ystream)
57 | }
58 | })
59 | ydoc.on('destroy', () => {
60 | bc.unsubscribe(bcroom, sub)
61 | })
62 | }
63 | /* c8 ignore end */
64 | updates.length > 0 && Y.transact(ydoc, () => {
65 | updates.forEach(update => {
66 | if (update.op.type === operations.OpYjsUpdateType) {
67 | Y.applyUpdateV2(ydoc, update.op.update)
68 | }
69 | })
70 | }, ystream, false)
71 | ydoc.emit('load', [])
72 | }
73 |
--------------------------------------------------------------------------------
/bin/yfs.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import fs from 'fs'
4 | import * as Ystream from '@y/stream'
5 | import Yfs from '@y/stream/extensions/fs'
6 | import * as dbtypes from '@y/stream/api/dbtypes'
7 | import * as authentication from '@y/stream/api/authentication'
8 | import * as wscomm from '@y/stream/comms/websocket'
9 | import * as env from 'lib0/environment'
10 | import * as buffer from 'lib0/buffer'
11 | import * as ecdsa from 'lib0/crypto/ecdsa'
12 | import * as json from 'lib0/json'
13 | import * as decoding from 'lib0/decoding'
14 | import path from 'node:path'
15 | import * as logging from 'lib0/logging'
16 |
17 | const cloneDir = env.getParam('--clone', '')
18 | const initPath = env.getParam('--init', '')
19 | const observePath = cloneDir || initPath
20 |
21 | if (observePath === '') {
22 | throw new Error('Parameter --clone or --init is expected')
23 | }
24 |
25 | if (cloneDir !== '') {
26 | console.log({ cloneDir, initPath })
27 | fs.mkdirSync(cloneDir)
28 | }
29 |
30 | const testUserRaw = {
31 | privateKey: '{"key_ops":["sign"],"ext":true,"kty":"EC","x":"pAUmLYc-UFmPIt7leafPTbhxQyygcaW7__nPcUNCuu0wH27yS9P_pWFP1GwcsoAN","y":"u3109KjrPGsNUn2k5Whn2uHLAckQPdLNqtM4GpBEpUJwlvVDvk71-lS3YOEYJ_Sq","crv":"P-384","d":"OHnRw5an9hlSqSKg966lFRvB7dow669pVSn7sFZUi7UQh_Y9Xc95SQ6pEWsofsYD"}',
32 | user: 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6InBBVW1MWWMtVUZtUEl0N2xlYWZQVGJoeFF5eWdjYVc3X19uUGNVTkN1dTB3SDI3eVM5UF9wV0ZQMUd3Y3NvQU4iLCJ5IjoidTMxMDlLanJQR3NOVW4yazVXaG4ydUhMQWNrUVBkTE5xdE00R3BCRXBVSndsdlZEdms3MS1sUzNZT0VZSl9TcSIsImNydiI6IlAtMzg0In0='
33 | }
34 |
35 | const testServerUser = 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6IkNZd01ha3BuMG9uYU5lQ2Etd3FMbjRGenNyaXNfVVk0WjVnUlFVQTl4UU9vaDk0WUc5T0hoSXRyNnJvdmFZcFoiLCJ5IjoiNzRVbGp1ODZJVU1KWnNZc1NqeFNqdXNMamo5VTZyb3pad2JLOVhhcWozTWdJV3Ruak55akwxRC1Oek9QM0ZKNyIsImNydiI6IlAtMzg0In0='
36 |
37 | const testUser = {
38 | privateKey: await ecdsa.importKeyJwk(json.parse(testUserRaw.privateKey)),
39 | user: dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testUserRaw.user)))
40 | }
41 | const owner = buffer.toBase64(testUser.user.hash)
42 |
43 | const collectionOwner = env.getParam('--owner', owner)
44 | const collectionName = env.getParam('--collection', 'default')
45 |
46 | const y = await Ystream.open(path.join(observePath, '.ystream/yfs'), {
47 | comms: [new wscomm.WebSocketComm('wss://ystream.yjs.dev', { owner: collectionOwner, name: collectionName })]
48 | })
49 |
50 | await authentication.registerUser(y, dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testServerUser))), { isTrusted: true })
51 | await authentication.setUserIdentity(y, testUser.user, await testUser.user.publicKey, testUser.privateKey)
52 |
53 | const ycollection = y.getCollection(collectionOwner, collectionName)
54 |
55 | // eslint-disable-next-line
56 | const _yfs = new Yfs(ycollection, { observePath })
57 |
58 | logging.print(logging.GREEN, 'Started Yfs Daemon', logging.ORANGE, ` observePath="./${observePath}" collection="${collectionOwner},${collectionName}"`)
59 |
--------------------------------------------------------------------------------
/src/comm.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Communication channel API
3 | */
4 |
5 | import * as error from 'lib0/error'
6 | import * as utils from './utils.js'
7 | import * as observable from 'lib0/observable'
8 |
9 | // @todo rename all interfacses to have I* prefix.
10 |
11 | /**
12 | * @typedef {import('./ystream.js').Ystream} Ystream
13 | */
14 |
15 | /**
16 | * @typedef {import('./api/dbtypes.js').OpValue} OpValue
17 | */
18 |
19 | /* c8 ignore start */
20 | /**
21 | * Interface that describes a communication channel.
22 | *
23 | * @interface
24 | * @extends {observable.ObservableV2<{ authenticated: (comm:Comm) => void, "requested-ops": (comm: Comm, sub: { collection: { owner: Uint8Array?, name: string? }, clock: number }) => void }>}
25 | */
26 | export class Comm extends observable.ObservableV2 {
27 | get clientid () { return -1 }
28 | set clientid (_v) { error.methodUnimplemented() }
29 | /**
30 | * @type {import('./api/dbtypes.js').UserIdentity|null}
31 | */
32 | get user () { return error.methodUnimplemented() }
33 | set user (_u) { error.methodUnimplemented() }
34 | /**
35 | * @type {import('./api/dbtypes.js').DeviceClaim|null}
36 | */
37 | get deviceClaim () { return error.methodUnimplemented() }
38 | set deviceClaim (_u) { error.methodUnimplemented() }
39 | /**
40 | * Set of synced collections
41 | * @type {utils.CollectionsSet}
42 | */
43 | get synced () { return new utils.CollectionsSet() }
44 | set synced (_v) { error.methodUnimplemented() }
45 | /**
46 | * Set of synced collections
47 | * @type {Uint8Array}
48 | */
49 | get challenge () { return new Uint8Array() }
50 | get isDestroyed () { return false }
51 | get isAuthenticated () { return false }
52 | set isAuthenticated (_v) { error.methodUnimplemented() }
53 | get sentChallengeAnswer () { return false }
54 | set sentChallengeAnswer (_v) { error.methodUnimplemented() }
55 |
56 | /**
57 | * @type {WritableStream<{ messages: Array, origin: any }>}
58 | */
59 | get writer () { return error.methodUnimplemented() }
60 | /**
61 | * @type {AbortController}
62 | */
63 | get streamController () { return error.methodUnimplemented() }
64 | /**
65 | * The next expected clock from the remote client.
66 | * @type {number}
67 | */
68 | get nextClock () { return error.methodUnimplemented() }
69 | set nextClock (_v) { error.methodUnimplemented() }
70 |
71 | destroy () {
72 | error.methodUnimplemented()
73 | }
74 |
75 | /**
76 | * @param {number} [_code]
77 | * @param {string} [_reason]
78 | */
79 | close (_code, _reason) {
80 | error.methodUnimplemented()
81 | }
82 | }
83 | /* c8 ignore end */
84 |
85 | /* c8 ignore start */
86 | export class CommHandler {
87 | destroy () {}
88 | }
89 | /* c8 ignore end */
90 |
91 | /* c8 ignore start */
92 | /**
93 | * @interface
94 | */
95 | export class CommConfiguration {
96 | /**
97 | * @param {Ystream} _ystream
98 | * @return {CommHandler}
99 | */
100 | init (_ystream) {
101 | error.methodUnimplemented()
102 | }
103 | }
104 | /* c8 ignore end */
105 |
--------------------------------------------------------------------------------
/src/api/authorization.js:
--------------------------------------------------------------------------------
1 | import * as dbtypes from '../api/dbtypes.js' // eslint-disable-line
2 | import * as actions from '../api/actions.js'
3 | import * as operations from '../operations.js'
4 | import * as buffer from 'lib0/buffer'
5 | import * as promise from 'lib0/promise'
6 |
7 | /**
8 | * @typedef {import('../ystream.js').Ystream} Ystream
9 | */
10 |
11 | /**
12 | * @param {import('@y/stream').YTransaction} tr
13 | * @param {Ystream} ystream
14 | * @param {Uint8Array} owner
15 | * @param {string} collection
16 | * @param {string} doc
17 | * @param {dbtypes.UserIdentity} user
18 | * @param {operations.AccessType} accessType
19 | */
20 | export const updateCollaborator = async (tr, ystream, owner, collection, doc, user, accessType) => {
21 | const currentPermOp = await actions.getDocOpsMerged(tr, ystream, owner, collection, doc, operations.OpPermType)
22 | const op = operations.createOpPermUpdate(currentPermOp?.op || null, buffer.toBase64(user.hash), accessType)
23 | actions.addOp(tr, ystream, owner, collection, doc, op)
24 | }
25 |
26 | /**
27 | * @param {import('@y/stream').YTransaction} tr
28 | * @param {Ystream} ystream
29 | * @param {Uint8Array} owner
30 | * @param {string} collection
31 | * @param {string} doc
32 | * @return {Promise} accessType
33 | */
34 | export const getPermOp = async (tr, ystream, owner, collection, doc) =>
35 | actions.getDocOpsMerged(tr, ystream, owner, collection, doc, operations.OpPermType).then(opperm => opperm?.op || new operations.OpPerm())
36 |
37 | /**
38 | * @param {import('@y/stream').YTransaction} tr
39 | * @param {Ystream} ystream
40 | * @param {Uint8Array} owner
41 | * @param {string} collection
42 | * @param {string} doc
43 | * @param {function(operations.OpPerm):boolean} checker
44 | */
45 | const _checkStreamAccess = (tr, ystream, owner, collection, doc, checker) => getPermOp(tr, ystream, owner, collection, doc).then(checker)
46 |
47 | /**
48 | * @param {import('@y/stream').YTransaction} tr
49 | * @param {Ystream} ystream
50 | * @param {Uint8Array} owner
51 | * @param {string} collection
52 | * @param {string} doc
53 | * @param {function(operations.OpPerm):boolean} checker
54 | */
55 | const checkAccess = async (tr, ystream, owner, collection, doc, checker) => {
56 | const hasAccessStream = await _checkStreamAccess(tr, ystream, owner, collection, '*', checker)
57 | if (hasAccessStream) return hasAccessStream
58 | return await _checkStreamAccess(tr, ystream, owner, collection, doc, checker)
59 | }
60 |
61 | /**
62 | * @param {import('@y/stream').YTransaction} tr
63 | * @param {Ystream} ystream
64 | * @param {Uint8Array} owner
65 | * @param {string} collection
66 | * @param {string} doc
67 | * @param {dbtypes.UserIdentity} user
68 | */
69 | export const hasReadAccess = async (tr, ystream, owner, collection, doc, user) => user.isTrusted ? promise.resolveWith(true) : checkAccess(tr, ystream, owner, collection, doc, opperm => opperm.hasReadAccess(buffer.toBase64(user.hash)))
70 |
71 | /**
72 | * @param {import('@y/stream').YTransaction} tr
73 | * @param {Ystream} ystream
74 | * @param {Uint8Array} owner
75 | * @param {string} collection
76 | * @param {string} doc
77 | * @param {dbtypes.UserIdentity} user
78 | */
79 | export const hasWriteAccess = async (tr, ystream, owner, collection, doc, user) => user.isTrusted ? promise.resolveWith(true) : checkAccess(tr, ystream, owner, collection, doc, opperm => opperm.hasWriteAccess(buffer.toBase64(user.hash)))
80 |
--------------------------------------------------------------------------------
/src/utils.js:
--------------------------------------------------------------------------------
1 | import * as map from 'lib0/map'
2 | import * as Y from 'yjs'
3 | import * as dbtypes from './api/dbtypes.js' // eslint-disable-line
4 | import * as operations from './operations.js'
5 | import * as array from 'lib0/array'
6 | import * as buffer from 'lib0/buffer'
7 |
8 | /**
9 | * Merges ops on the same collection & doc
10 | *
11 | * @template {operations.OpTypes} OP
12 | * @param {Array>} ops
13 | * @param {boolean} gc
14 | * @return {Array>}
15 | */
16 | const _mergeOpsHelper = (ops, gc) => {
17 | /**
18 | * @type {Map>}
19 | */
20 | const opsSortedByType = map.create()
21 | for (let i = ops.length - 1; i >= 0; i--) {
22 | const op = ops[i]
23 | map.setIfUndefined(opsSortedByType, op.op.type, array.create).push(op)
24 | }
25 | /**
26 | * @type {Array>}
27 | */
28 | const mergedOps = []
29 | opsSortedByType.forEach((sops, type) => { mergedOps.push(/** @type {typeof operations.AbstractOp} */ (operations.typeMap[type]).merge(sops, gc)) })
30 | return mergedOps
31 | }
32 |
33 | /**
34 | * @deprecated - remove this
35 | * @template {operations.OpTypes} OP
36 | * @param {Array>} ops
37 | * @param {boolean} gc
38 | * @return {Array>}
39 | */
40 | export const mergeOps = (ops, gc) => {
41 | /**
42 | * @type {Map>>>}
43 | */
44 | const collections = new Map()
45 | // Iterate from right to left so we add the "latest" ops first to the collection.
46 | // Then, when we generate the merged updates (based on the collections map), the ops are already in order
47 | for (let i = ops.length - 1; i >= 0; i--) {
48 | const op = ops[i]
49 | map.setIfUndefined(map.setIfUndefined(collections, op.collection, map.create), op.doc, array.create).push(op)
50 | }
51 | /**
52 | * @type {Array>}
53 | */
54 | const mergedOps = []
55 | collections.forEach(docs => {
56 | docs.forEach(docops => {
57 | mergedOps.push(..._mergeOpsHelper(docops, gc))
58 | })
59 | })
60 | return mergedOps.reverse().sort((a, b) => a.localClock - b.localClock)
61 | }
62 |
63 | /**
64 | * @template {operations.OpTypes|operations.AbstractOp} OP
65 | * @param {Array>} ops
66 | * @param {boolean} gc
67 | * @return {dbtypes.OpValue|null}
68 | */
69 | export const merge = (ops, gc) => {
70 | if (ops.length === 0) return null
71 | return /** @type {dbtypes.OpValue} */ (
72 | /** @type {typeof operations.AbstractOp} */ (operations.typeMap[ops[0].op.type]).merge(ops, gc)
73 | )
74 | }
75 |
76 | /**
77 | * @param {Array} ops
78 | */
79 | export const filterYjsUpdateOps = ops =>
80 | /** @type {Array>} */ (ops.filter(op => op.op.type === operations.OpYjsUpdateType))
81 |
82 | /**
83 | * @param {Array} ops
84 | */
85 | export const mergeYjsUpdateOps = ops =>
86 | Y.mergeUpdatesV2(filterYjsUpdateOps(ops).map(op => op.op.update))
87 |
88 | /**
89 | * Registry for adding owner/collection pairs and checking whether they have been added.
90 | */
91 | export class CollectionsSet {
92 | constructor () {
93 | /**
94 | * @type {Map>}
95 | */
96 | this.owners = new Map()
97 | }
98 |
99 | clear () {
100 | this.owners.clear()
101 | }
102 |
103 | /**
104 | * @param {Uint8Array} owner
105 | * @param {string} collection
106 | */
107 | add (owner, collection) {
108 | map.setIfUndefined(this.owners, buffer.toBase64(owner), () => new Set()).add(collection)
109 | }
110 |
111 | /**
112 | * @param {string} owner
113 | * @param {string} collection
114 | * @return {boolean}
115 | */
116 | has (owner, collection) {
117 | return this.owners.get(owner)?.has(collection) === true
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/src/messages.js:
--------------------------------------------------------------------------------
1 | import * as encoding from 'lib0/encoding'
2 | import * as decoding from 'lib0/decoding'
3 | import * as error from 'lib0/error'
4 | import * as operations from './operations.js'
5 |
6 | /**
7 | * @typedef {import('isodb').IEncodable} IEncodable
8 | */
9 |
10 | export const RequestDocumentType = 0
11 | export const ResendDocumentType = 1
12 |
13 | /**
14 | * @abstract
15 | * @implements IEncodable
16 | */
17 | export class AbstractMessage {
18 | /**
19 | * @return {number}
20 | */
21 | get type () {
22 | return error.methodUnimplemented()
23 | }
24 |
25 | /**
26 | * @param {encoding.Encoder} _encoder
27 | */
28 | encode (_encoder) {
29 | error.methodUnimplemented()
30 | }
31 |
32 | /**
33 | * @param {decoding.Decoder} _decoder
34 | * @return {AbstractMessage}
35 | */
36 | static decode (_decoder) {
37 | error.methodUnimplemented()
38 | }
39 | }
40 |
41 | /**
42 | * @implements AbstractMessage
43 | */
44 | export class RequestDocument {
45 | /**
46 | * @param {string} collection
47 | * @param {string} doc
48 | * @param {number} expectedClientid
49 | * @param {number} expectedClock
50 | */
51 | constructor (collection, doc, expectedClientid, expectedClock) {
52 | this.collection = collection
53 | this.doc = doc
54 | this.eclientid = expectedClientid
55 | this.eclock = expectedClock
56 | }
57 |
58 | get type () { return RequestDocumentType }
59 |
60 | /**
61 | * @param {encoding.Encoder} encoder
62 | */
63 | encode (encoder) {
64 | encoding.writeVarString(encoder, this.collection)
65 | encoding.writeVarString(encoder, this.doc)
66 | encoding.writeVarUint(encoder, this.eclientid)
67 | encoding.writeVarUint(encoder, this.eclock)
68 | }
69 |
70 | /**
71 | * @param {decoding.Decoder} decoder
72 | */
73 | static decode (decoder) {
74 | const collection = decoding.readVarString(decoder)
75 | const doc = decoding.readVarString(decoder)
76 | const eclientid = decoding.readVarUint(decoder)
77 | const eclock = decoding.readVarUint(decoder)
78 | return new RequestDocument(collection, doc, eclientid, eclock)
79 | }
80 | }
81 |
82 | /**
83 | * @todo rename to "RequestedDocument"
84 | * @implements AbstractMessage
85 | */
86 | export class ResendDocument {
87 | /**
88 | * @param {string} collection
89 | * @param {string} doc
90 | * @param {number} confirmedClientId
91 | * @param {number} confirmedClock
92 | * @param {Array} ops
93 | */
94 | constructor (collection, doc, confirmedClientId, confirmedClock, ops) {
95 | this.collection = collection
96 | this.doc = doc
97 | this.cclientid = confirmedClientId
98 | this.cclock = confirmedClock
99 | this.ops = ops
100 | }
101 |
102 | get type () { return ResendDocumentType }
103 |
104 | /**
105 | * @param {encoding.Encoder} encoder
106 | */
107 | encode (encoder) {
108 | encoding.writeVarString(encoder, this.collection)
109 | encoding.writeVarString(encoder, this.doc)
110 | encoding.writeVarUint(encoder, this.cclientid)
111 | encoding.writeVarUint(encoder, this.cclock)
112 | encoding.writeVarUint(encoder, this.ops.length)
113 | for (let i = 0; i < this.ops.length; i++) {
114 | const op = this.ops[i]
115 | encoding.writeVarUint(encoder, op.type)
116 | op.encode(encoder)
117 | }
118 | }
119 |
120 | /**
121 | * @param {decoding.Decoder} decoder
122 | */
123 | static decode (decoder) {
124 | const collection = decoding.readVarString(decoder)
125 | const doc = decoding.readVarString(decoder)
126 | const cclientid = decoding.readVarUint(decoder)
127 | const cclock = decoding.readVarUint(decoder)
128 | const ops = []
129 | const opsLen = decoding.readVarUint(decoder)
130 | for (let i = 0; i < opsLen; i++) {
131 | const type = /** @type {operations.OpTypeIds} */ (decoding.readVarUint(decoder))
132 | ops.push(operations.typeMap[type].decode(decoder))
133 | }
134 | return new ResendDocument(collection, doc, cclientid, cclock, ops)
135 | }
136 | }
137 |
138 | export const typeMap = {
139 | [RequestDocumentType]: RequestDocument,
140 | [ResendDocumentType]: ResendDocument
141 | }
142 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@y/stream",
3 | "version": "0.1.2",
4 | "description": "A fast CRDT database",
5 | "sideEffects": false,
6 | "type": "module",
7 | "types": "./dist/src/index.d.ts",
8 | "funding": {
9 | "type": "GitHub Sponsors ❤",
10 | "url": "https://github.com/sponsors/dmonad"
11 | },
12 | "scripts": {
13 | "docs": "npm run docs:build && 0serve -o ./docs/index.html",
14 | "docs:build": "typedoc src/index.js",
15 | "clean": "rm -rf dist .ystream .test_dbs tmp coverage",
16 | "types": "tsc",
17 | "gentesthtml": "npx 0gentesthtml --script ./tests/index.js > test.html",
18 | "debug:browser": "npm run gentesthtml && npx 0serve -o test.html",
19 | "debug:node": "LOG=* node --inspect-brk tests/index.js",
20 | "ws-server": "LOG=* node ./bin/server.js",
21 | "ws-server:inspect": "LOG=* node --inspect-brk ./bin/server.js",
22 | "test": "LOG=* c8 node tests/index.js",
23 | "test-coverage": "npm run lint && npm run dist && nyc --check-coverage --lines 100 --branches 100 --functions 100 --statements 100 node --unhandled-rejections=strict ./dist/test.cjs --repetition-time 50 --production",
24 | "test-inspect": "node --inspect-brk --unhandled-rejections=strict ./test.js --repetition-time 50 --production",
25 | "test-extensive": "node test.js && npm test -- --repetition-time 30000 --extensive",
26 | "test-code-coverage": "npm run dist && nyc --reporter html node ./dist/test.cjs --production",
27 | "lint": "standard && tsc",
28 | "preversion": "npm run clean && npm run types && rm -rf dist/tests && npm run docs:build",
29 | "postpublish": "npm run clean",
30 | "server:docker:build": "docker build . -t ystream",
31 | "server:docker:run": "docker run -p 9000:9000 --name ystream ystream "
32 | },
33 | "files": [
34 | "dist",
35 | "src",
36 | "docs"
37 | ],
38 | "exports": {
39 | "./package.json": "./package.json",
40 | ".": {
41 | "types": "./dist/src/index.d.ts",
42 | "default": "./src/index.js"
43 | },
44 | "./comms/websocket": {
45 | "types": "./dist/src/comms/websocket.d.ts",
46 | "default": "./src/comms/websocket.js"
47 | },
48 | "./comms/websocket-server": {
49 | "types": "./dist/src/comms/websocket-server.d.ts",
50 | "default": "./src/comms/websocket-server.js"
51 | },
52 | "./utils/websocket": {
53 | "browser": "./src/utils/websocket.browser.js",
54 | "bun": "./src/utils/websocket.node.js",
55 | "node": "./src/utils/websocket.node.js",
56 | "types": "./dist/src/utils/websocket.browser.d.ts",
57 | "default": "./src/utils/websocket.browser.js"
58 | },
59 | "./api/authentication": {
60 | "types": "./dist/src/api/authentication.d.ts",
61 | "default": "./src/api/authentication.js"
62 | },
63 | "./api/dbtypes": {
64 | "types": "./dist/src/api/dbtypes.d.ts",
65 | "default": "./src/api/dbtypes.js"
66 | },
67 | "./api/actions": {
68 | "types": "./dist/src/api/actions.d.ts",
69 | "default": "./src/api/actions.js"
70 | },
71 | "./extensions/fs": {
72 | "types": "./dist/src/extensions/fs.d.ts",
73 | "default": "./src/extensions/fs.js"
74 | }
75 | },
76 | "bin": {
77 | "ystream-server": "./bin/server.js",
78 | "yfs": "./bin/yfs.js"
79 | },
80 | "repository": {
81 | "type": "git",
82 | "url": "git+https://github.com/yjs/ystream.git"
83 | },
84 | "author": "Kevin Jahns ",
85 | "license": "MIT",
86 | "bugs": {
87 | "url": "https://github.com/yjs/ystream/issues"
88 | },
89 | "homepage": "https://github.com/yjs/ystream#readme",
90 | "standard": {
91 | "ignore": [
92 | "/dist",
93 | "/node_modules",
94 | "/docs"
95 | ]
96 | },
97 | "engines": {
98 | "node": ">=20"
99 | },
100 | "devDependencies": {
101 | "@codemirror/lang-javascript": "^6.2.2",
102 | "@codemirror/lang-markdown": "^6.2.4",
103 | "@codemirror/state": "^6.4.1",
104 | "@codemirror/view": "^6.24.1",
105 | "@types/node": "^18.11.9",
106 | "@types/ws": "^8.5.5",
107 | "c8": "^7.13.0",
108 | "codemirror": "^6.0.1",
109 | "concurrently": "^7.6.0",
110 | "standard": "^17.0.0",
111 | "typedoc": "^0.25.13",
112 | "typescript": "^5.1.6",
113 | "y-codemirror.next": "^0.3.2"
114 | },
115 | "dependencies": {
116 | "isodb": "^0.1.6",
117 | "lib0": "^0.2.82"
118 | },
119 | "peerDependencies": {
120 | "yjs": "^13.5.43"
121 | },
122 | "optionalDependencies": {
123 | "chokidar": "^3.6.0",
124 | "uws": "github:uNetworking/uWebSockets.js#v20.43.0",
125 | "ws": "^8.14.1"
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/demo/src/app/components/filetree.tsx:
--------------------------------------------------------------------------------
1 | import { useEffect, useState } from 'react'
2 | import {
3 | Tree,
4 | CreateHandler,
5 | DeleteHandler,
6 | NodeApi,
7 | NodeRendererProps,
8 | RenameHandler,
9 | } from "react-arborist"
10 | import * as icons from 'react-icons/md'
11 | import { BsTree } from "react-icons/bs";
12 | import * as Y from 'yjs'
13 | import * as promise from 'lib0/promise'
14 | import * as random from 'lib0/random'
15 | import thePromisedYstream from '../ystream'
16 |
17 |
18 | export default function Filetree ({ onDocSelection }: { onDocSelection: (ydoc: Y.Doc|null) => void }) {
19 | const [fileTree, setFileTree] = useState([] as Array)
20 | const [docid, setDocId] = useState(null as string|null)
21 |
22 | useEffect(() => {
23 | const updateFileTree = () => thePromisedYstream.then(({ y, ycollection }) => {
24 | y.transact(async tr => {
25 | const tree = await ycollection.getDocChildrenRecursive(tr, null)
26 | const mapper = async (node: any) => {
27 | const isFolder = node.children.length > 0 || (await ycollection.getFileInfo(tr, node.docid))?.ftype === 'dir'
28 | return {
29 | id: node.docid,
30 | name: node.docname,
31 | readOnly: false,
32 | editable: true,
33 | children: isFolder ? (await promise.all(node.children.map(mapper))) : null
34 | }
35 | }
36 | const filetree = await promise.all(tree.map(mapper))
37 | setFileTree(filetree)
38 | })
39 | })
40 | updateFileTree()
41 | thePromisedYstream.then(({ y }) => {
42 | y.on('ops', updateFileTree)
43 | })
44 | return () => {
45 | thePromisedYstream.then(({ y }) => {
46 | y.off('ops', updateFileTree)
47 | })
48 | }
49 | }, [])
50 |
51 | useEffect(() => {
52 | if (docid === null) { return }
53 | onDocSelection(null)
54 | // bind new ydoc to ycollection doc
55 | const promisedYdoc = thePromisedYstream.then(({ ycollection }) => {
56 | const ydoc = ycollection.getYdoc(docid)
57 | onDocSelection(ydoc)
58 | return ydoc
59 | })
60 | return () => {
61 | promisedYdoc.then(ydoc => { ydoc.destroy() })
62 | }
63 | }, [docid])
64 |
65 | const onCreateFile: CreateHandler = async (node) => {
66 | console.log(node)
67 | return thePromisedYstream.then(async ({ y, ycollection }) => {
68 | const res = await y.transact(async tr => {
69 | const newNoteId = random.uuidv4()
70 | await ycollection.setFileInfo(tr, random.uuidv4(), `new-note#${Math.random().toString(32).slice(2)}.md`, node.parentId, 'text')
71 | return { id: newNoteId }
72 | })
73 | await promise.wait(300)
74 | return res
75 | })
76 | }
77 |
78 | const onDeleteFile: DeleteHandler = async (event) => {
79 | await thePromisedYstream.then(async ({ y, ycollection }) => {
80 | await y.transact(async tr => {
81 | await promise.all(event.ids.map(docid =>
82 | ycollection.deleteDoc(tr, docid)
83 | ))
84 | })
85 | })
86 | }
87 |
88 | const onRename: RenameHandler = async (node) => {
89 | const { y, ycollection } = await thePromisedYstream
90 | await y.transact(async tr => {
91 | const parentNode = node.node.parent
92 | const parentid = (parentNode?.level ?? -1) >= 0 ? parentNode?.id ?? null: null
93 | const newid = random.uuidv4()
94 | const oldContent = await ycollection.getYdocUpdates(tr, node.id)
95 | await ycollection.addYdocUpdate(tr, newid, Y.mergeUpdatesV2(oldContent || []))
96 | await ycollection.setFileInfo(tr, newid, node.name, parentid, 'text')
97 | // delete old doc
98 | await ycollection.deleteDoc(tr, node.id)
99 | })
100 | }
101 |
102 | const onActivate = (node: NodeApi) => {
103 | if(node.children === null) setDocId(node.id)
104 | }
105 |
106 | return (
107 |
108 |
109 | {Node}
110 |
111 |
112 | )
113 | }
114 |
115 | function Node({ node, style, dragHandle }: NodeRendererProps) {
116 | const Icon = node.data.icon || BsTree;
117 | return (
118 | node.isInternal && node.toggle()}
122 | >
123 |
124 |
125 |
126 |
127 | {node.isEditing ? : node.data.name}
128 |
129 | );
130 | }
131 |
132 | function Input({ node }: { node: NodeApi }) {
133 | return (
134 | e.currentTarget.select()}
139 | onBlur={() => node.reset()}
140 | onKeyDown={(e) => {
141 | if (e.key === "Escape") node.reset();
142 | if (e.key === "Enter") node.submit(e.currentTarget.value);
143 | }}
144 | />
145 | );
146 | }
147 |
148 | function FolderArrow({ node }: { node: NodeApi }) {
149 | if (node.isLeaf) return ;
150 | return (
151 |
152 | {node.isOpen ? : }
153 |
154 | );
155 | }
156 |
--------------------------------------------------------------------------------
/tests/yfs.tests.js:
--------------------------------------------------------------------------------
1 | import fs from 'fs'
2 | import * as t from 'lib0/testing' // eslint-disable-line
3 | import * as helpers from './helpers.js'
4 | import Yfs from '@y/stream/extensions/fs'
5 | import cp from 'child_process'
6 | import * as number from 'lib0/number'
7 | import * as promise from 'lib0/promise'
8 | import * as logging from 'lib0/logging'
9 |
10 | /**
11 | * Testing loading from the database.
12 | *
13 | * @param {t.TestCase} tc
14 | */
15 | export const testYfsQuickTest = async tc => {
16 | try {
17 | fs.rmSync('./tmp', { recursive: true })
18 | } catch (e) {}
19 | fs.mkdirSync('./tmp')
20 | fs.mkdirSync('./tmp/init')
21 | fs.cpSync('./bin', './tmp/init/bin', { recursive: true })
22 | fs.mkdirSync('./tmp/clone')
23 | const th = await helpers.createTestScenario(tc)
24 | const [{ collection: ycollection1 }, { collection: ycollection2 }] = await th.createClients(2)
25 | const yfs1 = new Yfs(ycollection1, { observePath: './tmp/init' })
26 | const yfs2 = new Yfs(ycollection2, { observePath: './tmp/clone' })
27 | const waitFilesSynced = async () => {
28 | console.log('before wait coll')
29 | await helpers.waitCollectionsSynced(ycollection1, ycollection2)
30 | console.log('after wait coll')
31 | await promise.untilAsync(async () => {
32 | const numOfInitFiles = number.parseInt(cp.execSync('find ./tmp/init | wc -l').toString()) - 1
33 | const numOfClonedFiles = number.parseInt(cp.execSync('find ./tmp/clone | wc -l').toString()) - 1
34 | const cs1 = await ycollection1.ystream.transact(tr => ycollection1.getDocChildrenRecursive(tr, null))
35 | const cs2 = await ycollection2.ystream.transact(tr => ycollection2.getDocChildrenRecursive(tr, null))
36 | console.log({ numOfClonedFiles, numOfInitFiles })
37 | console.log({ cs1: JSON.stringify(cs1), cs2: JSON.stringify(cs2) })
38 | return numOfClonedFiles === numOfInitFiles
39 | }, 0, 300)
40 | }
41 | console.log('before wait files synced')
42 | await waitFilesSynced()
43 | t.info('successfully synced initial files')
44 | yfs1.destroy()
45 | yfs2.destroy()
46 | await th.destroy()
47 | }
48 |
49 | /**
50 | * Testing loading from the database.
51 | *
52 | * @param {t.TestCase} tc
53 | */
54 | export const testYfsBasics = async tc => {
55 | try {
56 | fs.rmSync('./tmp', { recursive: true })
57 | } catch (e) {}
58 | fs.mkdirSync('./tmp')
59 | fs.cpSync('./src', './tmp/init', { recursive: true })
60 | fs.mkdirSync('./tmp/clone')
61 | const th = await helpers.createTestScenario(tc)
62 | const [{ collection: ycollection1 }, { collection: ycollection2 }] = await th.createClients(2)
63 | const yfs1 = new Yfs(ycollection1, { observePath: './tmp/init' })
64 | const yfs2 = new Yfs(ycollection2, { observePath: './tmp/clone' })
65 | const waitFilesSynced = async (otherCollection = ycollection2, otherdir = './tmp/clone') => {
66 | await helpers.waitCollectionsSynced(ycollection1, otherCollection)
67 | await promise.untilAsync(async () => {
68 | const numOfInitFiles = number.parseInt(cp.execSync('find ./tmp/init | wc -l').toString()) - 1
69 | const numOfClonedFiles = number.parseInt(cp.execSync(`find ${otherdir} | wc -l`).toString()) - 1
70 | // const cs1 = await ycollection1.ystream.transact(tr => ycollection1.getDocChildrenRecursive(tr, null))
71 | // const cs2 = await other.ystream.transact(tr => other.getDocChildrenRecursive(tr, null))
72 | logging.print('yfs file sync status', { numOfClonedFiles, numOfInitFiles })
73 | // console.log({ cs1, cs2 })
74 | return numOfClonedFiles === numOfInitFiles
75 | }, 0, 300)
76 | }
77 | await t.measureTimeAsync('sync initial files', async () => {
78 | await waitFilesSynced()
79 | })
80 | await t.measureTimeAsync('sync file delete', async () => {
81 | cp.execSync('rm -rf ./tmp/clone/actions.js')
82 | await waitFilesSynced()
83 | })
84 | await t.measureTimeAsync('sync folder delete', async () => {
85 | cp.execSync('rm -rf ./tmp/clone/api')
86 | await waitFilesSynced()
87 | })
88 | await t.measureTimeAsync('sync copied folder', async () => {
89 | cp.execSync('cp -rf ./src ./tmp/init/src-copy')
90 | await waitFilesSynced()
91 | })
92 | await t.measureTimeAsync('move folder', async () => {
93 | cp.execSync('mv ./tmp/init/src-copy ./tmp/init/src-moved')
94 | await waitFilesSynced()
95 | })
96 | await t.measureTimeAsync('edit file', async () => {
97 | cp.execSync('echo newcontent > ./tmp/clone/index.js')
98 | await promise.wait(300)
99 | await waitFilesSynced()
100 | let fileContent = fs.readFileSync('./tmp/init/index.js').toString('utf8')
101 | while (fileContent[fileContent.length - 1] === '\n') {
102 | fileContent = fileContent.slice(0, -1)
103 | }
104 | t.compare(fileContent, 'newcontent')
105 | })
106 | await t.measureTimeAsync('copy node_modules', async () => {
107 | cp.execSync('cp -rf ./node_modules ./tmp/init/')
108 | await waitFilesSynced()
109 | })
110 | await t.measureTimeAsync('complete a full sync with a third client', async () => {
111 | fs.mkdirSync('./tmp/clone2')
112 | const [{ collection: ycollection3 }] = await th.createClients(1)
113 | const yfs3 = new Yfs(ycollection3, { observePath: './tmp/clone2' })
114 | await waitFilesSynced(ycollection3, './tmp/clone2')
115 | t.info('successfully completed a full sync with a third client')
116 | yfs3.destroy()
117 | })
118 | await yfs1.destroy()
119 | await yfs2.destroy()
120 | await th.destroy()
121 | }
122 |
--------------------------------------------------------------------------------
/src/api/authentication.js:
--------------------------------------------------------------------------------
1 | import * as ecdsa from 'lib0/crypto/ecdsa'
2 | import * as dbtypes from '../api/dbtypes.js'
3 | import * as jose from 'lib0/crypto/jwt'
4 | import * as time from 'lib0/time'
5 | import * as json from 'lib0/json'
6 | import * as error from 'lib0/error'
7 | import * as promise from 'lib0/promise'
8 | import * as string from 'lib0/string'
9 | import * as sha256 from 'lib0/hash/sha256'
10 |
11 | /**
12 | * @typedef {import('../ystream.js').Ystream} Ystream
13 | */
14 |
15 | /**
16 | * @note this must not be called within a transaction, as it will never finish.
17 | *
18 | * @param {Ystream} ystream
19 | * @param {dbtypes.UserIdentity} userIdentity
20 | * @param {CryptoKey} publicKey
21 | * @param {CryptoKey|null} privateKey
22 | */
23 | export const setUserIdentity = async (ystream, userIdentity, publicKey, privateKey) => {
24 | const deviceIdentity = await ystream.transact(async tr => {
25 | console.log(ystream.clientid, 'setting user identity', userIdentity.ekey)
26 | tr.objects.user.set('public', publicKey)
27 | privateKey && tr.objects.user.set('private', privateKey)
28 | tr.tables.users.add(userIdentity)
29 | tr.objects.user.set('identity', userIdentity)
30 | ystream.user = userIdentity
31 | if (privateKey) {
32 | return /** @type {dbtypes.DeviceIdentity} */ (await tr.objects.device.get('identity'))
33 | }
34 | return null
35 | })
36 | if (deviceIdentity != null) {
37 | const claim = await createDeviceClaim(ystream, deviceIdentity)
38 | await useDeviceClaim(ystream, claim)
39 | }
40 | }
41 |
42 | export const createUserIdentity = async ({ extractable = false } = {}) => {
43 | const { publicKey, privateKey } = await ecdsa.generateKeyPair({ extractable })
44 | const userIdentity = new dbtypes.UserIdentity(json.stringify(await ecdsa.exportKeyJwk(publicKey)))
45 | return { userIdentity, publicKey, privateKey }
46 | }
47 |
48 | /**
49 | * @note this function must not be awaited inside of a ystream.transact, as it must create its own
50 | * transaction.
51 | *
52 | * @param {Ystream} ystream
53 | * @param {dbtypes.DeviceIdentity} deviceIdentity
54 | */
55 | export const createDeviceClaim = async (ystream, deviceIdentity) => {
56 | const { pkey, iss, sub } = await ystream.transact(async tr => {
57 | const [privateUserKey, user] = await promise.all([
58 | tr.objects.user.get('private'),
59 | tr.objects.user.get('identity')
60 | ])
61 | if (privateUserKey == null || user == null) error.unexpectedCase()
62 | return { pkey: privateUserKey.key, iss: user.ekey, sub: deviceIdentity.ekey }
63 | })
64 | // @todo add type definition to isodb.jwtValue
65 | // @todo add expiration date `exp`
66 | return await jose.encodeJwt(pkey, {
67 | iss,
68 | iat: time.getUnixTime(),
69 | sub
70 | })
71 | }
72 |
73 | /**
74 | * Register user, allowing him to connect to this instance.
75 | *
76 | * @param {Ystream} ystream
77 | * @param {dbtypes.UserIdentity} user
78 | * @param {Object} opts
79 | * @param {boolean} [opts.isTrusted]
80 | */
81 | export const registerUser = (ystream, user, { isTrusted = false } = {}) =>
82 | ystream.transact(async tr => {
83 | if ((await tr.tables.users.indexes.hash.get(user.hash)) == null) {
84 | await tr.tables.users.add(new dbtypes.UserIdentity(user.ekey, { isTrusted }))
85 | }
86 | })
87 |
88 | /**
89 | * Checks whether a user is registered in this database.
90 | *
91 | * @param {Ystream} ystream
92 | * @param {dbtypes.UserIdentity} user
93 | */
94 | export const isRegisteredUser = (ystream, user) =>
95 | ystream.transact(tr =>
96 | tr.tables.users.indexes.hash.get(user.hash)
97 | .then(ruser => ruser?.ekey === user.ekey)
98 | )
99 |
100 | /**
101 | * Checks whether a user is registered in this database.
102 | *
103 | * @param {Ystream} ystream
104 | * @param {dbtypes.UserIdentity} user
105 | */
106 | export const getRegisteredUser = (ystream, user) =>
107 | ystream.transact(tr =>
108 | tr.tables.users.indexes.hash.get(user.hash)
109 | )
110 |
111 | /**
112 | * @param {Ystream} ystream
113 | * @param {Uint8Array} userHash
114 | * @param {string} jwt
115 | * @return {Promise}
116 | */
117 | export const verifyDeviceClaim = async (ystream, userHash, jwt) => {
118 | const user = await ystream.transact(tr =>
119 | tr.tables.users.indexes.hash.get(userHash)
120 | )
121 | if (user == null) return null
122 | const { payload } = await jose.verifyJwt(await user.publicKey, jwt)
123 | if (payload.sub !== user.ekey) {
124 | return null
125 | }
126 | return payload
127 | }
128 |
129 | /**
130 | * @param {Ystream} ystream
131 | * @param {Uint8Array} userHash
132 | * @param {dbtypes.DeviceClaim} claim
133 | */
134 | export const registerDevice = async (ystream, userHash, claim) => {
135 | const user = await ystream.transact(tr =>
136 | tr.tables.users.indexes.hash.get(userHash)
137 | )
138 | if (user == null) error.unexpectedCase()
139 | await verifyDeviceClaim(ystream, claim.hash, claim.v)
140 | await ystream.transact(tr =>
141 | tr.tables.devices.add(claim)
142 | )
143 | }
144 |
145 | /**
146 | * @param {Ystream} ystream
147 | * @param {string} jwt
148 | */
149 | export const useDeviceClaim = async (ystream, jwt) => {
150 | /**
151 | * @type {any}
152 | */
153 | let payload
154 | const userPublicKey = await ystream.transact(tr => tr.objects.user.get('public'))
155 | if (userPublicKey != null) {
156 | payload = await jose.verifyJwt(userPublicKey.key, jwt)
157 | } else {
158 | payload = jose.unsafeDecode(jwt)
159 | }
160 | const { payload: { sub, iss } } = payload
161 | if (userPublicKey == null) {
162 | // ensure that the user identity is set using the public key of the jwt
163 | const user = new dbtypes.UserIdentity(iss)
164 | await setUserIdentity(ystream, user, await user.publicKey, null)
165 | }
166 | if (sub == null) error.unexpectedCase()
167 | await ystream.transact(async tr => {
168 | // Don't call the constructor manually. This is okay only here. Use DeviceClaim.fromJwt
169 | // instead.
170 | const deviceclaim = new dbtypes.DeviceClaim(jwt, sha256.digest(string.encodeUtf8(sub)))
171 | tr.objects.device.set('claim', deviceclaim)
172 | tr.tables.devices.add(deviceclaim)
173 | ystream.deviceClaim = deviceclaim
174 | ystream.isAuthenticated = true
175 | })
176 | ystream.emit('authenticate', []) // should only be fired on deviceclaim
177 | }
178 |
179 | /**
180 | * @param {Ystream} ystream
181 | */
182 | export const getDeviceIdentity = ystream =>
183 | ystream.transact(async tr => {
184 | const did = await tr.objects.device.get('identity')
185 | if (did == null) error.unexpectedCase()
186 | return did
187 | })
188 |
189 | /**
190 | * @param {Ystream} ystream
191 | */
192 | export const getUserIdentity = ystream =>
193 | ystream.transact(async tr => {
194 | const uid = await tr.objects.user.get('identity')
195 | if (uid == null) error.unexpectedCase()
196 | return uid
197 | })
198 |
199 | /**
200 | * @param {Ystream} ystream
201 | */
202 | export const getAllRegisteredUserHashes = ystream => ystream.transact(async tr => {
203 | const users = await tr.tables.users.getValues()
204 | return users.map(user => new Uint8Array(user.hash))
205 | })
206 |
--------------------------------------------------------------------------------
/tests/helpers.js:
--------------------------------------------------------------------------------
1 | import * as promise from 'lib0/promise'
2 | import * as t from 'lib0/testing' // eslint-disable-line
3 | import * as Ystream from '../src/index.js'
4 | import * as actions from '@y/stream/api/actions'
5 | import * as Y from 'yjs'
6 | import * as array from 'lib0/array'
7 | import * as wscomm from '../src/comms/websocket.js'
8 | import * as env from 'lib0/environment'
9 | import * as random from 'lib0/random'
10 | import * as authentication from '../src/api/authentication.js'
11 | import * as json from 'lib0/json'
12 | import * as buffer from 'lib0/buffer'
13 | import * as dbtypes from '@y/stream/api/dbtypes'
14 | import * as decoding from 'lib0/decoding'
15 | import * as ecdsa from 'lib0/crypto/ecdsa'
16 | import * as fun from 'lib0/function'
17 |
18 | /**
19 | * New test runs shouldn't reuse old data
20 | */
21 | const randTestRunName = random.uint32().toString(32)
22 | console.log('random db name prefix: ' + randTestRunName)
23 |
24 | const testUserRaw = {
25 | privateKey: '{"key_ops":["sign"],"ext":true,"kty":"EC","x":"pAUmLYc-UFmPIt7leafPTbhxQyygcaW7__nPcUNCuu0wH27yS9P_pWFP1GwcsoAN","y":"u3109KjrPGsNUn2k5Whn2uHLAckQPdLNqtM4GpBEpUJwlvVDvk71-lS3YOEYJ_Sq","crv":"P-384","d":"OHnRw5an9hlSqSKg966lFRvB7dow669pVSn7sFZUi7UQh_Y9Xc95SQ6pEWsofsYD"}',
26 | user: 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6InBBVW1MWWMtVUZtUEl0N2xlYWZQVGJoeFF5eWdjYVc3X19uUGNVTkN1dTB3SDI3eVM5UF9wV0ZQMUd3Y3NvQU4iLCJ5IjoidTMxMDlLanJQR3NOVW4yazVXaG4ydUhMQWNrUVBkTE5xdE00R3BCRXBVSndsdlZEdms3MS1sUzNZT0VZSl9TcSIsImNydiI6IlAtMzg0In0='
27 | }
28 |
29 | const testServerIdentityRaw = {
30 | privateKey: '{"key_ops":["sign"],"ext":true,"kty":"EC","x":"CYwMakpn0onaNeCa-wqLn4Fzsris_UY4Z5gRQUA9xQOoh94YG9OHhItr6rovaYpZ","y":"74Ulju86IUMJZsYsSjxSjusLjj9U6rozZwbK9Xaqj3MgIWtnjNyjL1D-NzOP3FJ7","crv":"P-384","d":"-yKNOty9EshGL0yAOQ2q6c_b_PNCpeEK9FVPoB0wc9EUyt9BR4DZuqrC9t_DgNaF"}',
31 | user: 'AMgBeyJrZXlfb3BzIjpbInZlcmlmeSJdLCJleHQiOnRydWUsImt0eSI6IkVDIiwieCI6IkNZd01ha3BuMG9uYU5lQ2Etd3FMbjRGenNyaXNfVVk0WjVnUlFVQTl4UU9vaDk0WUc5T0hoSXRyNnJvdmFZcFoiLCJ5IjoiNzRVbGp1ODZJVU1KWnNZc1NqeFNqdXNMamo5VTZyb3pad2JLOVhhcWozTWdJV3Ruak55akwxRC1Oek9QM0ZKNyIsImNydiI6IlAtMzg0In0='
32 | }
33 |
34 | const testUser = {
35 | privateKey: await ecdsa.importKeyJwk(json.parse(testUserRaw.privateKey)),
36 | user: dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testUserRaw.user)))
37 | }
38 |
39 | const testServerIdentity = {
40 | privateKey: await ecdsa.importKeyJwk(json.parse(testServerIdentityRaw.privateKey)),
41 | user: dbtypes.UserIdentity.decode(decoding.createDecoder(buffer.fromBase64(testServerIdentityRaw.user)))
42 | }
43 |
44 | /**
45 | * @typedef {Object} TestClientOptions
46 | */
47 |
48 | export const owner = testUser.user.hash
49 | export const collectionsDefiniton = [
50 | { owner: buffer.toBase64(owner), collection: 'c1' }
51 | ]
52 |
53 | /**
54 | * @type {import('../src/comms/websocket-server.js').WSServer|null}
55 | */
56 | export let server = null
57 |
58 | if (env.isNode) {
59 | const fs = await import('node:fs')
60 | try {
61 | fs.rmSync('./.test_dbs', { recursive: true })
62 | } catch (e) {}
63 | const { createWSServer } = await import('../src/comms/websocket-server.js')
64 | server = await createWSServer({ dbname: `.test_dbs/${randTestRunName}-server`, identity: testServerIdentity })
65 | await authentication.registerUser(server.ystream, testUser.user)
66 | // @todo add roles and default permissions to colletcions
67 | // await authorization.updateCollaborator(server.ystream, owner, 'c1', 'ydoc', testUser.user, 'admin')
68 | console.log('server registered user hashes: ', await authentication.getAllRegisteredUserHashes(server.ystream))
69 | }
70 |
71 | /**
72 | * @param {t.TestCase} tc
73 | */
74 | export const getDbName = tc => `.test_dbs/${randTestRunName}/${tc.moduleName}/${tc.testName}`
75 |
76 | export const emptyUpdate = Y.encodeStateAsUpdateV2(new Y.Doc())
77 |
78 | class TestClient {
79 | /**
80 | * @param {Ystream.Ystream} ystream
81 | * @param {{ owner: string, name: string }} collectionDef
82 | */
83 | constructor (ystream, { owner, name }) {
84 | this.ystream = ystream
85 | this.collection = ystream.getCollection(owner, name)
86 | this.doc1 = this.collection.getYdoc('ydoc')
87 | }
88 |
89 | async destroy () {
90 | this.doc1.destroy()
91 | await promise.all([this.ystream.destroy()])
92 | }
93 | }
94 |
95 | class TestScenario {
96 | /**
97 | * @param {string} name
98 | */
99 | constructor (name) {
100 | this.name = name
101 | this.collectionDef = { owner: buffer.toBase64(owner), name: this.name }
102 | /**
103 | * @type {Array}
104 | */
105 | this.clients = []
106 | this.cliNum = 0
107 | this.server = server
108 | }
109 |
110 | /**
111 | * @param {TestClientOptions} _options
112 | */
113 | async createClient (_options = {}) {
114 | const dbname = `.test_dbs/${randTestRunName}-${this.name}-${this.cliNum++}`
115 | await Ystream.remove(dbname)
116 | const ystream = await Ystream.open(dbname, {
117 | comms: [new wscomm.WebSocketComm('ws://localhost:9000', this.collectionDef)]
118 | })
119 | console.log('registering server', testServerIdentity.user, testServerIdentity.user.hash)
120 | await authentication.registerUser(ystream, testServerIdentity.user, { isTrusted: true })
121 | await authentication.setUserIdentity(ystream, testUser.user, await testUser.user.publicKey, testUser.privateKey)
122 | const client = new TestClient(ystream, this.collectionDef)
123 | this.clients.push(client)
124 | return client
125 | }
126 |
127 | /**
128 | * @param {number} num
129 | */
130 | async createClients (num) {
131 | return promise.all(array.unfold(num, () => this.createClient()))
132 | }
133 |
134 | async destroy () {
135 | await promise.all(this.clients.map(client => client.destroy()))
136 | }
137 | }
138 |
139 | /**
140 | * @type {TestScenario?}
141 | */
142 | let currTestScenario = null
143 |
144 | /**
145 | * @param {t.TestCase} tc
146 | */
147 | export const createTestScenario = async tc => {
148 | await currTestScenario?.destroy()
149 | currTestScenario = new TestScenario(getDbName(tc))
150 | return currTestScenario
151 | }
152 |
153 | /**
154 | * @param {Y.Doc} ydoc1
155 | * @param {Y.Doc} ydoc2
156 | */
157 | export const waitDocsSynced = (ydoc1, ydoc2) =>
158 | promise.until(0, () => {
159 | const e1 = Y.encodeStateAsUpdateV2(ydoc1)
160 | const e2 = Y.encodeStateAsUpdateV2(ydoc2)
161 | return array.equalFlat(e1, e2)
162 | })
163 |
164 | /**
165 | * @param {Ystream.Collection} ycollection1
166 | * @param {Ystream.Collection} ycollection2
167 | */
168 | export const waitCollectionsSynced = (ycollection1, ycollection2) =>
169 | promise.untilAsync(async () => {
170 | let sv1 = await ycollection1.ystream.transact(tr => actions.getStateVector(tr, ycollection1.ystream, ycollection1.ownerBin, ycollection1.collection))
171 | let sv2 = await ycollection2.ystream.transact(tr => actions.getStateVector(tr, ycollection2.ystream, ycollection2.ownerBin, ycollection2.collection))
172 | sv1 = sv1.filter(s => s.client !== server?.ystream.clientid)
173 | sv2 = sv2.filter(s => s.client !== server?.ystream.clientid)
174 | // const cs1 = await ycollection1.ystream.transact(tr => ycollection1.getDocChildrenRecursive(tr, null))
175 | // const cs2 = await ycollection2.ystream.transact(tr => ycollection2.getDocChildrenRecursive(tr, null))
176 | console.log({ sv1, sv2, clientid1: ycollection1.ystream.clientid, clientid2: ycollection2.ystream.clientid, serverClientId: server?.ystream.clientid })
177 | return fun.equalityDeep(sv1, sv2)
178 | }, 0, 500)
179 |
--------------------------------------------------------------------------------
/src/db.js:
--------------------------------------------------------------------------------
1 | import * as dbtypes from './api/dbtypes.js'
2 | import * as isodb from 'isodb'
3 | import * as webcrypto from 'lib0/webcrypto'
4 | import * as json from 'lib0/json'
5 | import * as promise from 'lib0/promise'
6 | import * as ecdsa from 'lib0/crypto/ecdsa'
7 | import * as random from 'lib0/random'
8 |
9 | /**
10 | * @todos
11 | * - implement protocol/RequestDoc (+ be able to apply it)
12 | * - implement todo queue for requesting docs
13 | */
14 |
15 | export const def = {
16 | tables: {
17 | oplog: {
18 | key: isodb.AutoKey,
19 | value: dbtypes.OpValue,
20 | indexes: {
21 | // @todo add `shallow index: {collection}/{autokey}/{doc}/{opid}` for computing the shallow (nocontent)
22 | // sync
23 | doc: {
24 | /**
25 | * @param {isodb.AutoKey} k
26 | * @param {dbtypes.OpValue} v
27 | */
28 | mapper: (k, v) => new dbtypes.DocKey(v.op.type, v.owner, v.collection, v.doc, k.v),
29 | key: dbtypes.DocKey
30 | },
31 | collection: {
32 | /**
33 | * @param {isodb.AutoKey} k
34 | * @param {dbtypes.OpValue} v
35 | */
36 | mapper: (k, v) => new dbtypes.CollectionKey(v.owner, v.collection, k.v),
37 | key: dbtypes.CollectionKey
38 | }
39 | }
40 | },
41 | /**
42 | * Maps from parent_docid/child_docname => child_docid
43 | * Useful for looking up docnames of children
44 | */
45 | childDocs: {
46 | key: dbtypes.ParentKey,
47 | value: isodb.StringValue
48 | },
49 | clocks: {
50 | key: dbtypes.ClocksKey,
51 | value: dbtypes.ClientClockValue
52 | },
53 | unsyncedDocs: {
54 | key: dbtypes.UnsyncedKey,
55 | value: isodb.NoValue
56 | },
57 | users: {
58 | key: isodb.AutoKey,
59 | value: dbtypes.UserIdentity,
60 | indexes: {
61 | hash: {
62 | key: isodb.BinaryKey, // sha256 digest of public key
63 | /**
64 | * @param {isodb.AutoKey} _k
65 | * @param {dbtypes.UserIdentity} user
66 | */
67 | mapper: (_k, user) => user.hash
68 | }
69 | }
70 | },
71 | devices: {
72 | key: isodb.AutoKey,
73 | value: dbtypes.DeviceClaim, // @todo use DeviceIdentity instead
74 | indexes: {
75 | hash: {
76 | key: isodb.BinaryKey,
77 | /**
78 | * @param {isodb.AutoKey} _k
79 | * @param {dbtypes.DeviceClaim} v
80 | */
81 | mapper: (_k, v) => v.hash
82 | }
83 | }
84 | }
85 | },
86 | objects: {
87 | db: {
88 | version: isodb.AnyValue
89 | },
90 | user: {
91 | public: isodb.CryptoKeyValue,
92 | private: isodb.CryptoKeyValue,
93 | identity: dbtypes.UserIdentity
94 | },
95 | device: {
96 | public: isodb.CryptoKeyValue,
97 | private: isodb.CryptoKeyValue,
98 | identity: dbtypes.DeviceIdentity,
99 | claim: dbtypes.DeviceClaim,
100 | clientid: isodb.AnyValue // @todo use isodb.Uint53 type
101 | }
102 | }
103 | }
104 |
105 | /**
106 | * # Algorithm to sync documents that a user just got permission to:
107 | *
108 | * The information that a document has been created (the first update) is always sent to all users.
109 | * However, users that don't have permission won't receive the content and simply store a "no
110 | * permission" information in the `oplog`.
111 | *
112 | * When we receive new permission, we store an "todo item in the `reqs`" table: "check table
113 | * starting at clock X". The name should be unique. Two separate permission changes should update
114 | * the same item. Simpler implementation: `reqs` table uses AutoKey or `collection/autokey` (by
115 | * index) and we have to work through the list until completion.
116 | *
117 | * We iterate through the table and sync each document individually until we completed the whole
118 | * collection table. Every time we synced a document, we update the todo item. Once completed,
119 | * we delete the todo item in reqs.
120 | *
121 | * # Todo
122 | * - ( ) implement request "sync document starting from clock X"
123 | * - ( ) implement requests table and figure out key-system.
124 | */
125 |
126 | /**
127 | * # The NoPerm op (possibly rename to PlaceHolder(noperm))
128 | * When a "server" mustn't send an update to a client because it lacks permission, we send
129 | * a NoPerm op instead with the current client/clock of the "server".
130 | * Once the client receives access, the client requests the content of that document from other
131 | * clients using the "RequestDoc" (collection, doc, latestClockOfLastNoPerm) protocol op. We apply
132 | * the document update op without checking clock. If the remote client has at least
133 | * "latestClockOfLastNoPerm", then we delete the todo item. Otherwise, we need to try again in the
134 | * future.
135 | */
136 |
137 | /**
138 | * # Auth & Credentials
139 | *
140 | * A user generates a public-private key that must be kept private. This could also be done by a
141 | * backend. Once a malicious user has access to the private key, the user is compromised and can no
142 | * longer sync p2p. Hence it must be kept in a secure vault. The users private key can also be
143 | * stored / generated only when needed.
144 | *
145 | * A user is gives access to a document / collection. The sha256 digest of the public key is used
146 | * for identification.
147 | *
148 | * A devices proves that it acts as a certain user using a signed message of the user (using the
149 | * private key).
150 | *
151 | * A device hence must have a different public-private key pair. The user signs the public key of
152 | * the device (similar to json web token).
153 | *
154 | * A future implementation could also require that other authorities (e.g. auth providers) sign
155 | * the device key for advanced security. We can also require that signed tokens expire.
156 | */
157 |
158 | /**
159 | * # Security Extensions
160 | *
161 | * - There should be a record of claims that a user generated. The user should be able to untrust
162 | * claims.
163 | */
164 |
165 | /**
166 | * # Protocol
167 | * 1. Exchange Credentials (User.publickKey + DeviceClaim)
168 | * - Approach1: From now on all messages must be encrypted using the public key of the remote device
169 | * - Approach2: If the connection is secure, we can auth by sending a proof that the other side
170 | * must decrypt.
171 | * 2. Request collections [collectionname:[clientid:lastclock], ..]
172 | * - Send back all documents as "Placeholder(unsent)" op, but include all permission ops.
173 | * - User requests all Placeholder(unsent) ops individually (if they have access, X at a time, but
174 | * possible to retrieve Y early). getdoc[clientid:clock|null]
175 | * -- An alternative implementation could only retrieve documents when they are opened.
176 | * -- Check if user has access to collection
177 | * -- If user doesn't have access to a specific document, send NoPerm op instead.
178 | * The user can rerequest the document itself.
179 | */
180 |
181 | /**
182 | * @param {string} dbname
183 | */
184 | export const createDb = dbname =>
185 | promise.all([
186 | isodb.openDB(dbname, def),
187 | ecdsa.generateKeyPair()
188 | .then(({ publicKey, privateKey }) => ecdsa.exportKeyJwk(publicKey)
189 | .then(publicKeyJwk => /** @type {[CryptoKey, CryptoKey, string]} */ ([publicKey, privateKey, json.stringify(publicKeyJwk)])))
190 | ]).then(([idb, [publicDeviceKey, privateDeviceKey, publicDeviceKeyJwk]]) =>
191 | idb.transact(async tr => {
192 | const version = await tr.objects.db.get('version')
193 | let isAuthenticated = false
194 | /**
195 | * @type {dbtypes.UserIdentity|null}
196 | */
197 | let user = null
198 | /**
199 | * @type {number}
200 | */
201 | let clientid
202 | /**
203 | * @type {dbtypes.DeviceClaim|null}
204 | */
205 | let deviceClaim = null
206 | if (version == null) {
207 | // @todo derive clientid from deviceid
208 | clientid = random.uint32()
209 | // init
210 | tr.objects.db.set('version', 0)
211 | const dguid = new Uint8Array(64)
212 | webcrypto.getRandomValues(dguid)
213 | await promise.all([
214 | tr.objects.device.set('clientid', clientid),
215 | tr.objects.device.set('private', privateDeviceKey),
216 | tr.objects.device.set('public', publicDeviceKey),
217 | tr.objects.device.set('identity', new dbtypes.DeviceIdentity(publicDeviceKeyJwk))
218 | ])
219 | } else if ((deviceClaim = await tr.objects.device.get('claim'))) {
220 | isAuthenticated = true
221 | user = await tr.objects.user.get('identity')
222 | // @todo remove
223 | if (user == null) throw new Error('user should be defined')
224 | }
225 | clientid = /** @type {number} */ ((await tr.objects.device.get('clientid'))?.v)
226 | return { isAuthenticated, idb, user, deviceClaim, clientid }
227 | })
228 | )
229 |
--------------------------------------------------------------------------------
/src/comms/websocket.js:
--------------------------------------------------------------------------------
1 | import { WebSocket as WS } from '@y/stream/utils/websocket'
2 | import * as encoding from 'lib0/encoding'
3 | import * as decoding from 'lib0/decoding'
4 | import * as protocol from '../protocol.js'
5 | import * as queue from 'lib0/queue'
6 | import { ObservableV2 } from 'lib0/observable'
7 | import * as math from 'lib0/math'
8 | import * as comm from '../comm.js' // eslint-disable-line
9 | import * as logging from 'lib0/logging'
10 | import { Ystream } from '../ystream.js' // eslint-disable-line
11 | import * as webcrypto from 'lib0/webcrypto'
12 | import * as utils from '../utils.js'
13 | import * as promise from 'lib0/promise'
14 | import * as actions from '../api/actions.js'
15 | import * as buffer from 'lib0/buffer'
16 | import * as eventloop from 'lib0/eventloop'
17 |
18 | const _log = logging.createModuleLogger('@y/stream/websocket')
19 | /**
20 | * @param {WebSocketCommInstance} comm
21 | * @param {string} type
22 | * @param {...any} args
23 | */
24 | const log = (comm, type, ...args) => _log(logging.PURPLE, `(local=${comm.ystream.clientid.toString(36).slice(0, 4)},remote=${comm.clientid.toString(36).slice(0, 4)}) `, logging.ORANGE, '[' + type + '] ', logging.GREY, ...args.map(arg => typeof arg === 'function' ? arg() : arg))
25 |
26 | /**
27 | * @param {WebSocketCommInstance} comm
28 | * @param {Uint8Array} m
29 | */
30 | const addReadMessage = async (comm, m) => {
31 | const readMessageQueue = comm._readMessageQueue
32 | const wasEmpty = queue.isEmpty(readMessageQueue)
33 | queue.enqueue(readMessageQueue, new queue.QueueValue(m))
34 | if (!wasEmpty) return
35 | /**
36 | * @type {Uint8Array|undefined}
37 | */
38 | let currMessage
39 | while ((currMessage = readMessageQueue.start?.v) != null) {
40 | log(comm, 'read message', { clientid: comm.clientid, len: currMessage.length })
41 | const reply = await protocol.readMessage(encoding.createEncoder(), decoding.createDecoder(currMessage), comm.ystream, comm)
42 | if (reply) {
43 | comm.send(encoding.toUint8Array(reply))
44 | }
45 | queue.dequeue(readMessageQueue)
46 | }
47 | }
48 |
49 | /**
50 | * @implements comm.Comm
51 | * @extends {ObservableV2<{ authenticated: (comm:WebSocketCommInstance) => void }>}
52 | */
53 | class WebSocketCommInstance extends ObservableV2 {
54 | /**
55 | * @param {WebSocketHandlerInstance} handler
56 | */
57 | constructor (handler) {
58 | super()
59 | const { ystream, url } = handler
60 | this.handler = handler
61 | this.synced = new utils.CollectionsSet()
62 | this.isDestroyed = false
63 | this.comm = true
64 | this.ystream = ystream
65 | this.url = url
66 | this.clientid = -1
67 | this.isAuthenticated = false
68 | this.sentChallengeAnswer = false
69 | /**
70 | * @type {import('../api/dbtypes.js').UserIdentity|null}
71 | */
72 | this.user = null
73 | /**
74 | * @type {Uint8Array}
75 | */
76 | this.challenge = webcrypto.getRandomValues(new Uint8Array(64))
77 | /**
78 | * @type {import('../api/dbtypes.js').DeviceClaim|null}
79 | */
80 | this.deviceClaim = null
81 | /**
82 | * @type {queue.Queue>}
83 | */
84 | this._readMessageQueue = queue.create()
85 | this.streamController = new AbortController()
86 | this.wsconnected = false
87 | this.nextClock = -1
88 | /**
89 | * @type {WritableStream<{ messages: Array, origin: any }>}
90 | */
91 | this.writer = new WritableStream({
92 | write: ({ messages, origin }) => {
93 | if (!this.wsconnected) {
94 | return this.destroy()
95 | }
96 | log(this, 'sending ops', () => `buffered amount=${this.ws?.bufferedAmount}, `, () => { return `number of ops=${messages.length}` })
97 | for (let i = 0; i < messages.length; i++) {
98 | this.send(messages[i])
99 | }
100 | const maxBufferedAmount = 3_000_000
101 | if ((this.ws?.bufferedAmount || 0) > maxBufferedAmount) {
102 | return promise.until(0, () => (this.ws?.bufferedAmount || 0) < maxBufferedAmount)
103 | }
104 | }
105 | })
106 | ystream.comms.add(this)
107 | const ws = new WS(url)
108 | this.ws = ws
109 | ws.binaryType = 'arraybuffer'
110 | ws.onmessage = (event) => {
111 | addReadMessage(this, new Uint8Array(/** @type {ArrayBuffer} */ (event.data)))
112 | }
113 | ws.onerror = /** @param {any} event */ (event) => {
114 | log(this, 'error', event)
115 | handler.emit('connection-error', [/** @type {ErrorEvent} */(event), handler])
116 | this.destroy()
117 | }
118 | ws.onclose = (event) => {
119 | handler.emit('status', [{
120 | status: 'disconnected',
121 | comm: this
122 | }, handler])
123 | handler.emit('connection-close', [/** @type {any} */(event), handler])
124 | log(this, 'close', 'close-code: ', event.code)
125 | this.destroy()
126 | }
127 | ws.onopen = () => {
128 | log(this, 'open')
129 | this.wsconnected = true
130 | handler.wsUnsuccessfulReconnects = 0
131 | this.send(encoding.encode(encoder => protocol.writeInfo(encoder, ystream, this)))
132 | handler.emit('status', [{
133 | status: 'connected',
134 | comm: this
135 | }, handler])
136 | }
137 | handler.emit('status', [{
138 | status: 'connecting',
139 | comm: this
140 | }, handler])
141 | this.on('authenticated', async () => {
142 | const encoder = encoding.createEncoder()
143 | await ystream.transact(tr =>
144 | actions.getClock(tr, ystream, this.clientid, handler.collection.owner, handler.collection.name).then(async clock => {
145 | const sv = await actions.getStateVector(tr, ystream, handler.collection.owner, handler.collection.name)
146 | log(this, 'requesting ops', { clock, clientid: this.clientid, sv })
147 | this.nextClock = clock + 1
148 | return protocol.writeRequestOps(encoder, handler.collection.owner, handler.collection.name, this.nextClock)
149 | })
150 | )
151 | this.send(encoding.toUint8Array(encoder))
152 | })
153 | // try to reconnect if not connected within 10 seconds
154 | eventloop.timeout(10_000, () => {
155 | if (!this.isDestroyed && !this.wsconnected) {
156 | this.close()
157 | }
158 | })
159 | }
160 |
161 | /**
162 | * @todo this should only be called once we know this connection is synced and that ops is the
163 | * next expected op. Otherwise, fall back to unsynced and sync all local ops to backend.
164 | * @param {Uint8Array} message
165 | */
166 | send (message) {
167 | if (this.ws && this.wsconnected) {
168 | log(this, 'sending message', `Message len: ${message.length}`)
169 | // @todo handle the case that message could not be sent
170 | this.ws.send(message)
171 | return
172 | }
173 | this.destroy()
174 | }
175 |
176 | /**
177 | * Close the connection.
178 | * Use a status code from websocket-utils.js
179 | *
180 | * @param {number} [code]
181 | * @param {string} [reason]
182 | */
183 | close (code, reason) {
184 | this.wsconnected = false
185 | this.ws.close(code, reason)
186 | this.destroy()
187 | }
188 |
189 | destroy () {
190 | if (this.isDestroyed) return
191 | const shouldrecreate = this.handler.comm === this
192 | super.destroy()
193 | this.streamController.abort('destroyed')
194 | this.ystream.comms.delete(this)
195 | this.isDestroyed = true
196 | this.handler.comm = null
197 | this.wsconnected && this.ws.close()
198 | this._readMessageQueue = queue.create()
199 | if (this.wsconnected) {
200 | this.handler.wsUnsuccessfulReconnects++
201 | }
202 | this.wsconnected = false
203 | if (shouldrecreate) {
204 | this.handler._setupNewComm()
205 | }
206 | }
207 | }
208 |
209 | /**
210 | * @implements comm.CommHandler
211 | * @extends ObservableV2<{ synced: function(WebSocketHandlerInstance):any, "connection-error": function(ErrorEvent, WebSocketHandlerInstance):any, "connection-close": function(CloseEvent, WebSocketHandlerInstance):any, status: function({ status: 'connecting'|'connected'|'disconnected', comm: WebSocketCommInstance }, WebSocketHandlerInstance):any }>
212 | */
213 | class WebSocketHandlerInstance extends ObservableV2 {
214 | /**
215 | * @param {import('../ystream.js').Ystream} ystream
216 | * @param {string} serverUrl
217 | * @param {{ owner: Uint8Array, name: string }} collection
218 | */
219 | constructor (ystream, serverUrl, collection) {
220 | super()
221 | this.ystream = ystream
222 | this.serverUrl = serverUrl
223 | this.collection = collection
224 | this.shouldConnect = true
225 | this.wsUnsuccessfulReconnects = 0
226 | this.maxBackoffTime = 60000
227 | /**
228 | * @type {WebSocketCommInstance?}
229 | */
230 | this.comm = null
231 | if (this.shouldConnect) {
232 | this._setupNewComm()
233 | }
234 | }
235 |
236 | get url () {
237 | return `${this.serverUrl}/${buffer.toBase64UrlEncoded(this.collection.owner)}/${encodeURIComponent(this.collection.name)}`
238 | }
239 |
240 | _setupNewComm () {
241 | if (!this.shouldConnect) return
242 | const prevComm = this.comm
243 | this.comm = null
244 | prevComm?.destroy()
245 | // Start with no reconnect timeout and increase timeout by
246 | // using exponential backoff starting with 100ms
247 | const setup = () => {
248 | if (this.shouldConnect && this.comm === null) {
249 | this.comm = new WebSocketCommInstance(this)
250 | }
251 | }
252 | if (this.wsUnsuccessfulReconnects === 0) {
253 | setup()
254 | } else {
255 | setTimeout(
256 | setup,
257 | math.min(
258 | math.pow(2, this.wsUnsuccessfulReconnects) * 100,
259 | this.maxBackoffTime
260 | )
261 | )
262 | }
263 | }
264 |
265 | destroy () {
266 | super.destroy()
267 | this.shouldConnect = false
268 | this.comm?.close()
269 | }
270 | }
271 |
272 | /**
273 | * @implements {comm.CommConfiguration}
274 | */
275 | export class WebSocketComm {
276 | /**
277 | * @param {string} serverUrl
278 | * @param {{ owner: string, name: string }} collection
279 | */
280 | constructor (serverUrl, collection) {
281 | this.serverUrl = serverUrl
282 | this.collection = collection
283 | }
284 |
285 | /**
286 | * @param {Ystream} ystream
287 | */
288 | init (ystream) {
289 | return new WebSocketHandlerInstance(ystream, this.serverUrl, { owner: buffer.fromBase64(this.collection.owner), name: this.collection.name })
290 | }
291 | }
292 |
--------------------------------------------------------------------------------
/src/comms/websocket-server.js:
--------------------------------------------------------------------------------
1 | /* A few users have reported having issues understanding backpressure and how to deal with it.
2 | *
3 | * Backpressure is the buildup of unacknowledged data; you can't just call ws.send without checking for backpressure.
4 | * Data doesn't just, poof, immediately jump from your server to the receiver - the receiver has to actually... receive it.
5 | * That happens with ACKs, controlling the transmission window.
6 | *
7 | * See https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/bufferedAmount
8 | *
9 | * Backpressure applies to all streams, files, sockets, queues, and so on. If you're building
10 | * web services without taking backpressure into account you're not developing proper solutions - you're dicking around.
11 | *
12 | * Any slow receiver can DOS your whole server if you're not taking backpressure into account.
13 | *
14 | * The following is a (ridiculous) example of how data can be pushed according to backpressure.
15 | * Do not take this as a way to actually write code, this is horrible, but it shows the concept clearly.
16 | *
17 | */
18 |
19 | import * as uws from 'uws'
20 | import * as encoding from 'lib0/encoding'
21 | import * as decoding from 'lib0/decoding'
22 | import * as protocol from '../protocol.js'
23 | import * as comm from '../comm.js' // eslint-disable-line
24 | import * as Ystream from '../index.js'
25 | import * as promise from 'lib0/promise'
26 | import * as error from 'lib0/error'
27 | import * as webcrypto from 'lib0/webcrypto'
28 | import * as authentication from '../api/authentication.js'
29 | import * as dbtypes from '../api/dbtypes.js' // eslint-disable-line
30 | import * as utils from '../utils.js'
31 | import * as logging from 'lib0/logging'
32 | import * as observable from 'lib0/observable'
33 | import * as actions from '../api/actions.js'
34 | import * as buffer from 'lib0/buffer'
35 | import * as array from 'lib0/array'
36 | import * as wsUtils from './websocket-utils.js'
37 |
38 | const expectedBufferedAmount = 512 * 1024 // 512kb
39 |
40 | const _log = logging.createModuleLogger('@y/stream/websocket')
41 | /**
42 | * @param {WSClient} comm
43 | * @param {string} type
44 | * @param {...any} args
45 | */
46 | const log = (comm, type, ...args) => _log(logging.PURPLE, `(local=${comm.ystream.clientid.toString(36).slice(0, 4)},remote=${comm.clientid.toString(36).slice(0, 4)}) `, logging.ORANGE, '[' + type + '] ', logging.GREY, ...args.map(arg => typeof arg === 'function' ? arg() : arg))
47 |
48 | const maxBufferedAmount = 3000_000
49 |
50 | /**
51 | * @implements {comm.Comm}
52 | * @extends {observable.ObservableV2<{ authenticated: (comm:comm.Comm) => void, "requested-ops": (comm: comm.Comm, sub: { collection: { owner: Uint8Array?, name: string? }, clock: number }) => void }>}
53 | */
54 | class WSClient extends observable.ObservableV2 {
55 | /**
56 | * @param {uws.WebSocket<{ client: WSClient }>} ws
57 | * @param {Ystream.Ystream} ystream
58 | * @param {object} collection
59 | * @param {Uint8Array} collection.owner
60 | * @param {string} collection.name
61 | */
62 | constructor (ws, ystream, collection) {
63 | super()
64 | this.ystream = ystream
65 | this.collection = collection
66 | this.clientid = -1
67 | /**
68 | * @type {import('../api/dbtypes.js').UserIdentity|null}
69 | */
70 | this.user = null
71 | /**
72 | * @type {import('../api/dbtypes.js').DeviceClaim|null}
73 | */
74 | this.deviceClaim = null
75 | this.ws = ws
76 | /**
77 | * @type {Array>}
78 | */
79 | this.nextOps = []
80 | this._isClosed = false
81 | this._isDraining = false
82 | this.isDestroyed = false
83 | this.synced = new utils.CollectionsSet()
84 | this.isAuthenticated = false
85 | this.sentChallengeAnswer = false
86 | this.challenge = webcrypto.getRandomValues(new Uint8Array(64))
87 | this.streamController = new AbortController()
88 | this.nextClock = -1
89 | /**
90 | * @type {WritableStream<{ messages: Array, origin: any }>}
91 | */
92 | this.writer = new WritableStream({
93 | write: ({ messages, origin }) => {
94 | log(this, 'sending messages', () => { return `number of messages=${messages.length}` })
95 | for (let i = 0; i < messages.length; i++) {
96 | this.send(messages[i])
97 | }
98 | if (this.ws.getBufferedAmount() > maxBufferedAmount) {
99 | // @todo make timeout (30s) configurable
100 | return promise.until(30000, () => this.isDestroyed || this.ws.getBufferedAmount() < maxBufferedAmount)
101 | }
102 | }
103 | })
104 | this.on('authenticated', async () => {
105 | const clock = await ystream.transact(tr => actions.getClock(tr, ystream, this.clientid, this.collection.owner, this.collection.name))
106 | this.nextClock = clock + 1
107 | this.send(encoding.encode(encoder => {
108 | protocol.writeRequestOps(encoder, this.collection.owner, this.collection.name, this.nextClock)
109 | }))
110 | })
111 | this.on('requested-ops', (_comm, { collection }) => {
112 | if (collection.name !== this.collection.name || collection.owner == null || !array.equalFlat(collection.owner, this.collection.owner)) {
113 | this.close(wsUtils.statusUnauthenticated, 'cannot request ops from other collections')
114 | }
115 | })
116 | }
117 |
118 | /**
119 | * @param {Uint8Array} message
120 | */
121 | send (message) {
122 | !this.isDestroyed && this.ws.send(message, true)
123 | }
124 |
125 | /**
126 | * @param {function (encoding.Encoder): Promise} f
127 | */
128 | queueMessage (f) {
129 | const opsSize = this.nextOps.push(f)
130 | if (opsSize === 1 || this.ws.getBufferedAmount() < expectedBufferedAmount) {
131 | this._drain()
132 | }
133 | }
134 |
135 | async _drain () {
136 | if (this._isDraining) return
137 | this._isDraining = true
138 | try {
139 | let bufferedAmount = this.ws.getBufferedAmount()
140 | while (this.nextOps.length > 0 && bufferedAmount < expectedBufferedAmount) {
141 | // @todo create a test that makes sure that _drain is eventually called once buffer is freed
142 | const encoder = encoding.createEncoder()
143 | if (!(await this.nextOps[0](encoder))) {
144 | this.nextOps.shift()
145 | }
146 | if (encoding.hasContent(encoder)) {
147 | const message = encoding.toUint8Array(encoder)
148 | bufferedAmount += message.byteLength
149 | this.send(message)
150 | }
151 | }
152 | } finally {
153 | this._isDraining = false
154 | // @todo destroy conn in case of an error
155 | }
156 | }
157 |
158 | /**
159 | * Close the connection.
160 | * Use a status code from websocket-utils.js
161 | *
162 | * @param {number} [code]
163 | * @param {string} [reason]
164 | */
165 | close (code, reason) {
166 | if (this.isDestroyed) return
167 | console.log('closing conn')
168 | if (code != null && reason != null) this.ws.end(code, reason)
169 | else this.ws.end()
170 | this.destroy()
171 | }
172 |
173 | destroy () {
174 | if (this.isDestroyed) return
175 | console.log('destroyed comm', new Error('destroyed comm').stack)
176 | super.destroy()
177 | this.isDestroyed = true
178 | this.nextOps = []
179 | this.streamController.abort('destroyed')
180 | if (!this._isClosed) this.ws.end()
181 | }
182 | }
183 |
184 | /**
185 | * @param {Object} options
186 | * @param {number} [options.port]
187 | * @param {string} [options.dbname]
188 | * @param {boolean} [options.acceptNewUsers]
189 | * @param {{ user: dbtypes.UserIdentity, privateKey: CryptoKey }} [options.identity]
190 | */
191 | export const createWSServer = async ({ port = 9000, dbname = '.ystream/server', acceptNewUsers = true, identity } = {}) => {
192 | const db = await Ystream.open(dbname, { acceptNewUsers, syncsEverything: true })
193 | const server = new WSServer(db, port)
194 | if (!db.isAuthenticated) {
195 | if (identity) {
196 | await authentication.setUserIdentity(db, identity.user, await identity.user.publicKey, identity.privateKey)
197 | } else {
198 | const user = await authentication.createUserIdentity()
199 | await authentication.setUserIdentity(db, user.userIdentity, user.publicKey, user.privateKey)
200 | }
201 | }
202 | await server.ready
203 | return server
204 | }
205 |
206 | export class WSServer {
207 | /**
208 | * @param {Ystream.Ystream} ystream
209 | * @param {number} port
210 | */
211 | constructor (ystream, port) {
212 | /**
213 | * @type {Ystream.Ystream}
214 | */
215 | this.ystream = ystream
216 | this.ready = ystream.whenAuthenticated.then(() => promise.create((resolve, reject) => {
217 | console.log('starting websocket server')
218 | uws.App({}).ws('/:owner/:cname', /** @type {uws.WebSocketBehavior<{ client: WSClient, collection: { owner: Uint8Array, name: string } }>} */ ({
219 | /* Options */
220 | compression: uws.SHARED_COMPRESSOR,
221 | maxPayloadLength: 70 * 1024 * 1024 * 1024,
222 | // @todo use the "dropped" timeout to create a new reader that reads directly form the
223 | // database without consuming much memory.
224 | maxBackpressure: 70 * 1024 * 1024 * 1024 * 100,
225 | // closeOnBackpressureLimit: true, // @todo reenable once types are fixed
226 | idleTimeout: 960,
227 | upgrade: (res, req, context) => {
228 | const owner = buffer.fromBase64UrlEncoded(req.getParameter(0))
229 | const name = decodeURIComponent(req.getParameter(1))
230 | res.upgrade(
231 | { client: null, collection: { owner, name } },
232 | req.getHeader('sec-websocket-key'),
233 | req.getHeader('sec-websocket-protocol'),
234 | req.getHeader('sec-websocket-extensions'),
235 | context
236 | )
237 | },
238 | /* Handlers */
239 | open: (ws) => {
240 | const userData = ws.getUserData()
241 | const client = new WSClient(ws, ystream, userData.collection)
242 | userData.client = client
243 | client.send(encoding.encode(encoder => {
244 | protocol.writeInfo(encoder, ystream, client)
245 | }))
246 | },
247 | message: (ws, message) => {
248 | const decoder = decoding.createDecoder(new Uint8Array(message.slice(0))) // copy buffer because uws will reuse the memory space
249 | const client = ws.getUserData().client
250 | client.queueMessage(async (encoder) => {
251 | await protocol.readMessage(encoder, decoder, ystream, client)
252 | return false
253 | })
254 | },
255 | drain: ws => {
256 | ws.getUserData().client._drain()
257 | },
258 | close: (ws, code, message) => {
259 | const client = ws.getUserData().client
260 | log(client, 'close', 'client disconnected' + JSON.stringify({ code, message }))
261 | client._isClosed = true
262 | client.destroy()
263 | }
264 | })).any('/*', (res, _req) => {
265 | console.info('Oh no, you found me 🫣')
266 | res.end('Oh no, you found me 🫣')
267 | }).listen(port, (token) => {
268 | if (token) {
269 | console.log('Listening to port ' + port)
270 | resolve(port)
271 | } else {
272 | const m = 'Failed to listen to port ' + port
273 | reject(error.create(m))
274 | console.log(m)
275 | }
276 | })
277 | }))
278 | }
279 | }
280 |
--------------------------------------------------------------------------------
/tests/ystream.tests.js:
--------------------------------------------------------------------------------
1 | import * as t from 'lib0/testing'
2 | import * as promise from 'lib0/promise'
3 | import * as buffer from 'lib0/buffer'
4 | import * as math from 'lib0/math'
5 | import * as Ystream from '../src/index.js'
6 | import * as helpers from './helpers.js'
7 | import * as error from 'lib0/error'
8 | import * as authentication from '../src/api/authentication.js'
9 | import * as actions from '@y/stream/api/actions'
10 | // import * as operations from '../src/operations.js'
11 |
12 | /**
13 | * @param {string} testname
14 | */
15 | const getDbName = testname => '.test_dbs/' + testname
16 |
17 | const owner = buffer.toBase64(helpers.owner)
18 |
19 | /**
20 | * Testing loading from the database.
21 | *
22 | * @param {t.TestCase} tc
23 | */
24 | export const testYdocLoad = async tc => {
25 | const collectionName = tc.testName
26 | await Ystream.remove(getDbName(tc.testName))
27 | const ystream = await Ystream.open(getDbName(tc.testName))
28 | const collection = ystream.getCollection(owner, collectionName)
29 | const ydoc1 = collection.getYdoc('ydoc')
30 | await ydoc1.whenLoaded
31 | ydoc1.getMap().set('k', 'v')
32 | const ydoc2 = collection.getYdoc('ydoc')
33 | await ydoc2.whenLoaded
34 | t.assert(ydoc2.getMap().get('k') === 'v')
35 | ydoc1.getMap().set('k', 'v2')
36 | t.assert(ydoc1.getMap().get('k') === 'v2')
37 | console.log('before destroy')
38 | await ystream.destroy()
39 | console.log('after destroy')
40 | const ystream2 = await Ystream.open(getDbName(tc.testName))
41 | const collection2 = ystream2.getCollection(owner, collectionName)
42 | console.log('after open')
43 | const ydoc3 = collection2.getYdoc('ydoc')
44 | console.log('after getdoc')
45 | await ydoc3.whenLoaded
46 | console.log('after loaded')
47 | t.compare(ydoc3.getMap().get('k'), 'v2')
48 | }
49 |
50 | /**
51 | * @param {t.TestCase} tc
52 | */
53 | export const testComm = async tc => {
54 | const th = await helpers.createTestScenario(tc)
55 | const [{ ystream: ystream1, collection: collection1 }, { ystream: ystream2, collection: collection2 }] = await th.createClients(2)
56 | console.log('@y/stream1 user hashes: ', await authentication.getAllRegisteredUserHashes(ystream1))
57 | console.log('@y/stream2 user hashes: ', await authentication.getAllRegisteredUserHashes(ystream2))
58 | await promise.all([collection1.whenSynced, collection2.whenSynced])
59 | const ydoc1 = collection1.getYdoc('ydoc')
60 | ydoc1.getMap().set('k', 'v1')
61 | const ydoc2 = collection2.getYdoc('ydoc')
62 | await ydoc2.whenLoaded
63 | await helpers.waitDocsSynced(ydoc1, ydoc2)
64 | t.compare(ydoc2.getMap().get('k'), 'v1')
65 | ydoc1.getMap().set('k', 'v2')
66 | t.compare(ydoc1.getMap().get('k'), 'v2')
67 | await helpers.waitDocsSynced(ydoc1, ydoc2)
68 | t.compare(ydoc2.getMap().get('k'), 'v2')
69 | const { collection: collection3 } = await th.createClient()
70 | await collection3.whenSynced
71 | const ydoc3 = collection3.getYdoc('ydoc')
72 | await ydoc3.whenLoaded
73 | t.compare(ydoc3.getMap().get('k'), 'v2')
74 | }
75 |
76 | /**
77 | * @param {t.TestCase} tc
78 | */
79 | export const testPerformanceLoadingManyDocs = async tc => {
80 | const N = 10
81 | const collectionName = tc.testName
82 | await Ystream.remove(getDbName(tc.testName))
83 | const ystream = await Ystream.open(getDbName(tc.testName))
84 | const collection = ystream.getCollection(owner, collectionName)
85 | await t.measureTimeAsync(`Create ${N} documents with initial content`, async () => {
86 | for (let i = 0; i < N; i++) {
87 | const ydoc = collection.getYdoc('doc-' + i)
88 | ydoc.getMap().set('i', i)
89 | }
90 | const lastdoc = collection.getYdoc('doc-' + (N - 1))
91 | await lastdoc.whenLoaded
92 | t.assert(lastdoc.getMap().get('i') === N - 1)
93 | })
94 | const ystream2 = await Ystream.open(getDbName(tc.testName))
95 | const collection2 = ystream2.getCollection(owner, collectionName)
96 | await t.measureTimeAsync(`Loading ${N} documents with initial content`, async () => {
97 | const ps = []
98 | for (let i = 0; i < N; i++) {
99 | const ydoc = collection2.getYdoc('doc-' + i)
100 | ps.push(ydoc.whenLoaded.then(() => {
101 | if (ydoc.getMap().get('i') !== i) {
102 | return promise.reject(error.create(`content on doc ${i} not properly loaded`))
103 | }
104 | }))
105 | }
106 | await promise.all(ps)
107 | })
108 | }
109 |
110 | /**
111 | * @param {t.TestCase} tc
112 | */
113 | export const testPerformanceSyncingManyDocs = async tc => {
114 | const N = 10000
115 | const th = await helpers.createTestScenario(tc)
116 | const { owner, name: collectionName } = th.collectionDef
117 | const server = th.server
118 | if (server === null) {
119 | return t.skip()
120 | }
121 | const [{ collection: collection1 }] = await th.createClients(1)
122 | await t.measureTimeAsync(`Sync ${N} documents with content to server`, async () => {
123 | for (let i = 0; i < N; i++) {
124 | const ydoc = collection1.getYdoc('doc-' + i)
125 | ydoc.getMap().set('i', i)
126 | if (i % 10000 === 0 && i !== 0) {
127 | console.log(`progress: ${math.round(100 * i / N)}%`)
128 | // can't wait here at the moment, or every single message will be sent individually
129 | // const ydocRemote = server.ystream.getYdoc(owner, collection, 'doc-' + i)
130 | // await ydocRemote.whenLoaded
131 | // await helpers.waitDocsSynced(ydoc, ydocRemote)
132 | }
133 | ydoc.destroy()
134 | }
135 | const lastClientDoc = collection1.getYdoc('doc-' + (N - 1))
136 | await lastClientDoc.whenLoaded
137 | const lastServerDoc = server.ystream.getCollection(owner, collectionName).getYdoc('doc-' + (N - 1))
138 | await lastServerDoc.whenLoaded
139 | await helpers.waitDocsSynced(lastClientDoc, lastServerDoc)
140 | t.assert(lastServerDoc.getMap().get('i') === N - 1)
141 | })
142 | const [{ collection: collection2 }] = await th.createClients(1)
143 | await t.measureTimeAsync(`Sync ${N} documents with content from server`, async () => {
144 | const lastClientDoc = collection2.getYdoc('doc-' + (N - 1))
145 | const lastServerDoc = server.ystream.getCollection(owner, collectionName).getYdoc('doc-' + (N - 1))
146 | await lastServerDoc.whenLoaded
147 | await helpers.waitDocsSynced(lastClientDoc, lastServerDoc)
148 | t.assert(lastClientDoc.getMap().get('i') === N - 1)
149 | })
150 | }
151 |
152 | /**
153 | * Testing loading from the database.
154 | *
155 | * @param {t.TestCase} tc
156 | */
157 | export const testLww = async tc => {
158 | const th = await helpers.createTestScenario(tc)
159 | const [{ collection: collection1, ystream: ystream1 }, { collection: collection2, ystream: ystream2 }] = await th.createClients(2)
160 | await ystream1.transact(tr => collection1.setLww(tr, 'key', 'val1'))
161 | t.assert(await ystream1.transact(tr => collection1.getLww(tr, 'key')) === 'val1')
162 | await ystream2.transact(tr => collection2.setLww(tr, 'key', 'val2'))
163 | t.assert(await ystream2.transact(tr => collection2.getLww(tr, 'key')) === 'val2')
164 | while (true) {
165 | const lw1 = await ystream1.transact(tr => collection1.getLww(tr, 'key'))
166 | const lw2 = await ystream2.transact(tr => collection2.getLww(tr, 'key'))
167 | const sv1 = await collection1.ystream.transact(tr => actions.getStateVector(tr, collection1.ystream, collection1.ownerBin, collection1.collection))
168 | const sv2 = await collection2.ystream.transact(tr => actions.getStateVector(tr, collection2.ystream, collection2.ownerBin, collection2.collection))
169 | console.log({ lw1, lw2, sv1, sv2 })
170 | if (lw1 === lw2) break
171 | await promise.wait(100)
172 | }
173 | t.info('lww value converged')
174 | }
175 |
176 | /**
177 | * Testing loading from the database.
178 | *
179 | * @param {t.TestCase} tc
180 | */
181 | export const testFolderStructure = async tc => {
182 | const th = await helpers.createTestScenario(tc)
183 | const [{ collection: collection1, ystream: ystream1 }] = await th.createClients(1)
184 | await ystream1.transact(async tr => {
185 | await collection1.setFileInfo(tr, 'A', 'a', null, 'dir')
186 | await collection1.setFileInfo(tr, 'B', 'b', 'A', 'dir')
187 | await collection1.setFileInfo(tr, 'C', 'c', 'B', 'dir')
188 | await collection1.setFileInfo(tr, 'D', 'd', 'B', 'dir')
189 | /**
190 | * @param {string} docid
191 | */
192 | const getParent = docid => collection1.getFileInfo(tr, docid).then(fi => fi?.parent)
193 | /**
194 | * @param {string} docid
195 | */
196 | const getDocName = docid => collection1.getFileInfo(tr, docid).then(fi => fi?.name)
197 | t.assert(await getParent('A') === null)
198 | const a = await getParent('B')
199 | console.log(a)
200 | t.assert(await getParent('B') === 'A')
201 | t.assert(await getParent('D') === 'B')
202 | t.assert(await getParent('C') === 'B')
203 | console.log('docname A:', await getDocName('A'))
204 | t.assert(await getDocName('A') === 'a')
205 | t.assert(await getDocName('B') === 'b')
206 | t.assert(await getDocName('D') === 'd')
207 | t.assert(await getDocName('C') === 'c')
208 | t.compare(await collection1.getDocChildren(tr, 'A'), [{ docid: 'B', docname: 'b' }])
209 | t.compare(await collection1.getDocChildren(tr, 'B'), [{ docid: 'C', docname: 'c' }, { docid: 'D', docname: 'd' }]) // should return in alphabetical order
210 | t.compare(await collection1.getDocPath(tr, 'A'), [{ docid: 'A', docname: 'a', ftype: 'dir' }])
211 | t.compare(await collection1.getDocPath(tr, 'B'), [{ docid: 'A', docname: 'a', ftype: 'dir' }, { docid: 'B', docname: 'b', ftype: 'dir' }])
212 | t.compare(await collection1.getDocPath(tr, 'D'), [{ docid: 'A', docname: 'a', ftype: 'dir' }, { docid: 'B', docname: 'b', ftype: 'dir' }, { docid: 'D', docname: 'd', ftype: 'dir' }])
213 | t.compare(await collection1.getDocChildrenRecursive(tr, 'A'), [
214 | {
215 | docid: 'B',
216 | docname: 'b',
217 | children: [
218 | { docid: 'C', docname: 'c', children: [] },
219 | { docid: 'D', docname: 'd', children: [] }
220 | ]
221 | }
222 | ])
223 | t.compare(await collection1.getDocIdsFromPath(tr, 'A', ['b']), ['B'])
224 | t.compare(await collection1.getDocIdsFromPath(tr, 'A', ['b', 'c']), ['C'])
225 | t.compare(await collection1.getDocIdsFromPath(tr, 'A', ['c']), [])
226 | await collection1.setFileInfo(tr, 'B', 'b', null, 'dir')
227 | t.compare(await collection1.getDocChildrenRecursive(tr, 'A'), [])
228 | t.compare(await collection1.getDocChildrenRecursive(tr, 'B'), [
229 | { docid: 'C', docname: 'c', children: [] },
230 | { docid: 'D', docname: 'd', children: [] }
231 | ])
232 | await collection1.setFileInfo(tr, 'A', 'a', 'B', 'dir')
233 | t.compare(await collection1.getDocChildrenRecursive(tr, 'B'), [
234 | { docid: 'A', docname: 'a', children: [] },
235 | { docid: 'C', docname: 'c', children: [] },
236 | { docid: 'D', docname: 'd', children: [] }
237 | ])
238 | // @todo handle concurrent moves: parentless docs (deleted parent) should be moved to an
239 | // orphanage. Circles should be detected - the most recent "parent" should be moved to the
240 | // orhpanage.
241 | // The orhpanage should just be a local container of references. docs don't need to be reparented.
242 | // Circles are actually fine as long as the app can work with them.
243 | })
244 | }
245 |
246 | /**
247 | * Testing loading from the database.
248 | *
249 | * @param {t.TestCase} tc
250 | */
251 | export const testDeleteDoc = async tc => {
252 | const docid = 'test'
253 | const th = await helpers.createTestScenario(tc)
254 | const [{ collection: collection1, ystream: ystream1 }] = await th.createClients(1)
255 | const ydoc = collection1.getYdoc(docid)
256 | await ydoc.whenLoaded
257 | ydoc.getText().insert(0, 'hi')
258 | const ydocCheck = collection1.getYdoc(docid)
259 | await ydocCheck.whenLoaded
260 | t.compareStrings(ydocCheck.getText().toString(), 'hi')
261 | console.log('docval prev', ydocCheck.getText().toString())
262 | ydocCheck.destroy()
263 | await ystream1.transact(async tr => {
264 | await collection1.setLww(tr, docid, 'val')
265 | await collection1.setFileInfo(tr, docid, 'mydoc.md', 'parentid', 'binary')
266 | t.assert(await collection1.getLww(tr, docid) === 'val')
267 | t.compare(await collection1.getFileInfo(tr, docid), { name: 'mydoc.md', parent: 'parentid', ftype: 'binary' })
268 | await collection1.deleteDoc(tr, docid)
269 | })
270 | const ydocCheck2 = collection1.getYdoc(docid)
271 | console.log('docval prev', ydocCheck2.getText().toString())
272 | t.compareStrings(ydocCheck2.getText().toString(), '')
273 | await ystream1.transact(async tr => {
274 | t.assert(await collection1.getLww(tr, docid) === undefined)
275 | t.assert(await collection1.getFileInfo(tr, docid) === null)
276 | await collection1.setLww(tr, docid, 'val')
277 | t.assert(await collection1.getLww(tr, docid) === undefined)
278 | // @todo test if deletion works in combination with parents (integration of delete should
279 | // orphan child docs)
280 | })
281 | }
282 |
--------------------------------------------------------------------------------
/src/protocol.js:
--------------------------------------------------------------------------------
1 | import { Ystream } from './index.js' // eslint-disable-line
2 | import * as dbtypes from './api/dbtypes.js'
3 | import * as encoding from 'lib0/encoding'
4 | import * as decoding from 'lib0/decoding'
5 | import * as error from 'lib0/error'
6 | import * as array from 'lib0/array'
7 | import * as actions from './api/actions.js'
8 | import * as logging from 'lib0/logging'
9 | import * as authentication from './api/authentication.js'
10 | import * as buffer from 'lib0/buffer'
11 | import * as jose from 'lib0/crypto/jwt'
12 | import * as sha256 from 'lib0/hash/sha256'
13 | import * as string from 'lib0/string'
14 | import * as wsUtils from './comms/websocket-utils.js'
15 |
16 | const _log = logging.createModuleLogger('@y/stream/protocol')
17 | /**
18 | * @param {Ystream} ystream
19 | * @param {import('./comm.js').Comm} comm
20 | * @param {string} type
21 | * @param {...any} args
22 | */
23 | const log = (ystream, comm, type, ...args) => _log(logging.PURPLE, `(local=${ystream.clientid.toString(36).slice(0, 4)},remote=${comm.clientid.toString(36).slice(0, 4)}${ystream.syncsEverything ? ',server=true' : ''}) `, logging.ORANGE, '[' + type + '] ', logging.GREY, ...args.map(arg => typeof arg === 'function' ? arg() : arg))
24 |
25 | const messageOps = 0
26 | const messageRequestOps = 1
27 | const messageSynced = 2
28 | const messageSyncedAll = 3
29 | const messageInfo = 4 // first message
30 | const messageChallengeAnswer = 5 // second message
31 |
32 | /**
33 | * @param {encoding.Encoder} encoder
34 | * @param {Array} ops
35 | * @param {number} startClock
36 | * @param {number} endClock
37 | */
38 | export const writeOps = (encoder, ops, startClock, endClock) => {
39 | encoding.writeUint8(encoder, messageOps)
40 | encoding.writeVarUint(encoder, startClock)
41 | encoding.writeVarUint(encoder, endClock)
42 | encoding.writeVarUint(encoder, ops.length)
43 | ops.forEach(op => {
44 | op.encode(encoder)
45 | })
46 | }
47 |
48 | /**
49 | * @param {decoding.Decoder} decoder
50 | * @param {Ystream} ystream
51 | * @param {import('./comm.js').Comm} comm
52 | */
53 | const readOps = async (decoder, ystream, comm) => {
54 | const startClock = decoding.readVarUint(decoder)
55 | const endClock = decoding.readVarUint(decoder)
56 | const numOfOps = decoding.readVarUint(decoder)
57 | /**
58 | * @type {Array}
59 | */
60 | const ops = []
61 | for (let i = 0; i < numOfOps; i++) {
62 | ops.push(/** @type {dbtypes.OpValue} */ (dbtypes.OpValue.decode(decoder)))
63 | }
64 | log(ystream, comm, 'Ops', `received ${ops.length} ops. decoderlen=${decoder.arr.length}. first: clock=${ops[0].clock},client=${ops[0].client},startClock=${startClock},endClock=${endClock}`)
65 | if (comm.user == null) {
66 | error.unexpectedCase()
67 | }
68 | await actions.applyRemoteOps(ystream, comm, ops, comm.user, comm, startClock, endClock)
69 | }
70 |
71 | /**
72 | * @param {encoding.Encoder} encoder
73 | * @param {Uint8Array} owner
74 | * @param {string} collection
75 | * @param {number} nextClock
76 | */
77 | export const writeSynced = (encoder, owner, collection, nextClock) => {
78 | encoding.writeUint8(encoder, messageSynced)
79 | encoding.writeVarUint8Array(encoder, owner)
80 | encoding.writeVarString(encoder, collection)
81 | encoding.writeVarUint(encoder, nextClock)
82 | }
83 |
84 | /**
85 | * @param {encoding.Encoder} encoder
86 | * @param {number} nextClock
87 | */
88 | export const writeSyncedAll = (encoder, nextClock) => {
89 | encoding.writeUint8(encoder, messageSyncedAll)
90 | encoding.writeVarUint(encoder, nextClock)
91 | }
92 |
93 | /**
94 | * @param {encoding.Encoder} _encoder
95 | * @param {decoding.Decoder} decoder
96 | * @param {Ystream} ystream
97 | * @param {import('./comm.js').Comm|null} comm
98 | */
99 | const readSynced = async (_encoder, decoder, ystream, comm) => {
100 | const owner = decoding.readVarUint8Array(decoder)
101 | const collection = decoding.readVarString(decoder)
102 | decoding.readVarUint(decoder) // confirmed clock
103 | if (comm == null) return
104 | comm.synced.add(owner, collection)
105 | const ycol = ystream.getCollection(buffer.toBase64(owner), collection)
106 | if (ycol != null && !ycol.isSynced) {
107 | ycol.isSynced = true
108 | ycol.emit('sync', [])
109 | log(ystream, comm, 'Synced', `synced "${collection}" .. emitted sync event`)
110 | }
111 | }
112 |
113 | /**
114 | * @param {encoding.Encoder} _encoder
115 | * @param {decoding.Decoder} decoder
116 | * @param {Ystream} ystream
117 | * @param {import('./comm.js').Comm|null} comm
118 | */
119 | const readSyncedAll = async (_encoder, decoder, ystream, comm) => {
120 | decoding.readVarUint(decoder) // confirmed clock
121 | if (comm == null) return
122 | ystream.collections.forEach(c => {
123 | c.forEach(ycol => {
124 | ycol.isSynced = true
125 | ycol.emit('sync', [])
126 | })
127 | })
128 | log(ystream, comm, 'Synced', 'synced "*" collections .. emitted sync event')
129 | }
130 |
131 | /**
132 | * @param {encoding.Encoder} encoder
133 | * @param {Uint8Array} owner
134 | * @param {string} collection
135 | * @param {number} clock
136 | */
137 | export const writeRequestOps = (encoder, owner, collection, clock) => {
138 | encoding.writeUint8(encoder, messageRequestOps)
139 | encoding.writeUint8(encoder, 1) // requesting specific ops
140 | encoding.writeVarUint8Array(encoder, owner)
141 | encoding.writeVarString(encoder, collection)
142 | encoding.writeVarUint(encoder, clock)
143 | }
144 |
145 | /**
146 | * @param {encoding.Encoder} encoder
147 | * @param {number} clock
148 | */
149 | export const writeRequestAllOps = (encoder, clock) => {
150 | encoding.writeUint8(encoder, messageRequestOps)
151 | encoding.writeUint8(encoder, 0) // request all ops
152 | encoding.writeVarUint(encoder, clock)
153 | }
154 |
155 | /**
156 | * @param {decoding.Decoder} decoder
157 | * @param {Ystream} ystream
158 | * @param {import('./comm.js').Comm} comm - this is used to subscribe to messages
159 | */
160 | const readRequestOps = async (decoder, ystream, comm) => {
161 | const requestedAllOps = decoding.readUint8(decoder) === 0
162 | let owner = null
163 | let collection = null
164 | let nextClock = 0
165 | if (requestedAllOps) {
166 | nextClock = decoding.readVarUint(decoder)
167 | log(ystream, comm, 'RequestOps', 'requested all ops', () => ({ nextClock, remoteClientId: comm.clientid }))
168 | } else {
169 | // requested only a single collection
170 | owner = decoding.readVarUint8Array(decoder)
171 | collection = decoding.readVarString(decoder)
172 | nextClock = decoding.readVarUint(decoder)
173 | log(ystream, comm, 'RequestOps', `requested "${collection}" `, () => ({ nextClock, remoteClientId: comm.clientid }))
174 | }
175 | comm.emit('requested-ops', [comm, { collection: { owner, name: collection }, clock: nextClock }])
176 | log(ystream, comm, 'subscribing conn to ops', { fcid: comm.clientid, collection, owner })
177 | // @todo add method to filter by owner & collection
178 | actions.createOpsReader(ystream, nextClock, owner, collection, comm).pipeTo(comm.writer, { signal: comm.streamController.signal }).catch((reason) => {
179 | log(ystream, comm, 'ended pipe', { reason, isDestroyed: comm.isDestroyed })
180 | comm.close(wsUtils.statusParseError, 'unexpected error reading ops stream')
181 | })
182 | }
183 |
184 | /**
185 | * @todo should contain device auth, exchange of certificates, some verification by challenge, ..
186 | * @param {encoding.Encoder} encoder
187 | * @param {Ystream} ystream
188 | * @param {import('./comm.js').Comm} comm - this is used to subscribe to messages
189 | */
190 | export const writeInfo = (encoder, ystream, comm) => {
191 | encoding.writeUint8(encoder, messageInfo)
192 | encoding.writeVarUint(encoder, ystream.clientid)
193 | if (ystream.user == null || ystream.deviceClaim == null) {
194 | error.unexpectedCase()
195 | }
196 | ystream.user.encode(encoder)
197 | ystream.deviceClaim.encode(encoder)
198 | // challenge that the other user must sign using the device's private key
199 | encoding.writeVarUint8Array(encoder, comm.challenge)
200 | }
201 |
202 | /**
203 | * @todo maybe rename to SyncStep1?
204 | * @param {encoding.Encoder} encoder
205 | * @param {decoding.Decoder} decoder
206 | * @param {import('./comm.js').Comm} comm
207 | * @param {Ystream} ystream
208 | */
209 | const readInfo = async (encoder, decoder, ystream, comm) => {
210 | const clientid = decoding.readVarUint(decoder)
211 | // @todo user only has to be submitted, if we want to register a new user. For now, we simply
212 | // always send the user identity in all initial requests.
213 | const user = dbtypes.UserIdentity.decode(decoder)
214 | const deviceClaim = dbtypes.DeviceClaim.decode(decoder)
215 | const challenge = decoding.readVarUint8Array(decoder)
216 | const registeredUser = await authentication.getRegisteredUser(ystream, user)
217 | comm.clientid = clientid
218 | comm.user = registeredUser || user
219 | // @todo 1. read device claim and verify it
220 | comm.deviceClaim = deviceClaim
221 | if (!array.equalFlat(user.hash, sha256.digest(string.encodeUtf8(deviceClaim.unsafeDecode().payload.iss)))) {
222 | log(ystream, comm, 'InfoRejected', 'rejecting comm because client hash doesn\'t match with device claim', '\n', user.hash, deviceClaim.hash)
223 | error.unexpectedCase()
224 | }
225 | if (registeredUser == null) {
226 | if (ystream.acceptNewUsers) {
227 | await authentication.registerUser(ystream, user)
228 | } else {
229 | log(ystream, comm, 'destroying', 'User not registered')
230 | comm.close(wsUtils.statusUnauthenticated, 'User not registered')
231 | return
232 | }
233 | }
234 | const parsedClaim = await deviceClaim.verify(await user.publicKey)
235 | if (parsedClaim.payload.iss !== user.ekey) {
236 | comm.close(wsUtils.statusUnauthenticated, 'invalid user claim')
237 | error.unexpectedCase()
238 | }
239 | await ystream.transact(async tr => {
240 | const currClaim = await tr.tables.devices.indexes.hash.get(deviceClaim.hash)
241 | if (currClaim == null) {
242 | await tr.tables.devices.add(deviceClaim)
243 | }
244 | })
245 | // @todo send some kind of challenge
246 | log(ystream, comm, 'Info Challenge', () => Array.from(challenge))
247 | await writeChallengeAnswer(encoder, ystream, challenge, comm)
248 | }
249 |
250 | /**
251 | * @param {decoding.Decoder} decoder
252 | * @param {import('./comm.js').Comm} comm
253 | */
254 | const readChallengeAnswer = async (decoder, comm) => {
255 | const deviceClaim = comm.deviceClaim
256 | if (deviceClaim == null) {
257 | error.unexpectedCase()
258 | }
259 | const jwt = decoding.readVarString(decoder)
260 | const { payload: { sub } } = await jose.verifyJwt(await deviceClaim.dpkey, jwt)
261 | if (sub !== buffer.toBase64(comm.challenge)) {
262 | throw new Error('Wrong challenge')
263 | }
264 | comm.isAuthenticated = true
265 | if (comm.sentChallengeAnswer) comm.emit('authenticated', [comm])
266 | }
267 |
268 | /**
269 | * @todo should contain device auth, exchange of certificates, some verification by challenge, ..
270 | * @param {encoding.Encoder} encoder
271 | * @param {Ystream} ystream
272 | * @param {Uint8Array} challenge - this is used to subscribe to messages
273 | * @param {import('./comm.js').Comm} comm - this is used to subscribe to messages
274 | */
275 | export const writeChallengeAnswer = async (encoder, ystream, challenge, comm) => {
276 | encoding.writeUint8(encoder, messageChallengeAnswer)
277 | await ystream.transact(async tr => {
278 | const pk = await tr.objects.device.get('private')
279 | if (pk == null) error.unexpectedCase()
280 | const jwt = await jose.encodeJwt(pk.key, {
281 | sub: buffer.toBase64(challenge)
282 | })
283 | encoding.writeVarString(encoder, jwt)
284 | })
285 | comm.sentChallengeAnswer = true
286 | if (comm.isAuthenticated) comm.emit('authenticated', [comm])
287 | }
288 |
289 | /**
290 | * @param {encoding.Encoder} encoder
291 | * @param {decoding.Decoder} decoder
292 | * @param {Ystream} ystream
293 | * @param {import('./comm.js').Comm} comm - this is used to set the "synced" property
294 | */
295 | export const readMessage = async (encoder, decoder, ystream, comm) => {
296 | if (ystream.isDestroyed) return
297 | try {
298 | do {
299 | const messageType = decoding.readUint8(decoder)
300 | if (messageType === messageInfo) {
301 | await readInfo(encoder, decoder, ystream, comm)
302 | } else if (messageType === messageChallengeAnswer) {
303 | await readChallengeAnswer(decoder, comm)
304 | } else {
305 | if (comm.deviceClaim == null || comm.user == null || !comm.isAuthenticated) {
306 | log(ystream, comm, 'closing unauthenticated connection')
307 | comm.close(wsUtils.statusUnauthenticated, 'closing unauthenticated connection')
308 | }
309 | switch (messageType) {
310 | case messageOps: {
311 | await readOps(decoder, ystream, comm)
312 | break
313 | }
314 | case messageRequestOps: {
315 | await readRequestOps(decoder, ystream, comm)
316 | break
317 | }
318 | case messageSynced: {
319 | await readSynced(encoder, decoder, ystream, comm)
320 | break
321 | }
322 | case messageSyncedAll: {
323 | await readSyncedAll(encoder, decoder, ystream, comm)
324 | break
325 | }
326 | /* c8 ignore next 3 */
327 | default:
328 | // Unknown message-type
329 | error.unexpectedCase()
330 | }
331 | }
332 | } while (decoding.hasContent(decoder))
333 | if (encoding.hasContent(encoder)) {
334 | return encoder
335 | }
336 | return null
337 | } catch (err) {
338 | log(ystream, comm, 'Info rejection', 'Closing connection because of unexpected error', /** @type {Error} */ (err).stack)
339 | comm.close(wsUtils.statusParseError, 'Unexpected error when parsing message')
340 | }
341 | }
342 |
--------------------------------------------------------------------------------
/src/ystream.js:
--------------------------------------------------------------------------------
1 | import * as Y from 'yjs'
2 | import * as map from 'lib0/map'
3 | import { bindydoc } from './bindydoc.js'
4 | import * as promise from 'lib0/promise'
5 | import * as isodb from 'isodb' // eslint-disable-line
6 | import * as db from './db.js' // eslint-disable-line
7 | import { ObservableV2 } from 'lib0/observable'
8 | import * as actions from './api/actions.js'
9 | import * as dbtypes from './api/dbtypes.js' // eslint-disable-line
10 | import * as bc from 'lib0/broadcastchannel'
11 | import * as buffer from 'lib0/buffer'
12 | import * as logging from 'lib0/logging'
13 | import * as eventloop from 'lib0/eventloop'
14 |
15 | const _log = logging.createModuleLogger('@y/stream')
16 | /**
17 | * @param {Ystream} ystream
18 | * @param {string} type
19 | * @param {...any} args
20 | */
21 | const log = (ystream, type, ...args) => _log(logging.PURPLE, `(local=${ystream.clientid.toString(36).slice(0, 4)}${ystream.syncsEverything ? ',server=true' : ''}) `, logging.ORANGE, '[' + type + '] ', logging.GREY, ...args.map(arg => typeof arg === 'function' ? arg() : arg))
22 |
23 | /**
24 | * @typedef {Object} YstreamConf
25 | * @property {Array} [YstreamConf.comms]
26 | * @property {boolean} [YstreamConf.acceptNewUsers]
27 | * @property {boolean} [YstreamConf.syncsEverything]
28 | */
29 |
30 | /**
31 | * Fires the `ops` event.
32 | *
33 | * The `ops` event guarantees that ops are emitted in-order (sorted by localClock).
34 | *
35 | * However, because of async ops from other threads, we can't guarantee that `_emitOpsEvent`
36 | * receives ops in order. So we need to wait before emitting ops.
37 | *
38 | * This happens from inside of a transaction, so it can't overlap with other transactions.
39 | *
40 | * @param {YTransaction} tr
41 | * @param {Ystream} ystream
42 | * @param {Array} ops
43 | * @param {any} origin
44 | */
45 | const emitOpsEvent = async (tr, ystream, ops, origin) => {
46 | if (ops.length > 0) {
47 | if (ystream._eclock == null) {
48 | ystream._eclock = ops[0].localClock
49 | }
50 | const eclock = ystream._eclock
51 | ops.sort((o1, o2) => o1.localClock - o2.localClock)
52 | while (ops[0].localClock < eclock) {
53 | ops.shift()
54 | }
55 | for (let i = 0; i < ops.length - 1; i++) {
56 | if (ops[i].localClock + 1 !== ops[i + 1].localClock) {
57 | throw new Error('expected emitted ops to be without holes')
58 | }
59 | }
60 | if (ops[0].localClock !== eclock) {
61 | origin = 'db'
62 | // not expected op, pull from db again
63 | ops = await tr.tables.oplog.getEntries({
64 | start: new isodb.AutoKey(eclock)
65 | }).then(colEntries => colEntries.map(update => {
66 | update.value.localClock = update.key.v
67 | if (update.value.client === ystream.clientid) {
68 | update.value.clock = update.key.v
69 | }
70 | return update.value
71 | }))
72 | }
73 | if (ops.length > 0) {
74 | bc.publish('@y/stream#' + ystream.dbname, ops[ops.length - 1].localClock, ystream)
75 | ystream._eclock = ops[ops.length - 1].localClock + 1
76 | eventloop.enqueue(() => {
77 | // @todo make this a proper log
78 | log(ystream, 'emitting ops', () => `localClockRange=${ops[0].localClock}-${ops[ops.length - 1].localClock}`)
79 | ystream.emit('ops', [ops, tr.origin, tr.isRemote])
80 | })
81 | }
82 | }
83 | }
84 |
85 | export class YTransaction {
86 | /**
87 | * @param {isodb.ITransaction} db
88 | * @param {any} origin
89 | */
90 | constructor (db, origin) {
91 | this.db = db
92 | this.tables = db.tables
93 | this.objects = db.objects
94 | /**
95 | * @type {Array}
96 | */
97 | this.ops = []
98 | this.origin = origin
99 | this.isRemote = false
100 | }
101 | }
102 |
103 | /**
104 | * @extends ObservableV2<{ ops:function(Array,any,boolean):void, authenticate:function():void, "collection-opened":(collection:Collection)=>void, "destroy": (ystream: Ystream)=>void }>
105 | */
106 | export class Ystream extends ObservableV2 {
107 | /**
108 | * @param {string} dbname
109 | * @param {isodb.IDB} _db
110 | * @param {number} clientid
111 | * @param {dbtypes.UserIdentity|null} user
112 | * @param {dbtypes.DeviceClaim|null} deviceClaim
113 | * @param {YstreamConf} conf
114 | */
115 | constructor (dbname, _db, clientid, user, deviceClaim, { comms = [], acceptNewUsers = false, syncsEverything = false } = {}) {
116 | super()
117 | this.dbname = dbname
118 | /**
119 | * @type {isodb.IDB}
120 | */
121 | this._db = _db
122 | this.acceptNewUsers = acceptNewUsers
123 | /**
124 | * @type {Map>}
125 | */
126 | this.collections = new Map()
127 | /**
128 | * Whether to sync all collections (usually only done by a server)
129 | */
130 | this.syncsEverything = syncsEverything
131 | this.clientid = clientid
132 | /**
133 | * @type {dbtypes.UserIdentity|null}
134 | */
135 | this.user = user
136 | /**
137 | * @type {dbtypes.DeviceClaim|null}
138 | */
139 | this.deviceClaim = deviceClaim
140 | /**
141 | * Instance is authenticated once a user identity is set and the device claim has been set.
142 | */
143 | this.isAuthenticated = false
144 | this.whenAuthenticated = promise.create(resolve => this.once('authenticate', resolve))
145 | /**
146 | * Next expected localClock for emitting ops.
147 | * @type {number|null}
148 | */
149 | this._eclock = null
150 | /**
151 | * Subscribe to broadcastchannel event that is fired whenever an op is added to the database.
152 | * The localClock of the last op will be emitted.
153 | */
154 | this._esub = bc.subscribe('@y/stream#' + this.dbname, /** @param {number} lastOpId */ async (lastOpId, origin) => {
155 | if (origin !== this) {
156 | log(this, 'received ops via broadcastchannel', lastOpId)
157 | // @todo reintroduce pulling from a database
158 | // const ops = await actions.getOps(this, opids[0])
159 | // _emitOpsEvent(this, ops, 'broadcastchannel')
160 | }
161 | })
162 | /**
163 | * @type {Set}
164 | */
165 | this.comms = new Set()
166 | /**
167 | * @type {Set}
168 | */
169 | this.commHandlers = new Set()
170 | this.whenAuthenticated.then(() => {
171 | log(this, 'adding comms', comms)
172 | comms.forEach(comm =>
173 | this.commHandlers.add(comm.init(this))
174 | )
175 | })
176 | this.isDestroyed = false
177 | }
178 |
179 | /**
180 | * @param {string} owner
181 | * @param {string} collection
182 | */
183 | getCollection (owner, collection) {
184 | return map.setIfUndefined(map.setIfUndefined(this.collections, owner, map.create), collection, () => new Collection(this, owner, collection))
185 | }
186 |
187 | /**
188 | * @todo Transactions should have an origin, children should only be added if they have the same
189 | * origin, never to system transactions
190 | * @template T
191 | * @param {(tr:YTransaction) => Promise} f
192 | * @param {any} origin
193 | * @return {Promise}
194 | */
195 | transact (f, origin = null) {
196 | return this._db.transact(async db => {
197 | const tr = new YTransaction(db, origin)
198 | const res = await f(tr)
199 | await emitOpsEvent(tr, this, tr.ops, tr.origin)
200 | return res
201 | })
202 | }
203 |
204 | destroy () {
205 | if (this.isDestroyed) return
206 | this.isDestroyed = true
207 | this.collections.forEach(owner => {
208 | owner.forEach(collection => {
209 | collection.destroy()
210 | })
211 | })
212 | this.commHandlers.forEach(handler => handler.destroy())
213 | bc.unsubscribe('@y/stream#' + this.dbname, this._esub)
214 | this.emit('destroy', [this])
215 | return this._db.destroy()
216 | }
217 | }
218 |
219 | /**
220 | * @extends ObservableV2<{ sync:function():void, ops:function(Array,any,boolean):void }>
221 | */
222 | export class Collection extends ObservableV2 {
223 | /**
224 | * @param {Ystream} stream
225 | * @param {string} owner
226 | * @param {string} collection
227 | */
228 | constructor (stream, owner, collection) {
229 | super()
230 | this.ystream = stream
231 | this.owner = owner
232 | this.ownerBin = buffer.fromBase64(owner)
233 | this.collection = collection
234 | /**
235 | * @type {Map>}
236 | */
237 | this.docs = new Map()
238 | this.isSynced = false
239 | this.whenSynced = promise.create(resolve =>
240 | this.once('sync', resolve)
241 | )
242 | stream.emit('collection-opened', [this])
243 | }
244 |
245 | /**
246 | * @param {string} docname
247 | */
248 | getYdoc (docname) {
249 | const ydoc = new Y.Doc({
250 | guid: docname
251 | })
252 | return this.bindYdoc(docname, ydoc)
253 | }
254 |
255 | /**
256 | * @param {string} docname
257 | * @param {Y.Doc} ydoc
258 | */
259 | bindYdoc (docname, ydoc) {
260 | const docset = map.setIfUndefined(this.docs, docname, () => new Set())
261 | docset.add(ydoc)
262 | ydoc.on('destroy', () => {
263 | docset.delete(ydoc)
264 | })
265 | bindydoc(this.ystream, this.owner, this.collection, docname, ydoc)
266 | return ydoc
267 | }
268 |
269 | /**
270 | * @param {import('@y/stream').YTransaction} tr
271 | * @param {string} docid
272 | */
273 | getYdocUpdates (tr, docid) {
274 | return actions.getYDocUpdates(tr, this.ystream, this.ownerBin, this.collection, docid)
275 | }
276 |
277 | /**
278 | * @param {import('@y/stream').YTransaction} tr
279 | * @param {string} docid
280 | * @param {Uint8Array} update
281 | */
282 | addYdocUpdate (tr, docid, update) {
283 | return actions.addYDocUpdate(tr, this.ystream, this.ownerBin, this.collection, docid, update)
284 | }
285 |
286 | /**
287 | * @param {import('@y/stream').YTransaction} tr
288 | * @param {string} docid
289 | * @return {Promise<{ name: string, parent: null | string, ftype: 'dir'|'binary'|'text' } | null>}
290 | */
291 | async getFileInfo (tr, docid) {
292 | return actions.getFileInfo(tr, this.ystream, this.ownerBin, this.collection, docid)
293 | }
294 |
295 | /**
296 | * This functions sets the fileinfo - making it possible to represent this as a file on a
297 | * filesystem.
298 | *
299 | * It is possible to query the children of a parent. The children can be identified by the docid
300 | * (immutable) OR the docname (mutable, but not guaranteed to be unique across devices).
301 | *
302 | * This function does not overwrite content. The existing file should be deleted manually.
303 | *
304 | * @param {import('@y/stream').YTransaction} tr
305 | * @param {string} docid
306 | * @param {string} docname
307 | * @param {string|null} parentDoc
308 | * @param {'dir'|'binary'|'text'} ftype
309 | */
310 | async setFileInfo (tr, docid, docname, parentDoc, ftype) {
311 | return actions.setFileInfo(tr, this.ystream, this.ownerBin, this.collection, docid, docname, parentDoc, ftype)
312 | }
313 |
314 | /**
315 | * @param {import('@y/stream').YTransaction} tr
316 | * @param {string} docid
317 | * @param {number} [endLocalClock]
318 | */
319 | getDocPath (tr, docid, endLocalClock) {
320 | return actions.getDocPath(tr, this.ystream, this.ownerBin, this.collection, docid, endLocalClock)
321 | }
322 |
323 | /**
324 | * @param {import('@y/stream').YTransaction} tr
325 | * @param {string|null} rootid
326 | * @param {Array} path
327 | * @return {Promise>}
328 | */
329 | getDocIdsFromPath (tr, rootid, path) {
330 | return actions.getDocIdsFromPath(tr, this.ystream, this.ownerBin, this.collection, rootid, path)
331 | }
332 |
333 | /**
334 | * This function retrieves the children on a document. It simulates the behavior of the `ls` unix
335 | * command.
336 | *
337 | * @param {import('@y/stream').YTransaction} tr
338 | * @param {string?} docid
339 | * @return {Promise>}
340 | */
341 | getDocChildren (tr, docid) {
342 | return actions.getDocChildren(tr, this.ystream, this.ownerBin, this.collection, docid)
343 | }
344 |
345 | /**
346 | * This function retrieves the children on a document. It simulates the behavior of the `ls **\/*
347 | * -l` unix command.
348 | *
349 | * @param {import('@y/stream').YTransaction} tr
350 | * @param {string?} docname
351 | */
352 | getDocChildrenRecursive (tr, docname) {
353 | return actions.getDocChildrenRecursive(tr, this.ystream, this.ownerBin, this.collection, docname)
354 | }
355 |
356 | /**
357 | * @param {import('@y/stream').YTransaction} tr
358 | * @param {string} key
359 | * @returns {any|undefined} undefined if the value was not defined previously
360 | */
361 | getLww (tr, key) {
362 | return actions.getLww(tr, this.ystream, this.ownerBin, this.collection, key)
363 | }
364 |
365 | /**
366 | * @param {import('@y/stream').YTransaction} tr
367 | * @param {string} key
368 | * @param {any} val
369 | * @return the previous values
370 | */
371 | setLww (tr, key, val) {
372 | return actions.setLww(tr, this.ystream, this.ownerBin, this.collection, key, val)
373 | }
374 |
375 | /**
376 | * @param {import('@y/stream').YTransaction} tr
377 | * @param {string} docid
378 | */
379 | deleteDoc (tr, docid) {
380 | return actions.deleteDoc(tr, this.ystream, this.ownerBin, this.collection, docid)
381 | }
382 |
383 | /**
384 | * @param {import('@y/stream').YTransaction} tr
385 | * @param {string} docid
386 | */
387 | isDocDeleted (tr, docid) {
388 | return actions.isDocDeleted(tr, this.ystream, this.ownerBin, this.collection, docid)
389 | }
390 |
391 | destroy () {
392 | this.ystream.collections.get(this.owner)?.delete(this.collection)
393 | }
394 | }
395 |
--------------------------------------------------------------------------------
/src/extensions/fs.js:
--------------------------------------------------------------------------------
1 | import chokidar from 'chokidar'
2 | import fs from 'node:fs'
3 | import path from 'node:path'
4 | import * as Y from 'yjs'
5 |
6 | import * as random from 'lib0/random'
7 | import * as array from 'lib0/array'
8 | import * as observable from 'lib0/observable'
9 | import * as error from 'lib0/error'
10 | import * as Ystream from '@y/stream' // eslint-disable-line
11 | import * as actions from '@y/stream/api/actions' // eslint-disable-line
12 | import * as logging from 'lib0/logging'
13 | import * as diff from 'lib0/diff'
14 |
15 | const textFileExtensions = new Set([
16 | 'txt', 'md', 'js', 'ts', 'tsx', 'jsx', 'css', 'norg'
17 | ])
18 |
19 | const _fileExtensionRegex = /.+\.(\w+)$/
20 | /**
21 | * @param {string} fname
22 | */
23 | const getFileExtension = fname => _fileExtensionRegex.exec(fname)?.[1] ?? null
24 |
25 | /**
26 | * @param {string} fname
27 | */
28 | const isTextFile = (fname) => {
29 | const ext = getFileExtension(fname)
30 | return ext != null ? textFileExtensions.has(ext) : false
31 | }
32 |
33 | /**
34 | * @extends {observable.ObservableV2<{}>}
35 | */
36 | export default class Yfs extends observable.ObservableV2 {
37 | /**
38 | * @param {import('@y/stream').Collection} ycollection
39 | * @param {Object} opts
40 | * @param {string} opts.observePath
41 | */
42 | constructor (ycollection, { observePath = '.' }) {
43 | super()
44 | this.ycollection = ycollection
45 | this.ystream = ycollection.ystream
46 | this.observedPath = observePath
47 | /**
48 | * @type {Array<{ clock: number, docid: string }>}
49 | */
50 | this._filesToRender = []
51 | /**
52 | * @type {Set}
53 | */
54 | this._filesToRenderDocNames = new Set()
55 | /**
56 | * @type {(ops: Array, origin: any) => void}
57 | */
58 | this._opsObserver = (ops, _origin) => {
59 | const shouldStartRender = this._filesToRender.length === 0
60 | // console.log({ ops })
61 | for (let i = 0; i < ops.length; i++) {
62 | const op = ops[i]
63 | if (!this._filesToRenderDocNames.has(op.doc)) {
64 | // console.log('doc added to files to render', { docid: op.doc, type: op.op.type, op: op.op, opC: op.op.val?.toString?.().slice(0, 50) })
65 | this._filesToRender.push({ clock: op.localClock, docid: op.doc })
66 | this._filesToRenderDocNames.add(op.doc)
67 | }
68 | }
69 | if (shouldStartRender) {
70 | _renderFiles(this)
71 | }
72 | }
73 | // @todo start consuming starting at last checkpoint
74 | this.ystream.on('ops', this._opsObserver)
75 | this._destroyObserver = this.destroy.bind(this)
76 | this.ystream.on('destroy', this._destroyObserver)
77 |
78 | this.chokidarWatch = chokidar.watch(observePath, { ignoreInitial: false, ignored: /(\.ystream|node_modules|\.git)/ /*, awaitWriteFinish: true */ })
79 | .on('all', (type, cwdPath) => {
80 | const observeRelPath = path.relative(observePath, cwdPath)
81 | if (observeRelPath === '' || observeRelPath === '.' || observeRelPath.startsWith('..')) return
82 | let content = null
83 | if (type === 'add' || type === 'change') {
84 | content = fs.readFileSync(path.join(observePath, observeRelPath))
85 | try {
86 | if (isTextFile(observeRelPath)) {
87 | content = content.toString('utf8')
88 | }
89 | } catch (e) {
90 | console.warn('error parsing text file', e)
91 | }
92 | }
93 | _eventsToCompute.push({ type, path: observeRelPath, content })
94 | if (_eventsToCompute.length === 1) _computeEvents(this)
95 | })
96 | }
97 |
98 | async destroy () {
99 | super.destroy()
100 | this.ystream.off('ops', this._opsObserver)
101 | await this.chokidarWatch.close()
102 | }
103 | }
104 |
105 | /**
106 | * @param {import('@y/stream').YTransaction} tr
107 | * @param {Yfs} yfs
108 | * @param {string} docid
109 | * @return {Promise<{ type: 'binaryFile', content: Buffer }|{ type: 'dir' }|{ type: 'skip' }|{ type: 'text', content: Y.Text }|null>}
110 | */
111 | const getFileContent = async (tr, yfs, docid) => {
112 | const fi = await yfs.ycollection.getFileInfo(tr, docid)
113 | if (fi == null) {
114 | return null
115 | }
116 | if (fi.ftype === 'binary') {
117 | const content = await yfs.ycollection.getLww(tr, docid)
118 | if (content == null) {
119 | // skip for now, until content arrives
120 | return { type: 'skip' }
121 | }
122 | return { type: 'binaryFile', content }
123 | }
124 | if (fi.ftype === 'text') {
125 | const ydoc = new Y.Doc()
126 | const yupdates = await yfs.ycollection.getYdocUpdates(tr, docid)
127 | if (yupdates == null) return null
128 | ydoc.transact(tr => {
129 | yupdates.forEach(update => {
130 | Y.applyUpdateV2(ydoc, update)
131 | })
132 | })
133 | return { type: 'text', content: ydoc.getText() }
134 | }
135 | if (fi.ftype === 'dir') {
136 | return { type: 'dir' }
137 | }
138 | error.unexpectedCase()
139 | }
140 |
141 | /**
142 | * @param {Yfs} yfs
143 | */
144 | const _renderFiles = async (yfs) => {
145 | let shouldBreak = false
146 | let filesrendered = 0
147 | const filesToRender = yfs._filesToRender
148 | while (filesToRender.length > 0 && !shouldBreak) {
149 | await yfs.ystream.transact(async tr => {
150 | // perform a max of 100 changes before creating a new transaction
151 | for (let i = 0; i < 300 && filesToRender.length > 0; i++) {
152 | const { docid, clock: opClock } = filesToRender[0]
153 | const ycontent = await getFileContent(tr, yfs, docid)
154 | const docPath = await actions.getDocPath(tr, yfs.ystream, yfs.ycollection.ownerBin, yfs.ycollection.collection, docid, ycontent == null ? opClock - 1 : undefined)
155 | // console.log('getting file content', { docid, ycontent, docPath })
156 | const docnamee = docPath?.[docPath.length - 1].docname
157 | // const docdeleted = await actions.isDocDeleted(tr, yfs.ystream, yfs.ycollection.ownerBin, yfs.ycollection.collection, docid)
158 | // console.log({ docnamee, docdeleted, docid, ycontent: /** @type {any} */ (ycontent)?.content?.toString?.().slice(0, 50) || ycontent, docPath, opClock })
159 | const strPath = path.join(yfs.observedPath, docPath?.map(p => p.docname).join('/') || '')
160 | if (docnamee == null || docPath == null) {
161 | docnamee == null && console.warn('docname should not be empty') // @todo
162 | docPath == null && console.warn('docPath should not be empty') // @todo
163 | // @todo this edge case is ignored for now
164 | // error.unexpectedCase()
165 | } else if (ycontent == null) {
166 | // console.log('removing file/dir ', { strPath })
167 | try {
168 | const stat = fs.statSync(strPath)
169 | if (stat.isDirectory()) {
170 | fs.rmdirSync(strPath)
171 | } else if (stat.isFile()) {
172 | fs.rmSync(strPath)
173 | } else {
174 | console.log('File doesnt exist anymore')
175 | }
176 | } catch (e) {
177 | // console.log('error in fs.stat', e)
178 | }
179 | } else if (ycontent.type === 'skip') {
180 | // skip for now, will be rendered when all content arrives
181 | // nop
182 | } else if (ycontent.type === 'binaryFile') {
183 | // console.log('trying to read file', { strPath, docPath, docid })
184 | const fileContent = fs.existsSync(strPath) ? fs.readFileSync(strPath) : null
185 | if (fileContent == null || !array.equalFlat(fileContent, ycontent.content)) {
186 | // console.log('writing file', { docPath, strPath, ycontent: /** @type {any} */ (ycontent).content?.toString?.().slice(0, 50) || ycontent, fileContent: fileContent?.toString?.().slice(0, 50), ypath: docPath })
187 | fs.writeFileSync(strPath, ycontent.content)
188 | // console.log('file written!', { strPath })
189 | }
190 | } else if (ycontent.type === 'text') {
191 | const ycontentStr = ycontent.content.toString()
192 | // console.log('trying to read file', { strPath, docPath, docid })
193 | const fileContent = fs.existsSync(strPath) ? fs.readFileSync(strPath) : null
194 | let fileContentStr = null
195 | if (fileContent != null) {
196 | try {
197 | fileContentStr = fileContent.toString('utf8')
198 | } catch (e) { /* nop */ }
199 | }
200 | if (fileContentStr !== ycontentStr) {
201 | fs.writeFileSync(strPath, ycontentStr)
202 | }
203 | } else {
204 | // console.log('checking if folder exists', { strPath })
205 | if (!fs.existsSync(strPath)) {
206 | // console.log(strPath, ' does notexist , lets creat it..')
207 | fs.mkdirSync(strPath)
208 | // console.log('folder exists now', {
209 | // strPath, exists: fs.existsSync(strPath)
210 | // })
211 | }
212 | }
213 | yfs._filesToRenderDocNames.delete(docid)
214 | filesToRender.shift()
215 | filesrendered++
216 | if (filesToRender.length === 0) {
217 | shouldBreak = true
218 | }
219 | }
220 | })
221 | logging.print(logging.ORANGE, `${filesrendered}/${filesToRender.length + filesrendered} files rendered (clientid=${yfs.ystream.clientid})`)
222 | }
223 | }
224 |
225 | /**
226 | * Creates a document and creates parent documents as necessary. Works similarly to `mkdir -p`.
227 | *
228 | * @param {import('@y/stream').YTransaction} tr
229 | * @param {Ystream.Ystream} ystream
230 | * @param {Uint8Array} owner
231 | * @param {string} collection
232 | * @param {string|null} rootid
233 | * @param {Array} path
234 | * @param {'dir'|'binary'|'text'} finalFtype
235 | * @return {Promise<{ docid: string, isNew: boolean }>}
236 | */
237 | export const mkPath = async (tr, ystream, owner, collection, rootid, path, finalFtype) => {
238 | let isNew = false
239 | if (path.length === 0) error.unexpectedCase()
240 | const ftype = path.length === 1 ? finalFtype : 'dir'
241 | let children = await tr.tables.childDocs.getValues({ prefix: { owner, collection, parent: rootid, docname: path[0] } }).then(cs => cs.map(c => c.v))
242 | if (children.length === 0) {
243 | const newChildId = random.uuidv4() + path.join('/')
244 | isNew = true
245 | await actions.setFileInfo(tr, ystream, owner, collection, newChildId, path[0], rootid, ftype)
246 | children = [newChildId]
247 | } else if (path.length === 1) {
248 | const fi = await actions.getFileInfo(tr, ystream, owner, collection, children[0])
249 | if (fi?.ftype !== finalFtype) {
250 | await actions.setFileInfo(tr, ystream, owner, collection, children[0], path[0], rootid, ftype)
251 | }
252 | }
253 | if (path.length === 1) {
254 | return { docid: children[0], isNew }
255 | }
256 | return mkPath(tr, ystream, owner, collection, children[0], path.slice(1), finalFtype)
257 | }
258 |
259 | /**
260 | * @type {Array<{ type: string, path: string, content: string|Buffer|null }>}}
261 | */
262 | const _eventsToCompute = []
263 | /**
264 | * @param {Yfs} yfs
265 | */
266 | const _computeEvents = async yfs => {
267 | const ycollection = yfs.ycollection
268 | // console.log('all events to compute', _eventsToCompute)
269 | while (_eventsToCompute.length > 0) {
270 | await yfs.ystream.transact(async tr => {
271 | for (let iterations = 0; _eventsToCompute.length > 0 && iterations < 600; iterations++) {
272 | const event = _eventsToCompute[0]
273 | const arrPath = event.path.split(path.sep)
274 | // const filePath = arrPath.slice(0, -1)
275 | // const fileName = arrPath[arrPath.length - 1]
276 | // console.log(event.type, { path: event.path, filePath, fileName, content: event.content?.toString?.().slice(0, 50) || event.content })
277 | switch (event.type) {
278 | case 'add':
279 | case 'change': {
280 | // console.log('ids for path', {
281 | // filePath,
282 | // ids: await ycollection.getDocIdsFromPath(tr, null, filePath)
283 | // })
284 | //
285 | const isTextContent = typeof event.content === 'string'
286 | const { docid, isNew } = await mkPath(tr, ycollection.ystream, ycollection.ownerBin, ycollection.collection, null, arrPath, isTextContent ? 'text' : 'binary')
287 | if (isNew) {
288 | if (isTextContent) {
289 | const ydoc = new Y.Doc()
290 | ydoc.getText().insert(0, /** @type {string} */ (event.content))
291 | await actions.addYDocUpdate(tr, ycollection.ystream, ycollection.ownerBin, ycollection.collection, docid, Y.encodeStateAsUpdateV2(ydoc))
292 | } else {
293 | await ycollection.setLww(tr, docid, event.content)
294 | }
295 | } else {
296 | if (isTextContent) {
297 | const currDocUpdates = await ycollection.getYdocUpdates(tr, docid)
298 | const currDoc = new Y.Doc()
299 | if (currDocUpdates != null) {
300 | currDoc.transact(() => {
301 | currDocUpdates.forEach(update => {
302 | Y.applyUpdateV2(currDoc, update)
303 | })
304 | })
305 | }
306 | const textContent = /** @type {string} */ (event.content)
307 | const d = diff.simpleDiffString(currDoc.getText().toString(), textContent)
308 | // apply diff and catch the updates
309 | /**
310 | * @type {Array}
311 | */
312 | const updates = []
313 | currDoc.on('updateV2', update => updates.push(update))
314 | /**
315 | * @type {Array}
316 | */
317 | const qdelta = [{ retain: d.index }]
318 | if (d.remove > 0) {
319 | qdelta.push({ delete: d.remove })
320 | }
321 | if (d.insert.length > 0) {
322 | qdelta.push({ insert: d.insert })
323 | }
324 | if (qdelta.length > 1) {
325 | currDoc.getText().applyDelta(qdelta)
326 | }
327 | for (let i = 0; i < updates.length; i++) {
328 | actions.addYDocUpdate(tr, ycollection.ystream, ycollection.ownerBin, ycollection.collection, docid, updates[i])
329 | }
330 | } else {
331 | const currContent = await ycollection.getLww(tr, docid)
332 | // console.log('updating file', { filePath, currContent: Buffer.from(currContent).toString().slice(0, 50), eventContent: event.content?.toString().slice(0, 50) })
333 | if (Buffer.isBuffer(event.content) && currContent instanceof Uint8Array && array.equalFlat(currContent, event.content)) {
334 | // console.log('nop...')
335 | // nop
336 | } else {
337 | await ycollection.setLww(tr, docid, event.content)
338 | }
339 | }
340 | }
341 | break
342 | }
343 | case 'unlink':
344 | case 'unlinkDir': {
345 | const docid = await ycollection.getDocIdsFromPath(tr, null, arrPath).then(ids => ids[0])
346 | if (docid) {
347 | await ycollection.deleteDoc(tr, docid)
348 | }
349 | break
350 | }
351 | case 'addDir': {
352 | await mkPath(tr, ycollection.ystream, ycollection.ownerBin, ycollection.collection, null, arrPath, 'dir')
353 | break
354 | }
355 | }
356 | _eventsToCompute.shift()
357 | }
358 | })
359 | }
360 | }
361 |
--------------------------------------------------------------------------------
/src/operations.js:
--------------------------------------------------------------------------------
1 | import * as encoding from 'lib0/encoding'
2 | import * as decoding from 'lib0/decoding'
3 | import * as error from 'lib0/error'
4 | import * as Y from 'yjs'
5 | import * as math from 'lib0/math'
6 | import * as array from 'lib0/array'
7 | import * as dbtypes from './api/dbtypes.js'
8 | import { getDocOps, mergeDocOps } from './api/actions.js'
9 | import * as number from 'lib0/number'
10 | import * as promise from 'lib0/promise'
11 | import * as object from 'lib0/object'
12 |
13 | /**
14 | * @typedef {import('isodb').IEncodable} IEncodable
15 | */
16 |
17 | export const OpYjsUpdateType = 0
18 | export const OpNoPermissionType = 1
19 | export const OpPermType = 2
20 | export const OpLwwType = 3
21 | export const OpFileInfoType = 4
22 | export const OpDeleteDocType = 5
23 |
24 | /**
25 | * @typedef {keyof typeMap} OpTypeIds
26 | */
27 |
28 | /**
29 | * @typedef {InstanceType} OpTypes
30 | */
31 |
32 | /**
33 | * @todo rename all interfaces to I[* / AbstractOp]
34 | * @implements IEncodable
35 | */
36 | export class AbstractOp {
37 | /**
38 | * @param {any} _anyarg
39 | */
40 | constructor (_anyarg) {
41 | error.methodUnimplemented()
42 | }
43 |
44 | /**
45 | * @return {OpTypeIds}
46 | */
47 | get type () {
48 | return error.methodUnimplemented()
49 | }
50 |
51 | /**
52 | * @param {encoding.Encoder} _encoder
53 | */
54 | encode (_encoder) {
55 | error.methodUnimplemented()
56 | }
57 |
58 | /**
59 | * @param {decoding.Decoder} _decoder
60 | * @return {OpTypes}
61 | */
62 | static decode (_decoder) {
63 | error.methodUnimplemented()
64 | }
65 |
66 | /**
67 | * @param {Array} _ops
68 | * @param {boolean} _gc
69 | * @return {import('./api/dbtypes.js').OpValue}
70 | */
71 | static merge (_ops, _gc) {
72 | error.methodUnimplemented()
73 | }
74 |
75 | /**
76 | * Note that op.localClock must be set before calling integrate!
77 | *
78 | * @todo There is probably a better abstraction than integrate / unintegrate to achive consistency
79 | * (e.g. AbstractOp.cleanup(merged, deletedOps), which is called only once before calling event
80 | * handlers or returning the promise)
81 | *
82 | * @param {import('@y/stream').YTransaction} _tr
83 | * @param {import('./ystream.js').Ystream} _ystream
84 | * @param {import('./api/dbtypes.js').OpValue} _op
85 | * @return {Promise|void}
86 | */
87 | integrate (_tr, _ystream, _op) {
88 | error.methodUnimplemented()
89 | }
90 |
91 | /**
92 | * @param {import('./ystream.js').Ystream} _ystream
93 | * @param {import('./ystream.js').YTransaction} _tr
94 | * @param {import('./api/dbtypes.js').OpValue} _op
95 | * @return {Promise|void}
96 | */
97 | unintegrate (_ystream, _tr, _op) {
98 | error.methodUnimplemented()
99 | }
100 | }
101 |
102 | /**
103 | * @typedef {'noaccess'|'r'|'rw'|'admin'} AccessType
104 | */
105 |
106 | /**
107 | * An operation that contains information about which users have access to a document.
108 | *
109 | * @implements AbstractOp
110 | */
111 | export class OpPerm {
112 | constructor () {
113 | /**
114 | * @type {Map}
115 | */
116 | this.access = new Map()
117 | }
118 |
119 | /**
120 | * @param {string} userhash
121 | */
122 | hasReadAccess (userhash) {
123 | return (this.access.get(userhash) || 0) % 3 > 0
124 | }
125 |
126 | /**
127 | * @param {string} userhash
128 | */
129 | hasWriteAccess (userhash) {
130 | return (this.access.get(userhash) || 0) % 4 > 1
131 | }
132 |
133 | /**
134 | * @param {string} userhash
135 | */
136 | hasAdminAccess (userhash) {
137 | return (this.access.get(userhash) || 0) % 4 === 3
138 | }
139 |
140 | /**
141 | * @param {string} userhash
142 | * @return {AccessType}
143 | */
144 | getAccessType (userhash) {
145 | switch ((this.access.get(userhash) || 0) % 4) {
146 | case 0:
147 | return 'noaccess'
148 | case 1:
149 | return 'r'
150 | case 2:
151 | return 'rw'
152 | case 3:
153 | return 'admin'
154 | default:
155 | error.unexpectedCase()
156 | }
157 | }
158 |
159 | /**
160 | * @return {OpPermType}
161 | */
162 | get type () {
163 | return OpPermType
164 | }
165 |
166 | /**
167 | * @param {encoding.Encoder} encoder
168 | */
169 | encode (encoder) {
170 | encoding.writeVarUint(encoder, this.access.size)
171 | this.access.forEach((perm, userhash) => {
172 | encoding.writeVarString(encoder, userhash)
173 | encoding.writeVarUint(encoder, perm)
174 | })
175 | }
176 |
177 | /**
178 | * @param {decoding.Decoder} decoder
179 | * @return {OpPerm}
180 | */
181 | static decode (decoder) {
182 | const op = new this()
183 | const size = decoding.readVarUint(decoder)
184 | for (let i = 0; i < size; i++) {
185 | const userhash = decoding.readVarString(decoder)
186 | const perm = decoding.readVarUint(decoder)
187 | op.access.set(userhash, perm)
188 | }
189 | return op
190 | }
191 |
192 | /**
193 | * @todo maybe return ops that can safely be removed
194 | * @param {Array>} ops
195 | * @param {boolean} _gc
196 | * @return {import('./api/dbtypes.js').OpValue}
197 | */
198 | static merge (ops, _gc) {
199 | const mergedOp = ops[0].op
200 | for (let i = 1; i < ops.length; i++) {
201 | const op = ops[i]
202 | op.op.access.forEach((perm, userhash) => {
203 | mergedOp.access.set(userhash, math.max(mergedOp.access.get(userhash) || 0, perm))
204 | })
205 | }
206 | const lastOp = ops[ops.length - 1]
207 | lastOp.op = mergedOp
208 | return lastOp
209 | }
210 |
211 | /**
212 | * @param {import('@y/stream').YTransaction} _tr
213 | * @param {import('./ystream.js').Ystream} _ystream
214 | * @param {import('./api/dbtypes.js').OpValue} _op
215 | */
216 | integrate (_tr, _ystream, _op) {
217 | }
218 |
219 | /**
220 | * @param {import('./ystream.js').Ystream} _ystream
221 | * @param {import('@y/stream').YTransaction} _tr
222 | * @param {import('./api/dbtypes.js').OpValue} _op
223 | */
224 | unintegrate (_ystream, _tr, _op) {
225 | }
226 | }
227 |
228 | /**
229 | * @param {OpPerm|null} currentPermOp
230 | * @param {string} userhash
231 | * @param {AccessType} accessType
232 | */
233 | export const createOpPermUpdate = (currentPermOp, userhash, accessType) => {
234 | const currAccess = (currentPermOp?.access.get(userhash) || 0) % 4
235 | const diff = 4 - currAccess
236 | let newAccessType = 0
237 | switch (accessType) {
238 | case 'noaccess':
239 | newAccessType = 0
240 | break
241 | case 'r':
242 | newAccessType = 1
243 | break
244 | case 'rw':
245 | newAccessType = 2
246 | break
247 | case 'admin':
248 | newAccessType = 3
249 | break
250 | default:
251 | error.unexpectedCase()
252 | }
253 | const newPermOp = new OpPerm()
254 | newPermOp.access.set(userhash, currAccess + diff + newAccessType)
255 | return newPermOp
256 | }
257 |
258 | /**
259 | * An operation that is used as a placeholder until we request access again.
260 | * @implements AbstractOp
261 | */
262 | export class OpNoPermission {
263 | /**
264 | * @return {OpNoPermissionType}
265 | */
266 | get type () {
267 | return OpNoPermissionType
268 | }
269 |
270 | /**
271 | * @param {encoding.Encoder} _encoder
272 | */
273 | encode (_encoder) {}
274 |
275 | /**
276 | * @param {decoding.Decoder} _decoder
277 | * @return {OpNoPermission}
278 | */
279 | static decode (_decoder) {
280 | return new this()
281 | }
282 |
283 | /**
284 | * @param {Array>} ops
285 | * @param {boolean} _gc
286 | * @return {import('./api/dbtypes.js').OpValue}
287 | */
288 | static merge (ops, _gc) {
289 | return ops[ops.length - 1]
290 | }
291 |
292 | /**
293 | * @param {import('@y/stream').YTransaction} _tr
294 | * @param {import('./ystream.js').Ystream} _ystream
295 | * @param {import('./api/dbtypes.js').OpValue} _op
296 | */
297 | integrate (_tr, _ystream, _op) {
298 | }
299 |
300 | /**
301 | * @param {import('./ystream.js').Ystream} _ystream
302 | * @param {import('@y/stream').YTransaction} _tr
303 | * @param {import('./api/dbtypes.js').OpValue} _op
304 | */
305 | unintegrate (_ystream, _tr, _op) {
306 | }
307 | }
308 |
309 | /**
310 | * @implements AbstractOp
311 | */
312 | export class OpLww {
313 | /**
314 | * @param {number} cnt
315 | * @param {any} val
316 | */
317 | constructor (cnt, val) {
318 | this.cnt = cnt
319 | this.val = val
320 | }
321 |
322 | /**
323 | * @return {OpLwwType}
324 | */
325 | get type () {
326 | return OpLwwType
327 | }
328 |
329 | /**
330 | * @param {encoding.Encoder} encoder
331 | */
332 | encode (encoder) {
333 | encoding.writeVarUint(encoder, this.cnt)
334 | encoding.writeAny(encoder, this.val)
335 | }
336 |
337 | /**
338 | * @param {decoding.Decoder} decoder
339 | * @return {OpLww}
340 | */
341 | static decode (decoder) {
342 | return new this(decoding.readVarUint(decoder), decoding.readAny(decoder))
343 | }
344 |
345 | /**
346 | * This returns the "last writer". There is no merging. The other updates (even the ones that
347 | * happen "later"), can safely be removed without causing sync-issues.
348 | *
349 | * @param {Array>} ops
350 | * @param {boolean} _gc
351 | * @return {import('./api/dbtypes.js').OpValue}
352 | */
353 | static merge (ops, _gc) {
354 | return array.fold(ops, ops[0], (o1, o2) => (o1.op.cnt > o2.op.cnt || (o1.op.cnt === o2.op.cnt && o1.client > o2.client)) ? o1 : o2)
355 | }
356 |
357 | /**
358 | * @param {import('@y/stream').YTransaction} _tr
359 | * @param {import('./ystream.js').Ystream} _ystream
360 | * @param {import('./api/dbtypes.js').OpValue} _op
361 | */
362 | integrate (_tr, _ystream, _op) {
363 | }
364 |
365 | /**
366 | * @param {import('./ystream.js').Ystream} _ystream
367 | * @param {import('@y/stream').YTransaction} _tr
368 | * @param {import('./api/dbtypes.js').OpValue} _op
369 | */
370 | unintegrate (_ystream, _tr, _op) {
371 | }
372 | }
373 |
374 | /**
375 | * @todo This currently handles parent-child relation AND the name of a document. It's easier to
376 | * manage the database like this.
377 | *
378 | * However, it does make sense to enable users renaming a document while another user is moving
379 | * content. This implementation might be problematic in a shared filesystem.
380 | *
381 | * @implements AbstractOp
382 | */
383 | export class OpFileInfo {
384 | /**
385 | * @param {number} cnt
386 | * @param {string} docname
387 | * @param {string|null} parent
388 | * @param {'dir'|'binary'|'text'} ftype
389 | */
390 | constructor (cnt, docname, parent, ftype) {
391 | this.cnt = cnt
392 | this.parent = parent
393 | this.name = docname
394 | this.ftype = ftype
395 | }
396 |
397 | /**
398 | * @return {OpFileInfoType}
399 | */
400 | get type () {
401 | return OpFileInfoType
402 | }
403 |
404 | /**
405 | * @param {encoding.Encoder} encoder
406 | */
407 | encode (encoder) {
408 | // bit0: has parent
409 | encoding.writeUint8(encoder, (this.parent === null ? 0 : 1))
410 | encoding.writeVarUint(encoder, this.cnt)
411 | if (this.parent !== null) encoding.writeVarString(encoder, this.parent)
412 | encoding.writeVarString(encoder, this.name)
413 | encoding.writeVarString(encoder, this.ftype)
414 | }
415 |
416 | /**
417 | * @param {decoding.Decoder} decoder
418 | * @return {OpFileInfo}
419 | */
420 | static decode (decoder) {
421 | const info = decoding.readUint8(decoder)
422 | const cnt = decoding.readVarUint(decoder)
423 | const parent = (info & 1) === 1 ? decoding.readVarString(decoder) : null
424 | const name = decoding.readVarString(decoder)
425 | const ftype = decoding.readVarString(decoder)
426 | return new OpFileInfo(cnt, name, parent, /** @type {any} */ (ftype))
427 | }
428 |
429 | /**
430 | * This works similarly to the lww merge.
431 | *
432 | * @param {Array>} ops
433 | * @param {boolean} _gc
434 | * @return {import('./api/dbtypes.js').OpValue}
435 | */
436 | static merge (ops, _gc) {
437 | return array.fold(ops, ops[0], (o1, o2) => (o1.op.cnt > o2.op.cnt || (o1.op.cnt === o2.op.cnt && o1.client > o2.client)) ? o1 : o2)
438 | }
439 |
440 | /**
441 | * @param {import('@y/stream').YTransaction} tr
442 | * @param {import('./ystream.js').Ystream} ystream
443 | * @param {import('./api/dbtypes.js').OpValue} op
444 | */
445 | async integrate (tr, ystream, op) {
446 | tr.tables.childDocs.set(new dbtypes.ParentKey(op.owner, op.collection, this.parent, this.name ?? op.doc, op.localClock), op.doc)
447 | // force that conflicts are unintegrated
448 | await mergeDocOps(tr, ystream, op.owner, op.collection, op.doc, this.type)
449 | }
450 |
451 | /**
452 | * @param {import('./ystream.js').Ystream} _ystream
453 | * @param {import('@y/stream').YTransaction} tr
454 | * @param {import('./api/dbtypes.js').OpValue} op
455 | */
456 | unintegrate (_ystream, tr, op) {
457 | tr.tables.childDocs.remove(new dbtypes.ParentKey(op.owner, op.collection, this.parent, this.name ?? op.doc, op.localClock))
458 | }
459 | }
460 |
461 | /**
462 | * @implements AbstractOp
463 | */
464 | export class OpYjsUpdate {
465 | /**
466 | * @param {Uint8Array} update
467 | */
468 | constructor (update) {
469 | this.update = update
470 | }
471 |
472 | /**
473 | * @return {OpYjsUpdateType}
474 | */
475 | get type () {
476 | return OpYjsUpdateType
477 | }
478 |
479 | /**
480 | * @param {encoding.Encoder} encoder
481 | */
482 | encode (encoder) {
483 | encoding.writeVarUint8Array(encoder, this.update)
484 | }
485 |
486 | /**
487 | * @param {decoding.Decoder} decoder
488 | * @return {OpYjsUpdate}
489 | */
490 | static decode (decoder) {
491 | return new this(decoding.readVarUint8Array(decoder))
492 | }
493 |
494 | /**
495 | * @param {Array>} ops
496 | * @param {boolean} gc
497 | * @return {import('./api/dbtypes.js').OpValue}
498 | */
499 | static merge (ops, gc) {
500 | let update
501 | // @todo if ops.length === 1 return ops[0]
502 | if (gc) {
503 | const ydoc = new Y.Doc({ guid: '' })
504 | ydoc.transact(() => {
505 | for (let i = 0; i < ops.length; i++) {
506 | Y.applyUpdateV2(ydoc, ops[i].op.update)
507 | }
508 | })
509 | update = Y.encodeStateAsUpdateV2(ydoc)
510 | } else {
511 | update = Y.mergeUpdatesV2(ops.map(op => op.op.update))
512 | }
513 | const lastOp = array.fold(ops, ops[0], (o1, o2) => o1.localClock > o2.localClock ? o1 : o2)
514 | lastOp.op = new OpYjsUpdate(update)
515 | return lastOp
516 | }
517 |
518 | /**
519 | * @param {import('@y/stream').YTransaction} _tr
520 | * @param {import('./ystream.js').Ystream} _ystream
521 | * @param {import('./api/dbtypes.js').OpValue} _op
522 | */
523 | integrate (_tr, _ystream, _op) {
524 | }
525 |
526 | /**
527 | * @param {import('./ystream.js').Ystream} _stream
528 | * @param {import('@y/stream').YTransaction} _tr
529 | * @param {import('./api/dbtypes.js').OpValue} _op
530 | */
531 | unintegrate (_stream, _tr, _op) {
532 | }
533 | }
534 |
535 | /**
536 | * @implements AbstractOp
537 | */
538 | export class OpDeleteDoc {
539 | /**
540 | * @return {OpDeleteDocType}
541 | */
542 | get type () {
543 | return OpDeleteDocType
544 | }
545 |
546 | /**
547 | * @param {encoding.Encoder} _encoder
548 | */
549 | encode (_encoder) { }
550 |
551 | /**
552 | * @param {decoding.Decoder} _decoder
553 | * @return {OpDeleteDoc}
554 | */
555 | static decode (_decoder) {
556 | return new this()
557 | }
558 |
559 | /**
560 | * This returns the "last writer". There is no merging. The other updates (even the ones that
561 | * happen "later"), can safely be removed without causing sync-issues.
562 | *
563 | * @param {Array>} ops
564 | * @param {boolean} _gc
565 | * @return {import('./api/dbtypes.js').OpValue}
566 | */
567 | static merge (ops, _gc) {
568 | // we retain the op with the lowest localClock
569 | return array.fold(ops, ops[0], (o1, o2) => o1.localClock < o2.localClock ? o1 : o2)
570 | }
571 |
572 | /**
573 | * @param {import('@y/stream').YTransaction} tr
574 | * @param {import('./ystream.js').Ystream} ystream
575 | * @param {import('./api/dbtypes.js').OpValue} op
576 | */
577 | async integrate (tr, ystream, op) {
578 | // @todo add a test case that creates files and folders and then delete them. The
579 | // "getChildrenRecursive" function should not return the deleted children
580 | // @todo, specifically only call types that need to be unintegrated
581 | await promise.all(object.keys(typeMap).map(async _type => {
582 | const type = /** @type {any} */ (number.parseInt(_type))
583 | if (type === OpDeleteDocType) { return }
584 | const ops = await getDocOps(tr, ystream, op.owner, op.collection, op.doc, type)
585 | await promise.all(ops.map(op => op.op.unintegrate(ystream, tr, op)))
586 | }))
587 | }
588 |
589 | /**
590 | * @param {import('./ystream.js').Ystream} _ystream
591 | * @param {import('@y/stream').YTransaction} _tr
592 | * @param {import('./api/dbtypes.js').OpValue} _op
593 | */
594 | unintegrate (_ystream, _tr, _op) { }
595 | }
596 |
597 | export const typeMap = {
598 | [OpYjsUpdateType]: OpYjsUpdate,
599 | [OpNoPermissionType]: OpNoPermission,
600 | [OpPermType]: OpPerm,
601 | [OpLwwType]: OpLww,
602 | [OpFileInfoType]: OpFileInfo,
603 | [OpDeleteDocType]: OpDeleteDoc
604 | }
605 |
--------------------------------------------------------------------------------
/src/api/dbtypes.js:
--------------------------------------------------------------------------------
1 | /**
2 | * @todo add "type" fields to applicable types reserved for future usage
3 | */
4 |
5 | import * as encoding from 'lib0/encoding'
6 | import * as decoding from 'lib0/decoding'
7 | import * as error from 'lib0/error'
8 | import * as isodb from 'isodb' // eslint-disable-line
9 | import * as requests from '../messages.js'
10 | import * as operations from '../operations.js'
11 | import * as binary from 'lib0/binary'
12 | import * as string from 'lib0/string'
13 | import * as sha256 from 'lib0/hash/sha256'
14 | import * as jose from 'lib0/crypto/jwt'
15 | import * as json from 'lib0/json'
16 | import * as ecdsa from 'lib0/crypto/ecdsa'
17 |
18 | /**
19 | * @todo "owner" could be mapped to an integer
20 | * @todo client should actually be a map to a deviceid
21 | * @template {operations.OpTypes|operations.AbstractOp} [OP=operations.AbstractOp]
22 | * @implements isodb.IEncodable
23 | */
24 | export class OpValue {
25 | /**
26 | * @param {number} client
27 | * @param {number} clock
28 | * @param {Uint8Array} owner hash of a user
29 | * @param {string} collection
30 | * @param {string} doc
31 | * @param {OP} op
32 | */
33 | constructor (client, clock, owner, collection, doc, op) {
34 | this.localClock = 0
35 | this.client = client
36 | this.clock = clock
37 | this.owner = owner
38 | this.collection = collection
39 | // @todo rename to docid to avoid resemblence to docname
40 | this.doc = doc
41 | this.op = op
42 | }
43 |
44 | /**
45 | * @param {encoding.Encoder} encoder
46 | */
47 | encode (encoder) {
48 | encoding.writeUint8(encoder, this.op.type)
49 | encoding.writeVarUint(encoder, this.client)
50 | encoding.writeVarUint(encoder, this.clock)
51 | encoding.writeVarUint8Array(encoder, this.owner)
52 | encoding.writeVarString(encoder, this.collection)
53 | encoding.writeVarString(encoder, this.doc)
54 | this.op.encode(encoder)
55 | }
56 |
57 | /**
58 | * @param {decoding.Decoder} decoder
59 | * @return {isodb.IEncodable}
60 | */
61 | static decode (decoder) {
62 | const type = /** @type {operations.OpTypeIds} */ (decoding.readUint8(decoder))
63 | const clientFkey = decoding.readVarUint(decoder)
64 | const clientClockFkey = decoding.readVarUint(decoder)
65 | const owner = decoding.readVarUint8Array(decoder)
66 | const collection = decoding.readVarString(decoder)
67 | const doc = decoding.readVarString(decoder)
68 | const op = operations.typeMap[type].decode(decoder)
69 | return new OpValue(clientFkey, clientClockFkey, owner, collection, doc, op)
70 | }
71 | }
72 |
73 | /**
74 | * @todo create a "Request" type that is used in protocol
75 | * @template {requests.RequestDocument} [REQ=requests.RequestDocument]
76 | * @implements isodb.IEncodable
77 | */
78 | export class RequestValue {
79 | /**
80 | * @param {REQ} req
81 | */
82 | constructor (req) {
83 | this.req = req
84 | }
85 |
86 | /**
87 | * @param {encoding.Encoder} encoder
88 | */
89 | encode (encoder) {
90 | encoding.writeVarUint(encoder, this.req.type)
91 | this.req.encode(encoder)
92 | }
93 |
94 | /**
95 | * @param {decoding.Decoder} decoder
96 | * @return {isodb.IEncodable}
97 | */
98 | static decode (decoder) {
99 | const requestType = decoding.readVarUint(decoder)
100 | switch (requestType) {
101 | case requests.RequestDocumentType: {
102 | return requests.RequestDocument.decode(decoder)
103 | }
104 | default:
105 | error.methodUnimplemented()
106 | }
107 | }
108 | }
109 |
110 | /**
111 | * @implements isodb.IEncodable
112 | */
113 | export class ClientClockValue {
114 | /**
115 | * @param {number} clock
116 | * @param {number} localClock
117 | */
118 | constructor (clock, localClock) {
119 | this.clock = clock
120 | this.localClock = localClock
121 | }
122 |
123 | /**
124 | * @param {encoding.Encoder} encoder
125 | */
126 | encode (encoder) {
127 | encoding.writeVarUint(encoder, this.clock)
128 | encoding.writeVarUint(encoder, this.localClock)
129 | }
130 |
131 | /**
132 | * @param {decoding.Decoder} decoder
133 | * @return {ClientClockValue}
134 | */
135 | static decode (decoder) {
136 | const clock = decoding.readVarUint(decoder)
137 | const localClock = decoding.readVarUint(decoder)
138 | return new ClientClockValue(clock, localClock)
139 | }
140 | }
141 |
142 | /**
143 | * @implements isodb.IEncodable
144 | */
145 | export class CollectionKey {
146 | /**
147 | * @param {Uint8Array} owner
148 | * @param {string} collection
149 | * @param {number} opid
150 | */
151 | constructor (owner, collection, opid) {
152 | this.owner = owner
153 | this.collection = collection
154 | this.opid = opid
155 | }
156 |
157 | /**
158 | * @param {encoding.Encoder} encoder
159 | */
160 | encode (encoder) {
161 | encoding.writeVarUint8Array(encoder, this.owner)
162 | encoding.writeVarString(encoder, this.collection)
163 | encoding.writeUint32BigEndian(encoder, this.opid)
164 | }
165 |
166 | /**
167 | * @param {decoding.Decoder} decoder
168 | * @return {isodb.IEncodable}
169 | */
170 | static decode (decoder) {
171 | const owner = decoding.readVarUint8Array(decoder)
172 | const collection = decoding.readVarString(decoder)
173 | const opid = decoding.readUint32BigEndian(decoder)
174 | return new CollectionKey(owner, collection, opid)
175 | }
176 | }
177 |
178 | /**
179 | * @implements isodb.IEncodable
180 | */
181 | export class UserIdentity {
182 | /**
183 | * @param {string} encodedPublicKey stringified jwk
184 | * @param {object} opts
185 | * @param {boolean} [opts.isTrusted]
186 | */
187 | constructor (encodedPublicKey, { isTrusted = false } = {}) {
188 | this.ekey = encodedPublicKey
189 | this.isTrusted = isTrusted
190 | this._hash = null
191 | this._publicKey = null
192 | }
193 |
194 | get publicKey () {
195 | return this._publicKey || (this._publicKey = ecdsa.importKeyJwk(json.parse(this.ekey)))
196 | }
197 |
198 | /**
199 | * @return {Uint8Array}
200 | */
201 | get hash () {
202 | return this._hash || (this._hash = sha256.digest(string.encodeUtf8(this.ekey)))
203 | }
204 |
205 | /**
206 | * @param {encoding.Encoder} encoder
207 | */
208 | encode (encoder) {
209 | encoding.writeVarUint(encoder, this.isTrusted ? 1 : 0)
210 | encoding.writeVarString(encoder, this.ekey)
211 | }
212 |
213 | /**
214 | * @param {decoding.Decoder} decoder
215 | * @return {UserIdentity}
216 | */
217 | static decode (decoder) {
218 | const isTrusted = decoding.readVarUint(decoder) === 1
219 | const pkey = decoding.readVarString(decoder)
220 | return new UserIdentity(pkey, { isTrusted })
221 | }
222 | }
223 |
224 | /**
225 | * @implements isodb.IEncodable
226 | */
227 | export class DeviceIdentity {
228 | /**
229 | * @param {string} encodedPublicKey stringified jwk
230 | */
231 | constructor (encodedPublicKey) {
232 | this.ekey = encodedPublicKey
233 | this._hash = null
234 | this._publicKey = null
235 | }
236 |
237 | get publicKey () {
238 | return this._publicKey || (this._publicKey = ecdsa.importKeyJwk(json.parse(this.ekey)))
239 | }
240 |
241 | /**
242 | * @return {Uint8Array}
243 | */
244 | get hash () {
245 | return this._hash || (this._hash = sha256.digest(string.encodeUtf8(this.ekey)))
246 | }
247 |
248 | /**
249 | * @param {encoding.Encoder} encoder
250 | */
251 | encode (encoder) {
252 | encoding.writeVarUint(encoder, 0)
253 | encoding.writeVarString(encoder, this.ekey)
254 | }
255 |
256 | /**
257 | * @param {decoding.Decoder} decoder
258 | * @return {isodb.IEncodable}
259 | */
260 | static decode (decoder) {
261 | decoding.readVarUint(decoder) // read a "type" byte that is reserved for future usage
262 | /**
263 | * @todo validate that the read key conforms to a specific format and doesn't allow to contain
264 | * "junk" that could be used to generate keys for a specific hash
265 | */
266 | const pkey = decoding.readVarString(decoder)
267 | return new DeviceIdentity(pkey)
268 | }
269 | }
270 |
271 | /**
272 | * @typedef {Object} JwtDeviceClaim
273 | * @property {number} JwtDeviceClaim.iat
274 | * @property {string} JwtDeviceClaim.sub public key of the device
275 | * @property {string} JwtDeviceClaim.iss "issuer" hash of the user that created this claim
276 | */
277 |
278 | /**
279 | * @implements isodb.IEncodable
280 | * @extends isodb.JwtValue
281 | */
282 | export class DeviceClaim extends isodb.JwtValue {
283 | /**
284 | * @note It should never be necessary for you to call the constructor!
285 | * Use the static `DeviceClaim.create` method instead.
286 | *
287 | * @param {string} v
288 | * @param {Uint8Array} phash
289 | */
290 | constructor (v, phash) {
291 | super(v)
292 | this.hash = phash
293 | /**
294 | * Public key of the device
295 | * @type {Promise?}
296 | */
297 | this._dpkey = null
298 | }
299 |
300 | /**
301 | * Public key of the device
302 | */
303 | get dpkey () {
304 | return this._dpkey || (this._dpkey = ecdsa.importKeyJwk(json.parse(this.unsafeDecode().payload.sub)))
305 | }
306 |
307 | /**
308 | * @param {string} jwt
309 | * @param {CryptoKey} userPublicKey
310 | */
311 | static async fromJwt (jwt, userPublicKey) {
312 | const { payload } = await jose.verifyJwt(userPublicKey, jwt)
313 | const hash = sha256.digest(string.encodeUtf8(payload.sub))
314 | return new this(jwt, hash)
315 | }
316 |
317 | /**
318 | * @param {decoding.Decoder} decoder
319 | * @return {DeviceClaim}
320 | */
321 | static decode (decoder) {
322 | const jwt = decoding.readVarString(decoder)
323 | const payload = jose.unsafeDecode(jwt).payload
324 | const hash = sha256.digest(string.encodeUtf8(payload.sub))
325 | return new this(jwt, hash)
326 | }
327 | }
328 |
329 | /**
330 | * @implements isodb.IEncodable
331 | */
332 | export class ClocksKey {
333 | /**
334 | * @param {number} clientid
335 | * @param {Uint8Array?} owner
336 | * @param {string?} collection
337 | */
338 | constructor (clientid, owner, collection) {
339 | this.clientid = clientid
340 | this.owner = owner
341 | this.collection = collection
342 | }
343 |
344 | /**
345 | * @param {{ owner: Uint8Array, collection:string }} prefix
346 | */
347 | static prefix ({ owner, collection }) {
348 | return encoding.encode(encoder => {
349 | encoding.writeUint8(encoder, 3)
350 | if (owner) {
351 | encoding.writeVarUint8Array(encoder, owner)
352 | encoding.writeVarString(encoder, collection)
353 | }
354 | })
355 | }
356 |
357 | /**
358 | * @param {encoding.Encoder} encoder
359 | */
360 | encode (encoder) {
361 | const info = (this.owner ? 1 : 0) | (this.collection ? 2 : 0)
362 | encoding.writeUint8(encoder, info)
363 | if (this.owner) {
364 | encoding.writeVarUint8Array(encoder, this.owner)
365 | this.collection && encoding.writeVarString(encoder, this.collection)
366 | }
367 | encoding.writeUint32(encoder, this.clientid)
368 | }
369 |
370 | /**
371 | * @param {decoding.Decoder} decoder
372 | * @return {ClocksKey}
373 | */
374 | static decode (decoder) {
375 | const info = decoding.readUint8(decoder)
376 | const owner = (info & 1) > 0 ? decoding.readVarUint8Array(decoder) : null
377 | const collection = (info & 2) > 0 ? decoding.readVarString(decoder) : null
378 | const clientid = decoding.readUint32(decoder)
379 | return new ClocksKey(clientid, owner, collection)
380 | }
381 | }
382 |
383 | /**
384 | * @implements isodb.IEncodable
385 | */
386 | export class DocKey {
387 | /**
388 | * @param {number} type
389 | * @param {Uint8Array} owner
390 | * @param {string} collection
391 | * @param {string} doc
392 | * @param {number} opid
393 | */
394 | constructor (type, owner, collection, doc, opid) {
395 | this.type = type
396 | this.owner = owner
397 | this.collection = collection
398 | this.doc = doc
399 | this.opid = opid
400 | }
401 |
402 | /**
403 | * @param {{ type:number, owner: Uint8Array, collection:string, doc?:string }} prefix
404 | */
405 | static prefix ({ type, owner, collection, doc }) {
406 | return encoding.encode(encoder => {
407 | encoding.writeUint16(encoder, type)
408 | encoding.writeVarUint8Array(encoder, owner)
409 | encoding.writeVarString(encoder, collection)
410 | doc != null && encoding.writeVarString(encoder, doc)
411 | })
412 | }
413 |
414 | /**
415 | * @param {encoding.Encoder} encoder
416 | */
417 | encode (encoder) {
418 | encoding.writeUint16(encoder, this.type)
419 | encoding.writeVarUint8Array(encoder, this.owner)
420 | encoding.writeVarString(encoder, this.collection)
421 | encoding.writeVarString(encoder, this.doc)
422 | encoding.writeUint32BigEndian(encoder, this.opid)
423 | }
424 |
425 | /**
426 | * @param {decoding.Decoder} decoder
427 | * @return {isodb.IEncodable}
428 | */
429 | static decode (decoder) {
430 | const type = decoding.readUint16(decoder)
431 | const owner = decoding.readVarUint8Array(decoder)
432 | const collection = decoding.readVarString(decoder)
433 | const doc = decoding.readVarString(decoder)
434 | const opid = decoding.readUint32BigEndian(decoder)
435 | return new DocKey(type, owner, collection, doc, opid)
436 | }
437 | }
438 |
439 | /**
440 | * @implements isodb.IEncodable
441 | */
442 | export class UnsyncedKey {
443 | /**
444 | * @param {Uint8Array} owner
445 | * @param {string} collection
446 | * @param {string?} doc
447 | */
448 | constructor (owner, collection, doc) {
449 | this.owner = owner
450 | this.collection = collection
451 | this.doc = doc
452 | }
453 |
454 | /**
455 | * @param {encoding.Encoder} encoder
456 | */
457 | encode (encoder) {
458 | encoding.writeVarUint8Array(encoder, this.owner)
459 | encoding.writeVarString(encoder, this.collection)
460 | if (this.doc != null) {
461 | // use empty string '' as start
462 | encoding.writeVarString(encoder, this.doc)
463 | } else {
464 | // marking the end
465 | // this mustn't be decoded
466 | encoding.writeUint32(encoder, binary.BITS32)
467 | }
468 | }
469 |
470 | /**
471 | * @param {decoding.Decoder} decoder
472 | * @return {isodb.IEncodable}
473 | */
474 | static decode (decoder) {
475 | const owner = decoding.readVarUint8Array(decoder)
476 | const collection = decoding.readVarString(decoder)
477 | const doc = decoding.readVarString(decoder)
478 | return new this(owner, collection, doc)
479 | }
480 | }
481 |
482 | // @todo this can be removed
483 | export class NoPermissionIndexKey {
484 | /**
485 | * @param {Uint8Array} owner
486 | * @param {string} collection
487 | * @param {string} doc
488 | * @param {number} clock
489 | */
490 | constructor (owner, collection, doc, clock) {
491 | this.owner = owner
492 | this.collection = collection
493 | this.doc = doc
494 | this.clock = clock
495 | }
496 |
497 | /**
498 | * @param {encoding.Encoder} encoder
499 | */
500 | encode (encoder) {
501 | encoding.writeVarUint8Array(encoder, this.owner)
502 | encoding.writeVarString(encoder, this.collection)
503 | encoding.writeVarString(encoder, this.doc)
504 | encoding.writeUint32BigEndian(encoder, this.clock)
505 | }
506 |
507 | /**
508 | * @param {decoding.Decoder} decoder
509 | * @return {isodb.IEncodable}
510 | */
511 | static decode (decoder) {
512 | const owner = decoding.readVarUint8Array(decoder)
513 | const collection = decoding.readVarString(decoder)
514 | const doc = decoding.readVarString(decoder)
515 | const clock = decoding.readUint32BigEndian(decoder)
516 | return new NoPermissionIndexKey(owner, collection, doc, clock)
517 | }
518 | }
519 |
520 | /**
521 | * @implements isodb.IEncodable
522 | */
523 | export class ParentKey {
524 | /**
525 | * @param {Uint8Array} owner
526 | * @param {string} collection
527 | * @param {string?} parent
528 | * @param {string} childname
529 | * @param {number} localClock
530 | */
531 | constructor (owner, collection, parent, childname, localClock) {
532 | this.owner = owner
533 | this.collection = collection
534 | this.parent = parent
535 | this.childname = childname
536 | this.localClock = localClock
537 | }
538 |
539 | /**
540 | * @param {{ owner: Uint8Array, collection: string, parent: string|null, docname?: string }} prefix
541 | */
542 | static prefix ({ owner, collection, parent, docname }) {
543 | return encoding.encode(encoder => {
544 | encoding.writeVarUint8Array(encoder, owner)
545 | encoding.writeVarString(encoder, collection)
546 | if (parent == null) {
547 | encoding.writeUint8(encoder, 0)
548 | } else {
549 | encoding.writeUint8(encoder, 1)
550 | encoding.writeVarString(encoder, parent)
551 | }
552 | if (docname != null) {
553 | encoding.writeTerminatedString(encoder, docname)
554 | }
555 | })
556 | }
557 |
558 | /**
559 | * @param {encoding.Encoder} encoder
560 | */
561 | encode (encoder) {
562 | encoding.writeVarUint8Array(encoder, this.owner)
563 | encoding.writeVarString(encoder, this.collection)
564 | if (this.parent == null) {
565 | encoding.writeUint8(encoder, 0)
566 | } else {
567 | encoding.writeUint8(encoder, 1)
568 | encoding.writeVarString(encoder, this.parent)
569 | }
570 | encoding.writeTerminatedString(encoder, this.childname)
571 | encoding.writeVarUint(encoder, this.localClock)
572 | }
573 |
574 | /**
575 | * @param {decoding.Decoder} decoder
576 | * @return {isodb.IEncodable}
577 | */
578 | static decode (decoder) {
579 | const owner = decoding.readVarUint8Array(decoder)
580 | const collection = decoding.readVarString(decoder)
581 | const hasParent = decoding.readUint8(decoder) === 1
582 | const doc = hasParent ? decoding.readVarString(decoder) : null
583 | const childname = decoding.readTerminatedString(decoder)
584 | const localClock = decoding.readVarUint(decoder)
585 | return new this(owner, collection, doc, childname, localClock)
586 | }
587 | }
588 |
--------------------------------------------------------------------------------