├── .gitignore ├── src ├── lib │ ├── util.ts │ ├── crypto.ts │ └── lock.ts ├── base.ts ├── oplog.ts ├── wire-extensions │ └── hsfw-writerctrl.ts ├── structures.ts └── index.ts ├── tsconfig.json ├── repl.js ├── test ├── history.ts ├── util │ └── util.ts ├── ydocs.ts ├── writer-management.ts ├── forks.ts ├── dual-writers.ts └── single-writer.ts ├── package.json ├── bench └── many-files.js └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .DS_Store 3 | dist 4 | package-lock.json 5 | -------------------------------------------------------------------------------- /src/lib/util.ts: -------------------------------------------------------------------------------- 1 | export function toBuffer (v: string|Buffer): Buffer { 2 | return Buffer.isBuffer(v) ? v : Buffer.from(v, 'hex') 3 | } 4 | 5 | export function toHex (v: string|Buffer): string { 6 | if (Buffer.isBuffer(v)) return v.toString('hex') 7 | return v 8 | } -------------------------------------------------------------------------------- /src/lib/crypto.ts: -------------------------------------------------------------------------------- 1 | import { createHash, randomBytes } from 'crypto' 2 | 3 | export function hash (buf: Buffer): string { 4 | const hashSum = createHash('sha256') 5 | hashSum.update(buf) 6 | return `sha256-${hashSum.digest('hex')}` 7 | } 8 | 9 | export function genId (len = 8) { 10 | return randomBytes(len).toString('hex') 11 | } 12 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "include": ["src/**/*"], 4 | "compilerOptions": { 5 | "outDir": "dist", 6 | "typeRoots": [ "./types", "./node_modules/@types"], 7 | "lib": ["es2020"], 8 | "module": "es2020", 9 | "moduleResolution": "node", 10 | "target": "es2020", 11 | "declaration": true, 12 | 13 | "checkJs": true, 14 | "strict": true, 15 | "esModuleInterop": true, 16 | "skipLibCheck": true, 17 | "forceConsistentCasingInFileNames": true 18 | } 19 | } -------------------------------------------------------------------------------- /src/base.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore no types available yet -prf 2 | import Corestore from 'corestore' 3 | // @ts-ignore no types available yet -prf 4 | import Hypercore from 'hypercore' 5 | 6 | export class BaseWorkspaceCore { 7 | core: Hypercore 8 | constructor (public store: Corestore, public publicKey: Buffer, public secretKey?: Buffer) { 9 | this.core = store.get({publicKey, secretKey}) 10 | } 11 | 12 | get writable () { 13 | return !!this.secretKey 14 | } 15 | 16 | toJSON () { 17 | return { 18 | key: this.publicKey.toString('hex'), 19 | writable: this.writable 20 | } 21 | } 22 | 23 | serialize () { 24 | return { 25 | publicKey: this.publicKey.toString('hex'), 26 | secretKey: this.secretKey?.toString('hex'), 27 | } 28 | } 29 | } -------------------------------------------------------------------------------- /repl.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node --experimental-repl-await 2 | 3 | import repl from 'repl' 4 | import ram from 'random-access-memory' 5 | import Hyperswarm from 'hyperswarm' 6 | import Corestore from 'corestore' 7 | import crypto from 'hypercore-crypto' 8 | import * as hsfw from './dist/index.js' 9 | 10 | const swarmKeyPair = crypto.keyPair() 11 | const swarm = new Hyperswarm({keyPair: swarmKeyPair}) 12 | const store = new Corestore(ram) 13 | swarm.on('connection', connection => store.replicate(connection)) 14 | let ws = undefined 15 | 16 | async function create () { 17 | ws = await hsfw.Workspace.createNew(store, swarmKeyPair) 18 | rinst.context.ws = ws 19 | swarm.join(ws.getOwner().core.discoveryKey) 20 | console.log('Workspace created.', ws) 21 | } 22 | 23 | async function load (key) { 24 | const swarmKeyPair = crypto.keyPair() 25 | swarm.join(crypto.discoveryKey(Buffer.from(key, 'hex'))) 26 | ws = await hsfw.Workspace.load(store, swarmKeyPair, key) 27 | rinst.context.ws = ws 28 | console.log('Workspace loaded.', ws) 29 | } 30 | 31 | const rinst = repl.start('> ') 32 | Object.assign(rinst.context, { 33 | hsfw, 34 | create, 35 | load 36 | }) -------------------------------------------------------------------------------- /test/history.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { setupOne } from './util/util.js' 3 | import * as sfw from '../src/index.js' 4 | 5 | ava('listHistory filters', async t => { 6 | const { ws } = await setupOne(t) 7 | 8 | for (let i = 0; i < 4; i++) { 9 | await ws.writeFile('/test.txt', ''+i, 'utf8') 10 | for (let j = 0; j < 10; j++) { 11 | await ws.writeFile(`/folder/test${j}.txt`, ''+i, 'utf8') 12 | await ws.writeFile(`/sub/folder/test${j}.txt`, ''+i, 'utf8') 13 | } 14 | } 15 | 16 | t.is((await ws.listHistory()).length, 84) 17 | t.is((await ws.listHistory({path: '/test.txt'})).length, 4) 18 | t.is((await ws.listHistory({path: '/folder/test0.txt'})).length, 4) 19 | t.is((await ws.listHistory({path: '/sub/folder/test0.txt'})).length, 4) 20 | t.is((await ws.listHistory({path: '/folder/*.txt'})).length, 40) 21 | t.is((await ws.listHistory({path: '/sub/folder/*'})).length, 40) 22 | t.is((await ws.listHistory({path: '/sub/**'})).length, 40) 23 | }) 24 | 25 | ava('read historic values', async t => { 26 | const { ws } = await setupOne(t) 27 | 28 | for (let i = 0; i < 10; i++) { 29 | await ws.writeFile('/test.txt', ''+i, 'utf8') 30 | } 31 | 32 | const history = await ws.listHistory() 33 | t.is(history.length, 10) 34 | for (let i = 0; i < history.length; i++) { 35 | const v = await ws.readFile('/test.txt', {change: history[i].id, encoding: 'utf8'}) 36 | t.is(v, `${i}`) 37 | } 38 | }) -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hyper-sfw", 3 | "version": "1.0.0", 4 | "description": "A p2p collaborative filestructure (built on hypercore)", 5 | "main": "dist/index.js", 6 | "type": "module", 7 | "scripts": { 8 | "build": "tsc --project tsconfig.json", 9 | "test": "ava test/*.ts" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/atek-cloud/p2wiki.git" 14 | }, 15 | "author": "Paul Frazee ", 16 | "license": "MIT", 17 | "bugs": { 18 | "url": "https://github.com/atek-cloud/p2wiki/issues" 19 | }, 20 | "homepage": "https://github.com/atek-cloud/p2wiki#readme", 21 | "dependencies": { 22 | "autobase": "^1.0.0-alpha.2", 23 | "bytes": "^3.0.0", 24 | "codecs": "^2.2.0", 25 | "compact-encoding": "^2.5.1", 26 | "concat-stream": "^2.0.0", 27 | "corestore": "^6.0.1-alpha.6", 28 | "hyperbee": "^1.7.0", 29 | "hypercore": "^10.0.0-alpha.15", 30 | "hypercore-crypto": "^3.1.0", 31 | "micromatch": "^4.0.4", 32 | "monotonic-lexicographic-timestamp": "^1.0.0", 33 | "msgpackr": "^1.5.0", 34 | "pump": "^3.0.0", 35 | "through2": "^4.0.2" 36 | }, 37 | "devDependencies": { 38 | "@types/bytes": "^3.1.1", 39 | "@types/codecs": "^2.2.3", 40 | "@types/concat-stream": "^1.6.1", 41 | "@types/micromatch": "^4.0.2", 42 | "@types/node": "^16.11.7", 43 | "@types/pump": "^1.1.1", 44 | "@types/through2": "^2.0.36", 45 | "ava": "^3.15.0", 46 | "hyperswarm": "^3.0.0", 47 | "random-access-memory": "^4.0.0", 48 | "tmp": "^0.2.1", 49 | "ts-node": "^10.4.0", 50 | "typescript": "^4.4.4", 51 | "yjs": "^13.5.21" 52 | }, 53 | "ava": { 54 | "extensions": { 55 | "ts": "module" 56 | }, 57 | "nonSemVerExperiments": { 58 | "configurableModuleFormat": true 59 | }, 60 | "nodeArguments": [ 61 | "--loader=ts-node/esm" 62 | ] 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /src/oplog.ts: -------------------------------------------------------------------------------- 1 | import { unpack, pack } from 'msgpackr' 2 | // @ts-ignore no types available yet -prf 3 | import crypto from 'hypercore-crypto' 4 | // @ts-ignore no types available yet -prf 5 | import Corestore from 'corestore' 6 | import { BaseWorkspaceCore } from './base.js' 7 | import * as struct from './structures.js' 8 | import { toBuffer } from './lib/util.js' 9 | 10 | export interface WorkspaceWriterOpts { 11 | isOwner?: boolean 12 | name?: string 13 | isAdmin?: boolean 14 | isFrozen?: boolean 15 | } 16 | 17 | export class WorkspaceWriter extends BaseWorkspaceCore { 18 | isOwner = false 19 | name = '' 20 | isAdmin = false 21 | isFrozen = false 22 | 23 | constructor (store: Corestore, publicKey: Buffer, secretKey: Buffer|undefined, opts?: WorkspaceWriterOpts) { 24 | super(store, publicKey, secretKey) 25 | this.isOwner = opts?.isOwner || false 26 | this.name = opts?.name || '' 27 | this.isAdmin = opts?.isAdmin || false 28 | this.isFrozen = opts?.isFrozen || false 29 | } 30 | 31 | static createNew (store: Corestore, opts?: WorkspaceWriterOpts) { 32 | const keyPair = crypto.keyPair() 33 | return new WorkspaceWriter(store, keyPair.publicKey, keyPair.secretKey, opts) 34 | } 35 | 36 | static load (store: Corestore, publicKey: string|Buffer, secretKey?: string|Buffer, opts?: WorkspaceWriterOpts) { 37 | return new WorkspaceWriter( 38 | store, 39 | toBuffer(publicKey), 40 | secretKey ? toBuffer(secretKey) : undefined, 41 | opts 42 | ) 43 | } 44 | 45 | static packop (value: struct.DeclareOp|struct.ChangeOp|struct.BlobChunkOp) { 46 | validateOp(value) 47 | return pack(value) 48 | } 49 | 50 | static unpackop (buf: Buffer): struct.DeclareOp|struct.ChangeOp|struct.BlobChunkOp { 51 | const value = unpack(buf) 52 | validateOp(value) 53 | return value 54 | } 55 | } 56 | 57 | function validateOp (value: struct.DeclareOp|struct.ChangeOp|struct.BlobChunkOp) { 58 | switch (value.op) { 59 | case struct.OP_DECLARE: 60 | if (!struct.isDeclareOp(value)) throw new Error(`Invalid Declare operation`) 61 | break 62 | case struct.OP_CHANGE: 63 | if (!struct.isChangeOp(value)) throw new Error(`Invalid Change operation`) 64 | break 65 | case struct.OP_BLOB_CHUNK: 66 | if (!struct.isBlobChunkOp(value)) throw new Error(`Invalid BlobChunk operation`) 67 | break 68 | default: 69 | throw new Error(`Invalid op code: ${value.op}`) 70 | } 71 | } -------------------------------------------------------------------------------- /bench/many-files.js: -------------------------------------------------------------------------------- 1 | import tmp from 'tmp' 2 | import Corestore from 'corestore' 3 | import { randomBytes } from 'crypto' 4 | import { promises as fs } from 'fs' 5 | import * as sfw from '../dist/index.js' 6 | 7 | const NUM_FILES = 1e3 8 | const NUM_READS = 100 9 | const FILE_SIZE = 1e3 10 | const VALUE = randomBytes(FILE_SIZE) 11 | tmp.setGracefulCleanup() 12 | 13 | const dir = tmp.dirSync() 14 | const store = new Corestore(dir.name) 15 | const ws = await sfw.Workspace.createNew(store) 16 | 17 | console.log('SFW') 18 | console.log('===') 19 | 20 | console.log('') 21 | console.log(`Writing ${NUM_FILES} files of ${FILE_SIZE} bytes each`) 22 | await bench(NUM_FILES, async () => { 23 | for (let i = 0; i < NUM_FILES; i++) { 24 | await ws.writeFile(`/file${i}`, VALUE) 25 | } 26 | }) 27 | 28 | console.log('') 29 | console.log(`Listing all files ${NUM_READS} times`) 30 | await bench(NUM_READS, async () => { 31 | for (let i = 0; i < NUM_READS; i++) { 32 | await ws.listFiles() 33 | } 34 | }) 35 | 36 | // console.log('') 37 | // console.log(`Listing all history ${NUM_READS} times`) 38 | // await bench(NUM_READS, async () => { 39 | // for (let i = 0; i < NUM_READS; i++) { 40 | // await ws.listHistory() 41 | // } 42 | // }) 43 | 44 | console.log('') 45 | console.log(`Reading a file ${NUM_READS} times`) 46 | await bench(NUM_READS, async () => { 47 | for (let i = 0; i < NUM_READS; i++) { 48 | await ws.readFile('/file0') 49 | } 50 | }) 51 | 52 | console.log('') 53 | console.log('Normal FS') 54 | console.log('=========') 55 | 56 | const dir2 = tmp.dirSync() 57 | console.log('') 58 | console.log(`Writing ${NUM_FILES} files of ${FILE_SIZE} bytes each`) 59 | await bench(NUM_FILES, async () => { 60 | for (let i = 0; i < NUM_FILES; i++) { 61 | await fs.writeFile(`${dir2.name}/file${i}`, VALUE) 62 | } 63 | }) 64 | 65 | console.log('') 66 | console.log(`Listing all files ${NUM_READS} times`) 67 | await bench(NUM_READS, async () => { 68 | for (let i = 0; i < NUM_READS; i++) { 69 | await fs.readdir(dir2.name) 70 | } 71 | }) 72 | 73 | console.log('') 74 | console.log(`Reading a file ${NUM_READS} times`) 75 | await bench(NUM_READS, async () => { 76 | for (let i = 0; i < NUM_READS; i++) { 77 | await fs.readFile(`${dir2.name}/file0`) 78 | } 79 | }) 80 | 81 | async function bench (numOps, fn) { 82 | const start = Date.now() 83 | await fn() 84 | const total = Date.now() - start 85 | console.log(`=> ${total}ms (${numOps / (total / 1000)} ops/s average)`) 86 | } -------------------------------------------------------------------------------- /src/lib/lock.ts: -------------------------------------------------------------------------------- 1 | /* 2 | await-lock 3 | NOTE copied into here because it struggles with compilation and it's too small to bother with anything else 4 | 5 | The MIT License (MIT) 6 | 7 | Copyright (c) 2015-present James Ide 8 | */ 9 | 10 | /** 11 | * A mutex lock for coordination across async functions 12 | */ 13 | class AwaitLock { 14 | _acquired: boolean 15 | _waitingResolvers: ((value: unknown) => void)[] 16 | 17 | constructor() { 18 | this._acquired = false; 19 | this._waitingResolvers = []; 20 | } 21 | /** 22 | * Whether the lock is currently acquired or not. Accessing this property does not affect the 23 | * status of the lock. 24 | */ 25 | get acquired() { 26 | return this._acquired; 27 | } 28 | /** 29 | * Acquires the lock, waiting if necessary for it to become free if it is already locked. The 30 | * returned promise is fulfilled once the lock is acquired. 31 | * 32 | * After acquiring the lock, you **must** call `release` when you are done with it. 33 | */ 34 | acquireAsync() { 35 | if (!this._acquired) { 36 | this._acquired = true; 37 | return Promise.resolve(); 38 | } 39 | return new Promise((resolve) => { 40 | this._waitingResolvers.push(resolve); 41 | }); 42 | } 43 | /** 44 | * Acquires the lock if it is free and otherwise returns immediately without waiting. Returns 45 | * `true` if the lock was free and is now acquired, and `false` otherwise, 46 | */ 47 | tryAcquire() { 48 | if (!this._acquired) { 49 | this._acquired = true; 50 | return true; 51 | } 52 | return false; 53 | } 54 | /** 55 | * Releases the lock and gives it to the next waiting acquirer, if there is one. Each acquirer 56 | * must release the lock exactly once. 57 | */ 58 | release() { 59 | if (!this._acquired) { 60 | throw new Error(`Cannot release an unacquired lock`); 61 | } 62 | if (this._waitingResolvers.length > 0) { 63 | const resolve = this._waitingResolvers.shift(); 64 | if (resolve) { 65 | resolve(undefined); 66 | } 67 | } 68 | else { 69 | this._acquired = false; 70 | } 71 | } 72 | } 73 | 74 | // wraps await-lock in a simpler interface, with many possible locks 75 | interface LocksMap { 76 | [key: string]: AwaitLock 77 | } 78 | var locks: LocksMap = {} 79 | 80 | /** 81 | * Create a new lock 82 | * @example 83 | * var lock = require('./lock') 84 | * async function foo () { 85 | * var release = await lock('bar') 86 | * // ... 87 | * release() 88 | * } 89 | */ 90 | export default async function (key: string): Promise<() => void> { 91 | if (!(key in locks)) locks[key] = new AwaitLock() 92 | 93 | var lock = locks[key] 94 | await lock.acquireAsync() 95 | return lock.release.bind(lock) 96 | }; -------------------------------------------------------------------------------- /test/util/util.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore no types 2 | import ram from 'random-access-memory' 3 | // @ts-ignore no types 4 | import Hypercore from 'hypercore' 5 | // @ts-ignore no types 6 | import Corestore from 'corestore' 7 | // @ts-ignore no types 8 | import crypto from 'hypercore-crypto' 9 | import * as sfw from '../../src/index.js' 10 | 11 | class Sim { 12 | stores: Corestore[] = [] 13 | swarmKeyPairs: sfw.KeyPair[] = [] 14 | workspaces: sfw.Workspace[] = [] 15 | writers: sfw.WorkspaceWriter[] = [] 16 | streams: Record = {} 17 | 18 | addStore () { 19 | this.stores.push(new Corestore(ram)) 20 | this.swarmKeyPairs.push(crypto.keyPair()) 21 | } 22 | 23 | async createWorkspace (store: Corestore, swarmKeyPair: sfw.KeyPair) { 24 | const ws = await sfw.Workspace.createNew(store, swarmKeyPair) 25 | this.workspaces.push(ws) 26 | this.writers.push(ws.writers[0]) 27 | return ws 28 | } 29 | 30 | async cloneWorkspace (store: Corestore, swarmKeyPair: sfw.KeyPair, ws1: sfw.Workspace) { 31 | const ws2 = await sfw.Workspace.load(store, swarmKeyPair, ws1.key.toString('hex')) 32 | 33 | // TODO: use invite flows once we solve the wire protocol bug 34 | const writer2 = await ws2._createWriter() 35 | await ws1.putWriter(writer2.publicKey, {name: 'second writer'}) 36 | // const invite = await ws1.createInvite('Second writer') 37 | // const writer2 = await ws2.useInvite(invite) 38 | 39 | this.workspaces.push(ws2) 40 | this.writers.push(writer2) 41 | return ws2 42 | } 43 | 44 | connect (store1: Corestore, store2: Corestore) { 45 | let i1 = this.stores.indexOf(store1) 46 | let i2 = this.stores.indexOf(store2) 47 | const kp1 = this.swarmKeyPairs[i1] 48 | const kp2 = this.swarmKeyPairs[i2] 49 | if (i1 > i2) [i1, i2] = [i2, i1] 50 | if (!this.streams[`${i1}:${i2}`]) { 51 | const s = store1.replicate(Hypercore.createProtocolStream(true, {keyPair: kp1})) 52 | s.pipe(store2.replicate(Hypercore.createProtocolStream(false, {keyPair: kp2}))).pipe(s) 53 | this.streams[`${i1}:${i2}`] = s 54 | } 55 | } 56 | 57 | disconnect (store1: Corestore, store2: Corestore) { 58 | let i1 = this.stores.indexOf(store1) 59 | let i2 = this.stores.indexOf(store2) 60 | if (i1 > i2) [i1, i2] = [i2, i1] 61 | if (this.streams[`${i1}:${i2}`]) { 62 | this.streams[`${i1}:${i2}`].destroy() 63 | delete this.streams[`${i1}:${i2}`] 64 | } 65 | } 66 | } 67 | 68 | export async function setupOne (t: any) { 69 | const sim = new Sim() 70 | sim.addStore() 71 | 72 | const ws = await sim.createWorkspace(sim.stores[0], sim.swarmKeyPairs[0]) 73 | t.truthy(ws.key) 74 | 75 | t.is(ws.writers.length, 1) 76 | 77 | return {sim, ws, writer: sim.writers[0]} 78 | } 79 | 80 | export async function setupTwo (t: any) { 81 | const sim = new Sim() 82 | sim.addStore() 83 | sim.addStore() 84 | sim.connect(sim.stores[0], sim.stores[1]) 85 | 86 | const ws1 = await sim.createWorkspace(sim.stores[0], sim.swarmKeyPairs[0]) 87 | t.truthy(ws1.key) 88 | const ws2 = await sim.cloneWorkspace(sim.stores[1], sim.swarmKeyPairs[1], sim.workspaces[0]) 89 | t.truthy(ws2.key) 90 | t.deepEqual(ws1.key, ws2.key) 91 | 92 | t.is(ws1.writers.length, 2) 93 | t.is(ws2.writers.length, 2) 94 | 95 | return {sim, ws1, ws2, writer1: sim.writers[0], writer2: sim.writers[1]} 96 | } -------------------------------------------------------------------------------- /src/wire-extensions/hsfw-writerctrl.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore no types available yet -prf 2 | import Hypercore from 'hypercore' 3 | import EventEmitter from 'events' 4 | import * as msgpackr from 'msgpackr' 5 | import { Workspace } from '../index.js' 6 | 7 | export const EXTENSION_ID = 'hsfw-writerctrl' 8 | const USE_INVITE_TIMEOUT = 30e3 9 | 10 | interface OnRemoteSupportsInfo { 11 | extension: any 12 | peer: any 13 | } 14 | 15 | export interface InviteDetails { 16 | code: string 17 | token: string 18 | } 19 | 20 | export class WriterCtrlExtension extends EventEmitter { 21 | ext: any 22 | tokens: string[] = [] 23 | constructor (public ws: Workspace, public core: Hypercore) { 24 | super() 25 | this.ext = core.registerExtension(EXTENSION_ID, { 26 | encoding: 'binary', 27 | onmessage: this._onmessage.bind(this) 28 | }) 29 | } 30 | 31 | async _onmessage (message: Buffer, peer: any) { 32 | let parsed 33 | try { 34 | parsed = msgpackr.unpack(message) 35 | switch (parsed.msg) { 36 | case 'USE_INVITE': { 37 | const invite = this.ws.getInvite(parsed.token) 38 | if (!invite) { 39 | return this.sendUseInviteResponse(peer, false, `Invalid invite code (${parsed.token})`) 40 | } 41 | if (!Buffer.isBuffer(parsed.writerKey) || parsed.writerKey.length !== 32) { 42 | return this.sendUseInviteResponse(peer, false, `Invite code (${parsed.token}) is valid but the writer key is not`) 43 | } 44 | this.ws.delInvite(parsed.token) 45 | await this.ws.putWriter(parsed.writerKey, { 46 | name: invite.recipientName, 47 | admin: false, 48 | frozen: false 49 | }) 50 | this.emit('invite-used', invite) 51 | this.sendUseInviteResponse(peer, true) 52 | break 53 | } 54 | case 'USE_INVITE_RES': 55 | this.emit('use-invite-res', parsed) 56 | break 57 | default: 58 | throw new Error(`Unknown message type: ${parsed.message}`) 59 | } 60 | } catch (e) { 61 | console.error(`Error handling message in ${EXTENSION_ID} wire protocol.`, parsed || 'Invalid msgpack encoding.', e) 62 | } 63 | } 64 | 65 | async useInvite (invite: string, writerKey: Buffer) { 66 | const [prefix, peerPublicKeyHex, token] = invite.split(':') 67 | if (prefix !== 'invite') throw new Error('Not an invite code') 68 | if (!peerPublicKeyHex || !token) throw new Error('Incomplete invite code') 69 | 70 | const p = new Promise(r => { 71 | this.once('use-invite-res', r) 72 | }) 73 | 74 | let sent = false 75 | for (const peer of this.core.peers) { 76 | const remotePublicKeyHex = peer.protocol.noiseStream.remotePublicKey.toString('hex') 77 | if (remotePublicKeyHex === peerPublicKeyHex) { 78 | this.sendUseInvite(peer, token, writerKey) 79 | sent = true 80 | } 81 | } 82 | 83 | if (!sent) { 84 | throw new Error(`Can't find the user that created this invite. Are they online? (Are you?)`) 85 | } 86 | 87 | const timeoutP = new Promise((resolve, reject) => setTimeout(() => reject(new Error('Timed out waiting for a response')), USE_INVITE_TIMEOUT)) 88 | const res: any = await Promise.race([p, timeoutP]) 89 | if (!res.success) { 90 | throw new Error(res.error || 'Invite failed') 91 | } 92 | } 93 | 94 | sendUseInvite (peer: any, token: string, writerKey: Buffer) { 95 | this.ext.send(msgpackr.pack({msg: 'USE_INVITE', token, writerKey}), peer) 96 | } 97 | 98 | sendUseInviteResponse (peer: any, success: boolean, error?: string) { 99 | this.ext.send(msgpackr.pack({msg: 'USE_INVITE_RES', success, error}), peer) 100 | } 101 | } -------------------------------------------------------------------------------- /test/ydocs.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { setupOne, setupTwo } from './util/util.js' 3 | import * as Y from 'yjs' 4 | 5 | ava('ydoc read/write', async t => { 6 | const {ws} = await setupOne(t) 7 | 8 | const ydoc1 = new Y.Doc() 9 | 10 | const readFile = async (path: string) => { 11 | const ydoc2 = new Y.Doc() 12 | const state = await ws.readAllFileStates(path) 13 | for (const item of state) { 14 | Y.applyUpdate(ydoc2, item.data) 15 | } 16 | return String(ydoc2.getText()) 17 | } 18 | 19 | // write 1 20 | 21 | ydoc1.getText().insert(0, 'Hello, world!') 22 | await ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(ydoc1)), {noMerge: true}) 23 | t.deepEqual(await readFile('/test.txt'), 'Hello, world!') 24 | 25 | // write 2 26 | 27 | ydoc1.getText().delete(7, 13) 28 | ydoc1.getText().insert(7, 'universe!') 29 | await ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(ydoc1)), {noMerge: true}) 30 | t.deepEqual(await readFile('/test.txt'), 'Hello, universe!') 31 | }) 32 | 33 | ava('ydoc read/write two writers', async t => { 34 | const {ws1, ws2} = await setupTwo(t) 35 | 36 | const writer1 = {ws: ws1, ydoc: new Y.Doc()} 37 | const writer2 = {ws: ws2, ydoc: new Y.Doc()} 38 | 39 | // write 1 40 | 41 | writer1.ydoc.getText().insert(0, 'Hello, world!') 42 | await writer1.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer1.ydoc)), {noMerge: true}) 43 | 44 | for (const writer of [writer1, writer2]) { 45 | const state = await writer.ws.readAllFileStates('/test.txt') 46 | for (const item of state) { 47 | Y.applyUpdate(writer.ydoc, item.data) 48 | } 49 | t.deepEqual(String(writer.ydoc.getText()), 'Hello, world!') 50 | } 51 | 52 | // write 2 53 | 54 | writer2.ydoc.getText().delete(7, 13) 55 | writer2.ydoc.getText().insert(7, 'universe!') 56 | await writer2.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer2.ydoc)), {noMerge: true}) 57 | 58 | for (const writer of [writer1, writer2]) { 59 | const state = await writer.ws.readAllFileStates('/test.txt') 60 | for (const item of state) { 61 | Y.applyUpdate(writer.ydoc, item.data) 62 | } 63 | t.deepEqual(String(writer.ydoc.getText()), 'Hello, universe!') 64 | } 65 | 66 | // write 3 67 | 68 | writer2.ydoc.getText().delete(7, 13) 69 | writer2.ydoc.getText().insert(7, 'UNIVERSE!') 70 | await writer2.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer2.ydoc)), {noMerge: true}) 71 | 72 | for (const writer of [writer1, writer2]) { 73 | const state = await writer.ws.readAllFileStates('/test.txt') 74 | for (const item of state) { 75 | Y.applyUpdate(writer.ydoc, item.data) 76 | } 77 | t.deepEqual(String(writer.ydoc.getText()), 'Hello, UNIVERSE!') 78 | } 79 | 80 | // file noted as "noMerge" rather than "in conflict" 81 | 82 | for (const writer of [writer1, writer2]) { 83 | const info = await writer.ws.statFile('/test.txt') 84 | t.is(info?.conflict, false) 85 | t.is(info?.noMerge, true) 86 | t.is(info?.otherChanges?.length, 1) 87 | } 88 | }) 89 | 90 | ava('conflicted copies and moves not allowed', async t => { 91 | const {ws1, ws2} = await setupTwo(t) 92 | 93 | const writer1 = {ws: ws1, ydoc: new Y.Doc()} 94 | const writer2 = {ws: ws2, ydoc: new Y.Doc()} 95 | 96 | // write 97 | 98 | writer1.ydoc.getText().insert(0, 'Hello, world!') 99 | await writer1.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer1.ydoc)), {noMerge: true}) 100 | writer2.ydoc.getText().insert(0, 'Hello, world!') 101 | await writer2.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer2.ydoc)), {noMerge: true}) 102 | 103 | // copy & write fail 104 | 105 | await t.throwsAsync(() => writer1.ws.moveFile('/test.txt', '/test2.txt')) 106 | await t.throwsAsync(() => writer1.ws.copyFile('/test.txt', '/test2.txt')) 107 | await t.throwsAsync(() => writer2.ws.moveFile('/test.txt', '/test2.txt')) 108 | await t.throwsAsync(() => writer2.ws.copyFile('/test.txt', '/test2.txt')) 109 | }) 110 | 111 | ava('ydoc read/write during a fork', async t => { 112 | const {sim, ws1, ws2} = await setupTwo(t) 113 | 114 | const writer1 = {ws: ws1, ydoc: new Y.Doc()} 115 | const writer2 = {ws: ws2, ydoc: new Y.Doc()} 116 | 117 | const readFile = async (writer: any, path: string) => { 118 | const state = await writer.ws.readAllFileStates(path) 119 | for (const item of state) { 120 | Y.applyUpdate(writer.ydoc, item.data) 121 | } 122 | return String(writer.ydoc.getText()) 123 | } 124 | 125 | // forked writes 126 | 127 | // HACK sync state prior to disconnect, works around https://github.com/hypercore-protocol/autobase/issues/7 128 | await ws1.listFiles() 129 | await ws2.listFiles() 130 | 131 | sim.disconnect(sim.stores[0], sim.stores[1]) 132 | 133 | writer1.ydoc.getText().insert(0, 'writer1') 134 | await writer1.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer1.ydoc)), {noMerge: true}) 135 | writer2.ydoc.getText().insert(0, 'writer2') 136 | await writer2.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer2.ydoc)), {noMerge: true}) 137 | 138 | t.deepEqual(await readFile(writer1, 'test.txt'), 'writer1') 139 | t.deepEqual(await readFile(writer2, 'test.txt'), 'writer2') 140 | 141 | // merge 142 | 143 | sim.connect(sim.stores[0], sim.stores[1]) 144 | t.deepEqual(await readFile(writer1, 'test.txt'), await readFile(writer2, 'test.txt')) 145 | 146 | // forked writes 2 147 | 148 | // HACK sync state prior to disconnect, works around https://github.com/hypercore-protocol/autobase/issues/7 149 | await ws1.listFiles() 150 | await ws2.listFiles() 151 | 152 | sim.disconnect(sim.stores[0], sim.stores[1]) 153 | 154 | const orgValue = (await readFile(writer1, 'test.txt')) 155 | writer1.ydoc.getText().delete(0, orgValue.length) 156 | await writer1.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer1.ydoc)), {noMerge: true}) 157 | writer2.ydoc.getText().insert(orgValue.length, ' and more text') 158 | await writer2.ws.writeFile('/test.txt', Buffer.from(Y.encodeStateAsUpdate(writer2.ydoc)), {noMerge: true}) 159 | 160 | t.deepEqual(await readFile(writer1, 'test.txt'), '') 161 | t.deepEqual(await readFile(writer2, 'test.txt'), `${orgValue} and more text`) 162 | 163 | // merge 164 | 165 | sim.connect(sim.stores[0], sim.stores[1]) 166 | t.deepEqual(await readFile(writer1, 'test.txt'), ' and more text') 167 | t.deepEqual(await readFile(writer2, 'test.txt'), ' and more text') 168 | }) 169 | -------------------------------------------------------------------------------- /test/writer-management.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | // @ts-ignore no types 3 | import Hypercore from 'hypercore' 4 | // @ts-ignore no types 5 | import ram from 'random-access-memory' 6 | // @ts-ignore no types 7 | import Corestore from 'corestore' 8 | // @ts-ignore no types 9 | import crypto from 'hypercore-crypto' 10 | import * as sfw from '../src/index.js' 11 | 12 | function inst () { 13 | const swarmKeyPair = crypto.keyPair() 14 | const store = new Corestore(ram) 15 | return {swarmKeyPair, store} 16 | } 17 | 18 | function connect (inst1: any, inst2: any) { 19 | const s = inst1.store.replicate(Hypercore.createProtocolStream(true, {keyPair: inst1.swarmKeyPair})) 20 | s.pipe(inst2.store.replicate(Hypercore.createProtocolStream(false, {keyPair: inst2.swarmKeyPair}))).pipe(s) 21 | } 22 | 23 | ava('writer invite flow', async t => { 24 | const inst1 = inst() 25 | const inst2 = inst() 26 | connect(inst1, inst2) 27 | 28 | const ws1 = await sfw.Workspace.createNew(inst1.store, inst1.swarmKeyPair) 29 | const ws2 = await sfw.Workspace.load(inst2.store, inst2.swarmKeyPair, ws1.key) 30 | 31 | t.deepEqual(ws1.key, ws2.key) 32 | t.is(ws1.writers.length, 1) 33 | t.is(ws2.writers.length, 1) 34 | 35 | const invite = await ws1.createInvite('user two') 36 | await ws2.useInvite(invite) 37 | 38 | t.is(ws1.writers.length, 2) 39 | t.is(ws2.writers.length, 2) 40 | t.is(ws2.getMyWriter()?.name, 'user two') 41 | }) 42 | 43 | ava('writer perms: admins can edit all, non-admins can only edit self and cant self-promote to admin', async t => { 44 | const inst1 = inst() 45 | const inst2 = inst() 46 | connect(inst1, inst2) 47 | 48 | const ws1 = await sfw.Workspace.createNew(inst1.store, inst1.swarmKeyPair) 49 | const ws2 = await sfw.Workspace.load(inst2.store, inst2.swarmKeyPair, ws1.key) 50 | 51 | t.deepEqual(ws1.key, ws2.key) 52 | t.is(ws1.writers.length, 1) 53 | t.is(ws2.writers.length, 1) 54 | 55 | const invite = await ws1.createInvite('user two') 56 | await ws2.useInvite(invite) 57 | 58 | t.is(ws1.writers.length, 2) 59 | t.is(ws2.writers.length, 2) 60 | 61 | const writer1 = ws1.getMyWriter() 62 | const writer2 = ws2.getMyWriter() 63 | 64 | t.truthy(writer1) 65 | t.truthy(writer2) 66 | 67 | if (writer1 && writer2) { 68 | await t.throwsAsync(ws2.putWriter(writer1.publicKey, {name: 'bob'})) 69 | await ws2.putWriter(writer2.publicKey, {name: 'bob'}) 70 | await t.throwsAsync(ws2.putWriter(writer2.publicKey, {admin: true})) 71 | 72 | for (const ws of [ws1, ws2]) { 73 | const writers = await ws.listWriters() 74 | const w2 = writers.find(w => w.publicKey.equals(writer2.publicKey)) 75 | t.truthy(w2) 76 | if (w2) { 77 | t.is(w2.name, 'bob') 78 | t.falsy(w2.isAdmin) 79 | t.falsy(w2.isFrozen) 80 | } 81 | } 82 | 83 | await ws1.putWriter(writer1.publicKey, {name: 'alice'}) 84 | await ws1.putWriter(writer2.publicKey, {name: 'robert'}) 85 | await ws1.putWriter(writer2.publicKey, {admin: true}) 86 | 87 | for (const ws of [ws1, ws2]) { 88 | const writers = await ws.listWriters() 89 | const w1 = writers.find(w => w.publicKey.equals(writer1.publicKey)) 90 | t.truthy(w1) 91 | if (w1) { 92 | t.is(w1.name, 'alice') 93 | t.truthy(w1.isAdmin) 94 | t.falsy(w1.isFrozen) 95 | } 96 | const w2 = writers.find(w => w.publicKey.equals(writer2.publicKey)) 97 | t.truthy(w2) 98 | if (w2) { 99 | t.is(w2.name, 'robert') 100 | t.truthy(w2.isAdmin) 101 | t.falsy(w2.isFrozen) 102 | } 103 | } 104 | 105 | // writer2 is now admin 106 | await ws2.putWriter(writer1.publicKey, {name: 'ALICE'}) 107 | await ws2.putWriter(writer2.publicKey, {name: 'ROBERT'}) 108 | await ws2.putWriter(writer1.publicKey, {admin: false}) 109 | 110 | for (const ws of [ws1, ws2]) { 111 | const writers = await ws.listWriters() 112 | const w1 = writers.find(w => w.publicKey.equals(writer1.publicKey)) 113 | t.truthy(w1) 114 | if (w1) { 115 | t.is(w1.name, 'ALICE') 116 | t.falsy(w1.isAdmin) 117 | t.falsy(w1.isFrozen) 118 | } 119 | const w2 = writers.find(w => w.publicKey.equals(writer2.publicKey)) 120 | t.truthy(w2) 121 | if (w2) { 122 | t.is(w2.name, 'ROBERT') 123 | t.truthy(w2.isAdmin) 124 | t.falsy(w2.isFrozen) 125 | } 126 | } 127 | 128 | // writer1 is no longer admin 129 | await t.throwsAsync(ws1.putWriter(writer2.publicKey, {name: 'bob'})) 130 | await ws2.putWriter(writer1.publicKey, {name: 'ALICIA'}) 131 | await t.throwsAsync(ws1.putWriter(writer1.publicKey, {admin: true})) 132 | 133 | for (const ws of [ws1, ws2]) { 134 | const writers = await ws.listWriters() 135 | const w1 = writers.find(w => w.publicKey.equals(writer1.publicKey)) 136 | t.truthy(w1) 137 | if (w1) { 138 | t.is(w1.name, 'ALICIA') 139 | t.falsy(w1.isAdmin) 140 | t.falsy(w1.isFrozen) 141 | } 142 | const w2 = writers.find(w => w.publicKey.equals(writer2.publicKey)) 143 | t.truthy(w2) 144 | if (w2) { 145 | t.is(w2.name, 'ROBERT') 146 | t.truthy(w2.isAdmin) 147 | t.falsy(w2.isFrozen) 148 | } 149 | } 150 | } 151 | }) 152 | 153 | ava('cant write without being a writer', async t => { 154 | const inst1 = inst() 155 | const inst2 = inst() 156 | connect(inst1, inst2) 157 | 158 | const ws1 = await sfw.Workspace.createNew(inst1.store, inst1.swarmKeyPair) 159 | const ws2 = await sfw.Workspace.load(inst2.store, inst2.swarmKeyPair, ws1.key) 160 | 161 | t.deepEqual(ws1.key, ws2.key) 162 | t.is(ws1.writers.length, 1) 163 | t.is(ws2.writers.length, 1) 164 | 165 | await t.throwsAsync(ws2.writeFile('/foo.txt', 'bar'), {message: 'Not a writer'}) 166 | }) 167 | 168 | ava('invalid invites', async t => { 169 | const inst1 = inst() 170 | const inst2 = inst() 171 | connect(inst1, inst2) 172 | 173 | const ws1 = await sfw.Workspace.createNew(inst1.store, inst1.swarmKeyPair) 174 | const ws2 = await sfw.Workspace.load(inst2.store, inst2.swarmKeyPair, ws1.key) 175 | 176 | t.deepEqual(ws1.key, ws2.key) 177 | t.is(ws1.writers.length, 1) 178 | t.is(ws2.writers.length, 1) 179 | 180 | const invite = await ws1.createInvite('user two') 181 | const [prefix, key, token] = invite.split(':') 182 | 183 | await t.throwsAsync(ws2.useInvite(`foo`), {message: 'Not an invite code'}) 184 | await t.throwsAsync(ws2.useInvite(`${prefix}:${crypto.keyPair().publicKey.toString('hex')}:${token}`), {message: 'Can\'t find the user that created this invite. Are they online? (Are you?)'}) 185 | await t.throwsAsync(ws2.useInvite(`${prefix}:${key}:12345`), {message: 'Invalid invite code (12345)'}) 186 | }) -------------------------------------------------------------------------------- /src/structures.ts: -------------------------------------------------------------------------------- 1 | import bytes from 'bytes' 2 | 3 | export const OP_DECLARE = 1 4 | export const OP_CHANGE = 2 5 | export const OP_BLOB_CHUNK = 3 6 | 7 | export const OP_CHANGE_ACT_PUT = 1 8 | export const OP_CHANGE_ACT_COPY = 2 9 | export const OP_CHANGE_ACT_DEL = 3 10 | export const OP_CHANGE_ACT_PUT_WRITER = 4 11 | 12 | export const BLOB_CHUNK_BYTE_LENGTH = bytes('4mb') 13 | 14 | /* 15 | 16 | Index bee's structure: 17 | 18 | /_meta = IndexedMeta 19 | /files/{...path: string} = IndexedFile 20 | /changes/{id: string} = IndexedChange 21 | /history/{mlts: string} = string // mlts is a monotonic lexicographic timestamp 22 | /blobs/{id: string} = {} 23 | /blobchunks/{id: string}/{chunk: number} = Buffer 24 | 25 | */ 26 | 27 | // ops 28 | // = 29 | 30 | export interface Op { 31 | op: number 32 | } 33 | 34 | export interface DeclareOp extends Op { 35 | index: Buffer // key of the owner's index bee 36 | timestamp: Date // local clock time of declaration 37 | } 38 | 39 | export interface BlobChunkOp extends Op { 40 | blob: string // blob id 41 | chunk: number // chunk number 42 | value: Buffer 43 | } 44 | 45 | export interface ChangeOpFilesAct { 46 | path: string // path of file being changed 47 | } 48 | 49 | export interface ChangeOpPut extends ChangeOpFilesAct { 50 | action: number // OP_CHANGE_ACT_PUT 51 | blob: string // ID of blob to create 52 | bytes: number // number of bytes in this blob 53 | chunks: number // number of chunks in the blob (will follow this op) 54 | noMerge: boolean // is this a "no merge" put? 55 | } 56 | 57 | export interface ChangeOpCopy extends ChangeOpFilesAct { 58 | action: number // OP_CHANGE_ACT_COPY 59 | blob: string // ID of blob to copy 60 | bytes: number // number of bytes in this blob 61 | } 62 | 63 | export interface ChangeOpDel extends ChangeOpFilesAct { 64 | action: number // OP_CHANGE_ACT_DEL 65 | } 66 | 67 | export interface ChangeOpPutWriter { 68 | action: number // OP_CHANGE_ACT_PUT_WRITER 69 | key: Buffer // writer's key 70 | name?: string // writer's name 71 | admin?: boolean // is admin? 72 | frozen?: boolean // is frozen? 73 | } 74 | 75 | export interface ChangeOp extends Op { 76 | id: string // random generated ID 77 | parents: string[] // IDs of changes which preceded this change 78 | timestamp: Date // local clock time of change 79 | details: ChangeOpPut|ChangeOpCopy|ChangeOpDel|ChangeOpPutWriter 80 | } 81 | 82 | export function isDeclareOp (v: any): v is DeclareOp { 83 | if (!v) return false 84 | const check = new TypeCheck() 85 | check.is(v.op, OP_DECLARE) 86 | check.is(Buffer.isBuffer(v.index), true) 87 | check.is(v.timestamp instanceof Date, true) 88 | return check.valid 89 | } 90 | 91 | export function isBlobChunkOp (v: any): v is BlobChunkOp { 92 | if (!v) return false 93 | const check = new TypeCheck() 94 | check.is(v.op, OP_BLOB_CHUNK) 95 | check.type(v.blob, 'string') 96 | check.type(v.chunk, 'number') 97 | check.is(Buffer.isBuffer(v.value), true) 98 | return check.valid 99 | } 100 | 101 | export function isChangeOp (v: any): v is ChangeOp { 102 | if (!v) return false 103 | const check = new TypeCheck() 104 | check.is(v.op, OP_CHANGE) 105 | check.type(v.id, 'string') 106 | check.arrayType(v.parents, 'string') 107 | check.is(v.timestamp instanceof Date, true) 108 | check.type(v.details, 'object') 109 | if (v.details) { 110 | validateChangeAction(check, v) 111 | } 112 | return check.valid 113 | } 114 | 115 | export function isChangeOpFileAct (op: ChangeOp): boolean { 116 | return ( 117 | op.details.action === OP_CHANGE_ACT_PUT 118 | || op.details.action === OP_CHANGE_ACT_COPY 119 | || op.details.action === OP_CHANGE_ACT_DEL 120 | ) 121 | } 122 | 123 | export function isChangeOpMetaAct (op: ChangeOp): boolean { 124 | return ( 125 | op.details.action === OP_CHANGE_ACT_PUT_WRITER 126 | ) 127 | } 128 | 129 | // indexed data 130 | // = 131 | 132 | export interface IndexedMetaWriter { 133 | key: Buffer // writer key 134 | name: string // user-facing user name 135 | admin: boolean // can assign other writers? 136 | frozen: boolean // are further updates disabled? 137 | } 138 | 139 | export interface IndexedMeta { 140 | owner: Buffer // owner key 141 | ownerIndex: Buffer // owner's index key 142 | writers: IndexedMetaWriter[] 143 | timestamp: Date // local clock time of last change 144 | change: string // last change id 145 | } 146 | 147 | export interface IndexedChange { 148 | id: string // random generated ID 149 | parents: string[] // IDs of changes which preceded this change 150 | writer: Buffer // key of the core that authored the change 151 | timestamp: Date // local clock time of change 152 | details: ChangeOpPut|ChangeOpCopy|ChangeOpDel|ChangeOpPutWriter 153 | } 154 | 155 | export interface IndexedFile { 156 | path: string // path of the file in the tree 157 | timestamp: Date // local clock time of last change 158 | bytes: number // number of bytes in this blob (0 if delete or move) 159 | 160 | writer: Buffer // key of the core that authored the change 161 | blob: string|undefined // blob ID 162 | 163 | change: string // last change id 164 | noMerge: boolean // in no-merge mode? 165 | otherChanges: string[] // other current change ids 166 | } 167 | 168 | export function isIndexedMeta (v: any): v is IndexedMeta { 169 | if (!v) return false 170 | const check = new TypeCheck() 171 | check.is(Buffer.isBuffer(v.owner), true) 172 | check.is(Buffer.isBuffer(v.ownerIndex), true) 173 | check.arrayIs(v.writers, (w: any) => Buffer.isBuffer(w.key) && typeof w.name === 'string' && typeof w.admin === 'boolean' && typeof w.frozen === 'boolean') 174 | check.is(v.timestamp instanceof Date, true) 175 | check.type(v.change, 'string') 176 | return check.valid 177 | } 178 | 179 | export function isIndexedChange (v: any): v is IndexedChange { 180 | if (!v) return false 181 | const check = new TypeCheck() 182 | check.type(v.id, 'string') 183 | check.arrayType(v.parents, 'string') 184 | check.is(Buffer.isBuffer(v.writer), true) 185 | check.is(v.timestamp instanceof Date, true) 186 | check.type(v.details, 'object') 187 | if (v.details) { 188 | validateChangeAction(check, v) 189 | } 190 | return check.valid 191 | } 192 | 193 | export function isIndexedFile (v: any): v is IndexedFile { 194 | if (!v) return false 195 | const check = new TypeCheck() 196 | check.type(v.path, 'string') 197 | check.is(v.timestamp instanceof Date, true) 198 | check.type(v.bytes, 'number') 199 | check.is(Buffer.isBuffer(v.writer), true) 200 | if (v.blob) check.type(v.blob, 'string') 201 | check.type(v.change, 'string') 202 | check.type(v.noMerge, 'boolean') 203 | check.arrayType(v.otherChanges, 'string') 204 | return check.valid 205 | } 206 | 207 | // api structures 208 | // = 209 | 210 | export interface FileInfo { 211 | path: string // path of the file in the tree 212 | timestamp: Date // local clock time of change 213 | bytes: number // number of bytes in this blob (0 if delete or move) 214 | writer: Buffer // key of the core that authored the change 215 | 216 | change: string // last change ids 217 | conflict?: boolean // in conflict? 218 | noMerge?: boolean // in no-merge mode? 219 | otherChanges?: FileInfo[] // conflicting file infos 220 | } 221 | 222 | // internal methods 223 | // = 224 | 225 | function validateChangeAction (check: TypeCheck, v: any) { 226 | check.type(v.details.action, 'number') 227 | if (v.details.action === OP_CHANGE_ACT_PUT) { 228 | check.type(v.details.path, 'string') 229 | check.type(v.details.blob, 'string') 230 | check.type(v.details.bytes, 'number') 231 | check.type(v.details.chunks, 'number') 232 | check.type(v.details.noMerge, 'boolean') 233 | } else if (v.details.action === OP_CHANGE_ACT_COPY) { 234 | check.type(v.details.path, 'string') 235 | check.type(v.details.blob, 'string') 236 | check.type(v.details.bytes, 'number') 237 | } else if (v.details.action === OP_CHANGE_ACT_DEL) { 238 | check.type(v.details.path, 'string') 239 | } else if (v.details.action === OP_CHANGE_ACT_PUT_WRITER) { 240 | check.is(Buffer.isBuffer(v.details.key), true) 241 | if (typeof v.details.name !== 'undefined') check.type(v.details.name, 'string') 242 | if (typeof v.details.admin !== 'undefined') check.type(v.details.admin, 'boolean') 243 | if (typeof v.details.frozen !== 'undefined') check.type(v.details.frozen, 'boolean') 244 | } 245 | } 246 | 247 | class TypeCheck { 248 | valid = true 249 | 250 | is (v: any, expected: any) { 251 | if (v !== expected) { 252 | this.valid = false 253 | } 254 | } 255 | type (v: any, t: string) { 256 | if (typeof v !== t) { 257 | this.valid = false 258 | } 259 | } 260 | arrayIs (v: any, test: (item: any) => boolean) { 261 | if (!Array.isArray(v)) { 262 | this.valid = false 263 | } else if (!v.reduce((acc, item) => acc && test(item), true)) { 264 | this.valid = false 265 | } 266 | } 267 | arrayType (v: any, t: string) { 268 | if (!Array.isArray(v)) { 269 | this.valid = false 270 | } else if (!v.reduce((acc, item) => acc && typeof item === t, true)) { 271 | this.valid = false 272 | } 273 | } 274 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HSFW: Hyper Synced Files Workspace 2 | 3 | Dropbox-like p2p file syncing built on [Hypercore's new multiwriter Autobase](https://github.com/hypercore-protocol/autobase). 4 | 5 | **Not yet published to npm** 6 | 7 | ## TODOs 8 | 9 | - [Externalizing blobs to a separate storage and transfer protocol](https://github.com/pfrazee/hyper-sfw/issues/1) 10 | - [Decaching old core messages](https://github.com/pfrazee/hyper-sfw/issues/2) 11 | - Events / reactive APIs 12 | - Unique codes on each error 13 | - BUG: In multiple cases, I needed to read the current state to ensure sync between writers (look for HACKs in code) 14 | 15 | ## Example usage 16 | 17 | ```typescript 18 | import crypto from 'hypercore-crypto' 19 | import { Workspace } from 'hsfw' 20 | 21 | const mySwarmKeypair = crypto.keyPair() 22 | const ws = await Workspace.createNew(corestore, mySwarmKeypair) 23 | const ws = await Workspace.load(corestore, mySwarmKeypair, workspacePublicKey) 24 | 25 | // general metadata 26 | // = 27 | 28 | ws.key // the key that identifies this HSFW 29 | 30 | // basic file ops 31 | // = 32 | 33 | await ws.writeFile('/file.txt', 'Hello, world!') 34 | await ws.statFile('/file.txt') /* => { 35 | path: '/file.txt', 36 | timestamp: Date(January 1, 1969), 37 | bytes: 13, 38 | writer: Buffer<...>, 39 | change: 'b3c316fdc136bde5', 40 | conflict: false, 41 | noMerge: false 42 | otherChanges: [] 43 | } */ 44 | await ws.listFiles() // => [{...}] 45 | await ws.readFile('/file.txt', 'utf-8') // => 'Hello, world!' 46 | 47 | await ws.copyFile('/file.txt', '/file2.txt') 48 | await ws.moveFile('/file2.txt', '/file3.txt') 49 | await ws.deleteFile('/file3.txt') 50 | 51 | // history 52 | // = 53 | 54 | await ws.listHistory() 55 | await ws.listHistory({path: '/directory/*'}) 56 | await ws.listHistory({path: '/file.txt'}) 57 | 58 | // writer management 59 | // = 60 | 61 | await ws.listWriters() // fetch and list the current writers 62 | ws.getWriter(pubkey) /* get one of the writers (from the current cache) 63 | => WorkspaceWriter { 64 | core: Hypercore 65 | publicKey: Buffer 66 | secretKey?: Buffer 67 | isOwner: boolean 68 | name: string 69 | isAdmin: boolean 70 | isFrozen: boolean 71 | }*/ 72 | ws.getOwner() // get the "owner" writer of this HSFW 73 | ws.isOwner // am I the "owner" of this HSFW? 74 | ws.getMyWriter() // get my writer instance, if I am one 75 | ws.writable // am I a writer? 76 | ws.isAdmin // am I an admin writer? (able to change other writers) 77 | await ws.putWriter(key, {name: 'Bob', admin: false}) // create/update a writer 78 | 79 | const invite = await ws.createInvite(recipientName: string) // create a writer invite 80 | await ws.useInvite(invite) // use the invite to become a writer 81 | ``` 82 | 83 | ## Implementation notes 84 | 85 | ### Hypercore schemas 86 | 87 | The repo is an Autobase which uses oplog inputs and a Hyperbee for the index. All data is encoded using msgpack. 88 | 89 | The Hyperbee index uses the following layout: 90 | 91 | ``` 92 | /_meta = IndexedMeta 93 | /files/{...path: string} = IndexedFile 94 | /changes/{id: string} = IndexedChange 95 | /history/{mlts: string} = string // mlts is a monotonic lexicographic timestamp 96 | /blobs/{hash: string} = {} 97 | /blobchunks/{hash: string}/{chunk: number} = Buffer 98 | 99 | IndexedMeta { 100 | owner: Buffer // owner key 101 | ownerIndex: Buffer // owner's index key 102 | writers: IndexedMetaWriter[] 103 | timestamp: Date // local clock time of last change 104 | change: string // last change id 105 | 106 | IndexedMetaWriter { 107 | key: Buffer // writer key 108 | name: string // user-facing user name 109 | admin: boolean // can assign other writers? 110 | frozen: boolean // are further updates disabled? 111 | } 112 | } 113 | 114 | IndexedChange { 115 | id: string // random generated ID 116 | parents: string[] // IDs of changes which preceded this change 117 | writer: Buffer // key of the core that authored the change 118 | 119 | path: string // path of file being changed 120 | timestamp: Date // local clock time of change 121 | details: ChangeOpPut|ChangeOpCopy|ChangeOpDel|ChangeOpPutWriter 122 | } 123 | 124 | IndexedFile { 125 | path: string // path of the file in the tree 126 | timestamp: Date // local clock time of change 127 | bytes: number // number of bytes in this blob (0 if delete or move) 128 | 129 | writer: Buffer // key of the core that authored the change 130 | blob: string|undefined // blob sha256 hash 131 | 132 | change: string // last change id 133 | noMerge: boolean // in no-merge mode? 134 | otherChanges: string[] // other current change ids 135 | } 136 | ``` 137 | 138 | The oplogs include one of the following message types: 139 | 140 | ``` 141 | DeclareOp { 142 | op: 1 143 | index: Buffer // key of the owner's index bee 144 | timestamp: Date // local clock time of declaration 145 | } 146 | 147 | ChangeOp { 148 | op: 2 149 | id: string // random generated ID 150 | parents: string[] // IDs of changes which preceded this change 151 | timestamp: Date // local clock time of change 152 | details: ChangeOpPut|ChangeOpCopy|ChangeOpDel|ChangeOpPutWriter 153 | 154 | ChangeOpPut { 155 | action: number // OP_CHANGE_ACT_PUT 156 | path: string // path of file being changed 157 | blob: string // sha256 hash of blob to create 158 | bytes: number // number of bytes in this blob 159 | chunks: number // number of chunks in the blob (will follow this op) 160 | noMerge: boolean // is this a "no merge" put? 161 | } 162 | 163 | ChangeOpCopy { 164 | action: number // OP_CHANGE_ACT_COPY 165 | path: string // path of file being changed 166 | blob: string // sha256 hash of blob to copy 167 | bytes: number // number of bytes in this blob 168 | } 169 | 170 | ChangeOpDel { 171 | action: number // OP_CHANGE_ACT_DEL 172 | path: string // path of file being changed 173 | } 174 | 175 | ChangeOpPutWriter { 176 | action: number // OP_CHANGE_ACT_PUT_WRITER 177 | key: Buffer // writer's key 178 | name?: string // writer's name 179 | admin?: boolean // is admin? 180 | frozen?: boolean // is frozen? 181 | } 182 | } 183 | 184 | BlobChunkOp { 185 | op: 3 186 | blob: string // sha256 hash of blob 187 | chunk: number // chunk number 188 | value: Buffer 189 | } 190 | ``` 191 | 192 | ### Writer management 193 | 194 | The Hypercore team is planning to create "Autoboot," a toolkit for managing writers. In the meantime, HSFW has implemented its own form of writer management. 195 | 196 | The basics are pretty straight-forward: to add a writer, create an "invite" with `createInvite()` and send that invite to the other user. That user then calls `useInvite()` and they will be added as a writer. (Note: The creator of the invite has to be online when it runs.) 197 | 198 | ```js 199 | // on the existing writer's side 200 | const inviteCode = await workspace.createInvite('Bob') // bob will be the name assigned to the new writer 201 | 202 | // on the joining user's side 203 | await workspace.useInvite(inviteCode) 204 | ``` 205 | 206 | You can modify existiing writers with `putWriter()` to change their name or give them admin powers. You must be an admin to add new writers or modify other writers. 207 | 208 | ```js 209 | // on the admin's device 210 | if (workspace.isAdmin) { 211 | await workspace.putWriter(bob.key, {name: 'Robert', admin: true}) 212 | } 213 | ``` 214 | 215 | ### Folder behaviors 216 | 217 | Folders are created automatically based on paths. SFW does not prohibit files from being created which conflict with a folder name. 218 | 219 | Changes to a folder (renames, moves, deletes) must be written as individual `Change` messages for each file. 220 | 221 | ### Detecting conflicts in changes 222 | 223 | All change operations have a random ID and list the parent changes by their ID. When the indexer handles a change, it compares the listed parents to the current file's "head changes." If one of the head changes is not included in the list of parents, the file is put in conflict state. Conflict state is tracked by a list of change numbers in the file entry. 224 | 225 | ### No-merge writes 226 | 227 | You can write a file with the `noMerge` option set to true. This circumvents the merging behavior, essentially causing the file to go into "conflict" on purpose. (SFW notes that it is a `noMerge` write and accordingly doesn't indicate the write as a conflict.) 228 | 229 | Non-merged files essentially maintain separate copies for each writer. You can fetch each writer's copy using `readAllFileStates()`. 230 | 231 | This is particularly useful for [Y.js](https://yjs.dev), as each writer's state updates can be written separately and then merged on read. 232 | 233 | ```js 234 | import * as Y from 'yjs' 235 | 236 | const ydoc = new Y.Doc() 237 | ydoc.getText().insert(0, 'Hello, world!') 238 | 239 | // write the state update in "no merge" mode 240 | await ws.writeFile('/ydoc.txt', Buffer.from(Y.encodeStateAsUpdate(ydoc)), {noMerge: true}) 241 | 242 | // now create another ydoc instance and read each writer's updates into it 243 | const ydoc2 = new Y.Doc() 244 | const state = await ws.readAllFileStates('/ydoc.txt') 245 | for (const item of state) Y.applyUpdate(ydoc2, item.data) 246 | String(ydoc2.getText()) // 'Hello, world!' 247 | ``` -------------------------------------------------------------------------------- /test/forks.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { setupTwo } from './util/util.js' 3 | 4 | ava('conflicting and merging writes to individual file', async t => { 5 | const VALUES = [ 6 | Buffer.from('writer1', 'utf-8'), 7 | Buffer.from('writer2', 'utf-8') 8 | ] 9 | 10 | const {sim, ws1, ws2, writer1, writer2} = await setupTwo(t) 11 | 12 | // conflicting writes 13 | 14 | // HACK sync state prior to disconnect, works around https://github.com/hypercore-protocol/autobase/issues/7 15 | await ws1.indexCore.update() 16 | await ws2.indexCore.update() 17 | 18 | console.log('\nDISCONNECT\n') 19 | sim.disconnect(sim.stores[0], sim.stores[1]) 20 | 21 | await ws1.writeFile('/test.txt', VALUES[0]) 22 | await ws2.writeFile('/test.txt', VALUES[1]) 23 | 24 | // not yet synced 25 | 26 | t.deepEqual(await ws1.readFile('/test.txt'), VALUES[0]) 27 | t.deepEqual(await ws2.readFile('/test.txt'), VALUES[1]) 28 | 29 | // synced but in conflict state 30 | 31 | console.log('\nCONNECT\n') 32 | sim.connect(sim.stores[0], sim.stores[1]) 33 | 34 | t.deepEqual( 35 | await ws1.readFile('/test.txt'), 36 | await ws2.readFile('/test.txt') 37 | ) 38 | { 39 | const info1 = await ws1.statFile('/test.txt') 40 | t.truthy(info1) 41 | const info2 = await ws2.statFile('/test.txt') 42 | t.truthy(info2) 43 | if (info1 && info2) { 44 | t.deepEqual(info1, info2) 45 | } 46 | if (info1) { 47 | t.is(info1.conflict, true) 48 | t.is(info1.otherChanges?.length, 1) 49 | } 50 | } 51 | 52 | // merging write 53 | 54 | await ws1.writeFile('/test.txt', VALUES[0]) 55 | t.deepEqual( 56 | await ws1.readFile('/test.txt'), 57 | await ws2.readFile('/test.txt') 58 | ) 59 | { 60 | const info1 = await ws1.statFile('/test.txt') 61 | t.truthy(info1) 62 | const info2 = await ws2.statFile('/test.txt') 63 | t.truthy(info2) 64 | if (info1 && info2) { 65 | t.deepEqual(info1, info2) 66 | } 67 | if (info1) { 68 | t.is(info1.conflict, false) 69 | t.is(info1.otherChanges?.length, 0) // no conflicts 70 | } 71 | } 72 | }) 73 | 74 | ava('conflicting and merging writes & deletes to individual file', async t => { 75 | const VALUES = [ 76 | Buffer.from('first write', 'utf-8'), 77 | Buffer.from('second write', 'utf-8'), 78 | Buffer.from('third write', 'utf-8') 79 | ] 80 | 81 | const {sim, ws1, ws2, writer1, writer2} = await setupTwo(t) 82 | 83 | // create a file 84 | 85 | await ws1.writeFile('/test.txt', VALUES[0]) 86 | 87 | // conflicting write & delete 88 | 89 | // HACK sync state prior to disconnect, works around https://github.com/hypercore-protocol/autobase/issues/7 90 | await ws1.indexCore.update() 91 | await ws2.indexCore.update() 92 | 93 | console.log('\nDISCONNECT\n') 94 | sim.disconnect(sim.stores[0], sim.stores[1]) 95 | 96 | await ws1.deleteFile('/test.txt') 97 | await ws2.writeFile('/test.txt', VALUES[1]) 98 | 99 | // not yet synced 100 | 101 | t.deepEqual(await ws1.readFile('/test.txt'), undefined) 102 | t.deepEqual(await ws2.readFile('/test.txt'), VALUES[1]) 103 | 104 | // synced but in conflict state 105 | 106 | console.log('\nCONNECT\n') 107 | sim.connect(sim.stores[0], sim.stores[1]) 108 | 109 | t.deepEqual( 110 | await ws1.readFile('/test.txt'), 111 | await ws2.readFile('/test.txt') 112 | ) 113 | { 114 | const info1 = await ws1.statFile('/test.txt') 115 | t.truthy(info1) 116 | const info2 = await ws2.statFile('/test.txt') 117 | t.truthy(info2) 118 | if (info1 && info2) { 119 | t.deepEqual(info1, info2) 120 | } 121 | if (info1) { 122 | t.is(info1.conflict, true) 123 | t.is(info1.otherChanges?.length, 1) 124 | } 125 | } 126 | 127 | // file is still present in listing even though it may be in a "deleted" state 128 | t.is((await ws1.listFiles('/')).length, 1) 129 | t.is((await ws2.listFiles('/')).length, 1) 130 | 131 | // merging write 132 | 133 | await ws1.writeFile('/test.txt', VALUES[2]) 134 | t.deepEqual( 135 | await ws1.readFile('/test.txt'), 136 | await ws2.readFile('/test.txt') 137 | ) 138 | { 139 | const info1 = await ws1.statFile('/test.txt') 140 | t.truthy(info1) 141 | const info2 = await ws2.statFile('/test.txt') 142 | t.truthy(info2) 143 | if (info1 && info2) { 144 | t.deepEqual(info1, info2) 145 | } 146 | if (info1){ 147 | t.is(info1.conflict, false) 148 | t.is(info1.otherChanges?.length, 0) // no conflicts 149 | } 150 | } 151 | }) 152 | 153 | ava('conflicting and merging writes & moves to individual file', async t => { 154 | const VALUES = [ 155 | Buffer.from('first write', 'utf-8'), 156 | Buffer.from('second write', 'utf-8'), 157 | Buffer.from('third write', 'utf-8') 158 | ] 159 | 160 | const {sim, ws1, ws2, writer1, writer2} = await setupTwo(t) 161 | 162 | // create a file 163 | 164 | await ws1.writeFile('/test.txt', VALUES[0]) 165 | 166 | // conflicting write & delete 167 | 168 | // HACK sync state prior to disconnect, works around https://github.com/hypercore-protocol/autobase/issues/7 169 | await ws1.indexCore.update() 170 | await ws2.indexCore.update() 171 | 172 | console.log('\nDISCONNECT\n') 173 | sim.disconnect(sim.stores[0], sim.stores[1]) 174 | 175 | await ws1.moveFile('/test.txt', '/test2.txt') 176 | await ws2.writeFile('/test.txt', VALUES[1]) 177 | 178 | // not yet synced 179 | 180 | t.deepEqual(await ws1.readFile('/test.txt'), undefined) 181 | t.deepEqual(await ws1.readFile('/test2.txt'), VALUES[0]) 182 | t.deepEqual(await ws2.readFile('/test.txt'), VALUES[1]) 183 | 184 | // synced but in conflict state 185 | 186 | console.log('\nCONNECT\n') 187 | sim.connect(sim.stores[0], sim.stores[1]) 188 | 189 | t.deepEqual( 190 | await ws1.readFile('/test.txt'), 191 | await ws2.readFile('/test.txt') 192 | ) 193 | { 194 | const info1 = await ws1.statFile('/test.txt') 195 | t.truthy(info1) 196 | const info2 = await ws2.statFile('/test.txt') 197 | t.truthy(info2) 198 | if (info1 && info2) { 199 | t.deepEqual(info1, info2) 200 | } 201 | if (info1){ 202 | t.is(info1.conflict, true) 203 | t.is(info1.otherChanges?.length, 1) 204 | } 205 | } 206 | 207 | t.deepEqual( 208 | await ws1.readFile('/test2.txt'), 209 | await ws2.readFile('/test2.txt') 210 | ) 211 | { 212 | const info1 = await ws1.statFile('/test2.txt') 213 | t.truthy(info1) 214 | const info2 = await ws2.statFile('/test2.txt') 215 | t.truthy(info2) 216 | if (info1 && info2) { 217 | t.deepEqual(info1, info2) 218 | } 219 | if (info1){ 220 | t.is(info1.conflict, false) 221 | t.is(info1.otherChanges?.length, 0) 222 | } 223 | } 224 | 225 | // file is still present in listing even though it may be in a "deleted" state 226 | t.is((await ws1.listFiles('/')).length, 2) 227 | t.is((await ws2.listFiles('/')).length, 2) 228 | }) 229 | 230 | ava('conflicting and merging writes & copies to individual file', async t => { 231 | const VALUES = [ 232 | Buffer.from('first write', 'utf-8'), 233 | Buffer.from('second write', 'utf-8'), 234 | Buffer.from('third write', 'utf-8') 235 | ] 236 | 237 | const {sim, ws1, ws2, writer1, writer2} = await setupTwo(t) 238 | 239 | // create two file 240 | 241 | await ws1.writeFile('/test.txt', VALUES[0]) 242 | await ws1.writeFile('/test2.txt', VALUES[1]) 243 | 244 | // conflicting write & delete 245 | 246 | // HACK sync state prior to disconnect, works around https://github.com/hypercore-protocol/autobase/issues/7 247 | await ws1.indexCore.update() 248 | await ws2.indexCore.update() 249 | 250 | console.log('\nDISCONNECT\n') 251 | sim.disconnect(sim.stores[0], sim.stores[1]) 252 | 253 | await ws1.copyFile('/test2.txt', '/test.txt') 254 | await ws2.writeFile('/test.txt', VALUES[2]) 255 | 256 | // not yet synced 257 | 258 | t.deepEqual(await ws1.readFile('/test.txt'), VALUES[1]) 259 | t.deepEqual(await ws1.readFile('/test2.txt'), VALUES[1]) 260 | t.deepEqual(await ws2.readFile('/test.txt'), VALUES[2]) 261 | 262 | // synced but in conflict state 263 | 264 | console.log('\nCONNECT\n') 265 | sim.connect(sim.stores[0], sim.stores[1]) 266 | 267 | t.deepEqual( 268 | await ws1.readFile('/test.txt'), 269 | await ws2.readFile('/test.txt') 270 | ) 271 | { 272 | const info1 = await ws1.statFile('/test.txt') 273 | t.truthy(info1) 274 | const info2 = await ws2.statFile('/test.txt') 275 | t.truthy(info2) 276 | if (info1 && info2) { 277 | t.deepEqual(info1, info2) 278 | } 279 | if (info1){ 280 | t.is(info1.conflict, true) 281 | t.is(info1.otherChanges?.length, 1) 282 | } 283 | } 284 | 285 | t.deepEqual( 286 | await ws1.readFile('/test2.txt'), 287 | await ws2.readFile('/test2.txt') 288 | ) 289 | { 290 | const info1 = await ws1.statFile('/test2.txt') 291 | t.truthy(info1) 292 | const info2 = await ws2.statFile('/test2.txt') 293 | t.truthy(info2) 294 | if (info1 && info2) { 295 | t.deepEqual(info1, info2) 296 | } 297 | if (info1){ 298 | t.is(info1.conflict, false) 299 | t.is(info1.otherChanges?.length, 0) 300 | } 301 | } 302 | 303 | // file is still present in listing even though it may be in a "deleted" state 304 | t.is((await ws1.listFiles('/')).length, 2) 305 | t.is((await ws2.listFiles('/')).length, 2) 306 | }) -------------------------------------------------------------------------------- /test/dual-writers.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { setupTwo } from './util/util.js' 3 | import * as sfw from '../src/index.js' 4 | 5 | ava('dual-writers individual file', async t => { 6 | const VALUES = [ 7 | Buffer.from('Hello, world', 'utf-8'), 8 | Buffer.from('Hello, universe', 'utf-8') 9 | ] 10 | 11 | const {ws1, ws2, writer1, writer2} = await setupTwo(t) 12 | 13 | t.deepEqual(await ws1.listFiles('/'), []) 14 | t.falsy(await ws1.statFile('/test.txt')) 15 | t.falsy(await ws1.statFile('test.txt')) 16 | t.deepEqual(await ws2.listFiles('/'), []) 17 | t.falsy(await ws2.statFile('/test.txt')) 18 | t.falsy(await ws2.statFile('test.txt')) 19 | 20 | // first write 21 | 22 | await ws1.writeFile('/test.txt', VALUES[0]) 23 | 24 | for (const ws of [ws1, ws2]) { 25 | t.deepEqual(await ws.readFile('/test.txt'), VALUES[0]) 26 | { 27 | const info = await ws.statFile('/test.txt') 28 | t.truthy(info) 29 | if (info) { 30 | t.is(info.path, '/test.txt') 31 | t.truthy(info.timestamp instanceof Date) 32 | t.truthy(info.writer.equals(writer1.publicKey)) 33 | t.truthy(typeof info.change === 'string') 34 | t.is(info.conflict, false) 35 | t.is(info.otherChanges?.length, 0) 36 | t.is(info.bytes, VALUES[0].length) 37 | } 38 | const info2 = await ws.statFile('test.txt') 39 | t.deepEqual(info, info2) 40 | } 41 | { 42 | const info = await ws.statFile('/test.txt') 43 | t.truthy(info) 44 | const listing = await ws.listFiles('/') 45 | t.is(listing.length, 1) 46 | if (info) { 47 | t.deepEqual(info, listing[0]) 48 | } 49 | } 50 | { 51 | const info = await ws.statFile('/test.txt') 52 | t.truthy(info) 53 | const history = await ws.listHistory() 54 | t.is(history.length, 2) 55 | if (info) { 56 | t.is(history[1].id, info.change) 57 | t.is(history[1].parents.length, 0) 58 | t.truthy(history[1].writer.equals(writer1.publicKey)) 59 | t.deepEqual(history[1].timestamp, info.timestamp) 60 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_PUT) 61 | t.is((history[1].details as sfw.ChangeOpPut).path, '/test.txt') 62 | t.is(typeof (history[1].details as sfw.ChangeOpPut).blob, 'string') 63 | t.is((history[1].details as sfw.ChangeOpPut).bytes, info.bytes) 64 | } 65 | } 66 | } 67 | 68 | // second write 69 | 70 | await ws2.writeFile('/test.txt', VALUES[1]) 71 | 72 | for (const ws of [ws1, ws2]) { 73 | t.deepEqual(await ws.readFile('/test.txt'), VALUES[1]) 74 | { 75 | const info = await ws.statFile('/test.txt') 76 | t.truthy(info) 77 | if (info) { 78 | t.is(info.path, '/test.txt') 79 | t.truthy(info.timestamp instanceof Date) 80 | t.truthy(info.writer.equals(writer2.publicKey)) 81 | t.truthy(typeof info.change === 'string') 82 | t.is(info.conflict, false) 83 | t.is(info.otherChanges?.length, 0) 84 | t.is(info.bytes, VALUES[1].length) 85 | } 86 | const info2 = await ws.statFile('test.txt') 87 | t.deepEqual(info, info2) 88 | } 89 | { 90 | const info = await ws.statFile('/test.txt') 91 | t.truthy(info) 92 | const listing = await ws.listFiles('/') 93 | t.is(listing.length, 1) 94 | if (info) { 95 | t.deepEqual(info, listing[0]) 96 | } 97 | } 98 | { 99 | const info = await ws.statFile('/test.txt') 100 | t.truthy(info) 101 | const history = await ws.listHistory() 102 | t.is(history.length, 3) 103 | if (info) { 104 | t.is(history[2].id, info.change) 105 | t.is(history[2].parents.length, 1) 106 | t.truthy(history[2].writer.equals(writer2.publicKey)) 107 | t.deepEqual(history[2].timestamp, info.timestamp) 108 | t.is(history[2].details.action, sfw.OP_CHANGE_ACT_PUT) 109 | t.is((history[2].details as sfw.ChangeOpPut).path, '/test.txt') 110 | t.is(typeof (history[2].details as sfw.ChangeOpPut).blob, 'string') 111 | t.is((history[2].details as sfw.ChangeOpPut).bytes, info.bytes) 112 | } 113 | } 114 | } 115 | 116 | // delete 117 | 118 | await ws1.deleteFile('/test.txt') 119 | 120 | for (const ws of [ws1, ws2]) { 121 | t.deepEqual(await ws.readFile('/test.txt'), undefined) 122 | { 123 | const info = await ws.statFile('/test.txt') 124 | t.falsy(info) 125 | const info2 = await ws.statFile('test.txt') 126 | t.falsy(info2) 127 | } 128 | { 129 | const listing = await ws.listFiles('/') 130 | t.is(listing.length, 0) 131 | } 132 | { 133 | const history = await ws.listHistory() 134 | t.is(history.length, 4) 135 | t.is(typeof history[3].id, 'string') 136 | t.is(history[3].parents.length, 1) 137 | t.truthy(history[3].writer.equals(writer1.publicKey)) 138 | t.truthy(history[3].timestamp instanceof Date) 139 | t.is(history[3].details.action, sfw.OP_CHANGE_ACT_DEL) 140 | t.is((history[3].details as sfw.ChangeOpDel).path, '/test.txt') 141 | } 142 | } 143 | 144 | // third write 145 | 146 | await ws2.writeFile('test.txt', VALUES[0]) 147 | 148 | for (const ws of [ws1, ws2]) { 149 | t.deepEqual(await ws.readFile('/test.txt'), VALUES[0]) 150 | { 151 | const info = await ws.statFile('/test.txt') 152 | t.truthy(info) 153 | if (info) { 154 | t.is(info.path, '/test.txt') 155 | t.truthy(info.timestamp instanceof Date) 156 | t.truthy(info.writer.equals(writer2.publicKey)) 157 | t.truthy(typeof info.change === 'string') 158 | t.is(info.conflict, false) 159 | t.is(info.otherChanges?.length, 0) 160 | t.is(info.bytes, VALUES[0].length) 161 | } 162 | const info2 = await ws.statFile('test.txt') 163 | t.deepEqual(info, info2) 164 | } 165 | { 166 | const info = await ws.statFile('/test.txt') 167 | t.truthy(info) 168 | const listing = await ws.listFiles('/') 169 | t.is(listing.length, 1) 170 | if (info) { 171 | t.deepEqual(info, listing[0]) 172 | } 173 | } 174 | { 175 | const info = await ws.statFile('/test.txt') 176 | t.truthy(info) 177 | const history = await ws.listHistory() 178 | t.is(history.length, 5) 179 | if (info) { 180 | t.is(history[4].id, info.change) 181 | t.is(history[4].parents.length, 0) 182 | t.truthy(history[4].writer.equals(writer2.publicKey)) 183 | t.deepEqual(history[4].timestamp, info.timestamp) 184 | t.is(history[4].details.action, sfw.OP_CHANGE_ACT_PUT) 185 | t.is((history[4].details as sfw.ChangeOpPut).path, '/test.txt') 186 | t.is(typeof (history[4].details as sfw.ChangeOpPut).blob, 'string') 187 | t.is((history[4].details as sfw.ChangeOpPut).bytes, info.bytes) 188 | } 189 | } 190 | } 191 | }) 192 | 193 | ava('dual-writers copy file', async t => { 194 | const VALUES = [ 195 | Buffer.from('Hello, world', 'utf-8') 196 | ] 197 | 198 | const {ws1, ws2, writer1, writer2} = await setupTwo(t) 199 | 200 | await ws1.writeFile('/test1.txt', VALUES[0]) 201 | await ws2.copyFile('/test1.txt', '/test2.txt') 202 | 203 | for (const ws of [ws1, ws2]) { 204 | t.deepEqual(await ws.readFile('/test1.txt'), VALUES[0]) 205 | t.deepEqual(await ws.readFile('/test2.txt'), VALUES[0]) 206 | { 207 | const info1 = await ws.statFile('/test1.txt') 208 | t.truthy(info1) 209 | if (info1) { 210 | t.is(info1.path, '/test1.txt') 211 | t.truthy(info1.timestamp instanceof Date) 212 | t.truthy(info1.writer.equals(writer1.publicKey)) 213 | t.truthy(typeof info1.change === 'string') 214 | t.is(info1.otherChanges?.length, 0) 215 | t.is(info1.bytes, VALUES[0].length) 216 | } 217 | const info2 = await ws.statFile('/test2.txt') 218 | t.truthy(info2) 219 | if (info2) { 220 | t.is(info2.path, '/test2.txt') 221 | t.truthy(info2.timestamp instanceof Date) 222 | t.truthy(info2.writer.equals(writer2.publicKey)) 223 | t.truthy(typeof info2.change === 'string') 224 | t.is(info2.otherChanges?.length, 0) 225 | t.is(info2.bytes, VALUES[0].length) 226 | } 227 | } 228 | { 229 | const info1 = await ws.statFile('/test1.txt') 230 | t.truthy(info1) 231 | const info2 = await ws.statFile('/test2.txt') 232 | t.truthy(info2) 233 | const listing = await ws.listFiles('/') 234 | t.is(listing.length, 2) 235 | if (info1) { 236 | t.deepEqual(info1, listing.find(i => i.path === '/test1.txt')) 237 | } 238 | if (info2) { 239 | t.deepEqual(info2, listing.find(i => i.path === '/test2.txt')) 240 | } 241 | } 242 | { 243 | const info1 = await ws.statFile('/test1.txt') 244 | t.truthy(info1) 245 | const info2 = await ws.statFile('/test2.txt') 246 | t.truthy(info2) 247 | const history = await ws.listHistory() 248 | t.is(history.length, 3) 249 | if (info1) { 250 | t.is(history[1].id, info1.change) 251 | t.is(history[1].parents.length, 0) 252 | t.truthy(history[1].writer.equals(writer1.publicKey)) 253 | t.deepEqual(history[1].timestamp, info1.timestamp) 254 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_PUT) 255 | t.is((history[1].details as sfw.ChangeOpPut).path, '/test1.txt') 256 | t.is(typeof (history[1].details as sfw.ChangeOpPut).blob, 'string') 257 | t.is((history[1].details as sfw.ChangeOpPut).bytes, info1.bytes) 258 | } 259 | if (info2) { 260 | t.is(history[2].id, info2.change) 261 | t.is(history[2].parents.length, 0) 262 | t.truthy(history[2].writer.equals(writer2.publicKey)) 263 | t.deepEqual(history[2].timestamp, info2.timestamp) 264 | t.is(history[2].details.action, sfw.OP_CHANGE_ACT_COPY) 265 | t.is((history[2].details as sfw.ChangeOpCopy).path, '/test2.txt') 266 | t.is(typeof (history[2].details as sfw.ChangeOpCopy).blob, 'string') 267 | t.is((history[2].details as sfw.ChangeOpCopy).bytes, info2.bytes) 268 | } 269 | } 270 | } 271 | }) 272 | 273 | ava('dual-writers move file', async t => { 274 | const VALUES = [ 275 | Buffer.from('Hello, world', 'utf-8') 276 | ] 277 | 278 | const {ws1, ws2, writer1, writer2} = await setupTwo(t) 279 | 280 | await ws1.writeFile('/test1.txt', VALUES[0]) 281 | await ws2.moveFile('/test1.txt', '/test2.txt') 282 | 283 | for (const ws of [ws1, ws2]) { 284 | t.deepEqual(await ws.readFile('/test1.txt'), undefined) 285 | t.deepEqual(await ws.readFile('/test2.txt'), VALUES[0]) 286 | { 287 | const info1 = await ws.statFile('/test1.txt') 288 | t.falsy(info1) 289 | const info2 = await ws.statFile('/test2.txt') 290 | t.truthy(info2) 291 | if (info2) { 292 | t.is(info2.path, '/test2.txt') 293 | t.truthy(info2.timestamp instanceof Date) 294 | t.truthy(info2.writer.equals(writer2.publicKey)) 295 | t.truthy(typeof info2.change === 'string') 296 | t.is(info2.otherChanges?.length, 0) 297 | t.is(info2.bytes, VALUES[0].length) 298 | } 299 | } 300 | { 301 | const info2 = await ws.statFile('/test2.txt') 302 | t.truthy(info2) 303 | const listing = await ws.listFiles('/') 304 | t.is(listing.length, 1) 305 | if (info2) { 306 | t.deepEqual(info2, listing.find(i => i.path === '/test2.txt')) 307 | } 308 | } 309 | { 310 | const info2 = await ws.statFile('/test2.txt') 311 | t.truthy(info2) 312 | const history = await ws.listHistory() 313 | t.is(history.length, 4) 314 | 315 | t.is(typeof history[1].id, 'string') 316 | t.is(history[1].parents.length, 0) 317 | t.truthy(history[1].writer.equals(writer1.publicKey)) 318 | t.truthy(history[1].timestamp instanceof Date) 319 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_PUT) 320 | t.is((history[1].details as sfw.ChangeOpPut).path, '/test1.txt') 321 | t.is(typeof (history[1].details as sfw.ChangeOpPut).blob, 'string') 322 | if (info2) { 323 | t.is((history[1].details as sfw.ChangeOpPut).bytes, info2.bytes) 324 | } 325 | 326 | if (info2) { 327 | t.is(history[2].id, info2.change) 328 | t.is(history[2].parents.length, 0) 329 | t.truthy(history[2].writer.equals(writer2.publicKey)) 330 | t.deepEqual(history[2].timestamp, info2.timestamp) 331 | t.is(history[2].details.action, sfw.OP_CHANGE_ACT_COPY) 332 | t.is((history[2].details as sfw.ChangeOpCopy).path, '/test2.txt') 333 | t.is(typeof (history[2].details as sfw.ChangeOpCopy).blob, 'string') 334 | t.is((history[2].details as sfw.ChangeOpCopy).bytes, info2.bytes) 335 | } 336 | 337 | t.is(typeof history[3].id, 'string') 338 | t.is(history[3].parents.length, 1) 339 | t.truthy(history[3].writer.equals(writer2.publicKey)) 340 | t.truthy(history[3].timestamp instanceof Date) 341 | t.is(history[3].details.action, sfw.OP_CHANGE_ACT_DEL) 342 | t.is((history[3].details as sfw.ChangeOpDel).path, '/test1.txt') 343 | } 344 | } 345 | }) -------------------------------------------------------------------------------- /test/single-writer.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { setupOne } from './util/util.js' 3 | import * as sfw from '../src/index.js' 4 | 5 | ava('single-writer individual file', async t => { 6 | const VALUES = [ 7 | Buffer.from('Hello, world', 'utf-8'), 8 | Buffer.from('Hello, universe', 'utf-8') 9 | ] 10 | 11 | const {ws} = await setupOne(t) 12 | 13 | t.deepEqual(await ws.listFiles('/'), []) 14 | t.falsy(await ws.statFile('/test.txt')) 15 | t.falsy(await ws.statFile('test.txt')) 16 | 17 | // first write 18 | 19 | await ws.writeFile('/test.txt', VALUES[0]) 20 | t.deepEqual(await ws.readFile('/test.txt'), VALUES[0]) 21 | 22 | { 23 | const info = await ws.statFile('/test.txt') 24 | t.truthy(info) 25 | if (info) { 26 | t.is(info.path, '/test.txt') 27 | t.truthy(info.timestamp instanceof Date) 28 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 29 | t.truthy(typeof info.change === 'string') 30 | t.is(info.conflict, false) 31 | t.is(info.otherChanges?.length, 0) 32 | t.is(info.bytes, VALUES[0].length) 33 | } 34 | const info2 = await ws.statFile('test.txt') 35 | t.deepEqual(info, info2) 36 | } 37 | { 38 | const info = await ws.statFile('/test.txt') 39 | t.truthy(info) 40 | const listing = await ws.listFiles('/') 41 | t.is(listing.length, 1) 42 | if (info) { 43 | t.deepEqual(info, listing[0]) 44 | } 45 | } 46 | { 47 | const info = await ws.statFile('/test.txt') 48 | t.truthy(info) 49 | const history = await ws.listHistory() 50 | t.is(history.length, 1) 51 | if (info) { 52 | t.is(history[0].id, info.change) 53 | t.is(history[0].parents.length, 0) 54 | t.truthy(history[0].writer.equals(ws.writers[0].publicKey)) 55 | t.deepEqual(history[0].timestamp, info.timestamp) 56 | t.is(history[0].details.action, sfw.OP_CHANGE_ACT_PUT) 57 | t.is((history[0].details as sfw.ChangeOpPut).path, '/test.txt') 58 | t.is(typeof (history[0].details as sfw.ChangeOpPut).blob, 'string') 59 | t.is((history[0].details as sfw.ChangeOpPut).bytes, info.bytes) 60 | } 61 | } 62 | 63 | // second write 64 | 65 | await ws.writeFile('/test.txt', VALUES[1]) 66 | t.deepEqual(await ws.readFile('/test.txt'), VALUES[1]) 67 | 68 | { 69 | const info = await ws.statFile('/test.txt') 70 | t.truthy(info) 71 | if (info) { 72 | t.is(info.path, '/test.txt') 73 | t.truthy(info.timestamp instanceof Date) 74 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 75 | t.truthy(typeof info.change === 'string') 76 | t.is(info.conflict, false) 77 | t.is(info.otherChanges?.length, 0) 78 | t.is(info.bytes, VALUES[1].length) 79 | } 80 | const info2 = await ws.statFile('test.txt') 81 | t.deepEqual(info, info2) 82 | } 83 | { 84 | const info = await ws.statFile('/test.txt') 85 | t.truthy(info) 86 | const listing = await ws.listFiles('/') 87 | t.is(listing.length, 1) 88 | if (info) { 89 | t.deepEqual(info, listing[0]) 90 | } 91 | } 92 | { 93 | const info = await ws.statFile('/test.txt') 94 | t.truthy(info) 95 | const history = await ws.listHistory() 96 | t.is(history.length, 2) 97 | if (info) { 98 | t.is(history[1].id, info.change) 99 | t.is(history[1].parents.length, 1) 100 | t.truthy(history[1].writer.equals(ws.writers[0].publicKey)) 101 | t.deepEqual(history[1].timestamp, info.timestamp) 102 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_PUT) 103 | t.is((history[1].details as sfw.ChangeOpPut).path, '/test.txt') 104 | t.is(typeof (history[1].details as sfw.ChangeOpPut).blob, 'string') 105 | t.is((history[1].details as sfw.ChangeOpPut).bytes, info.bytes) 106 | } 107 | } 108 | 109 | // delete 110 | 111 | await ws.deleteFile('/test.txt') 112 | t.deepEqual(await ws.readFile('/test.txt'), undefined) 113 | 114 | { 115 | const info = await ws.statFile('/test.txt') 116 | t.falsy(info) 117 | const info2 = await ws.statFile('test.txt') 118 | t.falsy(info2) 119 | } 120 | { 121 | const listing = await ws.listFiles('/') 122 | t.is(listing.length, 0) 123 | } 124 | { 125 | const history = await ws.listHistory() 126 | t.is(history.length, 3) 127 | t.is(typeof history[2].id, 'string') 128 | t.is(history[2].parents.length, 1) 129 | t.truthy(history[2].writer.equals(ws.writers[0].publicKey)) 130 | t.truthy(history[2].timestamp instanceof Date) 131 | t.is((history[2].details as sfw.ChangeOpDel).path, '/test.txt') 132 | t.is(history[2].details.action, sfw.OP_CHANGE_ACT_DEL) 133 | } 134 | 135 | // third write 136 | 137 | await ws.writeFile('test.txt', VALUES[0]) 138 | t.deepEqual(await ws.readFile('/test.txt'), VALUES[0]) 139 | 140 | { 141 | const info = await ws.statFile('/test.txt') 142 | t.truthy(info) 143 | if (info) { 144 | t.is(info.path, '/test.txt') 145 | t.truthy(info.timestamp instanceof Date) 146 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 147 | t.truthy(typeof info.change === 'string') 148 | t.is(info.conflict, false) 149 | t.is(info.otherChanges?.length, 0) 150 | t.is(info.bytes, VALUES[0].length) 151 | } 152 | const info2 = await ws.statFile('test.txt') 153 | t.deepEqual(info, info2) 154 | } 155 | { 156 | const info = await ws.statFile('/test.txt') 157 | t.truthy(info) 158 | const listing = await ws.listFiles('/') 159 | t.is(listing.length, 1) 160 | if (info) { 161 | t.deepEqual(info, listing[0]) 162 | } 163 | } 164 | { 165 | const info = await ws.statFile('/test.txt') 166 | t.truthy(info) 167 | const history = await ws.listHistory() 168 | t.is(history.length, 4) 169 | if (info) { 170 | t.is(history[3].id, info.change) 171 | t.is(history[3].parents.length, 0) 172 | t.truthy(history[3].writer.equals(ws.writers[0].publicKey)) 173 | t.deepEqual(history[3].timestamp, info.timestamp) 174 | t.is(history[3].details.action, sfw.OP_CHANGE_ACT_PUT) 175 | t.is((history[3].details as sfw.ChangeOpPut).path, '/test.txt') 176 | t.is(typeof (history[3].details as sfw.ChangeOpPut).blob, 'string') 177 | t.is((history[3].details as sfw.ChangeOpPut).bytes, info.bytes) 178 | } 179 | } 180 | }) 181 | 182 | ava('single-writer multiple files', async t => { 183 | const VALUES = [ 184 | Buffer.from('Hello, world', 'utf-8'), 185 | Buffer.from('Hello, universe', 'utf-8') 186 | ] 187 | 188 | const {ws} = await setupOne(t) 189 | 190 | // first write 191 | 192 | await ws.writeFile('/test1.txt', VALUES[0]) 193 | await ws.writeFile('test2.txt', VALUES[1]) 194 | 195 | { 196 | const info = await ws.statFile('/test1.txt') 197 | t.truthy(info) 198 | if (info) { 199 | t.is(info.path, '/test1.txt') 200 | t.truthy(info.timestamp instanceof Date) 201 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 202 | t.truthy(typeof info.change === 'string') 203 | t.is(info.conflict, false) 204 | t.is(info.otherChanges?.length, 0) 205 | t.is(info.bytes, VALUES[0].length) 206 | } 207 | const info2 = await ws.statFile('test1.txt') 208 | t.deepEqual(info, info2) 209 | } 210 | { 211 | const info = await ws.statFile('/test2.txt') 212 | t.truthy(info) 213 | if (info) { 214 | t.is(info.path, '/test2.txt') 215 | t.truthy(info.timestamp instanceof Date) 216 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 217 | t.truthy(typeof info.change === 'string') 218 | t.is(info.conflict, false) 219 | t.is(info.otherChanges?.length, 0) 220 | t.is(info.bytes, VALUES[1].length) 221 | } 222 | const info2 = await ws.statFile('test2.txt') 223 | t.deepEqual(info, info2) 224 | } 225 | { 226 | const info1 = await ws.statFile('/test1.txt') 227 | t.truthy(info1) 228 | const info2 = await ws.statFile('/test2.txt') 229 | t.truthy(info2) 230 | const listing = await ws.listFiles('/') 231 | t.is(listing.length, 2) 232 | if (info1) { 233 | t.deepEqual(listing.find(i => i.path === '/test1.txt'), info1) 234 | } 235 | if (info2) { 236 | t.deepEqual(listing.find(i => i.path === '/test2.txt'), info2) 237 | } 238 | } 239 | { 240 | const info1 = await ws.statFile('/test1.txt') 241 | t.truthy(info1) 242 | const info2 = await ws.statFile('/test2.txt') 243 | t.truthy(info2) 244 | const history = await ws.listHistory() 245 | t.is(history.length, 2) 246 | if (info1) { 247 | t.is(history[0].id, info1.change) 248 | t.is(history[0].parents.length, 0) 249 | t.truthy(history[0].writer.equals(ws.writers[0].publicKey)) 250 | t.deepEqual(history[0].timestamp, info1.timestamp) 251 | t.is(history[0].details.action, sfw.OP_CHANGE_ACT_PUT) 252 | t.is((history[0].details as sfw.ChangeOpPut).path, '/test1.txt') 253 | t.is(typeof (history[0].details as sfw.ChangeOpPut).blob, 'string') 254 | t.is((history[0].details as sfw.ChangeOpPut).bytes, info1.bytes) 255 | } 256 | if (info2) { 257 | t.is(history[1].id, info2.change) 258 | t.is(history[1].parents.length, 0) 259 | t.truthy(history[1].writer.equals(ws.writers[0].publicKey)) 260 | t.deepEqual(history[1].timestamp, info2.timestamp) 261 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_PUT) 262 | t.is((history[1].details as sfw.ChangeOpPut).path, '/test2.txt') 263 | t.is(typeof (history[1].details as sfw.ChangeOpPut).blob, 'string') 264 | t.is((history[1].details as sfw.ChangeOpPut).bytes, info2.bytes) 265 | } 266 | } 267 | }) 268 | 269 | ava('single-writer individual file in a folder', async t => { 270 | const VALUES = [ 271 | Buffer.from('Hello, world', 'utf-8'), 272 | Buffer.from('Hello, universe', 'utf-8') 273 | ] 274 | 275 | const {ws} = await setupOne(t) 276 | 277 | t.deepEqual(await ws.listFiles('/folder'), []) 278 | t.falsy(await ws.statFile('/folder/test.txt')) 279 | t.falsy(await ws.statFile('folder/test.txt')) 280 | 281 | // first write 282 | 283 | await ws.writeFile('/folder/test.txt', VALUES[0]) 284 | t.deepEqual(await ws.readFile('/folder/test.txt'), VALUES[0]) 285 | 286 | { 287 | const info = await ws.statFile('/folder/test.txt') 288 | t.truthy(info) 289 | if (info) { 290 | t.is(info.path, '/folder/test.txt') 291 | t.truthy(info.timestamp instanceof Date) 292 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 293 | t.truthy(typeof info.change === 'string') 294 | t.is(info.conflict, false) 295 | t.is(info.otherChanges?.length, 0) 296 | t.is(info.bytes, VALUES[0].length) 297 | } 298 | const info2 = await ws.statFile('folder/test.txt') 299 | t.deepEqual(info, info2) 300 | } 301 | { 302 | const info = await ws.statFile('/folder/test.txt') 303 | t.truthy(info) 304 | const listing = await ws.listFiles('/folder') 305 | t.is(listing.length, 1) 306 | if (info) { 307 | t.deepEqual(info, listing[0]) 308 | } 309 | } 310 | { 311 | const info = await ws.statFile('/folder/test.txt') 312 | t.truthy(info) 313 | const history = await ws.listHistory() 314 | t.is(history.length, 1) 315 | if (info) { 316 | t.is(history[0].id, info.change) 317 | t.is(history[0].parents.length, 0) 318 | t.truthy(history[0].writer.equals(ws.writers[0].publicKey)) 319 | t.deepEqual(history[0].timestamp, info.timestamp) 320 | t.is(history[0].details.action, sfw.OP_CHANGE_ACT_PUT) 321 | t.is((history[0].details as sfw.ChangeOpPut).path, '/folder/test.txt') 322 | t.is(typeof (history[0].details as sfw.ChangeOpPut).blob, 'string') 323 | t.is((history[0].details as sfw.ChangeOpPut).bytes, info.bytes) 324 | } 325 | } 326 | 327 | // second write 328 | 329 | await ws.writeFile('/folder/test.txt', VALUES[1]) 330 | t.deepEqual(await ws.readFile('/folder/test.txt'), VALUES[1]) 331 | 332 | { 333 | const info = await ws.statFile('/folder/test.txt') 334 | t.truthy(info) 335 | if (info) { 336 | t.is(info.path, '/folder/test.txt') 337 | t.truthy(info.timestamp instanceof Date) 338 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 339 | t.truthy(typeof info.change === 'string') 340 | t.is(info.conflict, false) 341 | t.is(info.otherChanges?.length, 0) 342 | t.is(info.bytes, VALUES[1].length) 343 | } 344 | const info2 = await ws.statFile('folder/test.txt') 345 | t.deepEqual(info, info2) 346 | } 347 | { 348 | const info = await ws.statFile('/folder/test.txt') 349 | t.truthy(info) 350 | const listing = await ws.listFiles('/folder/') 351 | t.is(listing.length, 1) 352 | if (info) { 353 | t.deepEqual(info, listing[0]) 354 | } 355 | } 356 | { 357 | const info = await ws.statFile('/folder/test.txt') 358 | t.truthy(info) 359 | const history = await ws.listHistory() 360 | t.is(history.length, 2) 361 | if (info) { 362 | t.is(history[1].id, info.change) 363 | t.is(history[1].parents.length, 1) 364 | t.truthy(history[1].writer.equals(ws.writers[0].publicKey)) 365 | t.deepEqual(history[1].timestamp, info.timestamp) 366 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_PUT) 367 | t.is((history[1].details as sfw.ChangeOpPut).path, '/folder/test.txt') 368 | t.is(typeof (history[1].details as sfw.ChangeOpPut).blob, 'string') 369 | t.is((history[1].details as sfw.ChangeOpPut).bytes, info.bytes) 370 | } 371 | } 372 | 373 | // delete 374 | 375 | await ws.deleteFile('/folder/test.txt') 376 | t.deepEqual(await ws.readFile('/folder/test.txt'), undefined) 377 | 378 | { 379 | const info = await ws.statFile('/folder/test.txt') 380 | t.falsy(info) 381 | const info2 = await ws.statFile('folder/test.txt') 382 | t.falsy(info2) 383 | } 384 | { 385 | const listing = await ws.listFiles('/folder/') 386 | t.is(listing.length, 0) 387 | } 388 | { 389 | const history = await ws.listHistory() 390 | t.is(history.length, 3) 391 | t.is(typeof history[2].id, 'string') 392 | t.is(history[2].parents.length, 1) 393 | t.truthy(history[2].writer.equals(ws.writers[0].publicKey)) 394 | t.truthy(history[2].timestamp instanceof Date) 395 | t.is(history[2].details.action, sfw.OP_CHANGE_ACT_DEL) 396 | t.is((history[2].details as sfw.ChangeOpDel).path, '/folder/test.txt') 397 | } 398 | 399 | // third write 400 | 401 | await ws.writeFile('folder/test.txt', VALUES[0]) 402 | t.deepEqual(await ws.readFile('/folder/test.txt'), VALUES[0]) 403 | 404 | { 405 | const info = await ws.statFile('/folder/test.txt') 406 | t.truthy(info) 407 | if (info) { 408 | t.is(info.path, '/folder/test.txt') 409 | t.truthy(info.timestamp instanceof Date) 410 | t.truthy(info.writer.equals(ws.writers[0].publicKey)) 411 | t.truthy(typeof info.change === 'string') 412 | t.is(info.conflict, false) 413 | t.is(info.otherChanges?.length, 0) 414 | t.is(info.bytes, VALUES[0].length) 415 | } 416 | const info2 = await ws.statFile('folder/test.txt') 417 | t.deepEqual(info, info2) 418 | } 419 | { 420 | const info = await ws.statFile('/folder/test.txt') 421 | t.truthy(info) 422 | const listing = await ws.listFiles('/folder/') 423 | t.is(listing.length, 1) 424 | if (info) { 425 | t.deepEqual(info, listing[0]) 426 | } 427 | } 428 | { 429 | const info = await ws.statFile('/folder/test.txt') 430 | t.truthy(info) 431 | const history = await ws.listHistory() 432 | t.is(history.length, 4) 433 | if (info) { 434 | t.is(history[3].id, info.change) 435 | t.is(history[3].parents.length, 0) 436 | t.truthy(history[3].writer.equals(ws.writers[0].publicKey)) 437 | t.deepEqual(history[3].timestamp, info.timestamp) 438 | t.is(history[3].details.action, sfw.OP_CHANGE_ACT_PUT) 439 | t.is((history[3].details as sfw.ChangeOpPut).path, '/folder/test.txt') 440 | t.is(typeof (history[3].details as sfw.ChangeOpPut).blob, 'string') 441 | t.is((history[3].details as sfw.ChangeOpPut).bytes, info.bytes) 442 | } 443 | } 444 | }) 445 | 446 | ava('single-writer copy file', async t => { 447 | const VALUES = [ 448 | Buffer.from('Hello, world', 'utf-8') 449 | ] 450 | 451 | const {ws} = await setupOne(t) 452 | 453 | await ws.writeFile('/test1.txt', VALUES[0]) 454 | await ws.copyFile('/test1.txt', '/test2.txt') 455 | t.deepEqual(await ws.readFile('/test1.txt'), VALUES[0]) 456 | t.deepEqual(await ws.readFile('/test2.txt'), VALUES[0]) 457 | 458 | { 459 | const info1 = await ws.statFile('/test1.txt') 460 | t.truthy(info1) 461 | if (info1) { 462 | t.is(info1.path, '/test1.txt') 463 | t.truthy(info1.timestamp instanceof Date) 464 | t.truthy(info1.writer.equals(ws.writers[0].publicKey)) 465 | t.truthy(typeof info1.change === 'string') 466 | t.is(info1.otherChanges?.length, 0) 467 | t.is(info1.bytes, VALUES[0].length) 468 | } 469 | const info2 = await ws.statFile('/test2.txt') 470 | t.truthy(info2) 471 | if (info2) { 472 | t.is(info2.path, '/test2.txt') 473 | t.truthy(info2.timestamp instanceof Date) 474 | t.truthy(info2.writer.equals(ws.writers[0].publicKey)) 475 | t.truthy(typeof info2.change === 'string') 476 | t.is(info2.otherChanges?.length, 0) 477 | t.is(info2.bytes, VALUES[0].length) 478 | } 479 | } 480 | { 481 | const info1 = await ws.statFile('/test1.txt') 482 | t.truthy(info1) 483 | const info2 = await ws.statFile('/test2.txt') 484 | t.truthy(info2) 485 | const listing = await ws.listFiles('/') 486 | t.is(listing.length, 2) 487 | if (info1) { 488 | t.deepEqual(info1, listing.find(i => i.path === '/test1.txt')) 489 | } 490 | if (info2) { 491 | t.deepEqual(info2, listing.find(i => i.path === '/test2.txt')) 492 | } 493 | } 494 | { 495 | const info1 = await ws.statFile('/test1.txt') 496 | t.truthy(info1) 497 | const info2 = await ws.statFile('/test2.txt') 498 | t.truthy(info2) 499 | const history = await ws.listHistory() 500 | t.is(history.length, 2) 501 | if (info1) { 502 | t.is(history[0].id, info1.change) 503 | t.is(history[0].parents.length, 0) 504 | t.truthy(history[0].writer.equals(ws.writers[0].publicKey)) 505 | t.deepEqual(history[0].timestamp, info1.timestamp) 506 | t.is(history[0].details.action, sfw.OP_CHANGE_ACT_PUT) 507 | t.is((history[0].details as sfw.ChangeOpPut).path, '/test1.txt') 508 | t.is(typeof (history[0].details as sfw.ChangeOpPut).blob, 'string') 509 | t.is((history[0].details as sfw.ChangeOpPut).bytes, info1.bytes) 510 | } 511 | if (info2) { 512 | t.is(history[1].id, info2.change) 513 | t.is(history[1].parents.length, 0) 514 | t.truthy(history[1].writer.equals(ws.writers[0].publicKey)) 515 | t.deepEqual(history[1].timestamp, info2.timestamp) 516 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_COPY) 517 | t.is((history[1].details as sfw.ChangeOpCopy).path, '/test2.txt') 518 | t.is(typeof (history[1].details as sfw.ChangeOpCopy).blob, 'string') 519 | t.is((history[1].details as sfw.ChangeOpCopy).bytes, info2.bytes) 520 | } 521 | } 522 | }) 523 | 524 | ava('single-writer move file', async t => { 525 | const VALUES = [ 526 | Buffer.from('Hello, world', 'utf-8') 527 | ] 528 | 529 | const {ws} = await setupOne(t) 530 | 531 | await ws.writeFile('/test1.txt', VALUES[0]) 532 | await ws.moveFile('/test1.txt', '/test2.txt') 533 | t.deepEqual(await ws.readFile('/test1.txt'), undefined) 534 | t.deepEqual(await ws.readFile('/test2.txt'), VALUES[0]) 535 | 536 | { 537 | const info1 = await ws.statFile('/test1.txt') 538 | t.falsy(info1) 539 | const info2 = await ws.statFile('/test2.txt') 540 | t.truthy(info2) 541 | if (info2) { 542 | t.is(info2.path, '/test2.txt') 543 | t.truthy(info2.timestamp instanceof Date) 544 | t.truthy(info2.writer.equals(ws.writers[0].publicKey)) 545 | t.truthy(typeof info2.change === 'string') 546 | t.is(info2.otherChanges?.length, 0) 547 | t.is(info2.bytes, VALUES[0].length) 548 | } 549 | } 550 | { 551 | const info2 = await ws.statFile('/test2.txt') 552 | t.truthy(info2) 553 | const listing = await ws.listFiles('/') 554 | t.is(listing.length, 1) 555 | if (info2) { 556 | t.deepEqual(info2, listing.find(i => i.path === '/test2.txt')) 557 | } 558 | } 559 | { 560 | const info2 = await ws.statFile('/test2.txt') 561 | t.truthy(info2) 562 | const history = await ws.listHistory() 563 | t.is(history.length, 3) 564 | 565 | t.is(typeof history[0].id, 'string') 566 | t.is(history[0].parents.length, 0) 567 | t.truthy(history[0].writer.equals(ws.writers[0].publicKey)) 568 | t.truthy(history[0].timestamp instanceof Date) 569 | t.is(history[0].details.action, sfw.OP_CHANGE_ACT_PUT) 570 | t.is((history[0].details as sfw.ChangeOpPut).path, '/test1.txt') 571 | t.is(typeof (history[0].details as sfw.ChangeOpPut).blob, 'string') 572 | if (info2) { 573 | t.is((history[0].details as sfw.ChangeOpPut).bytes, info2.bytes) 574 | } 575 | 576 | if (info2) { 577 | t.is(history[1].id, info2.change) 578 | t.is(history[1].parents.length, 0) 579 | t.truthy(history[1].writer.equals(ws.writers[0].publicKey)) 580 | t.deepEqual(history[1].timestamp, info2.timestamp) 581 | t.is(history[1].details.action, sfw.OP_CHANGE_ACT_COPY) 582 | t.is((history[1].details as sfw.ChangeOpCopy).path, '/test2.txt') 583 | t.is(typeof (history[1].details as sfw.ChangeOpCopy).blob, 'string') 584 | t.is((history[1].details as sfw.ChangeOpCopy).bytes, info2.bytes) 585 | } 586 | 587 | t.is(typeof history[2].id, 'string') 588 | t.is(history[2].parents.length, 1) 589 | t.truthy(history[2].writer.equals(ws.writers[0].publicKey)) 590 | t.truthy(history[2].timestamp instanceof Date) 591 | t.is(history[2].details.action, sfw.OP_CHANGE_ACT_DEL) 592 | t.is((history[2].details as sfw.ChangeOpDel).path, '/test1.txt') 593 | } 594 | }) -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore no types available yet -prf 2 | import crypto from 'hypercore-crypto' 3 | // @ts-ignore no types available yet -prf 4 | import Corestore from 'corestore' 5 | // @ts-ignore no types available yet -prf 6 | import Hypercore from 'hypercore' 7 | // @ts-ignore no types available yet -prf 8 | import Autobase from 'autobase' 9 | // @ts-ignore no types available yet -prf 10 | import Hyperbee from 'hyperbee' 11 | // @ts-ignore no types available yet -prf 12 | import HyperbeeMessages from 'hyperbee/lib/messages.js' 13 | // @ts-ignore no types available yet -prf 14 | import MonotonicLexicographicTimestamp from 'monotonic-lexicographic-timestamp' 15 | import pump from 'pump' 16 | import concat from 'concat-stream' 17 | import through from 'through2' 18 | import * as msgpackr from 'msgpackr' 19 | import match from 'micromatch' 20 | import { BaseWorkspaceCore } from './base.js' 21 | import { WorkspaceWriter } from './oplog.js' 22 | import * as structs from './structures.js' 23 | import { WriterCtrlExtension } from './wire-extensions/hsfw-writerctrl.js' 24 | import { genId, hash } from './lib/crypto.js' 25 | import lock from './lib/lock.js' 26 | import { toBuffer, toHex } from './lib/util.js' 27 | 28 | export * from './base.js' 29 | export * from './oplog.js' 30 | export * from './structures.js' 31 | 32 | const mlts = MonotonicLexicographicTimestamp() 33 | 34 | export interface KeyPair { 35 | publicKey: Buffer 36 | secretKey?: Buffer 37 | } 38 | 39 | export interface WorkspaceOpts { 40 | store: Corestore 41 | swarmKeyPair: KeyPair 42 | writers?: WorkspaceWriter[] 43 | indexes: WorkspaceIndex[] 44 | } 45 | 46 | export interface ReadFileOpts { 47 | change?: string 48 | encoding?: string 49 | } 50 | 51 | export interface WriteFileOpts { 52 | encoding?: string 53 | noMerge?: boolean 54 | } 55 | 56 | export interface WriteOpts { 57 | writer?: Buffer|Hypercore 58 | prefix?: string 59 | } 60 | 61 | export interface WorkspaceWireExtensions { 62 | writerCtrl?: WriterCtrlExtension 63 | } 64 | 65 | export interface WorkspaceInvite { 66 | token: string 67 | creatorSwarmPublicKey: Buffer 68 | recipientName: string 69 | } 70 | 71 | export class WorkspaceIndex extends BaseWorkspaceCore { 72 | constructor (public store: Corestore, public publicKey: Buffer, public secretKey: Buffer|undefined, public isOwnerIndex: boolean) { 73 | super(store, publicKey, secretKey) 74 | } 75 | 76 | static createNew (store: Corestore, isOwnerIndex: boolean) { 77 | const keyPair = crypto.keyPair() 78 | return new WorkspaceIndex(store, keyPair.publicKey, keyPair.secretKey, isOwnerIndex) 79 | } 80 | 81 | static load (store: Corestore, publicKey: string|Buffer, secretKey: string|Buffer|undefined, isOwnerIndex: boolean) { 82 | return new WorkspaceIndex( 83 | store, 84 | toBuffer(publicKey), 85 | secretKey ? toBuffer(secretKey) : undefined, 86 | isOwnerIndex 87 | ) 88 | } 89 | } 90 | 91 | let _debugIdCounter = 1 92 | export class Workspace { 93 | debugId = `Workspace${_debugIdCounter++}` 94 | autobase: Autobase 95 | indexBee: Hyperbee 96 | indexCore: Hypercore 97 | store: Corestore 98 | 99 | writers: WorkspaceWriter[] 100 | indexes: WorkspaceIndex[] 101 | 102 | swarmKeyPair: KeyPair 103 | wireExt: WorkspaceWireExtensions = {} 104 | invites: WorkspaceInvite[] = [] 105 | constructor ({store, writers, indexes, swarmKeyPair}: WorkspaceOpts) { 106 | this.store = store 107 | this.writers = writers || [] 108 | this.indexes = indexes 109 | this.swarmKeyPair = swarmKeyPair 110 | const inputs = this.writers.map(w => w.core) 111 | const defaultInput = inputs.find(core => core.writable) 112 | 113 | this.autobase = new Autobase(inputs, {outputs: indexes.map(idx => idx.core), input: defaultInput}) 114 | 115 | this.indexCore = this.autobase.linearize({ 116 | unwrap: true, 117 | apply: this._apply.bind(this) 118 | }) 119 | this.indexBee = new Hyperbee(this.indexCore, { 120 | extension: false, 121 | keyEncoding: 'utf-8', 122 | valueEncoding: { 123 | encode: (v: any) => msgpackr.pack(v), 124 | encodingLength: (v: any) => msgpackr.pack(v).length, 125 | decode: (v: any) => msgpackr.unpack(v) 126 | } 127 | }) 128 | 129 | this._setupWireExtensions() 130 | } 131 | 132 | _setupWireExtensions () { 133 | if (this.wireExt.writerCtrl) return 134 | const ownerIdx = this.indexes.find(idx => idx.isOwnerIndex) 135 | if (ownerIdx) { 136 | this.wireExt.writerCtrl = new WriterCtrlExtension(this, ownerIdx.core) 137 | } 138 | } 139 | 140 | [Symbol.for('nodejs.util.inspect.custom')] (depth: number, opts: any) { 141 | let indent = '' 142 | if (typeof opts.indentationLvl === 'number') { 143 | while (indent.length < opts.indentationLvl) indent += ' ' 144 | } 145 | 146 | const inspectWsWriter = (w: WorkspaceWriter) => { 147 | return ( 148 | indent + ' {\n' + 149 | indent + ' key: ' + opts.stylize(toHex(w.publicKey), 'string') + '\n' + 150 | indent + ' name: ' + opts.stylize(w.name, 'string') + '\n' + 151 | indent + ' admin: ' + opts.stylize(w.isAdmin, 'boolean') + '\n' + 152 | indent + ' owner: ' + opts.stylize(w.isOwner, 'boolean') + '\n' + 153 | indent + ' writable: ' + opts.stylize(w.core.writable, 'boolean') + '\n' + 154 | indent + ' }\n' 155 | ) 156 | } 157 | const inspectWsIndex = (idx: WorkspaceIndex) => { 158 | return ( 159 | indent + ' {\n' + 160 | indent + ' key: ' + opts.stylize(toHex(idx.publicKey), 'string') + '\n' + 161 | indent + ' writable: ' + opts.stylize(idx.core.writable, 'boolean') + '\n' + 162 | indent + ' }\n' 163 | ) 164 | } 165 | 166 | return this.constructor.name + '(\n' + 167 | indent + ' key: ' + opts.stylize((toHex(this.key)), 'string') + '\n' + 168 | indent + ' writable: ' + opts.stylize(this.writable, 'boolean') + '\n' + 169 | indent + ' admin: ' + opts.stylize(this.isAdmin, 'boolean') + '\n' + 170 | indent + ' owner: ' + opts.stylize(this.isOwner, 'boolean') + '\n' + 171 | indent + ' swarmPubKey: ' + opts.stylize(toHex(this.swarmKeyPair.publicKey), 'string') + '\n' + 172 | indent + ' writers: [\n' + this.writers.map(inspectWsWriter).join('') + ' ]\n' + 173 | indent + ' indexes: [\n' + this.indexes.map(inspectWsIndex).join('') + ' ]\n' + 174 | indent + ')' 175 | } 176 | 177 | static async createNew (store: Corestore, swarmKeyPair: KeyPair) { 178 | const writer = await WorkspaceWriter.createNew(store, {isOwner: true, isAdmin: true}) 179 | await writer.core.ready() 180 | const index = WorkspaceIndex.createNew(store, true) 181 | await index.core.ready() 182 | const workspace = new Workspace({ 183 | store, 184 | swarmKeyPair, 185 | writers: [writer], 186 | indexes: [index] 187 | }) 188 | await workspace.ready() 189 | await workspace._writeDeclaration() 190 | return workspace 191 | } 192 | 193 | static async load (store: Corestore, swarmKeyPair: KeyPair, publicKey: Buffer|string) { 194 | publicKey = toBuffer(publicKey) 195 | const ownerWriter = await WorkspaceWriter.load(store, publicKey, undefined, {isOwner: true, isAdmin: true}) 196 | await ownerWriter.core.ready() 197 | const localIndex = WorkspaceIndex.createNew(store, false) 198 | await localIndex.core.ready() 199 | const workspace = new Workspace({ 200 | store, 201 | swarmKeyPair, 202 | writers: [ownerWriter], 203 | indexes: [localIndex] 204 | }) 205 | await workspace.ready() 206 | await workspace._loadFromDeclaration(ownerWriter.core) 207 | await workspace._loadFromMeta() 208 | return workspace 209 | } 210 | 211 | async ready () { 212 | await this.autobase.ready() 213 | await this.indexBee.ready() 214 | } 215 | 216 | get key () { 217 | const owner = this.getOwner() 218 | if (owner) return owner.publicKey 219 | throw new Error('No owner writer set') 220 | } 221 | 222 | serialize () { 223 | return { 224 | key: this.key.toString('hex'), 225 | writers: this.writers.map(w => w.serialize()), 226 | indexes: this.indexes.map(idx => idx.serialize()) 227 | } 228 | } 229 | 230 | toJSON () { 231 | return { 232 | key: this.key.toString('hex'), 233 | writable: this.writable, 234 | writers: this.writers.map(w => w.toJSON()) 235 | } 236 | } 237 | 238 | // writers 239 | // = 240 | 241 | getOwner () { 242 | return this.writers.find(w => w.isOwner) 243 | } 244 | 245 | get isOwner () { 246 | return Boolean(this.getOwner()?.core.writable) 247 | } 248 | 249 | getOwnerIndex () { 250 | return this.writers.find(w => w.isOwner) 251 | } 252 | 253 | getMyWriter (): WorkspaceWriter|undefined { 254 | return this.writers.find(w => w.core.writable) 255 | } 256 | 257 | getMyWriterCore (): Hypercore { 258 | const writer = this.getMyWriter() 259 | if (writer) return writer.core 260 | throw new Error('Not a writer') 261 | } 262 | 263 | getWriter (publicKey: string|Buffer): WorkspaceWriter|undefined { 264 | const publicKeyBuf = toBuffer(publicKey) 265 | return this.writers.find(w => w.publicKey.equals(publicKeyBuf)) 266 | } 267 | 268 | get writable () { 269 | return Boolean(this.getMyWriter()) 270 | } 271 | 272 | get isAdmin () { 273 | return Boolean(this.getMyWriter()?.isAdmin) 274 | } 275 | 276 | async listWriters () { 277 | await this._loadFromMeta() // ensure writers are fresh 278 | return this.writers 279 | } 280 | 281 | async putWriter (key: Buffer, {name, admin, frozen}: {name?: string, admin?: boolean, frozen?: boolean} = {}) { 282 | await this._loadFromMeta() // ensure writers are fresh 283 | 284 | const writer = this.getMyWriter() 285 | if (!writer) throw new Error(`Can't modify writers: not a writer`) 286 | if (!writer.isAdmin && !writer.publicKey.equals(key)) throw new Error(`Can't modify other writers: not an admin`) 287 | if (!writer.isAdmin && admin) throw new Error(`Can't modify admin settings: not an admin`) 288 | if (!writer.isAdmin && frozen) throw new Error(`Can't modify frozen settings: not an admin`) 289 | await this.autobase.append(WorkspaceWriter.packop({ 290 | op: structs.OP_CHANGE, 291 | id: genId(), 292 | parents: [], 293 | timestamp: new Date(), 294 | details: { 295 | action: structs.OP_CHANGE_ACT_PUT_WRITER, 296 | key, 297 | name, 298 | admin, 299 | frozen 300 | } 301 | }), null, writer.core) 302 | await this._loadFromMeta() 303 | } 304 | 305 | async createInvite (recipientName = '') { 306 | await this._loadFromMeta() // ensure writers are fresh 307 | if (!this.isAdmin) throw new Error(`Can't create invites: not an admin`) 308 | const invite: WorkspaceInvite = { 309 | token: genId(), 310 | creatorSwarmPublicKey: this.swarmKeyPair.publicKey, 311 | recipientName 312 | } 313 | this.invites.push(invite) 314 | return `invite:${toHex(invite.creatorSwarmPublicKey)}:${invite.token}` 315 | } 316 | 317 | listInvites () { 318 | return this.invites 319 | } 320 | 321 | getInvite (token: string) { 322 | return this.invites.find(inv => inv.token === token) 323 | } 324 | 325 | delInvite (token: string) { 326 | const i = this.invites.findIndex(inv => inv.token === token) 327 | if (i !== -1) this.invites.splice(i, 1) 328 | } 329 | 330 | async useInvite (invite: string) { 331 | if (!this.wireExt.writerCtrl) throw new Error(`Unable to access invite protocol. Is the owner index loaded?`) 332 | if (this.getMyWriter()) throw new Error(`Can't use invite: already a writer`) 333 | 334 | const writer = await WorkspaceWriter.createNew(this.store, {isOwner: false, isAdmin: false}) 335 | await writer.core.ready() 336 | await this.wireExt.writerCtrl.useInvite(invite, writer.publicKey) // throws if unsuccessful 337 | 338 | this.writers.push(writer) 339 | await this.autobase.addInput(writer.core) 340 | await this._loadFromMeta() 341 | 342 | return writer 343 | } 344 | 345 | // files 346 | // = 347 | 348 | _filepathTraverse (pathp: string[]) { 349 | let sub = this.indexBee.sub('files') 350 | for (const part of pathp) { 351 | sub = sub.sub(part) 352 | } 353 | return sub 354 | } 355 | 356 | async _getIndexedFile (path: string): Promise { 357 | const pathp = path.split('/').filter(Boolean) 358 | if (pathp.length === 0) return undefined 359 | const sub = this._filepathTraverse(pathp.slice(0, -1)) 360 | const indexedFile = await sub.get(pathp[pathp.length - 1]) 361 | if (structs.isIndexedFile(indexedFile?.value)) return indexedFile.value 362 | } 363 | 364 | _gatherIndexedFileChangeParents (indexedFile: structs.IndexedFile|undefined) { 365 | let parents: string[] = [] 366 | if (indexedFile) { 367 | parents.push(indexedFile.change) 368 | if (indexedFile.otherChanges?.length) { 369 | parents = parents.concat(indexedFile.otherChanges) 370 | } 371 | } 372 | return parents 373 | } 374 | 375 | async _getIndexedChangeParents (path: string): Promise { 376 | return this._gatherIndexedFileChangeParents(await this._getIndexedFile(path)) 377 | } 378 | 379 | async _getIndexedNomergeParents (path: string, writer: Buffer): Promise { 380 | const indexedFile = await this._getIndexedFile(path) 381 | if (!indexedFile) return [] 382 | 383 | const parents = [] 384 | if (indexedFile.writer.equals(writer)) parents.push(indexedFile.change) 385 | for (const changeId of indexedFile.otherChanges) { 386 | const change = await this.getChange(changeId) 387 | if (change && change.writer.equals(writer)) parents.push(change.id) 388 | } 389 | return parents 390 | } 391 | 392 | async _getFileInfo (indexedFile: structs.IndexedFile): Promise { 393 | // @ts-ignore typescript isn't recognizing the filter operation 394 | const otherChanges: structs.IndexedChange[] = ( 395 | indexedFile.otherChanges?.length > 0 396 | ? await Promise.all(indexedFile.otherChanges.map(c => this.getChange(c))) 397 | : [] 398 | ).filter(Boolean) 399 | return { 400 | path: indexedFile.path, 401 | timestamp: indexedFile.timestamp, 402 | bytes: indexedFile.bytes, 403 | writer: indexedFile.writer, 404 | change: indexedFile.change, 405 | noMerge: indexedFile.noMerge, 406 | conflict: otherChanges.length > 0 && !indexedFile.noMerge, 407 | otherChanges: otherChanges.map((c: structs.IndexedChange, i: number) => ({ 408 | path: (c.details as structs.ChangeOpFilesAct).path, 409 | timestamp: c.timestamp, 410 | bytes: ('bytes' in c.details) ? c.details.bytes : 0, 411 | writer: c.writer, 412 | change: indexedFile.otherChanges[i] 413 | })) 414 | } 415 | } 416 | 417 | async listFiles (path = '/', opts?: any): Promise { 418 | await this.indexCore.update() 419 | const self = this 420 | const sub = this._filepathTraverse(path.split('/').filter(Boolean)) 421 | return await new Promise((resolve, reject) => { 422 | pump( 423 | sub.createReadStream(opts), 424 | through.obj(function (entry, enc, cb) { 425 | if (structs.isIndexedFile(entry?.value)) { 426 | self._getFileInfo(entry.value).then( 427 | v => { 428 | this.push(v) 429 | cb() 430 | }, 431 | err => cb(err) 432 | ) 433 | } else { 434 | cb() 435 | } 436 | }), 437 | concat((entries: any) => { 438 | resolve(entries as structs.FileInfo[]) 439 | }), 440 | reject 441 | ) 442 | }) 443 | } 444 | 445 | async statFile (path: string): Promise { 446 | await this.indexCore.update() 447 | const indexedFile = await this._getIndexedFile(path) 448 | if (!indexedFile) return undefined 449 | return await this._getFileInfo(indexedFile) 450 | } 451 | 452 | async readFile (path: string, opts?: string|ReadFileOpts): Promise { 453 | await this.indexCore.update() 454 | 455 | if (typeof opts === 'string') { 456 | opts = {encoding: opts} 457 | } 458 | 459 | let blob 460 | if (typeof opts?.change === 'string') { 461 | const indexedChange = await this.getChange(opts.change) 462 | if (indexedChange?.details.action === structs.OP_CHANGE_ACT_PUT) { 463 | blob = (indexedChange.details as structs.ChangeOpPut).blob 464 | } else if (indexedChange?.details.action === structs.OP_CHANGE_ACT_COPY) { 465 | blob = (indexedChange.details as structs.ChangeOpCopy).blob 466 | } 467 | } else { 468 | const indexedFile = await this._getIndexedFile(path) 469 | blob = indexedFile?.blob 470 | } 471 | if (!blob) return undefined 472 | return this._getBlobData(blob, opts) 473 | } 474 | 475 | async readAllFileStates (path: string): Promise<{writer: Buffer, data: Buffer}[]> { 476 | await this.indexCore.update() 477 | const indexedFile = await this._getIndexedFile(path) 478 | if (!indexedFile) return [] 479 | 480 | const buffers = [] 481 | if (indexedFile.blob) { 482 | buffers.push({ 483 | writer: indexedFile.writer, 484 | data: (await this._getBlobData(indexedFile.blob)) as Buffer 485 | }) 486 | } 487 | for (const changeId of indexedFile.otherChanges) { 488 | const change = await this.getChange(changeId) 489 | if (change && (change.details as structs.ChangeOpPut).blob) { 490 | buffers.push({ 491 | writer: indexedFile.writer, 492 | data: (await this._getBlobData((change.details as structs.ChangeOpPut).blob)) as Buffer 493 | }) 494 | } 495 | } 496 | return buffers 497 | } 498 | 499 | async writeFile (path: string, value: Buffer|string, opts?: string|WriteFileOpts) { 500 | await this.indexCore.update() 501 | if (typeof opts === 'string') { 502 | opts = {encoding: opts} 503 | } 504 | 505 | let blob: Buffer 506 | if (Buffer.isBuffer(value)) { 507 | blob = value 508 | } else { 509 | blob = Buffer.from(value, (opts?.encoding || 'utf-8') as BufferEncoding) 510 | } 511 | 512 | path = `/${path.split('/').filter(Boolean).join('/')}` 513 | const writerCore = this.getMyWriterCore() 514 | const blobChunks = [] 515 | { 516 | let i = 0 517 | while (i < blob.length) { 518 | blobChunks.push(blob.slice(i, i + structs.BLOB_CHUNK_BYTE_LENGTH)) 519 | i += structs.BLOB_CHUNK_BYTE_LENGTH 520 | } 521 | } 522 | 523 | const release = await lock(`write:${this.key.toString('hex')}`) 524 | try { 525 | const blobId = hash(blob) 526 | const parents = opts?.noMerge 527 | ? await this._getIndexedNomergeParents(path, writerCore.key) 528 | : await this._getIndexedChangeParents(path) 529 | await this.autobase.append(WorkspaceWriter.packop({ 530 | op: structs.OP_CHANGE, 531 | id: genId(), 532 | parents, 533 | timestamp: new Date(), 534 | details: { 535 | action: structs.OP_CHANGE_ACT_PUT, 536 | path, 537 | blob: blobId, 538 | chunks: blobChunks.length, 539 | bytes: blob.length, 540 | noMerge: opts?.noMerge || false 541 | } 542 | }), null, writerCore) 543 | for (const value of blobChunks) { 544 | await this.autobase.append(WorkspaceWriter.packop({ 545 | op: structs.OP_BLOB_CHUNK, 546 | blob: blobId, 547 | chunk: blobChunks.indexOf(value), 548 | value 549 | }), null, writerCore) 550 | } 551 | } finally { 552 | release() 553 | } 554 | } 555 | 556 | async moveFile (srcPath: string, dstPath: string) { 557 | await this.indexCore.update() 558 | srcPath = `/${srcPath.split('/').filter(Boolean).join('/')}` 559 | dstPath = `/${dstPath.split('/').filter(Boolean).join('/')}` 560 | const writerCore = this.getMyWriterCore() 561 | const release = await lock(`write:${this.key.toString('hex')}`) 562 | try { 563 | const indexedSrcFile = await this._getIndexedFile(srcPath) 564 | if (!indexedSrcFile) { 565 | throw new Error(`Cannot move ${srcPath}: file does not exist`) 566 | } 567 | if (indexedSrcFile.otherChanges.length) { 568 | throw new Error(`Cannot move ${srcPath}: file is in conflict`) 569 | } 570 | const srcParents = this._gatherIndexedFileChangeParents(indexedSrcFile) 571 | const dstParents = await this._getIndexedChangeParents(dstPath) 572 | await this.autobase.append(WorkspaceWriter.packop({ 573 | op: structs.OP_CHANGE, 574 | id: genId(), 575 | parents: dstParents, 576 | timestamp: new Date(), 577 | details: { 578 | action: structs.OP_CHANGE_ACT_COPY, 579 | path: dstPath, 580 | blob: indexedSrcFile.blob, 581 | bytes: indexedSrcFile.bytes 582 | } 583 | }), null, writerCore) 584 | await this.autobase.append(WorkspaceWriter.packop({ 585 | op: structs.OP_CHANGE, 586 | id: genId(), 587 | parents: srcParents, 588 | timestamp: new Date(), 589 | details: { 590 | action: structs.OP_CHANGE_ACT_DEL, 591 | path: srcPath 592 | } 593 | }), null, writerCore) 594 | } finally { 595 | release() 596 | } 597 | } 598 | 599 | async copyFile (srcPath: string, dstPath: string) { 600 | await this.indexCore.update() 601 | srcPath = `/${srcPath.split('/').filter(Boolean).join('/')}` 602 | dstPath = `/${dstPath.split('/').filter(Boolean).join('/')}` 603 | const writerCore = this.getMyWriterCore() 604 | const release = await lock(`write:${this.key.toString('hex')}`) 605 | try { 606 | const indexedSrcFile = await this._getIndexedFile(srcPath) 607 | if (!indexedSrcFile) { 608 | throw new Error(`Cannot copy ${srcPath}: file does not exist`) 609 | } 610 | if (indexedSrcFile.otherChanges.length) { 611 | throw new Error(`Cannot copy ${srcPath}: file is in conflict`) 612 | } 613 | const dstParents = await this._getIndexedChangeParents(dstPath) 614 | await this.autobase.append(WorkspaceWriter.packop({ 615 | op: structs.OP_CHANGE, 616 | id: genId(), 617 | parents: dstParents, 618 | timestamp: new Date(), 619 | details: { 620 | action: structs.OP_CHANGE_ACT_COPY, 621 | path: dstPath, 622 | blob: indexedSrcFile.blob, 623 | bytes: indexedSrcFile.bytes 624 | } 625 | }), null, writerCore) 626 | } finally { 627 | release() 628 | } 629 | } 630 | 631 | async deleteFile (path: string) { 632 | await this.indexCore.update() 633 | path = `/${path.split('/').filter(Boolean).join('/')}` 634 | const writerCore = this.getMyWriterCore() 635 | const release = await lock(`write:${this.key.toString('hex')}`) 636 | try { 637 | const parents = await this._getIndexedChangeParents(path) 638 | await this.autobase.append(WorkspaceWriter.packop({ 639 | op: structs.OP_CHANGE, 640 | id: genId(), 641 | parents, 642 | timestamp: new Date(), 643 | details: { 644 | action: structs.OP_CHANGE_ACT_DEL, 645 | path 646 | } 647 | }), null, writerCore) 648 | } finally { 649 | release() 650 | } 651 | } 652 | 653 | // history 654 | // = 655 | 656 | async getChange (changeId: string): Promise { 657 | await this.indexCore.update() 658 | const entry = await this.indexBee.sub('changes').get(changeId) 659 | if (structs.isIndexedChange(entry?.value)) return entry.value 660 | } 661 | 662 | async listHistory (opts?: any): Promise { 663 | await this.indexCore.update() 664 | const self = this 665 | const matcher = typeof opts?.path === 'string' ? match.matcher(opts.path) : undefined 666 | return await new Promise((resolve, reject) => { 667 | pump( 668 | this.indexBee.sub('history').createReadStream(opts), 669 | through.obj(function (entry, enc, cb) { 670 | if (typeof entry.value !== 'string') return cb() 671 | self.getChange(entry.value).then( 672 | change => { 673 | if (change) { 674 | if (matcher && (!('path' in change.details) || !matcher(change.details.path))) { 675 | // skip 676 | } else { 677 | this.push(change) 678 | } 679 | } 680 | cb() 681 | }, 682 | err => cb(err) 683 | ) 684 | }), 685 | concat((entries: any) => { 686 | resolve(entries as structs.IndexedChange[]) 687 | }), 688 | reject 689 | ) 690 | }) 691 | } 692 | 693 | // blobs 694 | // = 695 | 696 | async _getBlobData (blobId: string, opts?: ReadFileOpts): Promise { 697 | const buf: Buffer = await new Promise((resolve, reject) => { 698 | pump( 699 | this.indexBee.sub('blobchunks').sub(blobId).createReadStream(), 700 | concat((entries: any) => { 701 | resolve(Buffer.concat(entries.map((entry: any) => entry.value))) 702 | }), 703 | reject 704 | ) 705 | }) 706 | if (opts?.encoding && opts?.encoding !== 'binary') { 707 | return buf.toString(opts?.encoding as BufferEncoding) 708 | } 709 | return buf 710 | } 711 | 712 | // meta 713 | // = 714 | 715 | async _writeDeclaration () { 716 | await this.autobase.append(WorkspaceWriter.packop({ 717 | op: structs.OP_DECLARE, 718 | index: this.indexes[0].core.key, 719 | timestamp: new Date() 720 | }), null, this.getMyWriterCore()) 721 | } 722 | 723 | async _readDeclaration (core: Hypercore): Promise { 724 | const chunk = await this.autobase._getInputNode(core, 0) 725 | const op = WorkspaceWriter.unpackop(chunk.value) 726 | if (structs.isDeclareOp(op)) { 727 | return op 728 | } 729 | throw new Error(`Declaration Op not found`) 730 | } 731 | 732 | async _loadFromDeclaration (core: Hypercore): Promise { 733 | await this.indexCore.update() 734 | const declOp = await this._readDeclaration(core) 735 | const ownerIndex = WorkspaceIndex.load(this.store, declOp.index, undefined, true) 736 | await ownerIndex.core.ready() 737 | this.indexes.push(ownerIndex) 738 | this.autobase.addDefaultOutput(ownerIndex.core) 739 | this._setupWireExtensions() 740 | } 741 | 742 | async _loadFromMeta (meta?: structs.IndexedMeta): Promise { 743 | // TODO: why does this fail? 744 | // await this.indexCore.update() 745 | if (!meta) { 746 | const metaEntry = await this.indexBee.get('_meta') 747 | meta = structs.isIndexedMeta(metaEntry?.value) ? metaEntry.value : undefined 748 | } 749 | if (!meta) return 750 | for (const w of meta.writers) { 751 | let writer = this.getWriter(w.key) 752 | if (writer) { 753 | writer.name = w.name 754 | writer.isAdmin = w.admin 755 | writer.isFrozen = w.frozen 756 | } else { 757 | writer = WorkspaceWriter.load(this.store, w.key, undefined, { 758 | isOwner: false, 759 | name: w.name, 760 | isAdmin: w.admin, 761 | isFrozen: w.frozen 762 | }) 763 | await writer.core.ready() 764 | this.writers.push(writer) 765 | this.autobase.addInput(writer.core) 766 | } 767 | } 768 | } 769 | 770 | async _createWriter () { 771 | const writer = WorkspaceWriter.createNew(this.store) 772 | await writer.core.ready() 773 | this.writers.push(writer) 774 | this.autobase.addInput(writer.core) 775 | return writer 776 | } 777 | 778 | async _addWriter (publicKey: string) { 779 | const writer = WorkspaceWriter.load(this.store, publicKey, undefined) 780 | await writer.core.ready() 781 | this.writers.push(writer) 782 | await this.autobase.addInput(writer.core) 783 | return writer 784 | } 785 | 786 | // async _removeWriter (publicKey: string|Buffer) { 787 | // publicKey = (Buffer.isBuffer(publicKey)) ? publicKey : Buffer.from(publicKey, 'hex') 788 | // const i = this.writers.findIndex(w => w.publicKey.equals(publicKey as Buffer)) 789 | // if (i === -1) throw new Error('Writer not found') 790 | // await this.autobase.removeInput(this.writers[i].core) 791 | // this.writers.splice(i, 1) 792 | // } 793 | 794 | // indexing 795 | // = 796 | 797 | async _apply (batch: any[], clocks: any, change: Buffer) { 798 | if (this.indexBee._feed.length === 0) { 799 | // TODO needed? 800 | // HACK 801 | // when the indexBee is using the in-memory rebased core 802 | // (because it doesnt have one of its own, and is relying on a remote index) 803 | // it doesn't correctly write its header 804 | // so we do it here 805 | // -prf 806 | // await this.indexBee._feed.append(HyperbeeMessages.Header.encode({ 807 | // protocol: 'hyperbee' 808 | // })) 809 | } 810 | 811 | const b = this.indexBee.batch({ update: false }) 812 | for (const node of batch) { 813 | try { 814 | var op = WorkspaceWriter.unpackop(node.value) 815 | } catch (e) { 816 | // skip: not an op 817 | console.error('Warning: not an op', node.value, e) 818 | continue 819 | } 820 | 821 | try { 822 | // console.debug('OP', this.debugId, op) 823 | if (structs.isDeclareOp(op)) { 824 | const owner = this.getOwner() 825 | if (owner && change.equals(owner.publicKey)) { 826 | const indexedMeta: structs.IndexedMeta = { 827 | owner: change, 828 | ownerIndex: op.index, 829 | writers: [{key: change, name: '', admin: true, frozen: false}], 830 | timestamp: op.timestamp, 831 | change: '' 832 | } 833 | await b.put('_meta', indexedMeta) 834 | } else { 835 | console.error('Error: declaration operation found on non-owner core, key:', change, 'op:', op) 836 | } 837 | } else if (structs.isChangeOp(op)) { 838 | if (structs.isChangeOpFileAct(op)) { 839 | await this._applyChangeOpFileAct(op, b, change) 840 | } else if (structs.isChangeOpMetaAct(op)) { 841 | await this._applyChangeOpMetaAct(op, b, change) 842 | } else { 843 | console.error('Warning: invalid change op', op) 844 | continue 845 | } 846 | 847 | const indexedChange: structs.IndexedChange = { 848 | id: op.id, 849 | parents: op.parents, 850 | writer: change, 851 | timestamp: op.timestamp, 852 | details: op.details 853 | } 854 | await b.put(`changes\x00${indexedChange.id}`, indexedChange) 855 | await b.put(`history\x00${mlts()}`, indexedChange.id) 856 | } else if (structs.isBlobChunkOp(op)) { 857 | await b.put(`blobs\x00${op.blob}`, {}) 858 | await b.put(`blobchunks\x00${op.blob}\x00${op.chunk}`, op.value) 859 | } else { 860 | // skip: not an op 861 | console.error('Warning: invalid op', op) 862 | continue 863 | } 864 | } catch (e) { 865 | console.error('Failed to apply operation', op, e) 866 | } 867 | } 868 | await b.flush() 869 | } 870 | 871 | async _applyChangeOpFileAct (op: structs.ChangeOp, b: any, change: Buffer) { 872 | const pathp = (op.details as structs.ChangeOpFilesAct).path.split('/').filter(Boolean) 873 | if (pathp.length === 0) { 874 | console.error(`Invalid path "${(op.details as structs.ChangeOpFilesAct).path}", skipping operation`, op) 875 | return 876 | } 877 | const beekey = `files\x00${pathp.join('\x00')}` 878 | const path = `/${pathp.join('/')}` 879 | 880 | // detect conflicts 881 | const currIndexedFileEntry = await b.get(beekey, {update: false}) 882 | const currIndexedFile = structs.isIndexedFile(currIndexedFileEntry?.value) ? currIndexedFileEntry.value : undefined 883 | const currParents = this._gatherIndexedFileChangeParents(currIndexedFile) 884 | // @ts-ignore for some reason the isChangeOp() type guard isn't enforcing here 885 | const otherChanges = currParents.filter(parent => !op.parents.includes(parent)) 886 | 887 | const indexedFile: structs.IndexedFile = { 888 | path, 889 | timestamp: op.timestamp, // local clock time of change 890 | bytes: 0, 891 | 892 | writer: change, 893 | blob: undefined, 894 | 895 | change: op.id, 896 | noMerge: false, 897 | otherChanges 898 | } 899 | 900 | // TODO track blobs in use and delete unused blobs if possible 901 | 902 | switch (op.details.action) { 903 | case structs.OP_CHANGE_ACT_PUT: { 904 | const putDetails = op.details as structs.ChangeOpPut 905 | indexedFile.blob = putDetails.blob 906 | indexedFile.bytes = putDetails.bytes 907 | indexedFile.noMerge = putDetails.noMerge 908 | await b.put(beekey, indexedFile) 909 | break 910 | } 911 | case structs.OP_CHANGE_ACT_COPY: { 912 | const copyDetails = op.details as structs.ChangeOpCopy 913 | indexedFile.blob = copyDetails.blob 914 | indexedFile.bytes = copyDetails.bytes 915 | await b.put(beekey, indexedFile) 916 | break 917 | } 918 | case structs.OP_CHANGE_ACT_DEL: 919 | if (otherChanges.length === 0) { 920 | await b.del(beekey) 921 | } else { 922 | await b.put(beekey, indexedFile) 923 | } 924 | break 925 | } 926 | } 927 | 928 | async _applyChangeOpMetaAct (op: structs.ChangeOp, b: any, change: Buffer) { 929 | const putWriterDetails = op.details as structs.ChangeOpPutWriter 930 | 931 | const currIndexedMetaEntry = await b.get('_meta', {update: false}) 932 | const currIndexedMeta: structs.IndexedMeta = structs.isIndexedMeta(currIndexedMetaEntry?.value) ? currIndexedMetaEntry.value : undefined 933 | if (!currIndexedMeta) { 934 | console.error('Unable to update writers, _meta entry invalid. Entry:', currIndexedMetaEntry) 935 | return 936 | } 937 | 938 | const writer = this.getWriter(change) 939 | if (!writer || (!writer.isAdmin && !putWriterDetails.key.equals(writer.publicKey))) { 940 | console.error('Non-admin attempted to edit writers, key:', change, 'op:', op) 941 | return 942 | } 943 | const hasAdminPowers = writer.isAdmin 944 | 945 | const existingEntry = currIndexedMeta.writers.find((w: structs.IndexedMetaWriter) => w.key.equals(putWriterDetails.key)) 946 | if (existingEntry) { 947 | if ('name' in putWriterDetails) existingEntry.name = isString(putWriterDetails.name) ? putWriterDetails.name : existingEntry.name 948 | if (hasAdminPowers && 'admin' in putWriterDetails) existingEntry.admin = isBoolean(putWriterDetails.admin) ? putWriterDetails.admin : existingEntry.admin 949 | if (hasAdminPowers && 'frozen' in putWriterDetails) existingEntry.frozen = isBoolean(putWriterDetails.frozen) ? putWriterDetails.frozen : existingEntry.frozen 950 | } else { 951 | const indexedMetaWriter: structs.IndexedMetaWriter = { 952 | key: putWriterDetails.key, 953 | name: putWriterDetails.name || '', 954 | admin: hasAdminPowers && Boolean(putWriterDetails.admin), 955 | frozen: hasAdminPowers && Boolean(putWriterDetails.frozen) 956 | } 957 | currIndexedMeta.writers.push(indexedMetaWriter) 958 | } 959 | 960 | currIndexedMeta.change = op.id 961 | currIndexedMeta.timestamp = op.timestamp 962 | await b.put('_meta', currIndexedMeta) 963 | await this._loadFromMeta(currIndexedMeta) 964 | } 965 | } 966 | 967 | function isString (v: any): v is string { 968 | return typeof v === 'string' 969 | } 970 | 971 | function isBoolean (v: any): v is boolean { 972 | return typeof v === 'boolean' 973 | } 974 | --------------------------------------------------------------------------------