├── .gitignore ├── README.md ├── bin.js ├── docs ├── api-reference-readme.md ├── cli.md ├── contract-apis.md ├── implementation.md ├── vm-runtime.md ├── whitepaper-preamble.tex ├── whitepaper.csl ├── whitepaper.md └── whitepaper.pdf ├── examples ├── counter.js ├── kv-store.js └── membership-voting.js ├── package.json ├── src ├── cli.ts ├── core │ ├── database.ts │ ├── errors.ts │ ├── executor.ts │ ├── fraud-proofs.ts │ ├── inclusion-proofs.ts │ ├── log.ts │ ├── monitor.ts │ ├── storage.ts │ ├── testing │ │ ├── executor.ts │ │ └── local-dht.ts │ ├── transactions.ts │ └── vm.ts ├── index.ts ├── schemas.ts ├── server │ ├── config.ts │ ├── data-directory.ts │ ├── process.ts │ ├── rpc.ts │ └── server.ts ├── types.ts └── util │ ├── async.ts │ ├── hyper.ts │ ├── lock.ts │ ├── parser.ts │ ├── resource.ts │ ├── resources-manager.ts │ └── usage-manager.ts ├── test ├── base.ts ├── contract-code.ts ├── debugging.ts ├── index.ts ├── networking.ts ├── oplogs.ts ├── proofs.ts ├── transactions.ts └── verification.ts ├── tsconfig.json └── types ├── corestore └── index.d.ts ├── hyperbee └── index.d.ts ├── hypercore-crypto └── index.d.ts ├── hypercore └── index.d.ts └── hyperswarm └── index.d.ts /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist 3 | .DS_Store 4 | package-lock.json 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Vitra 2 | 3 | ``` 4 | ██╗ ██╗██╗████████╗██████╗ █████╗ 5 | ██║ ██║██║╚══██╔══╝██╔══██╗██╔══██╗ 6 | ██║ ██║██║ ██║ ██████╔╝███████║ 7 | ╚██╗ ██╔╝██║ ██║ ██╔══██╗██╔══██║ 8 | ╚████╔╝ ██║ ██║ ██║ ██║██║ ██║ 9 | ╚═══╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝╚═╝ ╚═╝ 10 | ``` 11 | 12 | Cooperative databases using smart contracts. [Read the white paper](https://github.com/pfrazee/vitra/blob/master/docs/whitepaper.md). 13 | 14 | ## Introduction 15 | 16 | Vitra is a research project for exploring the limits of smart contracts *without* blockchains -- specifically, without using decentralized consensus algorithms like Proof-of-Work or Proof-of-Stake. Its purpose is research and education. Contributors are welcome, but the software is not stable and should not be used in production. 17 | 18 | ## Overview 19 | 20 | Vitra is a hybrid of blockchains and traditional databases. It takes inspiration from [Certificate Transparency](https://certificate.transparency.dev/) and [Layer 2 Optimistic Rollups](https://ethereum.org/en/developers/docs/scaling/layer-2-rollups/) to create a hosted smart-contract protocol called ["Execution Transparency (ET)."](https://github.com/pfrazee/vitra/blob/master/docs/whitepaper.md) 21 | 22 | Vitra databases use [verifiable logs](https://transparency.dev/verifiable-data-structures) to record all transactions in a publicly-auditable structure. A contract written in Javascript then enforces the schemas and business logic of the database. By replaying the logs, users can audit the execution of the database and ensure that each participant is playing by the rules. Vitra also responds to every transaction with an inclusion proof, giving end-users an efficient solution to proving their data in the database. 23 | 24 | **When is this useful?** 25 | 26 | - Public/community services that need to publish very sensitive data, like user encryption-keys or software packages. Vitra gives clear external auditability of every change that occurs, much like Certificate Transparency does for PKI. 27 | - Decentralized organizations where a database needs to be shared among people who don't totally trust each other. The smart contract ensures that the operator of the database can't cheat the community; it effectively protects users from the owners of a service. 28 | - Large multi-org collaborations (think enterprises with multiple vendors) where data-sharing needs to be coordinated and consistent. Vitra protects you from incompetance in the same way it protects you from malice: the system is transparent and self-auditing. 29 | 30 | Vitra uses the [Hypercore Protocol](https://hypercore-protocol.org) to implement its verifiable logs. 31 | 32 | ## Tutorial video 33 | 34 | [Watch the tutorial video here.](https://www.youtube.com/watch?v=6lS7FMGzMZk) 35 | 36 | [![Vitra tutorial video](https://img.youtube.com/vi/6lS7FMGzMZk/0.jpg)](https://www.youtube.com/watch?v=6lS7FMGzMZk) 37 | 38 | ## Docs 39 | 40 | - [White paper](./docs/whitepaper.md) 41 | - [Command-line docs](./docs/cli.md) 42 | - [Smart contract API docs](./docs/contract-apis.md) 43 | - [API Reference](https://pfrazee.github.io/vitra/) 44 | - [Example contracts](./examples/) 45 | - Technical docs 46 | - [Implementation specs](./docs/implementation.md) 47 | - [VM Runtime](./docs/vm-runtime.md) 48 | 49 | ## Example 50 | 51 | This very simple contract maintains a counter which can only ever increment. The contract exports two calls, `get()` and `increment({amount})`, which we can use to interact with the database. 52 | 53 | ```js 54 | /** 55 | * Counter 56 | * 57 | * This contract maintains a singe numeric value which can only be incremented. 58 | */ 59 | 60 | import { index } from 'contract' 61 | 62 | // database api 63 | // = 64 | 65 | export async function get () { 66 | const entry = await index.get(`/counter`) 67 | return Number(entry?.value || 0) 68 | } 69 | 70 | export function increment (opts = {}, emit) { 71 | const amount = typeof opts?.amount === 'number' ? opts.amount : 1 72 | emit({op: 'INCREMENT', amount}) 73 | } 74 | 75 | // transaction handler 76 | // = 77 | 78 | export const apply = { 79 | async INCREMENT (tx, op) { 80 | const current = await get() 81 | tx.put(`/counter`, current + op.amount) 82 | } 83 | } 84 | ``` 85 | 86 | You'll notice that transactions are handled in two phases: first publishing an operation with `emit()`, and then applying the operation with `apply.INCREMENT()`. This separation is because Vitra databases may have multiple *participants* who can generate ops, but only one *executor* who can execute the ops. When we verify a contract, we're replaying the emitted operations against the apply functions to make sure the executor has been honest. 87 | 88 | Let's create a database using our contract. We'll use the API for this readme, but the interactive CLI is generally much easier. 89 | 90 | ```js 91 | import { Database } from 'vitra' 92 | 93 | // Create the DB 94 | const db = await Database.create('./db-storage-path', { 95 | contract: {source: COUNTER_CONTRACT} 96 | }) 97 | db.swarm() // share on the hypercore network 98 | console.log('New database created, public key:', db.pubkey.toString('hex')) 99 | 100 | // Read the current state 101 | const tx1 = await db.call('get', {}) 102 | console.log(tx.response) // => 0 103 | 104 | // Increment a few times 105 | const tx2 = await db.call('increment', {}) 106 | const tx3 = await db.call('increment', {amount: 2}) 107 | const tx4 = await db.call('increment', {}) 108 | 109 | // Wait for those increments to be processed 110 | await Promise.all([tx2.whenProcessed(), tx3.whenProcessed(), tx4.whenProcessed()]) 111 | 112 | // Read the new state 113 | const tx5 = await db.call('get', {}) 114 | console.log(tx.response) // => 4 115 | ``` 116 | 117 | As you can see, Vitra is a programmable database. We're interacting with the DB using the contract's API. 118 | 119 | To verify the execution, we can use one of two methods: `verify()` or `monitor()`. The difference is whether we want to persistently verify or not; monitor will watch for new updates and verify them continuously. 120 | 121 | ```js 122 | await db.verify() // check all current state 123 | 124 | const mon = await db.monitor() // persistently monitor transactions 125 | mon.on('violation', console.log) 126 | ``` 127 | 128 | Generally we try *not* to violate a contract; violations are unrecoverable and will require users to switch to an entirely new database. This is on purpose: if a contract has been violated, then your database's executor has either suffered a serious technical issue, or they're trying to defraud you and shouldn't be trusted! 129 | 130 | For this example, however, we'll force a violation to see what happens: 131 | 132 | ```js 133 | await db.index.dangerousBatch([{type: 'put', path: '/counter', value: 1}]) 134 | 135 | try { 136 | await db.verify() 137 | } catch (e) { 138 | console.log(e) // => ContractFraudProof (The executor has violated the contract) 139 | } 140 | ``` 141 | 142 | We just violated the contract by setting the counter back to 1. This particular violation is an unprompted change -- no operation caused this write -- but if the executor had responded to an operation with the wrong changes, or skipped over an operation, or tried to unpublish a change, it would be caught the same way. 143 | 144 | ## License 145 | 146 | MIT licensed, Copyright 2022 Blue Link Labs. 147 | 148 | ## Future improvements 149 | 150 | ### Transaction-result inclusion proofs 151 | 152 | Calls to a contract (transactions) may produce one or more operations, and each operation may produce one or more changes (results). Operations are published by the contract participants by writing to their "oplogs," while the operation results are always published by the executor in the "index log." Using Hypercore, we're able to generate inclusion proofs for any log message. 153 | 154 | Inclusion proofs are comprised of a log message's sequence number, the root hash of the log's merkle tree, and a signature over the root hash by the log's keypair. We can use the inclusion proof to independently verify that a log message was published by a log, and to prove mischief if the log owner ever attempts to unpublish a message. 155 | 156 | Vitra can easily generate an inclusion proof for *operations* when handling a transaction because there's a local interactive session with the participant that's executing the transaction. For the *results* published to the index log, there's no guarantee of an interactive session as the participant may not be the executor. The Hypercore protocol has mechanisms for requesting log inclusion proofs over a connection (this is fundamental to the protocol) but the implementation embeds this in the replication logic and does not currently include APIs to fetch proofs for random messages in a log. By adding those APIs to Hypercore, we can add transaction-result inclusion proofs to Vitra's API. 157 | 158 | ### Additional append-only fraud proof detection 159 | 160 | Violations to the append-only constraint are currently detected when verifying an inclusion proof. It is possible to detect append-only violations more aggressively by checking for them during replication. (In this framework, forking explicitly with Hypercore's truncate() API and forking implicitly with split logs are both considered violations.) 161 | 162 | ### Native-code contract runtime 163 | 164 | Currenly Vitra is using [https://github.com/laverdet/isolated-vm] to execute contracts (via the [Confine Sandbox](https://github.com/confine-sandbox) framework). This could be optimized by replacing the Confine guest process with a C++ program that embeds V8, which would reduce the amount of marshalling between V8 contexts. 165 | 166 | ### Edge-case protocols 167 | 168 | Vitra is currently designed to follow the contract with no external mutations allowed. This means that operator error could leave a Vitra in an unrecoverable state. We could solve this kind of problem with "edge-case protocols." Some edge-case protocols to consider: 169 | 170 | - **Contract rollback**. A broken contract could leave the database in an inoperable state (e.g. a runtime error stops execution). An edge-case protocol for rolling back to a previous version could help solve this. 171 | 172 | ### ZK-SNARKs 173 | 174 | Vitra uses transaction logs and log-replays to audit execution of a database. Novel research in Layer 2 rollups has recently focused on using zero-knowledge proofs to create a more compact and efficient approach to auditing (ZK-Rollups). It should be possible to apply the same research to Vitra. 175 | -------------------------------------------------------------------------------- /bin.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node --experimental-repl-await 2 | 3 | import './dist/cli.js' -------------------------------------------------------------------------------- /docs/api-reference-readme.md: -------------------------------------------------------------------------------- 1 | # Vitra API Reference 2 | 3 | This site is the API reference for [Vitra](https://github.com/pfrazee/vitra). You can find more detailed documentation, including the whitepaper, specs, and CLI usage, in [the git repository](https://github.com/pfrazee/vitra). 4 | 5 | ## Quick reference 6 | 7 | - [`Database`](./classes/Database.html). The main API for using Vitra. 8 | - [`Database.create()`](./classes/Database.html#create). Create a new Vitra database. 9 | - [`Database.load()`](./classes/Database.html#load). Load an existing Vitra database. 10 | - [`database#call`](./classes/Database.html#call). Execute a transaction the database. 11 | 12 | #### Common 13 | 14 | - [`Transaction`](./classes/Transaction.html). The class returned by [`database#call`](./classes/Database.html#call). 15 | - [`Operation`](./classes/Operation.html). Individual operations included in a `Transaction`. 16 | - [`OpLog`](./classes/OpLog.html). The API for participant operation logs. 17 | - [`IndexLog`](./classes/IndexLog.html). The API for the output index log. 18 | 19 | #### Proofs 20 | 21 | - [`BlockInclusionProof`](./classes/BlockInclusionProof.html) 22 | - [`BlockRewriteFraudProof`](./classes/BlockRewriteFraudProof.html) 23 | - [`ContractFraudProof`](./classes/ContractFraudProof.html) 24 | - [`LogForkFraudProof`](./classes/LogForkFraudProof.html) -------------------------------------------------------------------------------- /docs/cli.md: -------------------------------------------------------------------------------- 1 | # Vitra CLI docs 2 | 3 | Vitra uses an interactive CLI. Install vitra by calling: 4 | 5 | ``` 6 | npm i -g vitra 7 | ``` 8 | 9 | Then start vitra by simply calling the `vitra` command. 10 | 11 | ## Tutorial video 12 | 13 | [Watch the tutorial video here.](https://www.youtube.com/watch?v=6lS7FMGzMZk) 14 | 15 | [![Vitra tutorial video](https://img.youtube.com/vi/6lS7FMGzMZk/0.jpg)](https://www.youtube.com/watch?v=6lS7FMGzMZk) 16 | 17 | ## Overview 18 | 19 | The interactive CLI is a REPL session, meaning you can run Javascript inside it. However: the majority of the time you'll use the builtin commands, all of which start with a `.`. 20 | 21 | During the CLI session, you have a current working directory. This is the directory in which your database's data is stored. You can check the current directory and database with the `.status` command. You can set the current directory with the `.use {path}` command. 22 | 23 | ## Commands 24 | 25 | ``` 26 | .bg Move the hosting process to a background process that will persist after this session. 27 | .call Call a method on the current database. 28 | .deloplog Delete an oplog on this database. (Must not be an active participant.) 29 | .destroy Destroy the database in the current working path. 30 | .fg Stop the background process, if it exists, and move the host back into this session. 31 | .fraud View a tracked fraud-proof with this database. 32 | .fraudlist List all tracked fraud-proofs with this database. 33 | .get Get an entry in the index at the given path. 34 | .help Print this help message 35 | .history Output the history of a log. (Pass the pubkey or "index".) 36 | .info Output detailed information about the current database. 37 | .init Create a new database in the current working path. 38 | .list List entries in the index at the given path. 39 | .load Load an existing database into the current working path. 40 | .methods Output the methods exported by the current database's contract. 41 | .mkoplog Create a new oplog for this database. 42 | .monitor Persistently watch and verify the execution of this database. 43 | .monitorend Stop monitoring this database. 44 | .source Output the source code for the current database's contract. 45 | .status Get the current session information. 46 | .sync Sync the latest state of the current database. 47 | .syncall Sync the full history of the current database. 48 | .test Start a testing sandbox. 49 | .tx View a tracked transaction with this database. 50 | .txlist List all tracked transactions with this database. 51 | .txverify Verify the inclusion of a tracked transaction with this database. 52 | .use Set the current working path. 53 | .verify Verify the execution of this database. 54 | ``` 55 | 56 | ### `.bg` 57 | 58 | Move the hosting process to a background process that will persist after this session. 59 | 60 | ### `.call` 61 | 62 | ``` 63 | .call {method} [params...] 64 | ``` 65 | 66 | Call a method on the current database. Params are specified using bash-style `--` switches; for instance, to call `put({key: 'foo', value: 'bar'})`, you would type `.call put --key foo --value bar`. 67 | 68 | ### `.deloplog` 69 | 70 | ``` 71 | .deloplog {pubkey} 72 | ``` 73 | 74 | Delete an oplog on this database. (Must not be an active participant.) 75 | 76 | ### `.destroy` 77 | 78 | Destroy the database in the current working path. 79 | 80 | ### `.fg` 81 | 82 | Stop the background process, if it exists, and move the host back into this session. 83 | 84 | ### `.fraud` 85 | 86 | ``` 87 | .fraud {id} 88 | ``` 89 | 90 | View a tracked fraud-proof with this database. 91 | 92 | ### `.fraudlist` 93 | 94 | List all tracked fraud-proofs with this database. 95 | 96 | ### `.get` 97 | 98 | ``` 99 | .get {path} 100 | ``` 101 | 102 | Get an entry in the index at the given path. 103 | 104 | ### `.help` 105 | 106 | Print a command listing. 107 | 108 | ### `.history` 109 | 110 | ``` 111 | .history [pubkey] 112 | ``` 113 | 114 | Output the history of a log. (Pass the pubkey or "index".) 115 | 116 | ### `.info` 117 | 118 | Output detailed information about the current database. 119 | 120 | ### `.init` 121 | 122 | ``` 123 | .init {contract-js-path} 124 | ``` 125 | 126 | Create a new database in the current working path. 127 | 128 | ### `.list` 129 | 130 | ``` 131 | .list [path] 132 | ``` 133 | 134 | List entries in the index at the given path. 135 | 136 | ### `.load` 137 | 138 | ``` 139 | .load {pubkey} 140 | ``` 141 | 142 | Load an existing database into the current working path. 143 | 144 | ### `.methods` 145 | 146 | Output the methods exported by the current database's contract. 147 | 148 | ### `.mkoplog` 149 | 150 | Create a new oplog for this database. 151 | 152 | ### `.monitor` 153 | 154 | Persistently watch and verify the execution of this database. 155 | 156 | ### `.monitorend` 157 | 158 | Stop monitoring this database. 159 | 160 | ### `.source` 161 | 162 | Output the source code for the current database's contract. 163 | 164 | ### `.status` 165 | 166 | Get the current session information. 167 | 168 | ### `.sync` 169 | 170 | Sync the latest state of the current database. 171 | 172 | ### `.syncall` 173 | 174 | Sync the full history of the current database. 175 | 176 | ### `.test` 177 | 178 | ``` 179 | .test {contract-js-path} 180 | ``` 181 | 182 | Start a testing sandbox. 183 | 184 | ### `.tx` 185 | 186 | ``` 187 | .tx {id} 188 | ``` 189 | 190 | View a tracked transaction with this database. 191 | 192 | ### `.txlist` 193 | 194 | List all tracked transactions with this database. 195 | 196 | ### `.txverify` 197 | 198 | ``` 199 | .txverify {id} 200 | ``` 201 | 202 | Verify the inclusion of a tracked transaction with this database. 203 | 204 | ### `.use` 205 | 206 | ``` 207 | .txverify {db-path} 208 | ``` 209 | 210 | Set the current working path. 211 | 212 | ### `.verify` 213 | 214 | Verify the execution of this database. 215 | -------------------------------------------------------------------------------- /docs/contract-apis.md: -------------------------------------------------------------------------------- 1 | # Contract APIs 2 | 3 | Vitra contracts are a javascript modules. They must be a single file (you can't import from other .js files) but there are some APIs defined which you can import. 4 | 5 | The programming model of the contract is as follows: 6 | 7 | - A set of functions are exported. These are the contract's API. 8 | - Some functions may emit "operations" which are published in an oplog. 9 | - Operations are handled by database's "executor" using the `apply()` function. 10 | 11 | Here is a simple example contract to help explain this: 12 | 13 | ```js 14 | /** 15 | * Counter 16 | * 17 | * This contract maintains a singe numeric value which can only be incremented. 18 | */ 19 | 20 | import { index } from 'contract' 21 | 22 | // database api 23 | // = 24 | 25 | export async function get () { 26 | const entry = await index.get(`/counter`) 27 | return Number(entry?.value || 0) 28 | } 29 | 30 | export function increment (_, emit) { 31 | emit({op: 'INCREMENT'}) 32 | } 33 | 34 | // transaction handler 35 | // = 36 | 37 | export const apply = { 38 | async INCREMENT (tx, op) { 39 | const current = await get() 40 | tx.put(`/counter`, current + 1) 41 | } 42 | } 43 | ``` 44 | 45 | This contract exports two methods, `get()` and `increment()`. The increment method publishes an `INCREMENT` operation which is then processed by the `apply.INCREMENT` method. 46 | 47 | ## Exported functions 48 | 49 | Each contract exports the following functions: 50 | 51 | - `apply` Required. Translates operations into changes to the index. Must not have any side-effects. Called only on the executor instance or during verification flows. 52 | - `process` Optional. Returns metadata to be attached to operation `ack` messages. Cannot be async. Called only on the executor instance. 53 | 54 | They can also export any number of functions for working with the contract. 55 | 56 | ### The `apply()` method 57 | 58 | The `apply` function MUST be provided. It is called by the executor to transform operations into changes to the database. It is also called by monitors to double-check that the executor is following the contract. 59 | 60 | ```typescript 61 | // version 1 62 | export async function apply (tx, op, ack) { 63 | // apply the operation 64 | } 65 | 66 | // version 2 67 | export const apply = { 68 | async OPERATION_NAME (tx, op, ack) { 69 | // apply the operation 70 | } 71 | } 72 | ``` 73 | 74 | Three parameters are passed into apply: `tx`, `op`, and `ack`. The `op` is the operation you're currently processing, and it will simply be the object you passed with the `emit()` call in one of the API methods. 75 | 76 | The `tx` parameter is how you make changes to the database state and will follow the following API: 77 | 78 | ```typescript 79 | interface ApplyTransactor { 80 | get(key: string): any // gets the queued operation at the given key 81 | 82 | // general mutators 83 | put(key: string, value: any): void 84 | delete(key: string): void 85 | 86 | // system mutators 87 | addOplog(value: {pubkey: string}): void 88 | removeOplog(value: {pubkey: string}): void 89 | setContractSource(value: {code: string}): void 90 | } 91 | ``` 92 | 93 | The `ack` object is some additional metadata which will look like this: 94 | 95 | ```javascript 96 | interface Ack { 97 | origin: string // the pubkey of the op author (hex string) 98 | ts: Date // the timestamp of the executor's ack (Date) 99 | metadata: any // any metadata provided by `process()`; may be undefined 100 | } 101 | ``` 102 | 103 | ## The `process()` method 104 | 105 | The process method is an optional export which allows you to attach metadata to an operation. It's useful for attaching non-deterministic information such as a timestamp or random bits. It is only called by the executor, and it is called right before `apply()` is called. It should look like this: 106 | 107 | ```typescript 108 | export async function process (op) { 109 | return {some: 'metadata'} 110 | } 111 | ``` 112 | 113 | The result of `process()` is provided in the `ack.metadata` field in `apply()`. 114 | 115 | ### Other exported functions 116 | 117 | All other exported functions should follow the following signature: 118 | 119 | ```typescript 120 | export async function someMethod (params, emit) { 121 | // do whatever 122 | } 123 | ``` 124 | 125 | The first parameter, `params` will always be an object. It contains parameters supplied by the caller. 126 | 127 | The second parameter, `emit`, queues operations to be handled by `apply()`. The exact object passed to `emit()` will be the operation published. It's strongly recommended that every emitted operation includes an `op` attribute, which is the ID of the operation. For example, `emit({op: 'SET_VALUE', key: 'foo', value: 'bar'})`. 128 | 129 | ## Modules 130 | 131 | The contract environment includes a few standard modules. The most important is the `contract` module which provides APIs for interacting with the contract: 132 | 133 | ```js 134 | import { index } from 'contract' 135 | 136 | await index.get('/foo') 137 | await index.list('/') 138 | ``` 139 | 140 | ### `assert` 141 | 142 | ```typescript 143 | import ok, * as assert from 'assert' 144 | 145 | interface Assert { 146 | ok (value: any, message: string): void 147 | deepEqual (v1: any, v2: any, message: string): void 148 | doesNotMatch (str: string, regex: RegExp, message: string): void 149 | equal (v1: any, v2: any, message: string): void 150 | fail (message: string): void 151 | match (str: string, regex: RegExp, message: string): void 152 | notDeepEqual (v1: any, v2: any, message: string): void 153 | notEqual (v1: any, v2: any, message: string): void 154 | } 155 | ``` 156 | 157 | ### `contract` 158 | 159 | ```typescript 160 | import { index, oplog, isWriter, listOplogs } from 'contract' 161 | 162 | type listOplogs = () => ContractOplog[] 163 | type isWriter = boolean 164 | 165 | interface ContractIndex { 166 | list (prefix: string, opts: any): Promise 167 | get (key: string): Promise 168 | } 169 | 170 | interface ContractIndexEntry { 171 | key: string 172 | value: any 173 | } 174 | 175 | interface ContractOplog { 176 | getLength (): Promise 177 | get (seq: number): Promise 178 | } 179 | ``` 180 | 181 | ### `util` 182 | 183 | ```typescript 184 | import { genUUID } from 'util' 185 | 186 | type genUUID = () => string 187 | ``` 188 | -------------------------------------------------------------------------------- /docs/implementation.md: -------------------------------------------------------------------------------- 1 | # Implementation specs 2 | 3 | - Contracts are strongly isolated in virtual machines and do not share resources 4 | - Every contract is bound to a hypercore (the index log) 5 | - Every contract has a public key (the index log key) 6 | - Contract code is published on the index log 7 | - If you know the public key of a contract you can access it 8 | - A contract "instance" is created locally when the contract is accessed 9 | - Each contract instance may have an oplog 10 | 11 | Contracts are instantiated on any device that's interested in accessing it. This means there's always a "local instance" of the contract running. Contracts export an API for external interaction. 12 | 13 | > ℹ️ It is possible to examine the datacores of contracts without instantiating the contract, but this is not the default behavior. 14 | 15 | In addition to the contract's index core, each instance may or may not have an oplog core. All state-mutations are appended as operations to the local instance's oplog core. A required function, `apply()`, is then called to handle each oplog cores' operations and update the index core's state. 16 | 17 | The contract instance that owns the index core is known as the "executor." Contract instances that own oplog cores are known as "participants." 18 | 19 | ## Glossary 20 | 21 | |Name|Description| 22 | |-|-| 23 | |Contract|A program executed using the ITO framework. This term may refer to only the code or to the code and all the state and participants.| 24 | |Contract code|The source code defining the contract.| 25 | |Log|An append-only listing of messages. Sometimes called a Hypercore or "core" due to the technology ITO is implemented upon.| 26 | |Oplog|A log which produces operations to be executed by the contract.| 27 | |Index log|The log which represents the current state of the contract which are the results of processed operations.| 28 | |Executor|The party responsible for executing the contract.| 29 | |Participant|Any party with an oplog declared in the contract.| 30 | |Monitor|Any party who chooses to validate the contract's execution.| 31 | |Operation|Any message published on an oplog. Will be processed by the executor.| 32 | |Transaction|A collection of operations and resulting changes to the index which result from a call to the contract.| 33 | |Full verification|Conduct a full audit of the contract's execution.| 34 | |Transaction verification|Audit the execution of an individual transaction.| 35 | |Proof|An independently-verifiable assertion of some contract state.| 36 | |Inclusion proof|A proof that some state (e.g. an operation) was processed by the contract.| 37 | |Fraud proof|A proof that the executor or a participant in a contract has violated some invariant.| 38 | 39 | ## Index layout (output log) 40 | 41 | The output log has a set of fixed entries: 42 | 43 | |Key|Usage| 44 | |-|-| 45 | |`.sys/contract/source`|The source code of the contract| 46 | |`.sys/inputs/{pubkey-hex}`|Declarations of oplogs| 47 | |`.sys/acks/{pubkey-hex}/{seq}`|Acknowledgements of processed ops| 48 | 49 | Entries under `.sys/acks/` can not be modified by the contract. 50 | 51 | > ℹ️ Acks are stored in the output logs to ensure atomicity of transaction-handling. 52 | 53 | ## Encodings 54 | 55 | - Oplog values: messagepack. 56 | - Output index keys: utf8 with a `\x00` separator. 57 | - Output index values: messagepack. 58 | 59 | ## Flows 60 | 61 | ### Initialization flow (executor) 62 | 63 | The executor host initializes a contract with the following steps: 64 | 65 | - The index is created. 66 | - The Hyperbee header is written to the index log. (block 0) 67 | - The `.sys/contract/source` entry is written to the index log with the source code of the contract. (block 1) 68 | - Any number of `.sys/inputs/{key}` entries may be written. 69 | - An empty entry is written at `.sys/acks/genesis` indicating that initialization is complete and that all further entries will be dictated by the contract. 70 | 71 | ### Operation processing flow (executor) 72 | 73 | The executor host watches all active oplogs for new entries and enters the following flow as each entry is detected: 74 | 75 | - Place the vm in "restricted mode." 76 | - If the contract exports a `process()` function 77 | - Call `process(op)` and retain the returned metadata. 78 | - Create a new `ack` object which includes: 79 | - The oplog pubkey. 80 | - The op sequence number. 81 | - The root hash of the oplog. 82 | - A local timestamp. 83 | - Metadata returned by `process()`. 84 | - Call `apply()` with the following arguments: 85 | - `tx` An object with `put(key, value)` and `del(key)` operations for queueing updates to the index. 86 | - `op` The operation. 87 | - `ack` The generated ack. 88 | - If the `apply()` call: 89 | - Returns a resolved promise 90 | - Set `ack.success` to true 91 | - Returns a rejected promise 92 | - Set `ack.success` to false 93 | - Set `ack.error` to a string (the message of the error) 94 | - Empty the `tx` queue of actions 95 | - Place the vm in "unrestricted mode." 96 | - Prepend the `ack` entry to the `tx` with a path of `.sys/acks/{oplog-pubkey-hex}/{seq}`. 97 | - Atomically apply the queued actions in `tx` to the index. 98 | - Iterate the actions in `tx` using offset `i`: 99 | - If the `tx[i]` key is `.sys/contract/source`: 100 | - Replace the active VM with the value of `tx[i]`. 101 | - If the `tx[i]` key is prefixed by `.sys/input/`: 102 | - If the `tx[i]` action is `put`: 103 | - Add the encoded oplog to the active oplogs. 104 | - If the `tx[i]` action is `delete`: 105 | - Remove the encoded oplog from the active oplogs. 106 | 107 | ### Transaction flow (participant) 108 | 109 | > ℹ️ A "transaction" is an API call on the contract, including reads. Every transaction returns an inclusion proof called a "transaction proof" or "tx proof." See the `ContractTransaction` API for information about what each tx proof includes. 110 | 111 | The transaction flow is divided into "creation" and "receiving" as time may pass between the initial op-generating call and result processing by the executor. 112 | 113 | **Creating the transaction** 114 | 115 | - Sync the index head from the network. 116 | - Initialize the contract VM with the current contract source. 117 | - Capture the index root proof as `indexProof`. 118 | - Call the specified contract method. 119 | - Respond with the following information: 120 | - Was the call successful? 121 | - The response or error returned by the call. 122 | - `indexProof` 123 | - An array of all operations generated, including: 124 | - The oplog root proof. 125 | - The operation value. 126 | 127 | **Receiving transaction result** 128 | 129 | - Iterate each operation generated by the transaction as `op`: 130 | - Await the matching ack in the index. 131 | - Fetch the matching ack as `ack`. 132 | - Fetch all mutations to the index matching `ack` as `mutations`. 133 | - Fetch the index root proof at the seq of the `ack` as `indexProof`. 134 | - Respond with the following information: 135 | - `op` 136 | - `ack` 137 | - `mutations` 138 | - `indexProof` 139 | 140 | ### Full verification flow (monitor) 141 | 142 | > ℹ️ Verification occurs by iterating the index's log and comparing published tx-results against generated tx-results. All transactions are preceded by an ack entry, so the majority of the flow is looking for acks at expected places, comparing all updates that result from the message indicated by the ack, and then skipping forward. 143 | 144 | The monitor host verifies a contract using the following flow: 145 | 146 | **Verify Initialization Entries** 147 | 148 | - Verify that `idxLog[0]` is a valid Hyperbee header. 149 | - Read contract source from `idxLog[1]` and instantiate the VM with it. 150 | - Place the vm in "restricted mode." 151 | - Create a map `processedSeqs` to for each active oplog with each entry initialized at `-1`. 152 | - Set `idxSeq` to `2` 153 | - While `idxSeq < idxLog.length`: 154 | - If `idxLog[idxSeq]` key is `.sys/ack/0`, exit while loop. 155 | - If `idxLog[idxSeq]` key is not `.sys/inputs/{key}`, fail verification. 156 | - Increment `idxSeq` 157 | - Increment `idxSeq` 158 | 159 | **Verify Execution Entries** 160 | 161 | - While `idxSeq < idxLog.length`: 162 | - Set `ack` to `idxLog[idxSeq]` 163 | - If `ack` key does not match `.sys/ack/{pubkey}/{seq}`, fail verification. 164 | - If `ack` write-type is not `put`, fail verification. 165 | - If the `{seq}` segment of the key does not equal `processedSeqs[pubkey] + 1`, fail verification. 166 | - Fetch the `op` from the oplog specified by the `ack` value. 167 | - Rewind VM `index` state to `idxSeq`. 168 | - Call `apply()` with the following arguments: 169 | - `tx` An object with `put(key, value)` and `del(key)` operations for queueing updates to the index. 170 | - `op` The operation. 171 | - `ack` The `ack` value. 172 | - Set `newContractSource` to `null`. 173 | - Set `oplogChanges` to an empty array. 174 | - Iterate the actions in `tx` using offset `i`: 175 | - If the `tx[i]` type does not equal the `idxLog[idxSeq + i]` type, fail verification. 176 | - If the `tx[i]` key does not equal the `idxLog[idxSeq + i]` key, fail verification. 177 | - If the `tx[i]` value does not equal the `idxLog[idxSeq + i]` value, fail verification. 178 | - If the `tx[i]` key is `.sys/contract/source`, set `newContractSource` to the `tx[i]` value. 179 | - If the `tx[i]` key matches `.sys/inputs/{pubkey}`, add the value to `oplogChanges`. 180 | - Set `processedSeqs[pubkey]` to the `{seq}` segment of the `ack` key. 181 | - Increment `idxLogSeq` by `tx.length + 1`. 182 | - If `newContractSource` is not `null`: 183 | - Replace the active VM with `newContractSource` 184 | - Iterate each entry in `oplogChanges`: 185 | - Add or remove oplogs according to the encoded change. 186 | 187 | ### Transaction verification flow (monitor) 188 | 189 | TODO -------------------------------------------------------------------------------- /docs/vm-runtime.md: -------------------------------------------------------------------------------- 1 | # VM Runtime 2 | 3 | A contract is an ES module. Source code must be defined as a single "blob" and therefore no imports are enabled. 4 | 5 | ## Restricted Mode 6 | 7 | In "restricted mode," all host calls are disabled except for reads from the `ContractIndex` instance located at `globals.index`. Additionally, only one call to the contract may be active at a time. 8 | 9 | > ℹ️ Restricted mode is used during `process()` and `apply()` calls. 10 | 11 | ## Standard Exports 12 | 13 | Each contract exports the following functions: 14 | 15 | - `apply` Required. Translates operations into changes to the index. Must not have any side-effects. Called only on the executor instance or during verification flows. 16 | - `process` Optional. Returns metadata to be attached to operation `ack` messages. Cannot be async. Called only on the executor instance. 17 | 18 | Apply is expected to conform to one of the following signature: 19 | 20 | ```typescript 21 | // signature one 22 | (tx: ApplyTransactor, op: any, ack: Ack) => Promise 23 | 24 | // signature two 25 | Record Promise> 26 | ``` 27 | 28 | In the latter case, the `.op` attribute of the operation is used to lookup the correct apply function. 29 | 30 | Process is expected to conform to the following signature: 31 | 32 | ```typescript 33 | (op: any) => any|Promise 34 | ``` 35 | 36 | Every other export is expected to conform to the following signature: 37 | 38 | ```typescript 39 | (params: any, emit: (op: any) => void) => any|Promise 40 | ``` 41 | 42 | The passed APIs are defined hereafter. 43 | 44 | ### `ApplyTransactor` 45 | 46 | This is the API of the first parameter passed into `apply()` 47 | 48 | ```typescript 49 | interface ApplyTransactor { 50 | get(key: string): any // gets the queued operation at the given key 51 | 52 | // general mutators 53 | put(key: string, value: any): void 54 | delete(key: string): void 55 | 56 | // system mutators 57 | addOplog(value: {pubkey: string}): void 58 | removeOplog(value: {pubkey: string}): void 59 | setContractSource(value: {code: string}): void 60 | } 61 | ``` 62 | 63 | > ℹ️ `put` and `delete` are not allowed to modify `.sys/*` keys directly, and instead must use the system mutators. This is for security reasons and to reduce the potential for malformed system entries. 64 | 65 | ### `Ack` 66 | 67 | This is the API of the third parameter passed into `apply()` 68 | 69 | ```javascript 70 | interface Ack { 71 | origin: string // the pubkey of the op author (hex string) 72 | ts: Date // the timestamp of the executor's ack (Date) 73 | metadata: any // any metadata provided by `process()`; may be undefined 74 | } 75 | ``` 76 | 77 | ## Standard Modules 78 | 79 | ### `assert` 80 | 81 | ```typescript 82 | import ok, * as assert from 'assert' 83 | 84 | interface Assert { 85 | ok (value: any, message: string): void 86 | deepEqual (v1: any, v2: any, message: string): void 87 | doesNotMatch (str: string, regex: RegExp, message: string): void 88 | equal (v1: any, v2: any, message: string): void 89 | fail (message: string): void 90 | match (str: string, regex: RegExp, message: string): void 91 | notDeepEqual (v1: any, v2: any, message: string): void 92 | notEqual (v1: any, v2: any, message: string): void 93 | } 94 | ``` 95 | 96 | ### `contract` 97 | 98 | ```typescript 99 | import { index, oplog, isWriter, listOplogs } from 'contract' 100 | 101 | type listOplog = () => ContractOplog[] 102 | type isWriter = boolean 103 | 104 | interface ContractIndex { 105 | list (prefix: string, opts: any): Promise 106 | get (key: string): Promise 107 | } 108 | 109 | interface ContractIndexEntry { 110 | key: string 111 | value: any 112 | } 113 | 114 | interface ContractOplog { 115 | getLength (): Promise 116 | get (seq: number): Promise 117 | } 118 | ``` 119 | 120 | ### `util` 121 | 122 | ```typescript 123 | import { genUUID } from 'util' 124 | 125 | type genUUID = () => string 126 | ``` 127 | 128 | ## Globals 129 | 130 | The environment includes all items listed in https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects except for the following: 131 | 132 | - `eval` 133 | - `Atomics` 134 | - `WebAssembly` 135 | - `Function` 136 | - `AsyncFunction` 137 | -------------------------------------------------------------------------------- /docs/whitepaper-preamble.tex: -------------------------------------------------------------------------------- 1 | \setlength{\parindent}{20pt} 2 | \newcommand{\hideFromPandoc}[1]{#1} 3 | \hideFromPandoc{ 4 | \let\Begin\begin 5 | \let\End\end 6 | } 7 | -------------------------------------------------------------------------------- /docs/whitepaper.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pfrazee/vitra/29f362f6fce595057cff867bcd2714c481dd1a2c/docs/whitepaper.pdf -------------------------------------------------------------------------------- /examples/counter.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Counter 3 | * 4 | * This contract maintains a singe numeric value which can only be incremented. 5 | */ 6 | 7 | import { index } from 'contract' 8 | 9 | // database api 10 | // = 11 | 12 | export async function get () { 13 | const entry = await index.get(`/counter`) 14 | return Number(entry?.value || 0) 15 | } 16 | 17 | export function increment (_, emit) { 18 | emit({op: 'INCREMENT'}) 19 | } 20 | 21 | // transaction handler 22 | // = 23 | 24 | export const apply = { 25 | async INCREMENT (tx, op) { 26 | const current = await get() 27 | tx.put(`/counter`, current + 1) 28 | } 29 | } -------------------------------------------------------------------------------- /examples/kv-store.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Key/Value store with admin 3 | * 4 | * This contract is a simple key/value store with an appointed "admin" who can add or remove participants. 5 | */ 6 | 7 | import assert from 'assert' 8 | import { index, isWriter } from 'contract' 9 | 10 | // database api 11 | // = 12 | 13 | export function get ({key}) { 14 | assert(typeof key === 'string', 'Key must be a string') 15 | if (!key.startsWith('/')) key = `/${key}` 16 | return index.get(`/values${key}`) 17 | } 18 | 19 | export function list ({prefix}) { 20 | assert(typeof prefix === 'string', 'Prefix must be a string') 21 | if (!prefix.startsWith('/')) prefix = `/${prefix}` 22 | return index.list(`/values${prefix}`) 23 | } 24 | 25 | export function put ({key, value}, emit) { 26 | assert(isWriter, 'Must be a writer') 27 | assert(typeof key === 'string', 'Key must be a string') 28 | assert(typeof value !== 'undefined', 'Value cannot be undefined') 29 | emit({op: 'PUT', key, value}) 30 | } 31 | 32 | export function del ({key}, emit) { 33 | assert(isWriter, 'Must be a writer') 34 | assert(typeof key === 'string', 'Key must be a string') 35 | emit({op: 'DEL', key, value}) 36 | } 37 | 38 | export function getAdmin () { 39 | return index.get('/admin') 40 | } 41 | 42 | export function setAdmin ({pubkey}, emit) { 43 | assert(isWriter, 'Must be a writer') 44 | assert(typeof pubkey === 'string', 'Pubkey must be a string') 45 | assert(pubkey.length === 64, 'Pubkey must be 64 characters long') 46 | emit({op: 'SET_ADMIN', pubkey}) 47 | } 48 | 49 | export function addParticipant ({pubkey}, emit) { 50 | assert(isWriter, 'Must be a writer') 51 | assert(typeof pubkey === 'string', 'Pubkey must be a string') 52 | assert(pubkey.length === 64, 'Pubkey must be 64 characters long') 53 | emit({op: 'ADD_PARTICIPANT', pubkey}) 54 | } 55 | 56 | export function removeParticipant ({pubkey}, emit) { 57 | assert(isWriter, 'Must be a writer') 58 | assert(typeof pubkey === 'string', 'Pubkey must be a string') 59 | assert(pubkey.length === 64, 'Pubkey must be 64 characters long') 60 | emit({op: 'REMOVE_PARTICIPANT', pubkey}) 61 | } 62 | 63 | // transaction handler 64 | // = 65 | 66 | export const apply = { 67 | PUT (tx, op) { 68 | assert(typeof op.key === 'string') 69 | assert(typeof op.value !== 'undefined') 70 | if (!op.key.startsWith('/')) op.key = `/${op.key}` 71 | tx.put(`/values${op.key}`, op.value) 72 | }, 73 | 74 | DEL (tx, op) { 75 | assert(typeof op.key === 'string') 76 | if (!op.key.startsWith('/')) op.key = `/${op.key}` 77 | tx.delete(`/values${op.key}`) 78 | }, 79 | 80 | async SET_ADMIN (tx, op, ack) { 81 | assert(typeof op.pubkey === 'string', 'Pubkey must be a string') 82 | assert(op.pubkey.length === 64, 'Pubkey must be 64 characters long') 83 | const adminEntry = await index.get('/admin') 84 | assert(!adminEntry || adminEntry.value.pubkey === ack.origin, 'Must be the admin to set the admin') 85 | tx.put('/admin', {pubkey: op.pubkey}) 86 | }, 87 | 88 | async ADD_PARTICIPANT (tx, op, ack) { 89 | assert(typeof op.pubkey === 'string', 'Pubkey must be a string') 90 | assert(op.pubkey.length === 64, 'Pubkey must be 64 characters long') 91 | const adminEntry = await index.get('/admin') 92 | assert(adminEntry?.value.pubkey === ack.origin, 'Must be the admin to modify participants') 93 | tx.addOplog({pubkey: op.pubkey}) 94 | }, 95 | 96 | async REMOVE_PARTICIPANT (tx, op, ack) { 97 | assert(typeof op.pubkey === 'string', 'Pubkey must be a string') 98 | assert(op.pubkey.length === 64, 'Pubkey must be 64 characters long') 99 | const adminEntry = await index.get('/admin') 100 | assert(adminEntry?.value.pubkey === ack.origin, 'Must be the admin to modify participants') 101 | tx.removeOplog({pubkey: op.pubkey}) 102 | } 103 | } -------------------------------------------------------------------------------- /examples/membership-voting.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Membership voting 3 | * 4 | * This example contract allows participants to be added or removed, but only if 50% of the current participants agree. 5 | */ 6 | 7 | import assert from 'assert' 8 | import { genUUID } from 'util' 9 | import { index, listOplogs, isWriter } from 'contract' 10 | 11 | export function listProposals () { 12 | return index.list('/proposals') 13 | } 14 | 15 | export function getProposal ({propId}) { 16 | assert(typeof propId === 'string') 17 | return index.list(`/proposals/${propId}`) 18 | } 19 | 20 | export function propose ({action, candidate}, emit) { 21 | assert(isWriter) 22 | assert(['add', 'remove'].includes(action)) 23 | assert(typeof candidate === 'string') 24 | assert(candidate.length === 64) 25 | const propId = genUUID() 26 | emit({ 27 | op: 'PROPOSE', 28 | propId, 29 | action, 30 | candidate 31 | }) 32 | return {propId} 33 | } 34 | 35 | export function vote ({propId, vote}, emit) { 36 | assert(isWriter) 37 | assert(typeof propId === 'string') 38 | assert(['yes', 'no'].includes(vote)) 39 | emit({op: 'VOTE', propId, vote}) 40 | } 41 | 42 | export const apply = { 43 | PROPOSE (tx, op, ack) { 44 | assert(typeof op.propId === 'string') 45 | assert(['add', 'remove'].includes(op.action)) 46 | assert(typeof op.candidate === 'string') 47 | assert(op.candidate.length === 64) 48 | 49 | let proposal = await index.get(`/proposals/${op.propId}`) 50 | assert(!proposal, 'Duplicate proposition ID') 51 | 52 | proposal = { 53 | propId: op.propId, 54 | action: op.action, 55 | candidate: op.candidate, 56 | author: ack.origin, 57 | status: 'voting', 58 | votes: [{vote: 'yes', author: ack.origin}], 59 | ts: ack.ts 60 | } 61 | 62 | if (oplogs.length <= 2) { 63 | proposal.status = 'accepted' 64 | enactProposal(tx, proposal) 65 | } 66 | 67 | tx.put(`/proposals/${op.propId}`, proposal) 68 | }, 69 | 70 | VOTE (tx, op, ack) { 71 | const majority = Math.floor(listOplogs().length / 2) 72 | assert(typeof op.propId === 'string') 73 | assert(['yes', 'no'].includes(op.vote)) 74 | 75 | const proposal = await index.get(`/proposals/${op.propId}`) 76 | assert(proposal, 'Proposal does not exist') 77 | assert(proposal.status === 'voting', 'Proposal no longer in voting period') 78 | 79 | const existingVote = proposal.votes.find(v => v.author === ack.origin) 80 | if (existingVote) { 81 | existingVote.vote = op.vote 82 | } else { 83 | proposal.votes.push({vote: op.vote, author: op.origin}) 84 | } 85 | 86 | const numYesVotes = proposal.votes.reduce((v, acc) => acc + (v.vote === 'yes' ? 1 : 0), 0) 87 | const numNoVotes = proposal.votes.reduce((v, acc) => acc + (v.vote === 'no' ? 1 : 0), 0) 88 | 89 | if (numYesVotes >= majority) { 90 | enactProposal(tx, proposal) 91 | proposal.status = 'accepted' 92 | } else if (numNoVotes >= majority) { 93 | proposal.status = 'rejected' 94 | } 95 | tx.put(`/proposals/${proposal.propId}`, proposal) 96 | } 97 | } 98 | 99 | function enactProposal (tx, proposal) { 100 | if (proposal.action === 'add') { 101 | tx.addOplog({pubkey: proposal.candidate}) 102 | } else if (proposal.action === 'remove') { 103 | tx.removeOplog({pubkey: proposal.candidate}) 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "vitra", 3 | "version": "0.1.3", 4 | "description": "A framework for cooperatively-run databases using smart contracts.", 5 | "main": "dist/index.js", 6 | "type": "module", 7 | "bin": { 8 | "vitra": "./bin.js" 9 | }, 10 | "scripts": { 11 | "build": "tsc --project tsconfig.json", 12 | "docs": "typedoc --readme ./docs/api-reference-readme.md --out ./generated-docs src/index.ts --excludedFunctionOrMethod \"^_\" && gh-pages -d ./generated-docs && rm -Rf ./generated-docs", 13 | "pdf": "pandoc docs/whitepaper.md --pdf-engine=xelatex -o docs/whitepaper.pdf -H docs/whitepaper-preamble.tex --citeproc --csl docs/whitepaper.csl", 14 | "test": "ava -s test/*.ts" 15 | }, 16 | "ava": { 17 | "extensions": { 18 | "ts": "module" 19 | }, 20 | "nonSemVerExperiments": { 21 | "configurableModuleFormat": true 22 | }, 23 | "nodeArguments": [ 24 | "--loader=ts-node/esm" 25 | ] 26 | }, 27 | "author": "Paul Frazee ", 28 | "license": "MIT", 29 | "dependencies": { 30 | "@babel/parser": "^7.16.8", 31 | "@hyperswarm/dht": "^5.0.12", 32 | "chalk": "^4.1.2", 33 | "compact-encoding": "^2.5.1", 34 | "confine-sandbox": "^0.3.3", 35 | "core-js-pure": "^3.20.1", 36 | "corestore": "^6.0.1-alpha.9", 37 | "event-iterator": "^2.0.0", 38 | "frame-stream": "^3.0.0", 39 | "hyperbee": "^1.7.0", 40 | "hypercore": "^10.0.0-alpha.19", 41 | "hyperswarm": "^3.0.1", 42 | "jsonrpc-lite": "^2.2.0", 43 | "lodash.isequal": "^4.5.0", 44 | "minimist": "^1.2.5", 45 | "msgpackr": "^1.5.2", 46 | "pump": "^3.0.0", 47 | "random-access-file": "^2.2.0", 48 | "random-access-memory": "^4.0.0", 49 | "stream-to-it": "^0.2.4", 50 | "streamx": "^2.12.0", 51 | "vitra-confine-runtime": "^0.5.1", 52 | "vorpal": "^1.12.0" 53 | }, 54 | "devDependencies": { 55 | "@types/lodash.isequal": "^4.5.5", 56 | "@types/minimist": "^1.2.2", 57 | "@types/node": "^16.6.2", 58 | "@types/pump": "^1.1.1", 59 | "@types/streamx": "^2.9.1", 60 | "ava": "^3.15.0", 61 | "gh-pages": "^3.2.3", 62 | "ts-node": "^10.2.1", 63 | "typedoc": "^0.22.10", 64 | "typedoc-plugin-reference-excluder": "^1.0.0", 65 | "typescript": "^4.3.5" 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/core/database.ts: -------------------------------------------------------------------------------- 1 | import { Resource } from '../util/resource.js' 2 | import { ResourcesManager } from '../util/resources-manager.js' 3 | import { UsageManager } from '../util/usage-manager.js' 4 | import { timeout } from '../util/async.js' 5 | import * as assert from 'assert' 6 | import Hyperswarm from 'hyperswarm' 7 | import { 8 | DatabaseOpts, 9 | DatabaseCreateOpts, 10 | SandboxDatabaseCreateOpts, 11 | ExecutorBehavior, 12 | IndexBatchEntry, 13 | OperationResults, 14 | ApplyActions, 15 | IndexHistoryEntry, 16 | Key, 17 | keyToStr, 18 | keyToBuf 19 | } from '../types.js' 20 | import { 21 | CONTRACT_SOURCE_PATH, 22 | genParticipantPath, 23 | PARTICIPANT_PATH_PREFIX, 24 | GENESIS_ACK_PATH, 25 | genAckPath, 26 | InputSchema, 27 | AckSchema 28 | } from '../schemas.js' 29 | import { beekeyToPath } from '../util/hyper.js' 30 | import { Storage, StorageInMemory } from './storage.js' 31 | import { Operation, Transaction } from './transactions.js' 32 | import { IndexLog, OpLog } from './log.js' 33 | import { ContractExecutor } from './executor.js' 34 | import { TestContractExecutor } from './testing/executor.js' 35 | import { getOrCreateLocalDHT } from './testing/local-dht.js' 36 | import { ContractMonitor } from './monitor.js' 37 | import { VM } from './vm.js' 38 | import lock from '../util/lock.js' 39 | 40 | export class Database extends Resource { 41 | storage: Storage 42 | index: IndexLog 43 | oplogs: ResourcesManager = new ResourcesManager() 44 | vm: VM|undefined 45 | vmManager = new UsageManager() 46 | executor: ContractExecutor|undefined 47 | 48 | private _swarm: Hyperswarm|undefined = undefined 49 | private _lockPrefix = '' 50 | private _localOplogOverride: OpLog|undefined 51 | private _indexWatcher: AsyncGenerator|undefined 52 | 53 | constructor (storage: Storage, index: IndexLog) { 54 | super() 55 | this.storage = storage 56 | this.index = index 57 | this._lockPrefix = keyToStr(this.pubkey) 58 | 59 | this.oplogs.on('added', oplog => { 60 | if (this._swarm) this._swarm.join(oplog.core.discoveryKey) 61 | }) 62 | this.oplogs.on('removed', oplog => { 63 | if (this._swarm) this._swarm.leave(oplog.core.discoveryKey) 64 | }) 65 | } 66 | 67 | get pubkey (): Buffer { 68 | return this.index.pubkey 69 | } 70 | 71 | get isExecutor (): boolean { 72 | return this.index.writable 73 | } 74 | 75 | get localOplog (): OpLog|undefined { 76 | return this._localOplogOverride || this.oplogs.find(oplog => oplog.writable) 77 | } 78 | 79 | async setLocalOplog (log: OpLog|undefined) { 80 | if (log) assert.ok(log.writable, 'Oplog must be writable') 81 | this._localOplogOverride = log 82 | await this._restartVM() 83 | } 84 | 85 | get isParticipant (): boolean { 86 | return !!this.localOplog 87 | } 88 | 89 | isOplogParticipant (oplog: OpLog|Buffer): boolean { 90 | return Buffer.isBuffer(oplog) ? this.oplogs.has(l => l.pubkey.equals(oplog)) : this.oplogs.has(oplog) 91 | } 92 | 93 | getParticipant (pubkey: Buffer): OpLog|undefined { 94 | return this.oplogs.find(l => l.pubkey.equals(pubkey)) 95 | } 96 | 97 | get isSwarming (): boolean { 98 | return !!this._swarm 99 | } 100 | 101 | get numPeers (): number { 102 | return this._swarm?.peers.size || 0 103 | } 104 | 105 | lock (name: string): Promise<() => void> { 106 | return lock(`${this._lockPrefix}:${name}`) 107 | } 108 | 109 | [Symbol.for('nodejs.util.inspect.custom')] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 110 | let indent = '' 111 | if (opts.indentationLvl) { 112 | while (indent.length < opts.indentationLvl) indent += ' ' 113 | } 114 | return this.constructor.name + '(\n' + 115 | indent + ' key: ' + opts.stylize(keyToStr(this.pubkey), 'string') + '\n' + 116 | indent + ' opened: ' + opts.stylize(this.opened, 'boolean') + '\n' + 117 | indent + ' isExecutor: ' + opts.stylize(this.isExecutor, 'boolean') + '\n' + 118 | indent + ' isParticipant: ' + opts.stylize(this.isParticipant, 'boolean') + '\n' + 119 | indent + ')' 120 | } 121 | 122 | // management 123 | // = 124 | 125 | static async create (storage: Storage|string, opts: DatabaseCreateOpts): Promise { 126 | if (typeof storage === 'string') storage = new Storage(storage) 127 | assert.ok(storage instanceof Storage, 'storage is required') 128 | assert.equal(typeof opts?.contract?.source, 'string', 'opts.code.source is required') 129 | 130 | await storage.open() 131 | const index = await IndexLog.create(storage) 132 | const db = new Database(storage, index) 133 | db.oplogs.add(await OpLog.create(storage)) // executor oplog 134 | await db._writeInitBlocks(opts?.contract?.source) 135 | await db.open(opts) 136 | 137 | return db 138 | } 139 | 140 | static async load (storage: Storage|string, pubkey: Key, opts?: DatabaseOpts): Promise { 141 | const _storage: Storage = (typeof storage === 'string') ? new Storage(storage) : storage 142 | assert.ok(_storage instanceof Storage, '_storage is required') 143 | pubkey = keyToBuf(pubkey) // keyToBuf() will validate the key 144 | 145 | await _storage.open() 146 | const indexCore = await _storage.getHypercore(pubkey) 147 | const index = new IndexLog(indexCore) 148 | const db = new Database(_storage, index) 149 | const oplogs = (await timeout(5e3, db.index.listOplogs()).catch(e => [])) as {pubkey: Key}[] 150 | await Promise.all(oplogs.map(async (oplog) => { 151 | db.oplogs.add(new OpLog(await _storage.getHypercore(oplog.pubkey))) 152 | })) 153 | await db.open(opts) 154 | 155 | return db 156 | } 157 | 158 | static async createSandbox (opts: SandboxDatabaseCreateOpts): Promise { 159 | assert.ok(opts.from || opts.contract?.source, 'Must specify {from} or {contract}') 160 | 161 | const storage = new StorageInMemory() 162 | await storage.open() 163 | 164 | const index = await IndexLog.create(storage) 165 | if (opts.from) { 166 | await opts.from.index._dangerousCopyInto(index) 167 | } 168 | 169 | const db = new Database(storage, index) 170 | 171 | if (opts.from) { 172 | for (const fromOplog of opts.from.oplogs) { 173 | const oplog = await OpLog.create(storage) 174 | await fromOplog._dangerousCopyInto(oplog) 175 | db.oplogs.add(oplog) 176 | } 177 | } else { 178 | db.oplogs.add(await OpLog.create(storage)) 179 | } 180 | 181 | if (!opts.from) { 182 | await db._writeInitBlocks(opts.contract?.source) 183 | } else if (opts.contract?.source) { 184 | await db._onContractCodeChange(opts.contract.source) 185 | } 186 | 187 | await db.open() 188 | return db 189 | } 190 | 191 | async _open (opts?: DatabaseOpts|DatabaseCreateOpts) { 192 | if (this.isExecutor) { 193 | if (typeof opts?.executorBehavior === 'number') { 194 | if (opts.executorBehavior === ExecutorBehavior.DISABLED) { 195 | // don't instantiate 196 | } else { 197 | this.executor = new TestContractExecutor(this, opts.executorBehavior) 198 | await this.executor.open() 199 | } 200 | } else { 201 | this.executor = new ContractExecutor(this) 202 | await this.executor.open() 203 | } 204 | } else { 205 | this._watchOplogs() 206 | } 207 | if (this.executor) { 208 | // start the VM next tick so that error handlers can be registered 209 | process.nextTick(async () => { 210 | if (this.closing || this.closed) return 211 | try { 212 | await this._startVM() 213 | } catch (e) { 214 | this.emit('error', e) 215 | } 216 | }) 217 | } 218 | } 219 | 220 | async _close () { 221 | this.executor?.close() 222 | this.vm?.close() 223 | this._indexWatcher?.return(undefined) 224 | await Promise.all([ 225 | this.index.close(), 226 | this.oplogs.removeAll() 227 | ]) 228 | await this._swarm?.destroy() 229 | await this.storage.close() 230 | } 231 | 232 | private async _watchOplogs () { 233 | this._indexWatcher = this.index.history({gt: this.index.length, live: true}) 234 | for await (const entry of this._indexWatcher) { 235 | if (entry.path.startsWith(PARTICIPANT_PATH_PREFIX)) { 236 | const pubkey = (entry.value as InputSchema).pubkey 237 | const active = (entry.value as InputSchema).active 238 | if (entry.type === 'put' && active) { 239 | if (!this.oplogs.has(o => o.pubkey.equals(pubkey))) { 240 | this.oplogs.add(new OpLog(await this.storage.getHypercore(pubkey))) 241 | } 242 | } else if (entry.type === 'del' || (entry.type === 'put' && !active)) { 243 | const i = this.oplogs.findIndex(o => o.pubkey.equals(pubkey)) 244 | if (i !== -1) { 245 | this.oplogs.removeAt(i) 246 | } 247 | } 248 | } 249 | } 250 | } 251 | 252 | // networking 253 | // = 254 | 255 | async swarm (opts: {local: boolean} = {local: false}) { 256 | if (this._swarm) return 257 | this._swarm = opts.local ? new Hyperswarm(await getOrCreateLocalDHT()) : new Hyperswarm() 258 | this._swarm.on('connection', (connection: any) => { 259 | this.storage.corestore.replicate(connection) 260 | }) 261 | this._swarm.join(this.index.core.discoveryKey as Buffer) 262 | for (const oplog of this.oplogs) this._swarm.join(oplog.core.discoveryKey as Buffer) 263 | } 264 | 265 | async unswarm () { 266 | if (!this._swarm) return 267 | await this._swarm.destroy() 268 | this._swarm = undefined 269 | } 270 | 271 | async syncLatest () { 272 | if (!this._swarm) throw new Error(`Can't sync latest: not connected to the swarm`) 273 | await this._swarm?.flush() 274 | await Promise.all([ 275 | this.index.syncLatest(), 276 | ...this.oplogs.map(oplog => oplog.syncLatest()) 277 | ]) 278 | } 279 | 280 | async syncFullHistory () { 281 | if (!this._swarm) throw new Error(`Can't sync history: not connected to the swarm`) 282 | await this._swarm?.flush() 283 | await Promise.all([ 284 | this.index.syncFullHistory(), 285 | ...this.oplogs.map(oplog => oplog.syncFullHistory()) 286 | ]) 287 | } 288 | 289 | async whenConnected () { 290 | do { 291 | await this.syncLatest() 292 | } while (this._swarm?.peers.size === 0) 293 | } 294 | 295 | // transactions 296 | // = 297 | 298 | async call (methodName: string, params: Record): Promise { 299 | if (methodName === 'process' || methodName === 'apply') { 300 | throw new Error(`Cannot call "${methodName}" directly`) 301 | } 302 | await this._startVM() 303 | return await this.vmManager.use(async () => { 304 | if (this.vm) { 305 | const res = await this.vm.contractCall(methodName, params) 306 | let ops: Operation[] = [] 307 | if (res.ops?.length) { 308 | if (!this.localOplog) { 309 | throw new Error('Unable to execute transaction: not a writer') 310 | } 311 | ops = await this.localOplog.dangerousAppend(res.ops) 312 | } 313 | return new Transaction(this, methodName, params, res.result, ops) 314 | } else { 315 | throw new Error('Contract VM not instantiated') 316 | } 317 | }) 318 | } 319 | 320 | // monitoring 321 | // = 322 | 323 | async verify () { 324 | const monitor = new ContractMonitor(this) 325 | await monitor.open() 326 | const results = await monitor.verify() 327 | await monitor.close() 328 | return results 329 | } 330 | 331 | async monitor (): Promise { 332 | const monitor = new ContractMonitor(this) 333 | await monitor.open() 334 | monitor.watch() 335 | return monitor 336 | } 337 | 338 | // vm 339 | // = 340 | 341 | async _readContractCode (): Promise { 342 | const src = await this.index.get(CONTRACT_SOURCE_PATH) 343 | if (!src) throw new Error('No contract sourcecode found') 344 | if (Buffer.isBuffer(src.value)) return src.value.toString('utf8') 345 | if (typeof src.value === 'string') return src.value 346 | throw new Error(`Invalid contract sourcecode entry; must be a string or a buffer containing utf-8.`) 347 | } 348 | 349 | async _startVM () { 350 | const release = await this.lock('_startVM') 351 | try { 352 | if (this.vm) return 353 | await this.vmManager.pause() 354 | const source = await this._readContractCode() 355 | this.vm = new VM(this, source) 356 | this.vm.on('error', (error: any) => this.emit('error', error)) 357 | await this.vm.open() 358 | this.vmManager.unpause() 359 | } finally { 360 | release() 361 | } 362 | } 363 | 364 | private async _restartVM () { 365 | if (!this.vm) return 366 | await this.vmManager.pause() 367 | await this.vm.close() 368 | this.vm = undefined 369 | this.vmManager.unpause() 370 | await this._startVM() 371 | } 372 | 373 | async _onContractCodeChange (source: string) { 374 | await this.vmManager.pause() 375 | if (this.vm) await this.vm.close() 376 | this.vm = new VM(this, source) 377 | this.vm.on('error', (error: any) => this.emit('error', error)) 378 | await this.vm.open() 379 | this.vmManager.unpause() 380 | } 381 | 382 | // execution 383 | // = 384 | 385 | private async _writeInitBlocks (source?: string) { 386 | assert.ok(this.index.length === 0, 'Cannot write init blocks: index log already has entries') 387 | assert.ok(typeof source === 'string', 'Contract source must be provided') 388 | assert.ok(this.oplogs.length > 0, 'Oplogs must be created before writing init blocks') 389 | const batch: IndexBatchEntry[] = [ 390 | {type: 'put', path: CONTRACT_SOURCE_PATH, value: source} 391 | ] 392 | for (const oplog of this.oplogs) { 393 | const pubkey = keyToBuf(oplog.pubkey) 394 | batch.push({ 395 | type: 'put', 396 | path: genParticipantPath(oplog.pubkey), 397 | value: {pubkey, active: true} 398 | }) 399 | } 400 | batch.push({type: 'put', path: GENESIS_ACK_PATH, value: {}}) 401 | await this.index.dangerousBatch(batch) 402 | } 403 | 404 | _mapApplyActionsToBatch (actions: ApplyActions): IndexBatchEntry[] { 405 | // NOTE This function is called by the executor *and* the monitor and therefore 406 | // it must be a pure function. Any outside state will cause validation to fail. 407 | // (It might be a good idea to move this out of the contract class.) 408 | // -prf 409 | return Object.entries(actions) 410 | .map(([path, action]): IndexBatchEntry => { 411 | if (path.startsWith('/.sys/')) { 412 | if (action.type === 'addOplog') { 413 | const pubkeyBuf = keyToBuf(action.value.pubkey) 414 | return {type: 'put', path: genParticipantPath(action.value.pubkey), value: {pubkey: pubkeyBuf, active: true}} 415 | } else if (action.type === 'removeOplog') { 416 | const pubkeyBuf = keyToBuf(action.value.pubkey) 417 | return {type: 'put', path: genParticipantPath(action.value.pubkey), value: {pubkey: pubkeyBuf, active: false}} 418 | } else if (action.type === 'setContractSource') { 419 | return {type: 'put', path: CONTRACT_SOURCE_PATH, value: action.value.code} 420 | } 421 | } 422 | return {path, type: action.type, value: action.value} 423 | }) 424 | .sort((a, b) => a.path.localeCompare(b.path)) 425 | } 426 | 427 | async _executeApplyBatch (batch: IndexBatchEntry[]): Promise { 428 | if (this.closing || this.closed) return 429 | 430 | // complete writes 431 | await this.index.dangerousBatch(batch) 432 | 433 | // react to config changes 434 | for (const batchEntry of batch) { 435 | if (batchEntry.path === CONTRACT_SOURCE_PATH) { 436 | await this._onContractCodeChange(batchEntry.value) 437 | } else if (batchEntry.path.startsWith(PARTICIPANT_PATH_PREFIX)) { 438 | await this._onOplogChange(batchEntry.value) 439 | } 440 | } 441 | } 442 | 443 | async _onOplogChange (entry: InputSchema): Promise { 444 | const pubkeyBuf = entry.pubkey 445 | const oplogIndex = this.oplogs.findIndex(oplog => oplog.pubkey.equals(pubkeyBuf)) 446 | if (oplogIndex === -1 && entry.active) { 447 | await this.oplogs.add(new OpLog(await this.storage.getHypercore(pubkeyBuf))) 448 | } else if (oplogIndex !== -1 && !entry.active) { 449 | await this.oplogs.removeAt(oplogIndex) 450 | } 451 | } 452 | 453 | // helpers 454 | // = 455 | 456 | async _fetchOpAck (op: Operation): Promise { 457 | if (this.closing || this.closed) return 458 | const pubkey = op.oplog.pubkey 459 | const seq = op.proof.blockSeq 460 | const ack = (await this.index.get(genAckPath(pubkey, seq)))?.value 461 | return ack ? (ack as AckSchema) : undefined 462 | } 463 | 464 | async _fetchOpResults (op: Operation): Promise { 465 | if (this.closing || this.closed) return 466 | const pubkey = op.oplog.pubkey 467 | const seq = op.proof.blockSeq 468 | const ackEntry = await this.index.get(genAckPath(pubkey, seq)) 469 | if (ackEntry && ackEntry.seq) { 470 | const results: OperationResults = Object.assign(ackEntry.value, {changes: []}) 471 | if (ackEntry.value.success) { 472 | for (let i = ackEntry.seq + 1; i <= ackEntry.seq + results.numChanges; i++) { 473 | const node = await this.index.bee.getBlock(i, {}) 474 | const nodeObj = node.final() 475 | results.changes.push({ 476 | type: node.isDeletion() ? 'del' : 'put', 477 | seq: nodeObj.seq, 478 | path: `/${beekeyToPath(nodeObj.key)}`, 479 | value: nodeObj.value 480 | }) 481 | } 482 | } 483 | return results 484 | } 485 | } 486 | } 487 | -------------------------------------------------------------------------------- /src/core/errors.ts: -------------------------------------------------------------------------------- 1 | export class InvalidBlockInclusionProofError extends Error { 2 | name: string 3 | constructor (message: string) { 4 | super(message) 5 | this.name = this.constructor.name 6 | } 7 | } 8 | 9 | export class BlocksNotAvailableError extends Error { 10 | name: string 11 | constructor (public logPubkey: Buffer, public neededSeq: number, public availableSeq: number) { 12 | super(`Not enough blocks have been synced to verify inclusion. Needed: ${neededSeq}. Available: ${availableSeq}. Log: ${logPubkey.toString('hex')}`) 13 | this.name = this.constructor.name 14 | } 15 | } 16 | 17 | export class ContractParseError extends Error { 18 | name: string 19 | constructor (public parseErrorName: string, public parseErrorMessage: string) { 20 | super(`The contract failed to compile with "${parseErrorName}: ${parseErrorMessage}"`) 21 | this.name = this.constructor.name 22 | } 23 | } 24 | 25 | export class ContractRuntimeError extends Error { 26 | name: string 27 | constructor (public runtimeErrorName: string, public runtimeErrorMessage: string) { 28 | super(`The contract failed to execute with "${runtimeErrorName}: ${runtimeErrorMessage}"`) 29 | this.name = this.constructor.name 30 | } 31 | 32 | static isa (name: string) { 33 | return ['ReferenceError', 'InternalError', 'RangeError'].includes(name) 34 | } 35 | } -------------------------------------------------------------------------------- /src/core/executor.ts: -------------------------------------------------------------------------------- 1 | import { EventIterator } from 'event-iterator' 2 | import { Resource } from '../util/resource.js' 3 | // @ts-ignore no types available -prf 4 | import assert from 'assert' 5 | // @ts-ignore no types available -prf 6 | import AggregateError from 'core-js-pure/actual/aggregate-error.js' 7 | import * as msgpackr from 'msgpackr' 8 | import { IndexBatchEntry, keyToStr } from '../types.js' 9 | import { AckSchema, ACK_PATH_PREFIX, genAckPath } from '../schemas.js' 10 | import { Database } from './database.js' 11 | import { OpLog, ReadStream } from './log.js' 12 | import { ContractRuntimeError } from './errors.js' 13 | 14 | const OPLOG_WATCH_RETRY_TIMEOUT = 5e3 15 | 16 | interface WatchEvent { 17 | event: 'added'|'removed'|'op-executed' 18 | oplog: OpLog 19 | seq?: number 20 | op?: any 21 | } 22 | 23 | export class ContractExecutor extends Resource { 24 | protected _oplogsWatcher: AsyncGenerator<[string, OpLog]>|undefined 25 | protected _lastExecutedSeqs: Map = new Map() 26 | protected _oplogReadStreams: Map = new Map() 27 | constructor (public db: Database) { 28 | super() 29 | } 30 | 31 | // public api 32 | // = 33 | 34 | [Symbol.for('nodejs.util.inspect.custom')] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 35 | let indent = '' 36 | if (opts.indentationLvl) { 37 | while (indent.length < opts.indentationLvl) indent += ' ' 38 | } 39 | return this.constructor.name + '(\n' + 40 | indent + ' key: ' + opts.stylize(keyToStr(this.db.pubkey), 'string') + '\n' + 41 | indent + ' opened: ' + opts.stylize(this.opened, 'boolean') + '\n' + 42 | indent + ')' 43 | } 44 | 45 | async _open () { 46 | if (!this.db.isExecutor) { 47 | throw new Error('Not the executor') 48 | } 49 | for (const log of this.db.oplogs) { 50 | this.watchOpLog(log) 51 | } 52 | ;(async () => { 53 | this._oplogsWatcher = this.db.oplogs.watch(false) 54 | for await (const [evt, log] of this._oplogsWatcher) { 55 | if (evt === 'added') this.watchOpLog(log) 56 | if (evt === 'removed') this.unwatchOpLog(log) 57 | } 58 | })() 59 | } 60 | 61 | async _close () { 62 | this._oplogsWatcher?.return(true) 63 | for (const readStream of this._oplogReadStreams.values()) { 64 | readStream.destroy() 65 | } 66 | } 67 | 68 | watch (): AsyncIterable { 69 | return new EventIterator(({push}) => { 70 | const onAdd = (oplog: OpLog) => push({event: 'added', oplog}) 71 | const onRemove = (oplog: OpLog) => push({event: 'removed', oplog}) 72 | const onOpExecuted = (oplog: OpLog, seq: number, op: any) => push({event: 'op-executed', oplog, seq, op}) 73 | this.db.oplogs.on('added', onAdd) 74 | this.db.oplogs.on('removed', onRemove) 75 | this.on('op-executed', onOpExecuted) 76 | return () => { 77 | this.db.oplogs.removeListener('added', onAdd) 78 | this.db.oplogs.removeListener('removed', onRemove) 79 | this.removeListener('op-executed', onOpExecuted) 80 | } 81 | }) 82 | } 83 | 84 | async sync () { 85 | await Promise.all(this.db.oplogs.map(oplog => oplog.core.update())) 86 | const state = this._captureLogSeqs() 87 | if (this._hasExecutedAllSeqs(state)) return 88 | for await (const evt of this.watch()) { 89 | const keystr = keyToStr(evt.oplog.pubkey) 90 | if (evt.event === 'removed') { 91 | state.delete(keystr) 92 | } 93 | if (this._hasExecutedAllSeqs(state)) return 94 | } 95 | } 96 | 97 | async watchOpLog (log: OpLog) { 98 | if (this.closing || this.closed) return 99 | const keystr = keyToStr(log.pubkey) 100 | const release = await this.db.lock(`watchOpLog:${keystr}`) 101 | try { 102 | if (this._oplogReadStreams.has(keystr)) return 103 | if (!this.db.isOplogParticipant(log)) return 104 | 105 | await this._readLastExecutedSeq(log) 106 | const start = this._getLastExecutedSeq(log) 107 | const s = log.createLogReadStream({start, live: true}) 108 | this._oplogReadStreams.set(keystr, s) 109 | 110 | s.on('data', (entry: {seq: number, value: any}) => this._executeOp(log, entry.seq, msgpackr.unpack(entry.value))) 111 | s.on('error', (err: any) => { 112 | this.db.emit('error', new AggregateError([err], `An error occurred while reading oplog ${keystr}`)) 113 | }) 114 | s.on('close', () => { 115 | this._oplogReadStreams.delete(keystr) 116 | if (!this.db.closing && !this.db.closed && this.db.isOplogParticipant(log)) { 117 | // try again 118 | setTimeout(() => { 119 | this.watchOpLog(log) 120 | }, OPLOG_WATCH_RETRY_TIMEOUT).unref() 121 | } 122 | }) 123 | } catch (e) { 124 | if (this.closing || this.closed) return // ignore 125 | throw e 126 | } finally { 127 | release() 128 | } 129 | } 130 | 131 | unwatchOpLog (log: OpLog) { 132 | const keystr = keyToStr(log.pubkey) 133 | this._lastExecutedSeqs.delete(keystr) 134 | const stream = this._oplogReadStreams.get(keystr) 135 | if (stream) { 136 | stream.destroy() 137 | this._oplogReadStreams.delete(keystr) 138 | } 139 | } 140 | 141 | // protected methods 142 | // = 143 | 144 | protected async _readLastExecutedSeq (oplog: OpLog) { 145 | let seq = -1 146 | const keystr = keyToStr(oplog.pubkey) 147 | const entries = await this.db.index.list(`${ACK_PATH_PREFIX}${keystr}`) 148 | for (const entry of entries) { 149 | seq = Math.max(Number(entry.name), seq) 150 | } 151 | if (seq !== -1) this._putLastExecutedSeq(oplog, seq) 152 | } 153 | 154 | protected _getLastExecutedSeq (oplog: OpLog, fallback = 0): number { 155 | const seq = this._lastExecutedSeqs.get(keyToStr(oplog.pubkey)) 156 | return typeof seq === 'number' ? seq : fallback 157 | } 158 | 159 | protected _putLastExecutedSeq (oplog: OpLog, seq: number) { 160 | this._lastExecutedSeqs.set(keyToStr(oplog.pubkey), seq) 161 | } 162 | 163 | protected _captureLogSeqs (): Map { 164 | const seqs = new Map() 165 | for (const log of this.db.oplogs) seqs.set(keyToStr(log.pubkey), log.length - 1) 166 | return seqs 167 | } 168 | 169 | protected _hasExecutedAllSeqs (seqs: Map): boolean { 170 | for (const [pubkey, seq] of seqs.entries()) { 171 | const executedSeq = this._lastExecutedSeqs.has(pubkey) ? (this._lastExecutedSeqs.get(pubkey) || 0) : -1 172 | if (executedSeq < seq) return false 173 | } 174 | return true 175 | } 176 | 177 | protected async _executeOp (log: OpLog, seq: number, opValue: any) { 178 | const assertStillOpen = () => { 179 | if (this.db.closing || this.db.closed) throw new DatabaseClosedError() 180 | } 181 | 182 | const last = this._getLastExecutedSeq(log, -1) 183 | if (last >= seq) return 184 | 185 | const release = await this.db.lock('_executeOp') 186 | try { 187 | assertStillOpen() 188 | if (!this.db.isOplogParticipant(log)) { 189 | console.error('Skipping op from non-participant') 190 | console.error(' Log:', log) 191 | console.error(' Op:', opValue) 192 | return 193 | } 194 | 195 | // create ack object 196 | const ack: AckSchema = { 197 | success: undefined, 198 | error: undefined, 199 | origin: keyToStr(log.pubkey), 200 | seq, 201 | ts: Date.now(), 202 | metadata: undefined, 203 | numChanges: 0 204 | } 205 | let applySuccess = undefined 206 | let batch: IndexBatchEntry[] = [] 207 | let applyError: any 208 | 209 | await this.db._startVM() 210 | await this.db.vmManager.use(async () => { 211 | assert(!!this.db.vm, 'Contract VM not initialized') 212 | 213 | // enter restricted mode 214 | await this.db.vm.restrict() 215 | assertStillOpen() 216 | 217 | // call process() if it exists 218 | let metadata = undefined 219 | try { 220 | const processRes = await this.db.vm.contractProcess(opValue) 221 | metadata = processRes.result 222 | } catch (e: any) { 223 | if (!e.toString().includes('Method not found: process') && e instanceof ContractRuntimeError) { 224 | this.db.emit('error', e) 225 | } 226 | } 227 | ack.metadata = metadata 228 | assertStillOpen() 229 | 230 | // call apply() 231 | try { 232 | const applyRes = await this.db.vm.contractApply(opValue, ack) 233 | batch = this.db._mapApplyActionsToBatch(applyRes.actions) 234 | applySuccess = true 235 | } catch (e: any) { 236 | if (e instanceof ContractRuntimeError) { 237 | this.db.emit('error', e) 238 | } 239 | applyError = e 240 | applySuccess = false 241 | } 242 | assertStillOpen() 243 | 244 | // leave restricted mode 245 | await this.db.vm.unrestrict() 246 | assertStillOpen() 247 | }) 248 | 249 | // write the result 250 | if (applySuccess) { 251 | ack.success = true 252 | ack.numChanges = batch.length 253 | } else { 254 | ack.success = false 255 | ack.error = applyError.toString() 256 | batch.length = 0 257 | } 258 | batch.unshift({ 259 | type: 'put', 260 | path: genAckPath(log.pubkey, seq), 261 | value: ack 262 | }) 263 | await this.db._executeApplyBatch(batch) 264 | this._putLastExecutedSeq(log, seq) 265 | 266 | this.emit('op-executed', log, seq, opValue) 267 | } catch (e: any) { 268 | if (e instanceof DatabaseClosedError) { 269 | // ignore, just need to abort handling 270 | } else { 271 | throw e 272 | } 273 | } finally { 274 | release() 275 | } 276 | } 277 | } 278 | 279 | class DatabaseClosedError extends Error {} -------------------------------------------------------------------------------- /src/core/fraud-proofs.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import util from 'util' 3 | import { BlockInclusionProof } from './inclusion-proofs.js' 4 | 5 | export class FraudProof extends Error { 6 | name: string 7 | constructor (message: string) { 8 | super(message) 9 | this.name = this.constructor.name 10 | } 11 | 12 | toJSON () { 13 | throw new Error('Must be overridden') 14 | } 15 | } 16 | 17 | export class LogForkFraudProof extends FraudProof { 18 | constructor (public logPubkey: Buffer, public forkNumber: number, public blockSeq: number, public rootHashAtBlock: Buffer, public rootHashSignature: Buffer) { 19 | super('The log created a fork using the truncate() API, which is not allowed in the union protocol') 20 | } 21 | 22 | [util.inspect.custom] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 23 | let indent = '' 24 | if (opts.indentationLvl) { 25 | while (indent.length < opts.indentationLvl) indent += ' ' 26 | } 27 | return this.constructor.name + '(\n' + 28 | indent + ' The log created a fork using the truncate() API, which is not allowed in the union protocol.\n' + 29 | indent + ' logPubkey: ' + opts.stylize(this.logPubkey.toString('hex'), 'string') + '\n' + 30 | indent + ' forkNumber: ' + opts.stylize(this.forkNumber, 'number') + '\n' + 31 | indent + ' blockSeq: ' + opts.stylize(this.blockSeq, 'number') + '\n' + 32 | indent + ' rootHashAtBlock: ' + opts.stylize(this.rootHashAtBlock.toString('hex'), 'string') + '\n' + 33 | indent + ' rootHashSignature: ' + opts.stylize(this.rootHashSignature.toString('hex'), 'string') + '\n' + 34 | indent + ')' 35 | } 36 | 37 | toJSON () { 38 | return { 39 | vitraLogForkFraudProof: 1, 40 | logPubkey: this.logPubkey.toString('hex'), 41 | forkNumber: this.forkNumber, 42 | blockSeq: this.blockSeq, 43 | rootHashAtBlock: this.rootHashAtBlock.toString('hex'), 44 | rootHashSignature: this.rootHashSignature.toString('hex') 45 | } 46 | } 47 | 48 | static fromJSON (obj: any): LogForkFraudProof { 49 | assert(obj.vitraLogForkFraudProof >= 1, 'Invalid schema version') 50 | assert(typeof obj.logPubkey === 'string' && obj.logPubkey.length === 64, 'Invalid logPubkey') 51 | assert(typeof obj.forkNumber === 'number', 'Invalid forkNumber') 52 | assert(typeof obj.blockSeq === 'number', 'Invalid blockSeq') 53 | assert(typeof obj.rootHashAtBlock === 'string', 'Invalid rootHashAtBlock') 54 | assert(typeof obj.rootHashSignature === 'string', 'Invalid rootHashSignature') 55 | return new LogForkFraudProof( 56 | Buffer.from(obj.logPubkey, 'hex'), 57 | obj.forkNumber, 58 | obj.blockSeq, 59 | Buffer.from(obj.rootHashAtBlock, 'hex'), 60 | Buffer.from(obj.rootHashSignature, 'hex') 61 | ) 62 | } 63 | } 64 | 65 | export class BlockRewriteFraudProof extends FraudProof { 66 | constructor (message: string, public givenInclusionProof: BlockInclusionProof, public violatingInclusionProof: BlockInclusionProof) { 67 | super(message || 'Conflicting inclusion proofs indicate that the log unpublished a message after publishing it') 68 | } 69 | 70 | [util.inspect.custom] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 71 | let indent = '' 72 | if (opts.indentationLvl) { 73 | while (indent.length < opts.indentationLvl) indent += ' ' 74 | } 75 | return this.constructor.name + '(\n' + 76 | indent + ' Conflicting inclusion proofs indicate that the log unpublished a message after publishing it.', 77 | indent + ' description: ' + opts.stylize(this.message, 'string') + '\n' + 78 | indent + ' givenInclusionProof: ' + this.givenInclusionProof[util.inspect.custom](0, Object.assign({}, opts, {indentationLvl: (opts.indentationLvl||0) + 2})) + '\n' + 79 | indent + ' violatingInclusionProof: ' + this.violatingInclusionProof[util.inspect.custom](0, Object.assign({}, opts, {indentationLvl: (opts.indentationLvl||0) + 2})) + '\n' + 80 | indent + ')' 81 | } 82 | 83 | toJSON () { 84 | return { 85 | vitraBlockInclusionFraudProof: 1, 86 | description: this.message, 87 | givenInclusionProof: this.givenInclusionProof.toJSON(), 88 | violatingInclusionProof: this.violatingInclusionProof.toJSON(), 89 | } 90 | } 91 | 92 | static fromJSON (obj: any): BlockRewriteFraudProof { 93 | assert(obj.vitraBlockInclusionFraudProof >= 1, 'Invalid schema version') 94 | return new BlockRewriteFraudProof( 95 | obj.description && typeof obj.description === 'string' ? obj.description : '', 96 | BlockInclusionProof.fromJSON(obj.givenInclusionProof), 97 | BlockInclusionProof.fromJSON(obj.violatingInclusionProof) 98 | ) 99 | } 100 | } 101 | 102 | export class ContractFraudProof extends FraudProof { 103 | constructor (public indexStateProof: BlockInclusionProof, public details: ContractFraudProofDetails) { 104 | super('The executor has violated the contract.') 105 | } 106 | 107 | [util.inspect.custom] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 108 | let indent = '' 109 | if (opts.indentationLvl) { 110 | while (indent.length < opts.indentationLvl) indent += ' ' 111 | } 112 | return this.constructor.name + '(\n' + 113 | indent + ' The executor has violated the contract.\n' + 114 | indent + ' indexStateProof: ' + this.indexStateProof[util.inspect.custom](depth, Object.assign({}, opts, {indentationLvl: (opts.indentationLvl||0) + 2})) + '\n' + 115 | indent + ' details: ' + this.details[util.inspect.custom](depth, Object.assign({}, opts, {indentationLvl: (opts.indentationLvl||0) + 2})) + '\n' + 116 | indent + ')' 117 | } 118 | 119 | toJSON () { 120 | return { 121 | vitraContractFraudProof: 1, 122 | indexStateProof: this.indexStateProof.toJSON(), 123 | details: this.details 124 | } 125 | } 126 | 127 | static fromJSON (obj: any): ContractFraudProof { 128 | assert(obj.vitraContractFraudProof >= 1, 'Invalid schema version') 129 | assert(typeof obj.details?.description === 'string', 'Invalid details.description') 130 | return new ContractFraudProof( 131 | BlockInclusionProof.fromJSON(obj.indexStateProof), 132 | new ContractFraudProofDetails(obj.details.description, obj.details.details, obj.details.code) 133 | ) 134 | } 135 | } 136 | 137 | export class ContractFraudProofDetails { 138 | code: string 139 | constructor (public description: string, public data?: any, code?: string) { 140 | this.code = code || this.constructor.name 141 | } 142 | 143 | [util.inspect.custom] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 144 | let indent = '' 145 | if (opts.indentationLvl) { 146 | while (indent.length < opts.indentationLvl) indent += ' ' 147 | } 148 | return this.constructor.name + '(\n' + 149 | indent + ' description: ' + opts.stylize(this.description, 'string') + '\n' + 150 | indent + ' data: ' + util.inspect(this.data || {}) + '\n' + 151 | indent + ')' 152 | } 153 | } -------------------------------------------------------------------------------- /src/core/inclusion-proofs.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import util from 'util' 3 | import { Database } from './database.js' 4 | import { OpLog } from './log.js' 5 | 6 | export interface VerifyInclusionProofOpts { 7 | database?: Database 8 | oplog?: OpLog 9 | } 10 | 11 | export async function verifyInclusionProof (proof: object|BlockInclusionProof, opts?: VerifyInclusionProofOpts) { 12 | const p: BlockInclusionProof = proof instanceof BlockInclusionProof ? proof : BlockInclusionProof.fromJSON(proof) 13 | let oplog: OpLog|undefined 14 | if (opts?.oplog && opts.oplog.pubkey.equals(p.logPubkey)) { 15 | oplog = opts.oplog 16 | } else if (opts?.database && opts.database.isOplogParticipant(p.logPubkey)) { 17 | oplog = opts.database.getParticipant(p.logPubkey) as OpLog 18 | } else { 19 | throw new Error('TODO: fetch oplog from network') 20 | } 21 | await oplog.verifyBlockInclusionProof(p) 22 | } 23 | 24 | export class BlockInclusionProof { 25 | constructor (public logPubkey: Buffer, public blockSeq: number, public rootHashAtBlock: Buffer, public rootHashSignature: Buffer) { 26 | } 27 | 28 | [util.inspect.custom] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 29 | let indent = '' 30 | if (opts.indentationLvl) { 31 | while (indent.length < opts.indentationLvl) indent += ' ' 32 | } 33 | return this.constructor.name + '(\n' + 34 | indent + ' logPubkey: ' + opts.stylize(this.logPubkey.toString('hex'), 'string') + '\n' + 35 | indent + ' blockSeq: ' + opts.stylize(this.blockSeq, 'number') + '\n' + 36 | indent + ' rootHashAtBlock: ' + opts.stylize(this.rootHashAtBlock.toString('hex'), 'string') + '\n' + 37 | indent + ' rootHashSignature: ' + opts.stylize(this.rootHashSignature.toString('hex'), 'string') + '\n' + 38 | indent + ')' 39 | } 40 | 41 | toJSON () { 42 | return { 43 | vitraBlockInclusionProof: 1, 44 | logPubkey: this.logPubkey.toString('hex'), 45 | blockSeq: this.blockSeq, 46 | rootHashAtBlock: this.rootHashAtBlock.toString('hex'), 47 | rootHashSignature: this.rootHashSignature.toString('hex') 48 | } 49 | } 50 | 51 | static fromJSON (obj: any): BlockInclusionProof { 52 | assert(obj.vitraBlockInclusionProof >= 1, 'Invalid schema version') 53 | assert(typeof obj.logPubkey === 'string' && obj.logPubkey.length === 64, 'Invalid logPubkey') 54 | assert(typeof obj.blockSeq === 'number', 'Invalid blockSeq') 55 | assert(typeof obj.rootHashAtBlock === 'string', 'Invalid rootHashAtBlock') 56 | assert(typeof obj.rootHashSignature === 'string', 'Invalid rootHashSignature') 57 | return new BlockInclusionProof( 58 | Buffer.from(obj.logPubkey, 'hex'), 59 | obj.blockSeq, 60 | Buffer.from(obj.rootHashAtBlock, 'hex'), 61 | Buffer.from(obj.rootHashSignature, 'hex') 62 | ) 63 | } 64 | } -------------------------------------------------------------------------------- /src/core/log.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import EventEmitter from 'events' 3 | // @ts-ignore no types available -prf 4 | import AggregateError from 'core-js-pure/actual/aggregate-error.js' 5 | import Hypercore from 'hypercore' 6 | import Hyperbee from 'hyperbee' 7 | import * as msgpackr from 'msgpackr' 8 | import { Readable } from 'streamx' 9 | import { Operation } from './transactions.js' 10 | import { BlockInclusionProof } from './inclusion-proofs.js' 11 | import { BlockRewriteFraudProof, LogForkFraudProof } from './fraud-proofs.js' 12 | import { InvalidBlockInclusionProofError, BlocksNotAvailableError } from './errors.js' 13 | import { 14 | OpLogEntry, 15 | IndexLogListOpts, 16 | IndexLogEntry, 17 | IndexBatchEntry, 18 | IndexHistoryOpts, 19 | IndexHistoryEntry, 20 | Key, 21 | keyToStr 22 | } from '../types.js' 23 | import { 24 | PARTICIPANT_PATH_PREFIX, 25 | } from '../schemas.js' 26 | import { beeShallowList, pathToBeekey, beekeyToPath } from '../util/hyper.js' 27 | import { Storage } from './storage.js' 28 | // @ts-ignore no types available -prf 29 | import * as c from 'compact-encoding' 30 | // @ts-ignore no types available -prf 31 | import * as toIterable from 'stream-to-it' 32 | 33 | interface InternalIndexOpts { 34 | checkout?: number|undefined 35 | } 36 | 37 | export class Log extends EventEmitter { 38 | core: Hypercore 39 | 40 | constructor (core: Hypercore) { 41 | super() 42 | this.core = core 43 | } 44 | 45 | [Symbol.for('nodejs.util.inspect.custom')] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 46 | let indent = '' 47 | if (opts.indentationLvl) { 48 | while (indent.length < opts.indentationLvl) indent += ' ' 49 | } 50 | return this.constructor.name + '(\n' + 51 | indent + ' key: ' + opts.stylize(keyToStr(this.pubkey), 'string') + '\n' + 52 | indent + ' opened: ' + opts.stylize(this.core.opened, 'boolean') + '\n' + 53 | indent + ' length: ' + opts.stylize(this.core.length, 'number') + '\n' + 54 | indent + ')' 55 | } 56 | 57 | get pubkey () { 58 | return this.core.key 59 | } 60 | 61 | get length () { 62 | return this.core.length 63 | } 64 | 65 | get writable () { 66 | return this.core.writable 67 | } 68 | 69 | async open () { 70 | } 71 | 72 | async close () { 73 | return this.core.close() 74 | } 75 | 76 | equals (log: Log) { 77 | return this.pubkey.equals(log.pubkey) 78 | } 79 | 80 | get latestProof () { 81 | if (!this.core?.core?.tree) throw new Error('Hypercore not initialized') 82 | const tree = this.core.core.tree 83 | const seq = this.core.length 84 | const hash = tree.crypto.tree(tree.roots) 85 | return new BlockInclusionProof(this.pubkey, seq, hash, tree.signature) 86 | } 87 | 88 | async syncLatest () { 89 | await this.core.update() 90 | } 91 | 92 | async syncFullHistory () { 93 | await this.core.update() 94 | await this.core.download({start: 0, end: this.core.length - 1}) 95 | } 96 | 97 | async generateBlockInclusionProof (seq: number): Promise { 98 | if (!this.core?.core?.tree) throw new Error('Hypercore not initialized') 99 | const tree = this.core.core.tree 100 | 101 | const roots = await tree.getRoots(seq + 1) 102 | const hash = tree.crypto.tree(roots) 103 | const signableHash = signable(hash, seq + 1, 0) 104 | const signature = this.core.sign(signableHash) 105 | if (tree.fork !== 0) { 106 | throw new LogForkFraudProof(this.pubkey, tree.fork, seq, hash, signature) 107 | } 108 | return new BlockInclusionProof(this.pubkey, seq, hash, signature) 109 | } 110 | 111 | async verifyBlockInclusionProof (proof: BlockInclusionProof): Promise { 112 | if (!this.core?.core?.tree) throw new Error('Hypercore not initialized') 113 | const tree = this.core.core.tree 114 | 115 | if (tree.fork !== 0) { 116 | const seq = this.core.length 117 | const hash = tree.crypto.tree(tree.roots) 118 | throw new LogForkFraudProof(this.pubkey, tree.fork, seq, hash, tree.signature) 119 | } 120 | 121 | if ((this.core.length - 1) < proof.blockSeq) { 122 | throw new BlocksNotAvailableError(this.pubkey, proof.blockSeq, this.core.length) 123 | } 124 | 125 | const roots = await tree.getRoots(proof.blockSeq + 1) 126 | const hash = tree.crypto.tree(roots) 127 | 128 | const signableHash = signable(proof.rootHashAtBlock, proof.blockSeq + 1, 0) 129 | if (!tree.crypto.verify(signableHash, proof.rootHashSignature, this.pubkey)) { 130 | throw new InvalidBlockInclusionProofError('Invalid signature') 131 | } 132 | 133 | if (Buffer.compare(proof.rootHashAtBlock, hash) !== 0) { 134 | const violatingProof = await this.generateBlockInclusionProof(proof.blockSeq) 135 | throw new BlockRewriteFraudProof('Checksums do not match', proof, violatingProof) 136 | } 137 | } 138 | 139 | createLogReadStream (opts: {start?: number, end?: number, snapshot?: boolean, live?: boolean} = {}) { 140 | return new ReadStream(this, opts) 141 | } 142 | } 143 | 144 | export class OpLog extends Log { 145 | constructor (core: Hypercore) { 146 | super(core) 147 | } 148 | 149 | [Symbol.for('nodejs.util.inspect.custom')] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 150 | let indent = '' 151 | if (opts.indentationLvl) { 152 | while (indent.length < opts.indentationLvl) indent += ' ' 153 | } 154 | return this.constructor.name + '(\n' + 155 | indent + ' key: ' + opts.stylize(keyToStr(this.pubkey), 'string') + '\n' + 156 | indent + ' opened: ' + opts.stylize(this.core.opened, 'boolean') + '\n' + 157 | indent + ')' 158 | } 159 | 160 | static async create (storage: Storage): Promise { 161 | const core = await storage.createHypercore() 162 | return new OpLog(core) 163 | } 164 | 165 | async get (seq: number): Promise { 166 | const value = await this.core.get(seq) 167 | return { 168 | seq, 169 | value: msgpackr.unpack(value) 170 | } 171 | } 172 | 173 | async dangerousAppend (values: any[]): Promise { 174 | const ops = [] 175 | const baseSeq = await this.core.append(values.map(v => msgpackr.pack(v))) 176 | for (let i = 0; i < values.length; i++) { 177 | const seq = baseSeq + i 178 | const value = values[i] 179 | const proof = await this.generateBlockInclusionProof(seq) 180 | ops.push(new Operation(this, proof, value)) 181 | } 182 | return ops 183 | } 184 | 185 | async _dangerousCopyInto (target: OpLog) { 186 | assert(!target.length, 'Cannot copy into a log that isnt empty') 187 | const blocks: Buffer[] = [] 188 | for (let i = 0; i < this.length; i++) { 189 | blocks.push(await this.core.get(i)) 190 | } 191 | await this.core.append(blocks) 192 | } 193 | } 194 | 195 | export class IndexLog extends Log { 196 | bee: Hyperbee 197 | 198 | constructor (core: Hypercore) { 199 | super(core) 200 | this.bee = new Hyperbee(this.core, { 201 | keyEncoding: 'utf-8', 202 | valueEncoding: { 203 | encode: (v: any) => msgpackr.pack(v), 204 | encodingLength: (v: any) => msgpackr.pack(v).length, 205 | decode: (v: any) => msgpackr.unpack(v) 206 | } 207 | }) 208 | } 209 | 210 | static async create (storage: Storage): Promise { 211 | const core = await storage.createHypercore() 212 | return new IndexLog(core) 213 | } 214 | 215 | async list (prefix = '/', opts?: IndexLogListOpts, internalOpts?: InternalIndexOpts): Promise { 216 | let bee = this.bee 217 | if (typeof internalOpts?.checkout === 'number') { 218 | bee = bee.checkout(internalOpts?.checkout) 219 | } 220 | 221 | let arr = await beeShallowList(bee, prefix.split('/').filter(Boolean)) 222 | if (opts?.reverse) arr.reverse() 223 | if (opts?.offset && opts?.limit) { 224 | arr = arr.slice(opts.offset, opts.offset + opts.limit) 225 | } else if (opts?.offset) { 226 | arr = arr.slice(opts.offset) 227 | } else if (opts?.limit) { 228 | arr = arr.slice(0, opts.limit) 229 | } 230 | return arr 231 | } 232 | 233 | async get (path: string, internalOpts?: InternalIndexOpts): Promise { 234 | let bee = this.bee 235 | if (typeof internalOpts?.checkout === 'number') { 236 | bee = bee.checkout(internalOpts?.checkout) 237 | } 238 | 239 | const entry = await bee.get(pathToBeekey(path)) 240 | if (!entry) return undefined 241 | const pathSegs = entry.key.split(`\x00`).filter(Boolean) 242 | return { 243 | seq: entry.seq, 244 | container: false, 245 | name: pathSegs[pathSegs.length - 1], 246 | path: `/${pathSegs.join('/')}`, 247 | value: entry.value 248 | } 249 | } 250 | 251 | async dangerousBatch (batch: IndexBatchEntry[]) { 252 | if (!this.bee) throw new Error('Hyperbee not initialized') 253 | const b = this.bee.batch() 254 | for (const entry of batch) { 255 | assert(typeof entry.path === 'string' && entry.path.length, 'Invalid batch entry path') 256 | assert(entry.path !== '/', 'Invalid batch entry path (cannot write to /)') 257 | const key = pathToBeekey(entry.path) 258 | if (entry.type === 'put') { 259 | await b.put(key, entry.value) 260 | } else if (entry.type === 'del') { 261 | await b.del(key) 262 | } else { 263 | throw new Error(`Invalid batch entry type: "${entry.type}"`) 264 | } 265 | } 266 | await b.flush() 267 | } 268 | 269 | async listOplogs (): Promise<{pubkey: Key}[]> { 270 | const entries = await this.list(PARTICIPANT_PATH_PREFIX) 271 | const oplogs = [] 272 | for (const entry of entries) { 273 | try { 274 | if (!entry.value.active) continue 275 | oplogs.push({ 276 | pubkey: entry.value.pubkey 277 | }) 278 | } catch (e: any) { 279 | this.emit('warning', new AggregateError([e], `Invalid entry under ${PARTICIPANT_PATH_PREFIX}, name=${entry.name}`)) 280 | } 281 | } 282 | return oplogs 283 | } 284 | 285 | async* history (opts?: IndexHistoryOpts): AsyncGenerator { 286 | for await (const entry of toIterable.source(this.bee.createHistoryStream(opts))) { 287 | const path = `/${beekeyToPath(entry.key)}` 288 | yield { 289 | type: entry.type, 290 | seq: entry.seq, 291 | path, 292 | name: path.split('/').filter(Boolean).pop() || '', 293 | value: entry.value 294 | } 295 | } 296 | } 297 | 298 | async _dangerousCopyInto (target: IndexLog) { 299 | assert(!target.length, 'Cannot copy into a log that isnt empty') 300 | for await (const entry of toIterable.source(this.bee.createReadStream())) { 301 | await target.bee.put(entry.key, entry.value) 302 | } 303 | } 304 | } 305 | 306 | export class ReadStream extends Readable { 307 | start: number 308 | end: number 309 | snapshot: boolean 310 | live: boolean 311 | constructor (public log: Log, opts: {start?: number, end?: number, snapshot?: boolean, live?: boolean} = {}) { 312 | super() 313 | this.start = opts.start || 0 314 | this.end = typeof opts.end === 'number' ? opts.end : -1 315 | this.snapshot = !opts.live && opts.snapshot !== false 316 | this.live = !!opts.live 317 | } 318 | 319 | _open (cb: any) { 320 | this._openP().then(cb, cb) 321 | } 322 | 323 | _read (cb: any) { 324 | this._readP().then(cb, cb) 325 | } 326 | 327 | async _openP () { 328 | if (this.end === -1) await this.log.core.update() 329 | else await this.log.core.ready() 330 | if (this.snapshot && this.end === -1) this.end = this.log.core.length 331 | } 332 | 333 | async _readP () { 334 | const end = this.live ? -1 : (this.end === -1 ? this.log.core.length : this.end) 335 | if (end >= 0 && this.start >= end) { 336 | this.push(null) 337 | return 338 | } 339 | 340 | const nextSeq = this.start++ 341 | try { 342 | const nextValue = await this.log.core.get(nextSeq) 343 | this.push({seq: nextSeq, value: nextValue}) 344 | } catch (e) { 345 | this.push(null) 346 | } 347 | } 348 | } 349 | 350 | function signable (hash: Buffer, length: number, fork: number): Buffer { 351 | const state = { start: 0, end: 48, buffer: Buffer.alloc(48) } 352 | c.raw.encode(state, hash) 353 | c.uint64.encode(state, length) 354 | c.uint64.encode(state, fork) 355 | return state.buffer 356 | } 357 | 358 | -------------------------------------------------------------------------------- /src/core/monitor.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import util from 'util' 3 | import { Resource } from '../util/resource.js' 4 | import { AwaitLock } from '../util/lock.js' 5 | import { Database } from './database.js' 6 | import { OpLog } from './log.js' 7 | import { VM } from './vm.js' 8 | import { ContractFraudProof, ContractFraudProofDetails } from './fraud-proofs.js' 9 | import { IndexHistoryEntry, OpLogEntry, IndexBatchEntry, Key, keyToStr, keyToBuf } from '../types.js' 10 | import { 11 | CONTRACT_SOURCE_PATH, 12 | PARTICIPANT_PATH_PREFIX, 13 | ACK_PATH_PREFIX, 14 | GENESIS_ACK_PATH, 15 | InputSchema, 16 | AckSchema 17 | } from '../schemas.js' 18 | import _isEqual from 'lodash.isequal' 19 | 20 | enum MonitorState { 21 | VALIDATING_GENESIS_SOURCE, 22 | VALIDATING_GENESIS_INPUTS, 23 | AWAITING_TX, 24 | VALIDATING_TX 25 | } 26 | 27 | interface QueuedEffect { 28 | effect: 'set-vm'|'add-input'|'remove-input' 29 | value: any 30 | } 31 | 32 | export class ContractMonitor extends Resource { 33 | expectedSeq = 1 34 | expectedChanges: IndexBatchEntry[] = [] 35 | state: MonitorState = MonitorState.VALIDATING_GENESIS_SOURCE 36 | inputs: Set = new Set() 37 | vm: VM|undefined 38 | verifying = false 39 | 40 | private _oplogs: Map = new Map() 41 | private _oplogProcessedSeqs: Map = new Map() 42 | private _loadOplogLock = new AwaitLock() 43 | private _historyGenerator: AsyncGenerator|undefined 44 | private _queuedEffects: QueuedEffect[] = [] 45 | 46 | constructor (public db: Database) { 47 | super() 48 | } 49 | 50 | get verifiedLength () { 51 | return this.expectedSeq - 1 52 | } 53 | 54 | async _close () { 55 | for (const oplog of this._oplogs.values()) { 56 | await oplog.close() 57 | } 58 | this.vm?.close() 59 | this._historyGenerator?.return(undefined) 60 | this.verifying = false 61 | } 62 | 63 | async verify () { 64 | assert(!this.verifying, 'Monitor already running verification') 65 | this.reset() 66 | this.verifying = true 67 | for await (const entry of this.db.index.history()) { 68 | await this.validate(entry) 69 | } 70 | this.verifying = false 71 | } 72 | 73 | watch () { 74 | assert(!this.verifying, 'Monitor already running verification') 75 | this.reset() 76 | this.verifying = true 77 | ;(async () => { 78 | this._historyGenerator = this.db.index.history({live: true}) 79 | for await (const entry of this._historyGenerator) { 80 | try { 81 | await this.validate(entry) 82 | } catch (e) { 83 | this.emit('violation', e) 84 | return 85 | } 86 | } 87 | })() 88 | } 89 | 90 | private reset () { 91 | this.expectedSeq = 1 92 | this.expectedChanges.length = 0 93 | this.state = MonitorState.VALIDATING_GENESIS_SOURCE 94 | this.inputs = new Set() 95 | this._queuedEffects.length = 0 96 | this._oplogProcessedSeqs = new Map() 97 | } 98 | 99 | private async transition (state: MonitorState) { 100 | this.state = state 101 | if (state === MonitorState.AWAITING_TX) { 102 | await this.applyQueuedEffects() 103 | } 104 | } 105 | 106 | private async validate (entry: IndexHistoryEntry) { 107 | if (!this.verifying) return 108 | this.assert(entry.seq === this.expectedSeq, new UnexpectedSeqError({entry, expectedSeq: this.expectedSeq})) 109 | if (this.vm) { 110 | this.vm.checkoutIndexAt(entry.seq) 111 | } 112 | switch (this.state) { 113 | case MonitorState.VALIDATING_GENESIS_SOURCE: { 114 | this.assert(entry.path === CONTRACT_SOURCE_PATH, new UnexpectedPathError({entry, expectedPath: CONTRACT_SOURCE_PATH})) 115 | this.validateContractSourceChange(entry) 116 | await this.transition(MonitorState.VALIDATING_GENESIS_INPUTS) 117 | break 118 | } 119 | case MonitorState.VALIDATING_GENESIS_INPUTS: { 120 | if (entry.path.startsWith(PARTICIPANT_PATH_PREFIX)) { 121 | this.validateInputChange(entry) 122 | } else if (entry.path === GENESIS_ACK_PATH) { 123 | await this.transition(MonitorState.AWAITING_TX) 124 | this.assert(this.inputs.size > 0, new NoGenesisInputsDeclaredError()) 125 | } else { 126 | throw new UnexpectedPathError({entry, expectedPath: `${GENESIS_ACK_PATH} or a child of ${PARTICIPANT_PATH_PREFIX}`}) 127 | } 128 | break 129 | } 130 | case MonitorState.AWAITING_TX: { 131 | this.assert(entry.path.startsWith(ACK_PATH_PREFIX), new ChangeNotProducedByMonitorError({entry})) 132 | this.validateAck(entry) 133 | 134 | const ackValue = entry.value as AckSchema 135 | const op = await this.fetchOp(ackValue.origin, ackValue.seq) 136 | this.assert(!!op, new CannotFetchOpError({entry})) 137 | 138 | const replayRes = await this.replayOp(ackValue, (op as OpLogEntry).value) 139 | if ('error' in replayRes) { 140 | this.assert(ackValue.success === false, new MonitorApplyFailedError({entry, errorMessage: replayRes.errorMessage})) 141 | } else { 142 | this.expectedChanges = (replayRes as IndexBatchEntry[]) 143 | } 144 | 145 | await this.transition(MonitorState.VALIDATING_TX) 146 | break 147 | } 148 | case MonitorState.VALIDATING_TX: { 149 | const expectedChange = this.expectedChanges.shift() as IndexBatchEntry 150 | this.assert(!entry.path.startsWith(ACK_PATH_PREFIX), new ChangeNotProducedByExecutorError({entry, expectedChange})) 151 | this.validateChange(entry, expectedChange) 152 | if (this.expectedChanges.length === 0){ 153 | await this.transition(MonitorState.AWAITING_TX) 154 | } 155 | break 156 | } 157 | } 158 | this.expectedSeq++ 159 | this.emit('validated', entry) 160 | } 161 | 162 | private validateAck (entry: IndexHistoryEntry) { 163 | this.assert(entry.value && typeof entry.value === 'object', new InvalidSchemaError({entry, description: 'value to be an object'})) 164 | const ackValue = entry.value as AckSchema 165 | this.assert(typeof ackValue.success === 'boolean', new InvalidSchemaError({entry, description: '.success to be a boolean'})) 166 | this.assert(typeof ackValue.origin === 'string' && ackValue.origin.length === 64, new InvalidSchemaError({entry, description: '.origin to be a 64-character utf-8 string'})) 167 | this.assert(typeof ackValue.seq === 'number', new InvalidSchemaError({entry, description: '.seq to be a number'})) 168 | this.assert(typeof ackValue.ts === 'number', new InvalidSchemaError({entry, description: '.ts to be a number'})) 169 | this.assert(this.inputs.has(ackValue.origin), new NonParticipantError({entry, oplogPubkey: ackValue.origin})) 170 | this.assert(this.getNextOplogSeqToProcess(ackValue.origin) === ackValue.seq, new ProcessedOutOfOrderError({entry, oplogPubkey: ackValue.origin, expectedSeq: this.getNextOplogSeqToProcess(ackValue.origin), executedSeq: ackValue.seq})) 171 | this.setOplogSeqProcessed(ackValue.origin, ackValue.seq) 172 | if (ackValue.success) { 173 | this.assert(typeof ackValue.numChanges === 'number', new InvalidSchemaError({entry, description: '.numChanges to be a number'})) 174 | } else { 175 | this.assert(typeof ackValue.error === 'string' || typeof ackValue.error === 'undefined', new InvalidSchemaError({entry, description: '.error to be a string or undefined'})) 176 | } 177 | } 178 | 179 | getNextOplogSeqToProcess (pubkey: string) { 180 | let lastProcessed = this._oplogProcessedSeqs.get(pubkey) 181 | if (typeof lastProcessed === 'undefined') lastProcessed = -1 182 | return lastProcessed + 1 183 | } 184 | 185 | setOplogSeqProcessed (pubkey: string, seq: number) { 186 | this._oplogProcessedSeqs.set(pubkey, seq) 187 | } 188 | 189 | private validateChange (entry: IndexHistoryEntry, expectedChange: IndexBatchEntry) { 190 | this.assert(entry.type === expectedChange.type, new ChangeMismatchError({entry, expectedChange, description: 'Change type is different.'})) 191 | this.assert(entry.path === expectedChange.path, new ChangeMismatchError({entry, expectedChange, description: 'Change path is different.'})) 192 | this.assert(_isEqual(entry.value, expectedChange.value), new ChangeMismatchError({entry, expectedChange, description: 'Change value is different.'})) 193 | if (entry.path === CONTRACT_SOURCE_PATH) this.validateContractSourceChange(entry) 194 | if (entry.path.startsWith(PARTICIPANT_PATH_PREFIX)) this.validateInputChange(entry) 195 | } 196 | 197 | private validateContractSourceChange (entry: IndexHistoryEntry) { 198 | this.assert(typeof entry.value === 'string' && entry.value.length, new InvalidSchemaError({entry, description: 'a utf-8 string'})) 199 | this._queuedEffects.push({effect: 'set-vm', value: entry.value}) 200 | } 201 | 202 | private validateInputChange (entry: IndexHistoryEntry) { 203 | this.assert(entry.value && typeof entry.value === 'object', new InvalidSchemaError({entry, description: 'value to be an object'})) 204 | const inputValue = entry.value as InputSchema 205 | this.assert(Buffer.isBuffer(inputValue.pubkey), new InvalidSchemaError({entry, description: '.pubkey to be a buffer'})) 206 | this.assert(inputValue.pubkey?.byteLength === 32, new InvalidSchemaError({entry, description: '.pubkey to be a buffer of 32 bytes'})) 207 | this.assert(typeof inputValue.active === 'boolean', new InvalidSchemaError({entry, description: '.active to be a boolean'})) 208 | if (inputValue.active) { 209 | this._queuedEffects.push({effect: 'add-input', value: keyToStr(inputValue.pubkey)}) 210 | } else { 211 | this._queuedEffects.push({effect: 'remove-input', value: keyToStr(inputValue.pubkey)}) 212 | } 213 | } 214 | 215 | private async applyQueuedEffects () { 216 | for (const effect of this._queuedEffects) { 217 | switch (effect.effect) { 218 | case 'set-vm': { 219 | if (this.vm) { 220 | await this.vm.close() 221 | } 222 | this.vm = new VM(this.db, effect.value) 223 | await this.vm.open() 224 | await this.vm.restrict() 225 | break 226 | } 227 | case 'add-input': 228 | this.inputs.add(effect.value) 229 | break 230 | case 'remove-input': 231 | this.inputs.delete(effect.value) 232 | break 233 | } 234 | } 235 | this._queuedEffects.length = 0 236 | } 237 | 238 | private async replayOp (ack: AckSchema, opValue: any): Promise { 239 | const release = await this.db.lock('replayOp') 240 | try { 241 | assert(!!this.vm, 'Contract VM not initialized') 242 | let applySuccess = undefined 243 | let applyError = undefined 244 | let batch: IndexBatchEntry[] = [] 245 | try { 246 | const applyRes = await this.vm.contractApply(opValue, ack) 247 | batch = this.db._mapApplyActionsToBatch(applyRes.actions) 248 | applySuccess = true 249 | } catch (e: any) { 250 | applySuccess = false 251 | applyError = e 252 | } 253 | if (!applySuccess) { 254 | return {error: true, errorMessage: applyError.toString()} 255 | } 256 | return batch 257 | } finally { 258 | release() 259 | } 260 | } 261 | 262 | private async fetchOplog (pubkey: Key): Promise { 263 | await this._loadOplogLock.acquireAsync() 264 | try { 265 | const pubkeyBuf = keyToBuf(pubkey) 266 | const pubkeyStr = keyToStr(pubkey) 267 | let log = this.db.oplogs.find(log => log.pubkey.equals(pubkeyBuf)) 268 | if (log) return log 269 | 270 | log = this._oplogs.get(pubkeyStr) 271 | if (log) return log 272 | 273 | log = new OpLog(await this.db.storage.getHypercore(pubkeyBuf)) 274 | this._oplogs.set(pubkeyStr, log) 275 | return log 276 | } finally { 277 | this._loadOplogLock.release() 278 | } 279 | } 280 | 281 | private async fetchOp (pubkey: Key, seq: number): Promise { 282 | const log = await this.fetchOplog(pubkey) 283 | return await log.get(seq) 284 | } 285 | 286 | private assert (cond: any, error: VerificationError|ContractFraudProofDetails) { 287 | if (!cond) { 288 | if (error instanceof VerificationError) { 289 | throw error 290 | } 291 | const fraudProof = new ContractFraudProof(this.db.index.latestProof, error as ContractFraudProofDetails) 292 | throw fraudProof 293 | } 294 | } 295 | } 296 | 297 | export class VerificationError extends Error { 298 | name: string 299 | data: any 300 | 301 | constructor (message: string, data?: any) { 302 | super(message) 303 | this.name = this.constructor.name 304 | this.data = data 305 | } 306 | 307 | [util.inspect.custom] (depth: number, opts: {indentationLvl: number, stylize: Function}) { 308 | let indent = '' 309 | if (opts.indentationLvl) { 310 | while (indent.length < opts.indentationLvl) indent += ' ' 311 | } 312 | return this.constructor.name + '(\n' + 313 | indent + ' An issue occurred during verification. This does not indicate that the contract was violated, but that verification failed to complete.\n' + 314 | indent + ' message: ' + opts.stylize(this.message, 'string') + '\n' + 315 | indent + ')' 316 | } 317 | } 318 | 319 | export class UnexpectedSeqError extends VerificationError { 320 | constructor ({entry, expectedSeq}: {entry: IndexHistoryEntry, expectedSeq: number}) { 321 | super(`Unexpected message seq. Expected ${expectedSeq}, received ${entry.seq}`, {entry, expectedSeq}) 322 | } 323 | } 324 | 325 | export class CannotFetchOpError extends VerificationError { 326 | constructor ({entry}: {entry: IndexHistoryEntry}) { 327 | super(`Failed to fetch op from ${entry.value.origin} at seq ${entry.value.seq}`, {entry}) 328 | } 329 | } 330 | 331 | export class NoGenesisInputsDeclaredError extends ContractFraudProofDetails { 332 | constructor () { 333 | super(`No input oplogs declared in genesis sequence`) 334 | } 335 | } 336 | 337 | export class UnexpectedPathError extends ContractFraudProofDetails { 338 | constructor ({entry, expectedPath}: {entry: IndexHistoryEntry, expectedPath: string}) { 339 | super(`Unexpected message path. Expected ${expectedPath}, received ${entry.path}`, {entry, expectedPath}) 340 | } 341 | } 342 | 343 | export class InvalidSchemaError extends ContractFraudProofDetails { 344 | constructor ({entry, description}: {entry: IndexHistoryEntry, description: string}) { 345 | super(`Unexpected message value. Expected ${description}`, {entry}) 346 | } 347 | } 348 | 349 | export class MonitorApplyFailedError extends ContractFraudProofDetails { 350 | constructor ({entry, errorMessage}: {entry: IndexHistoryEntry, errorMessage: string}) { 351 | super(`The monitor expected the operation to fail but the executor successfully processed it. ${errorMessage}`, {entry, errorMessage}) 352 | } 353 | } 354 | 355 | export class ChangeNotProducedByMonitorError extends ContractFraudProofDetails { 356 | constructor ({entry}: {entry: IndexHistoryEntry}) { 357 | super(`The executor produced a change which the monitor did not expect.`, {entry}) 358 | } 359 | } 360 | 361 | export class ChangeNotProducedByExecutorError extends ContractFraudProofDetails { 362 | constructor ({entry, expectedChange}: {entry: IndexHistoryEntry, expectedChange: IndexBatchEntry}) { 363 | super(`The executor did not produce a change which the monitor expected.`, {entry, expectedChange}) 364 | } 365 | } 366 | 367 | export class ChangeMismatchError extends ContractFraudProofDetails { 368 | constructor ({entry, expectedChange, description}: {entry: IndexHistoryEntry, expectedChange: IndexBatchEntry, description: string}) { 369 | super(`The executor produced a change which is different than the change expected by the monitor. ${description}`, {entry, expectedChange}) 370 | } 371 | } 372 | 373 | export class ProcessedOutOfOrderError extends ContractFraudProofDetails { 374 | constructor ({entry, oplogPubkey, expectedSeq, executedSeq}: {entry: IndexHistoryEntry, oplogPubkey: string, expectedSeq: number, executedSeq: number}) { 375 | super(`The executor processed an operation out of order. Expected to process ${expectedSeq} but actually processed ${executedSeq} for oplog ${oplogPubkey}`, {entry, oplogPubkey, expectedSeq, executedSeq}) 376 | } 377 | } 378 | 379 | export class NonParticipantError extends ContractFraudProofDetails { 380 | constructor ({entry, oplogPubkey}: {entry: IndexHistoryEntry, oplogPubkey: string}) { 381 | super(`The executor processed an operation from an oplog which is not a declared participant, oplog = ${oplogPubkey}`, {entry, oplogPubkey}) 382 | } 383 | } -------------------------------------------------------------------------------- /src/core/storage.ts: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import { Resource } from '../util/resource.js' 3 | import Hypercore from 'hypercore' 4 | import Corestore from 'corestore' 5 | // @ts-ignore types not available -prf 6 | import raf from 'random-access-file' 7 | // @ts-ignore types not available -prf 8 | import ram from 'random-access-memory' 9 | import { Key } from '../types.js' 10 | 11 | interface StoredKeyPair { 12 | publicKey: Buffer 13 | secretKey: Buffer|undefined 14 | } 15 | 16 | export class Storage extends Resource { 17 | corestore: Corestore 18 | constructor (public basePath: string) { 19 | super() 20 | this.basePath = path.resolve(basePath) 21 | this.corestore = new Corestore(this._getCorestoreOpts()) 22 | } 23 | 24 | protected _getCorestoreOpts (): any { 25 | // return this.basePath 26 | return (name: string) => { 27 | return raf(name, { directory: this.basePath }) 28 | } 29 | } 30 | 31 | async _open () { 32 | // await this.corestore.ready() 33 | } 34 | 35 | async _close () { 36 | await this.corestore.close() 37 | } 38 | 39 | async getHypercore (key: Key|string): Promise { 40 | let c 41 | if (typeof key === 'string') { 42 | c = this.corestore.get({name: key}) 43 | } else { 44 | c = this.corestore.get(key) 45 | } 46 | await c.ready() 47 | return c 48 | } 49 | 50 | async createHypercore (): Promise { 51 | return await this.getHypercore(genName()) 52 | } 53 | } 54 | 55 | export class StorageInMemory extends Storage { 56 | constructor () { 57 | super('') 58 | } 59 | 60 | protected _getCorestoreOpts (): any { 61 | return ram 62 | } 63 | } 64 | 65 | let lastNameName = 0 66 | function genName () { 67 | let num = Date.now() 68 | while (num <= lastNameName) { 69 | num++ 70 | } 71 | lastNameName = num 72 | return String(num) 73 | } -------------------------------------------------------------------------------- /src/core/testing/executor.ts: -------------------------------------------------------------------------------- 1 | import { Database } from '../database.js' 2 | import { ContractExecutor } from '../executor.js' 3 | import { OpLog } from '../log.js' 4 | import { IndexBatchEntry, keyToStr, ExecutorBehavior } from '../../types.js' 5 | import { AckSchema, genAckPath } from '../../schemas.js' 6 | 7 | export class TestContractExecutor extends ContractExecutor { 8 | private _testingCounter = 0 9 | 10 | constructor (public db: Database, public behavior: ExecutorBehavior) { 11 | super(db) 12 | } 13 | 14 | protected async _executeOp (log: OpLog, seq: number, opValue: any) { 15 | if (this.behavior === ExecutorBehavior.TEST_PROCESS_OP_MULTIPLE_TIMES) { 16 | await super._executeOp(log, seq, opValue) 17 | await super._executeOp(log, seq, opValue) 18 | } else if (this.behavior === ExecutorBehavior.TEST_SKIP_OPS) { 19 | if (++this._testingCounter % 2 === 0) { 20 | // skip 21 | } else { 22 | await super._executeOp(log, seq, opValue) 23 | } 24 | } else if (this.behavior === ExecutorBehavior.TEST_WRONG_OP_MUTATIONS) { 25 | return this._executeOpWrongMutations(log, seq, opValue) 26 | } else { 27 | await super._executeOp(log, seq, opValue) 28 | } 29 | } 30 | 31 | async _executeOpWrongMutations (log: OpLog, seq: number, opValue: any) { 32 | const ack: AckSchema = { 33 | success: true, 34 | error: undefined, 35 | origin: keyToStr(log.pubkey), 36 | seq, 37 | ts: Date.now(), 38 | metadata: undefined, 39 | numChanges: 0 40 | } 41 | const batch: IndexBatchEntry[] = [ 42 | { 43 | type: 'put', 44 | path: genAckPath(log.pubkey, seq), 45 | value: ack 46 | }, 47 | { 48 | type: 'put', 49 | path: '/wrong', 50 | value: {bad: 'data'} 51 | } 52 | ] 53 | await this.db._executeApplyBatch(batch) 54 | this._putLastExecutedSeq(log, seq) 55 | this.emit('op-executed', log, seq, opValue) 56 | } 57 | } -------------------------------------------------------------------------------- /src/core/testing/local-dht.ts: -------------------------------------------------------------------------------- 1 | // @ts-ignore no types available 2 | import HyperDHT from '@hyperswarm/dht' 3 | 4 | const bootstrappers: any = [] 5 | const bootstrap: any = [] 6 | const nodes: any = [] 7 | 8 | export async function getOrCreateLocalDHT () { 9 | while (bootstrappers.length < 3) { 10 | bootstrappers.push(new HyperDHT({ ephemeral: true, bootstrap: [] })) 11 | } 12 | 13 | for (const node of bootstrappers) { 14 | await node.ready() 15 | bootstrap.push({ host: '127.0.0.1', port: node.address().port }) 16 | } 17 | 18 | while (nodes.length < 3) { 19 | const node = new HyperDHT({ ephemeral: false, bootstrap }) 20 | await node.ready() 21 | nodes.push(node) 22 | } 23 | 24 | return {bootstrap} 25 | } -------------------------------------------------------------------------------- /src/core/transactions.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import { Database } from './database.js' 3 | import { OpLog } from './log.js' 4 | import { BlockInclusionProof } from './inclusion-proofs.js' 5 | 6 | export class Operation { 7 | constructor (public oplog: OpLog, public proof: BlockInclusionProof, public value: any) { 8 | } 9 | 10 | async verifyInclusion () { 11 | await this.oplog.verifyBlockInclusionProof(this.proof) 12 | } 13 | } 14 | 15 | export class Transaction { 16 | constructor (public db: Database, public methodName: string, public params: any, public response: any, public ops: Operation[]) { 17 | } 18 | 19 | get txId () { 20 | if (this.ops[0]) { 21 | return `${this.methodName}:${this.ops[0].oplog.pubkey.toString('hex').slice(0, 8)}:${this.ops[0].proof.blockSeq}` 22 | } 23 | return '' 24 | } 25 | 26 | async verifyInclusion () { 27 | await Promise.all(this.ops.map(op => op.verifyInclusion())) 28 | } 29 | 30 | async isProcessed () { 31 | const acks = await this.fetchAcks() 32 | return acks.reduce((acc, ack) => acc && !!ack, true) 33 | } 34 | 35 | async whenProcessed (opts: {timeout?: number} = {}): Promise { 36 | let isTimedOut = false 37 | if (opts.timeout) { 38 | setTimeout(() => { isTimedOut = true }, opts.timeout).unref() 39 | } 40 | let backoff = 5 41 | while (true) { 42 | if (isTimedOut) throw new Error('Timed out') 43 | if (await this.isProcessed()) return 44 | await new Promise(r => setTimeout(r, backoff)) 45 | backoff *= 10 46 | } 47 | } 48 | 49 | async fetchAcks () { 50 | assert(this.db, 'DB not loaded') 51 | const c = this.db as Database 52 | return await Promise.all(this.ops.map(op => c._fetchOpAck(op))) 53 | } 54 | 55 | async fetchResults () { 56 | assert(this.db, 'DB not loaded') 57 | const c = this.db as Database 58 | return await Promise.all(this.ops.map(op => c._fetchOpResults(op))) 59 | } 60 | 61 | async toJSON (opts?: {includeValues: boolean}) { 62 | const results = opts?.includeValues ? await this.fetchResults() : await this.fetchAcks() 63 | const isProcessed = results.reduce((acc, ack) => acc && !!ack, true) 64 | return { 65 | vitraTransaction: 1, 66 | databasePubkey: this.db.pubkey.toString('hex'), 67 | isProcessed, 68 | call: { 69 | method: this.methodName, 70 | params: this.params, 71 | }, 72 | response: opts?.includeValues ? this.response : undefined, 73 | operations: this.ops.map((op, i) => { 74 | let result = undefined 75 | if (results[i]) { 76 | const r = results[i] as any 77 | result = {success: r.success, error: r.error, processedAt: r.ts, changes: r.changes} 78 | } 79 | return { 80 | value: opts?.includeValues ? op.value : undefined, 81 | proof: op.proof.toJSON(), 82 | result 83 | } 84 | }) 85 | } 86 | } 87 | 88 | static fromJSON (db: Database, obj: any): Transaction { 89 | assert(db?.opened, 'DB must be opened') 90 | assert(obj.vitraTransaction >= 1, 'Invalid schema version') 91 | assert(typeof obj.databasePubkey === 'string' && obj.databasePubkey.length === 64, 'Invalid databasePubkey') 92 | assert(Array.isArray(obj.operations), 'Invalid operations') 93 | const ops = obj.operations.map((opObj: any, i: number) => { 94 | assert(opObj.proof && typeof opObj.proof === 'object', `Invalid operations[${i}].proof`) 95 | const proof = BlockInclusionProof.fromJSON(opObj.proof) 96 | const oplog = db.getParticipant(proof.logPubkey) 97 | if (!oplog) throw new Error(`Database oplog not found: ${proof.logPubkey.toString('hex')}`) 98 | return new Operation(oplog, proof, opObj.value) 99 | }) 100 | return new Transaction(db, obj.call?.method || 'unknown', obj.call?.params || {}, obj.response, ops) 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/core/vm.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | import path from 'path' 3 | import { fileURLToPath } from 'url' 4 | import { Sandbox as ConfineSandbox } from 'confine-sandbox' 5 | import { Database } from './database.js' 6 | import { keyToStr, keyToBuf } from '../types.js' 7 | import { Resource } from '../util/resource.js' 8 | import { ContractParseError, ContractRuntimeError } from './errors.js' 9 | 10 | const __dirname = path.dirname(fileURLToPath(import.meta.url)) 11 | const NODE_MODULES_PATH = path.join(__dirname, '..', '..', 'node_modules') 12 | 13 | export class VM extends Resource { 14 | restricted = false 15 | private sandbox: ConfineSandbox|undefined 16 | private cid: number|undefined 17 | private indexCheckoutSeq: number|undefined 18 | 19 | constructor (public db: Database, public source: string) { 20 | super() 21 | } 22 | 23 | async _open () { 24 | this.sandbox = new ConfineSandbox({ 25 | runtime: 'vitra-confine-runtime', 26 | globals: this._createVMGlobals(), 27 | nodeModulesPath: NODE_MODULES_PATH, 28 | pipeStdout: true, 29 | pipeStderr: true 30 | }) 31 | this.sandbox.on('container-runtime-error', evt => { 32 | this.emit('error', new ContractRuntimeError(evt.error.name, evt.error.message)) 33 | }) 34 | await this.sandbox.init() 35 | try { 36 | const {cid} = await this.sandbox.execContainer({ 37 | source: this.source, 38 | env: { 39 | indexPubkey: keyToStr(this.db.pubkey), 40 | oplogPubkey: this.db.localOplog ? keyToStr(this.db.localOplog.pubkey) : undefined 41 | } 42 | }) 43 | this.cid = cid 44 | } catch (e: any) { 45 | if (e.details?.compileError) { 46 | this.emit('error', new ContractParseError(e.errorName, e.message)) 47 | } else { 48 | throw e 49 | } 50 | } 51 | } 52 | 53 | async _close () { 54 | if (this.sandbox) { 55 | await this.sandbox.teardown() 56 | } 57 | this.sandbox = undefined 58 | this.cid = undefined 59 | } 60 | 61 | async contractCall (methodName: string, params: Record): Promise { 62 | assert(!!this.sandbox, 'Contract VM not initialized') 63 | assert(!!this.cid, 'Contract VM not initialized') 64 | try { 65 | return await this.sandbox.handleAPICall(this.cid, methodName, [params]) 66 | } catch (e: any) { 67 | if (ContractRuntimeError.isa(e.errorName)) { 68 | throw new ContractRuntimeError(e.errorName, e.message) 69 | } else { 70 | throw e 71 | } 72 | } 73 | } 74 | 75 | async contractProcess (op: any): Promise { 76 | assert(!!this.sandbox, 'Contract VM not initialized') 77 | assert(!!this.cid, 'Contract VM not initialized') 78 | try { 79 | return await this.sandbox.handleAPICall(this.cid, 'process', [op]) 80 | } catch (e: any) { 81 | if (ContractRuntimeError.isa(e.errorName)) { 82 | throw new ContractRuntimeError(e.errorName, e.message) 83 | } else { 84 | throw e 85 | } 86 | } 87 | } 88 | 89 | async contractApply (op: any, ack: any): Promise { 90 | assert(!!this.sandbox, 'Contract VM not initialized') 91 | assert(!!this.cid, 'Contract VM not initialized') 92 | try { 93 | return await this.sandbox.handleAPICall(this.cid, 'apply', [op, ack]) 94 | } catch (e: any) { 95 | if (ContractRuntimeError.isa(e.errorName)) { 96 | throw new ContractRuntimeError(e.errorName, e.message) 97 | } else { 98 | throw e 99 | } 100 | } 101 | } 102 | 103 | async restrict () { 104 | assert(!!this.sandbox, 'Contract VM not initialized') 105 | assert(!!this.cid, 'Contract VM not initialized') 106 | if (this.restricted) return 107 | await this.sandbox.configContainer({cid: this.cid, opts: {restricted: true}}) 108 | this.restricted = true 109 | } 110 | 111 | async unrestrict () { 112 | assert(!!this.sandbox, 'Contract VM not initialized') 113 | assert(!!this.cid, 'Contract VM not initialized') 114 | if (!this.restricted) return 115 | await this.sandbox.configContainer({cid: this.cid, opts: {restricted: false}}) 116 | this.restricted = false 117 | } 118 | 119 | checkoutIndexAt (seq: number|undefined) { 120 | this.indexCheckoutSeq = seq 121 | } 122 | 123 | private _createVMGlobals (): any { 124 | return { 125 | console: { 126 | log: (...args: any[]) => this.emit('log', 'log', args), 127 | debug: (...args: any[]) => this.emit('log', 'debug', args), 128 | info: (...args: any[]) => this.emit('log', 'info', args), 129 | warn: (...args: any[]) => this.emit('log', 'warn', args), 130 | error: (...args: any[]) => this.emit('log', 'error', args) 131 | }, 132 | __internal__: { 133 | contract: { 134 | indexList: async (_pubkey: string, prefix: string, opts?: any) => { 135 | return await this.db.index.list(prefix, opts, {checkout: this.indexCheckoutSeq}) 136 | }, 137 | indexGet: async (_pubkey: string, key: string) => { 138 | return await this.db.index.get(key, {checkout: this.indexCheckoutSeq}) 139 | }, 140 | oplogGetLength: (pubkey: string) => { 141 | const pubkeyBuf = keyToBuf(pubkey) 142 | const oplog = this.db.oplogs.find(item => item.pubkey.equals(pubkeyBuf)) 143 | if (oplog) return oplog.length 144 | throw new Error(`OpLog is not a participant (key=${pubkey})`) 145 | }, 146 | oplogGet: async (pubkey: string, seq: number) => { 147 | const pubkeyBuf = keyToBuf(pubkey) 148 | const oplog = this.db.oplogs.find(item => item.pubkey.equals(pubkeyBuf)) 149 | if (oplog) return await oplog.get(seq) 150 | throw new Error(`OpLog is not a participant (key=${pubkey})`) 151 | } 152 | } 153 | } 154 | } 155 | } 156 | } -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from './types.js' 2 | export * from './core/errors.js' 3 | export * from './core/transactions.js' 4 | export * from './core/log.js' 5 | export * from './core/storage.js' 6 | export * from './core/vm.js' 7 | export * from './core/executor.js' 8 | export * from './core/monitor.js' 9 | export * from './core/inclusion-proofs.js' 10 | export * from './core/fraud-proofs.js' 11 | export * from './core/database.js' -------------------------------------------------------------------------------- /src/schemas.ts: -------------------------------------------------------------------------------- 1 | import { Key, keyToStr } from './types.js' 2 | 3 | export const CONTRACT_SOURCE_PATH = '/.sys/contract/source' 4 | export const PARTICIPANT_PATH_PREFIX = '/.sys/inputs/' 5 | export const genParticipantPath = (pubkey: Key) => `${PARTICIPANT_PATH_PREFIX}${keyToStr(pubkey)}` 6 | export const ACK_PATH_PREFIX = '/.sys/acks/' 7 | export const GENESIS_ACK_PATH = '/.sys/acks/genesis' 8 | export const genAckPath = (pubkey: Key, seq: number) => `${ACK_PATH_PREFIX}${keyToStr(pubkey)}/${String(seq).padStart(15, '0')}` 9 | 10 | export interface InputSchema { 11 | pubkey: Buffer 12 | active: boolean 13 | } 14 | 15 | export interface AckSchema { 16 | success: boolean|undefined 17 | error: string|undefined 18 | origin: string 19 | seq: number 20 | ts: number 21 | metadata: any 22 | numChanges: number 23 | } -------------------------------------------------------------------------------- /src/server/config.ts: -------------------------------------------------------------------------------- 1 | import { keyToBuf, keyToStr } from '../types.js' 2 | 3 | export interface ConfigValues { 4 | pubkey: Buffer 5 | createdOplogPubkeys: Buffer[] 6 | monitor: boolean 7 | } 8 | 9 | export class Config { 10 | pubkey: Buffer 11 | createdOplogPubkeys: Buffer[] 12 | monitor: boolean 13 | 14 | constructor (values: ConfigValues) { 15 | this.pubkey = values.pubkey 16 | this.createdOplogPubkeys = values.createdOplogPubkeys 17 | this.monitor = values.monitor 18 | } 19 | 20 | toJSON () { 21 | return { 22 | vitraConfig: 1, 23 | pubkey: keyToStr(this.pubkey), 24 | createdOplogPubkeys: this.createdOplogPubkeys.map(buf => keyToStr(buf)), 25 | monitor: this.monitor 26 | } 27 | } 28 | 29 | static fromJSON (obj: any): Config { 30 | const pubkey = keyToBuf(obj.pubkey) 31 | const createdOplogPubkeys = Array.isArray(obj.createdOplogPubkeys) ? obj.createdOplogPubkeys.map((str: string) => keyToBuf(str)) : [] 32 | const monitor = typeof obj.monitor === 'boolean' ? obj.monitor : false 33 | return new Config({pubkey, createdOplogPubkeys, monitor}) 34 | } 35 | } -------------------------------------------------------------------------------- /src/server/data-directory.ts: -------------------------------------------------------------------------------- 1 | import { promises as fsp } from 'fs' 2 | import { join } from 'path' 3 | import { Config } from './config.js' 4 | import { FraudProof } from '../core/fraud-proofs.js' 5 | import { Transaction } from '../core/transactions.js' 6 | import EventEmitter from 'events' 7 | 8 | export interface DataDirectoryInfo { 9 | exists: boolean 10 | testSandbox: boolean 11 | config?: Config 12 | } 13 | 14 | export interface FraudWatcher extends EventEmitter { 15 | close(): void 16 | } 17 | 18 | export class DataDirectory { 19 | constructor (public path: string) { 20 | } 21 | 22 | get configFilePath () { 23 | return join(this.path, 'vitra.json') 24 | } 25 | 26 | get socketFilePath () { 27 | return join(this.path, `server.sock`) 28 | } 29 | 30 | get coresPath () { 31 | return join(this.path, 'cores') 32 | } 33 | 34 | get transactionsPath () { 35 | return join(this.path, 'tx') 36 | } 37 | 38 | transactionFilePath (tx: Transaction|string) { 39 | return join(this.transactionsPath, `${tx instanceof Transaction ? tx.txId : tx}.json`) 40 | } 41 | 42 | get fraudsPath () { 43 | return join(this.path, 'fraud') 44 | } 45 | 46 | fraudFilePath (fraudId: string) { 47 | return join(this.fraudsPath, `${fraudId}.json`) 48 | } 49 | 50 | async info (): Promise { 51 | const st = await fsp.stat(this.path).catch(e => undefined) 52 | if (!st?.isDirectory()) { 53 | return { 54 | exists: false, 55 | testSandbox: false 56 | } 57 | } 58 | const config = await this.readConfigFile().catch(e => undefined) 59 | return { 60 | exists: !!config, 61 | testSandbox: false, 62 | config 63 | } 64 | } 65 | 66 | async destroy () { 67 | await fsp.rm(this.path, {recursive: true}) 68 | } 69 | 70 | async readConfigFile (): Promise { 71 | const obj = JSON.parse(await fsp.readFile(this.configFilePath, 'utf-8')) 72 | return Config.fromJSON(obj) 73 | } 74 | 75 | async writeConfigFile (cfg: Config) { 76 | await fsp.writeFile(this.configFilePath, JSON.stringify(cfg.toJSON(), null, 2), 'utf-8') 77 | } 78 | 79 | async trackTransaction (tx: Transaction) { 80 | if (!await this.writeTransaction(tx)) { 81 | await tx.whenProcessed() 82 | await this.writeTransaction(tx) 83 | } 84 | } 85 | 86 | async writeTransaction (tx: Transaction): Promise { 87 | const filepath = this.transactionFilePath(tx) 88 | const obj = await tx.toJSON({includeValues: true}) 89 | await fsp.mkdir(this.transactionsPath, {recursive: true}).catch(_ => undefined) 90 | await fsp.writeFile(filepath, JSON.stringify(obj, null, 2)) 91 | return obj.isProcessed 92 | } 93 | 94 | async listTrackedTxIds (): Promise { 95 | const names = await fsp.readdir(this.transactionsPath).catch(_ => []) 96 | return names.filter(name => name.endsWith('.json')).map(name => name.slice(0, name.length - 5)) 97 | } 98 | 99 | async readTrackedTx (txId: string): Promise { 100 | try { 101 | return JSON.parse(await fsp.readFile(this.transactionFilePath(txId), 'utf-8')) 102 | } catch (e) { 103 | return undefined 104 | } 105 | } 106 | 107 | async watchFrauds (): Promise { 108 | await fsp.mkdir(this.fraudsPath, {recursive: true}).catch(_ => undefined) 109 | return new FraudFolderWatcher(this.fraudsPath) 110 | } 111 | 112 | async writeFraud (fraudId: string, fraud: FraudProof) { 113 | const filepath = this.fraudFilePath(fraudId) 114 | const obj = fraud.toJSON() 115 | await fsp.mkdir(this.fraudsPath, {recursive: true}).catch(_ => undefined) 116 | await fsp.writeFile(filepath, JSON.stringify(obj, null, 2)) 117 | } 118 | 119 | async listTrackedFraudIds (): Promise { 120 | const names = await fsp.readdir(this.fraudsPath).catch(_ => []) 121 | return names.filter(name => name.endsWith('.json')).map(name => name.slice(0, name.length - 5)) 122 | } 123 | 124 | async readTrackedFraud (fraudId: string): Promise { 125 | try { 126 | return JSON.parse(await fsp.readFile(this.fraudFilePath(fraudId), 'utf-8')) 127 | } catch (e) { 128 | return undefined 129 | } 130 | } 131 | } 132 | 133 | export class TestingSandboxDataDirectory extends DataDirectory { 134 | private txs: Map = new Map() 135 | private frauds: Map = new Map() 136 | private fraudEmitter: TestSandboxFraudWatcher|undefined 137 | private cfg: Config|undefined 138 | 139 | constructor () { 140 | super('/tmp/test-sandbox') 141 | } 142 | 143 | async info (): Promise { 144 | return { 145 | exists: true, 146 | testSandbox: true 147 | } 148 | } 149 | 150 | async destroy () { 151 | // do nothing 152 | } 153 | 154 | async readConfigFile (): Promise { 155 | return this.cfg as Config 156 | } 157 | 158 | async writeConfigFile (cfg: Config) { 159 | this.cfg = cfg 160 | } 161 | 162 | async trackTransaction (tx: Transaction) { 163 | this.txs.set(tx.txId, tx) 164 | } 165 | 166 | async writeTransaction (tx: Transaction): Promise { 167 | // do nothing 168 | return true 169 | } 170 | 171 | async listTrackedTxIds (): Promise { 172 | return Array.from(this.txs.keys()) 173 | } 174 | 175 | async readTrackedTx (txId: string): Promise { 176 | return this.txs.get(txId)?.toJSON() 177 | } 178 | 179 | async watchFrauds (): Promise { 180 | this.fraudEmitter = new TestSandboxFraudWatcher() 181 | return this.fraudEmitter 182 | } 183 | 184 | async writeFraud (fraudId: string, fraud: FraudProof) { 185 | this.frauds.set(fraudId, fraud) 186 | this.fraudEmitter?.emit('frauds', Array.from(this.frauds.keys())) 187 | } 188 | 189 | async listTrackedFraudIds (): Promise { 190 | return Array.from(this.frauds.keys()) 191 | } 192 | 193 | async readTrackedFraud (fraudId: string): Promise { 194 | return this.frauds.get(fraudId)?.toJSON() 195 | } 196 | } 197 | 198 | class FraudFolderWatcher extends EventEmitter { 199 | private watchAbort: AbortController 200 | 201 | constructor (public path: string) { 202 | super() 203 | this.watchAbort = new AbortController() 204 | const watcher = fsp.watch(path, {persistent: false, signal: this.watchAbort.signal}) 205 | this.readAndEmit() 206 | ;(async () => { 207 | try { 208 | for await (const event of watcher) { 209 | this.readAndEmit() 210 | } 211 | } catch (e: any) { 212 | if (e.name !== 'AbortError') this.emit('error', e) 213 | } 214 | })() 215 | } 216 | 217 | private async readAndEmit () { 218 | const names = await fsp.readdir(this.path).catch(_ => []) 219 | if (names.length > 0) { 220 | this.emit('frauds', names.filter(name => name.endsWith('.json')).map(name => name.slice(0, name.length - 5))) 221 | } 222 | } 223 | 224 | async close () { 225 | this.watchAbort.abort() 226 | } 227 | } 228 | 229 | class TestSandboxFraudWatcher extends EventEmitter { 230 | close() {} 231 | } -------------------------------------------------------------------------------- /src/server/process.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { join, dirname } from 'path' 3 | import { fileURLToPath } from 'url' 4 | import childProcess from 'child_process' 5 | import { DataDirectory } from './data-directory.js' 6 | import { Server } from './server.js' 7 | import { bindServerSocket } from './rpc.js' 8 | 9 | const __dirname = join(dirname(fileURLToPath(import.meta.url))) 10 | const BIN_PATH = join(__dirname, '..', 'cli.js') 11 | 12 | export async function init (path: string) { 13 | try { 14 | acquirePidFile(join(path, 'server.pid')) 15 | } catch (e) { 16 | console.error('A server process is already active') 17 | process.exit(100) 18 | } 19 | 20 | const dir = new DataDirectory(path) 21 | const info = await dir.info() 22 | if (!info.exists) { 23 | console.error('No database has been configured at this directory') 24 | process.exit(101) 25 | } 26 | 27 | const server = await Server.load(dir) 28 | const netServer = await bindServerSocket(server) 29 | console.log('Initialized at', (new Date()).toLocaleDateString()) 30 | server.db.on('error', e => console.error(e)) 31 | process.on('SIGINT', async () => { 32 | netServer.close() 33 | await server.close() 34 | console.log('Shut down at', (new Date()).toLocaleDateString()) 35 | process.exit(0) 36 | }) 37 | return server 38 | } 39 | 40 | export function isActive (path: string) { 41 | return fs.existsSync(join(path, 'server.pid')) 42 | } 43 | 44 | export async function spawn (path: string) { 45 | const p = childProcess.spawn(process.execPath, [BIN_PATH, 'bg', path], { 46 | detached: true 47 | }) 48 | p.stdout.pipe(fs.createWriteStream(join(path, 'server.log'))) 49 | p.stderr.pipe(fs.createWriteStream(join(path, 'server.err'))) 50 | await new Promise((resolve, reject) => { 51 | p.on('spawn', resolve) 52 | p.on('error', reject) 53 | p.on('close', reject) 54 | }) 55 | await whenSocketFileExists(path) 56 | } 57 | 58 | export async function kill (path: string) { 59 | const pidStr = await fs.promises.readFile(join(path, 'server.pid'), 'utf-8') 60 | const pid = Number(pidStr) 61 | if (typeof pid !== 'number') throw new Error('Unable to read server pidfile') 62 | process.kill(pid, 'SIGINT') 63 | await whenIsntActive(path) 64 | } 65 | 66 | function acquirePidFile (path: string) { 67 | const pidBuf = Buffer.from(`${process.pid}\n`, 'utf-8') 68 | var fd = fs.openSync(path, 'wx') 69 | var offset = 0 70 | while (offset < pidBuf.length) { 71 | offset += fs.writeSync(fd, pidBuf, offset, pidBuf.length - offset) 72 | } 73 | fs.closeSync(fd) 74 | process.on('exit', () => fs.unlinkSync(path)) 75 | } 76 | 77 | async function whenSocketFileExists (path: string) { 78 | const sockPath = join(path, 'server.sock') 79 | const timeout = Date.now() + 15e3 80 | while (Date.now() < timeout) { 81 | try { 82 | const st = await fs.promises.stat(sockPath) 83 | if (st) return 84 | } catch (e) { 85 | // ignore 86 | } 87 | await new Promise(r => setTimeout(r, 1e3)) 88 | } 89 | throw new Error('Server failed to start') 90 | } 91 | 92 | async function whenIsntActive (path: string) { 93 | const pidPath = join(path, 'server.pid') 94 | const timeout = Date.now() + 15e3 95 | while (Date.now() < timeout) { 96 | try { 97 | const st = await fs.promises.stat(pidPath) 98 | if (!st) return 99 | } catch (e) { 100 | return 101 | } 102 | await new Promise(r => setTimeout(r, 1e3)) 103 | } 104 | throw new Error('Server failed to close') 105 | } -------------------------------------------------------------------------------- /src/server/rpc.ts: -------------------------------------------------------------------------------- 1 | import util from 'util' 2 | import net from 'net' 3 | import frame from 'frame-stream' 4 | import * as jsonrpc from 'jsonrpc-lite' 5 | import * as msgpackr from 'msgpackr' 6 | import { Server } from './server.js' 7 | import { Transaction } from '../core/transactions.js' 8 | import { Log } from '../core/log.js' 9 | import { FraudProof } from '../core/fraud-proofs.js' 10 | import { listExportedMethods } from '../util/parser.js' 11 | import { keyToBuf, keyToStr } from '../types.js' 12 | 13 | interface LogInfo { 14 | label: string 15 | pubkey: string 16 | length: number 17 | writable: boolean 18 | } 19 | 20 | interface GetInfoResponse { 21 | logs: LogInfo[] 22 | detachedOplogPubkeys: string[] 23 | numPeers: number 24 | } 25 | 26 | interface GetSourceResponse { 27 | source: string 28 | } 29 | 30 | interface ListMethodsResponse { 31 | methods: {name: string, args: string}[] 32 | } 33 | 34 | interface LogGetHistoryParams { 35 | pubkey: string 36 | } 37 | 38 | interface LogGetHistoryResponse { 39 | isIndex: boolean 40 | entries: any[] 41 | } 42 | 43 | interface IndexListParams { 44 | path: string 45 | } 46 | 47 | interface IndexListResponse { 48 | entries: any[] 49 | } 50 | 51 | interface IndexGetParams { 52 | path: string 53 | } 54 | 55 | type IndexGetResponse = any 56 | 57 | interface IndexDangerousWriteParams { 58 | type: 'put'|'del' 59 | path: string 60 | value: any 61 | } 62 | 63 | interface TxListResponse { 64 | txIds: string[] 65 | } 66 | 67 | interface TxGetParams { 68 | txId: string 69 | } 70 | 71 | interface TxGetResponse { 72 | vitraTransaction: number 73 | databasePubkey: string 74 | isProcessed: boolean 75 | call: { 76 | method: string 77 | params: any[] 78 | } 79 | response: any 80 | operations: { 81 | value: any, 82 | proof: { 83 | logPubkey: string, 84 | blockSeq: number, 85 | rootHashAtBlock: string, 86 | rootHashSignature: string 87 | }, 88 | result: any 89 | }[] 90 | } 91 | 92 | interface TxVerifyParams { 93 | txId: string 94 | } 95 | 96 | interface FraudListResponse { 97 | fraudIds: string[] 98 | } 99 | 100 | interface FraudGetParams { 101 | fraudId: string 102 | } 103 | 104 | interface TxVerifyResponse { 105 | success: boolean 106 | fraudId?: string 107 | fraudDescription?: string 108 | } 109 | 110 | interface DbCallParams { 111 | method: string 112 | args: any 113 | } 114 | 115 | interface DbCallResponse { 116 | txId: string|undefined 117 | response: any 118 | } 119 | 120 | interface DbSyncParams { 121 | fullHistory?: boolean 122 | } 123 | 124 | interface DbVerifyResponse { 125 | success: boolean 126 | fraudId?: string 127 | fraudDescription?: string 128 | } 129 | 130 | interface DbCreateOplogResponse { 131 | pubkey: string 132 | } 133 | 134 | interface DbDeleteOplogParams { 135 | pubkey: string 136 | } 137 | 138 | export interface Client { 139 | getInfo (): Promise 140 | getSource (): Promise 141 | listMethods (): Promise 142 | logGetHistory (params: LogGetHistoryParams): Promise 143 | indexList (params: IndexListParams): Promise 144 | indexGet (params: IndexGetParams): Promise 145 | indexDangerousWrite (params: IndexDangerousWriteParams): Promise 146 | txList (): Promise 147 | txGet (params: TxGetParams): Promise 148 | txVerify (params: TxVerifyParams): Promise 149 | fraudList (): Promise 150 | fraudGet (params: FraudGetParams): Promise 151 | dbCall (params: DbCallParams): Promise 152 | dbVerify (): Promise 153 | dbSync (params: DbSyncParams): Promise 154 | dbStartMonitor (): Promise 155 | dbStopMonitor (): Promise 156 | dbCreateOplog (): Promise 157 | dbDeleteOplog (params: DbDeleteOplogParams): Promise 158 | } 159 | 160 | function createClient (handler: Function): Client { 161 | let id = 1 162 | const request = async (method: string, params: any = undefined): Promise => { 163 | const req = jsonrpc.request(id++, method, [params]) 164 | const parsed = jsonrpc.parseObject(await handler(req)) 165 | if (parsed.type === 'error') { 166 | throw new Error(parsed.payload.error.message) 167 | } else if (parsed.type === 'success') { 168 | return parsed.payload.result 169 | } 170 | } 171 | 172 | return { 173 | getInfo (): Promise { 174 | return request('getInfo') 175 | }, 176 | 177 | getSource (): Promise { 178 | return request('getSource') 179 | }, 180 | 181 | listMethods (): Promise { 182 | return request('listMethods') 183 | }, 184 | 185 | logGetHistory (params: LogGetHistoryParams): Promise { 186 | return request('logGetHistory', params) 187 | }, 188 | 189 | indexList (params: IndexListParams): Promise { 190 | return request('indexList', params) 191 | }, 192 | 193 | indexGet (params: IndexGetParams): Promise { 194 | return request('indexGet', params) 195 | }, 196 | 197 | indexDangerousWrite (params: IndexDangerousWriteParams): Promise { 198 | return request('indexDangerousWrite', params) 199 | }, 200 | 201 | txList (): Promise { 202 | return request('txList') 203 | }, 204 | 205 | txGet (params: TxGetParams): Promise { 206 | return request('txGet', params) 207 | }, 208 | 209 | txVerify (params: TxVerifyParams): Promise { 210 | return request('txVerify', params) 211 | }, 212 | 213 | fraudList (): Promise { 214 | return request('fraudList') 215 | }, 216 | 217 | fraudGet (params: FraudGetParams): Promise { 218 | return request('fraudGet', params) 219 | }, 220 | 221 | dbCall (params: DbCallParams): Promise { 222 | return request('dbCall', params) 223 | }, 224 | 225 | dbVerify (): Promise { 226 | return request('dbVerify') 227 | }, 228 | 229 | dbSync (params: DbSyncParams): Promise { 230 | return request('dbSync', params) 231 | }, 232 | 233 | dbStartMonitor (): Promise { 234 | return request('dbStartMonitor') 235 | }, 236 | 237 | dbStopMonitor (): Promise { 238 | return request('dbStopMonitor') 239 | }, 240 | 241 | dbCreateOplog (): Promise { 242 | return request('dbCreateOplog') 243 | }, 244 | 245 | dbDeleteOplog (params: DbDeleteOplogParams): Promise { 246 | return request('dbDeleteOplog', params) 247 | } 248 | } 249 | } 250 | 251 | function createServer (server: Server) { 252 | const handlers: Record = { 253 | getInfo (): GetInfoResponse { 254 | const logs: LogInfo[] = [] 255 | const capture = (label: string, log: Log) => { 256 | logs.push({label, pubkey: log.pubkey.toString('hex'), length: log.length, writable: log.writable}) 257 | } 258 | capture('Index', server.db.index) 259 | for (let i = 0; i < server.db.oplogs.length; i++) { 260 | capture(`Oplog ${i}`, server.db.oplogs.at(i) as Log) 261 | } 262 | const detachedOplogPubkeys = [] 263 | for (const pubkey of server.cfg.createdOplogPubkeys) { 264 | if (!server.db.isOplogParticipant(pubkey)) { 265 | detachedOplogPubkeys.push(keyToStr(pubkey)) 266 | } 267 | } 268 | return {numPeers: server.db.numPeers, detachedOplogPubkeys, logs} 269 | }, 270 | 271 | async getSource (params: any) { 272 | const source = await server.db._readContractCode() 273 | return {source} 274 | }, 275 | 276 | async listMethods (params: any) { 277 | const source = await server.db._readContractCode() 278 | return { 279 | methods: listExportedMethods(source) 280 | } 281 | }, 282 | 283 | async logGetHistory (params: any) { 284 | let pubkeyBuf: Buffer 285 | try { 286 | if (!params?.pubkey) pubkeyBuf = server.db.index.pubkey 287 | else pubkeyBuf = keyToBuf(params?.pubkey) 288 | } catch (e: any) { 289 | throw new Error(`Invalid public key: ${e.message}`) 290 | } 291 | 292 | if (server.db.index.pubkey.equals(pubkeyBuf)) { 293 | const entries = [] 294 | for await (const entry of server.db.index.history()) { 295 | entries.push(entry) 296 | } 297 | return {isIndex: true, entries} 298 | } else { 299 | const oplog = server.db.oplogs.find(item => item.pubkey.equals(pubkeyBuf as Buffer)) 300 | if (!oplog) throw new Error(`Log not found`) 301 | 302 | const entries = [] 303 | for (let i = 0; i < oplog.length; i++) { 304 | entries.push((await oplog.get(i))?.value) 305 | } 306 | return {isIndex: false, entries} 307 | } 308 | }, 309 | 310 | async indexList (params: any) { 311 | const path = params?.path || '/' 312 | const entries = [] 313 | for (const entry of await server.db.index.list(path)) { 314 | entries.push(entry) 315 | } 316 | return {entries} 317 | }, 318 | 319 | async indexGet (params: any) { 320 | const entry = await server.db.index.get(params?.path) 321 | if (!entry) throw new Error(`No entry found`) 322 | return entry 323 | }, 324 | 325 | async indexDangerousWrite (params: IndexDangerousWriteParams) { 326 | await server.db.index.dangerousBatch([params]) 327 | return `${params.path} written.` 328 | }, 329 | 330 | async txList (params: any) { 331 | return {txIds: await server.dir.listTrackedTxIds()} 332 | }, 333 | 334 | async txGet (params: any) { 335 | return await server.dir.readTrackedTx(params?.txId) 336 | }, 337 | 338 | async txVerify (params: any) { 339 | const txInfo = await server.dir.readTrackedTx(params?.txId) 340 | if (!txInfo) throw new Error(`No transaction data found`) 341 | const tx = Transaction.fromJSON(server.db, txInfo) 342 | try { 343 | await tx.verifyInclusion() 344 | return {success: true} 345 | } catch (e: any) { 346 | if (e instanceof FraudProof) { 347 | const fraudId = String(Date.now()) 348 | server.dir.writeFraud(fraudId, e) 349 | return { 350 | success: false, 351 | fraudId, 352 | fraudDescription: util.inspect(e) 353 | } 354 | } 355 | throw e 356 | } 357 | }, 358 | 359 | async fraudList (): Promise { 360 | return {fraudIds: await server.dir.listTrackedFraudIds()} 361 | }, 362 | 363 | async fraudGet (params: FraudGetParams): Promise { 364 | return await server.dir.readTrackedFraud(params?.fraudId) 365 | }, 366 | 367 | async dbCall (params: any) { 368 | let txId = undefined 369 | const tx = await server.db.call(params?.method, params?.args) 370 | if (tx.ops.length) { 371 | txId = tx.txId 372 | server.dir.trackTransaction(tx) 373 | } 374 | return { 375 | txId, 376 | response: tx.response 377 | } 378 | }, 379 | 380 | async dbVerify (): Promise { 381 | try { 382 | await server.db.verify() 383 | return {success: true} 384 | } catch (e: any) { 385 | if (e instanceof FraudProof) { 386 | const fraudId = String(Date.now()) 387 | server.dir.writeFraud(fraudId, e) 388 | return { 389 | success: false, 390 | fraudId, 391 | fraudDescription: util.inspect(e) 392 | } 393 | } 394 | throw e 395 | } 396 | }, 397 | 398 | async dbSync (params: DbSyncParams): Promise { 399 | if (params.fullHistory) { 400 | await server.db.syncFullHistory() 401 | } else { 402 | await server.db.syncLatest() 403 | } 404 | }, 405 | 406 | async dbStartMonitor (): Promise { 407 | await server.startMonitor() 408 | }, 409 | 410 | async dbStopMonitor (): Promise { 411 | await server.stopMonitor() 412 | }, 413 | 414 | async dbCreateOplog (): Promise { 415 | return await server.createOplog() 416 | }, 417 | 418 | async dbDeleteOplog (params: DbDeleteOplogParams): Promise { 419 | return await server.deleteOplog(params) 420 | } 421 | } 422 | 423 | return async (reqbuf: Buffer): Promise => { 424 | const parsed = jsonrpc.parseObject(msgpackr.unpack(reqbuf)) 425 | if (parsed.type === 'error') { 426 | return msgpackr.pack(parsed.payload) 427 | } else if (parsed.type === 'request') { 428 | try { 429 | const param = Array.isArray(parsed.payload.params) ? parsed.payload.params[0] : [] 430 | const res = await handlers[parsed.payload.method](param) 431 | return msgpackr.pack(jsonrpc.success(parsed.payload.id, typeof res !== 'undefined' ? res : 0)) 432 | } catch (e: any) { 433 | const msg = e[util.inspect.custom] ? util.inspect(e) : (e.message || e.toString()) 434 | const rpcErr = new jsonrpc.JsonRpcError(msg, e.code || -32000, e.data) 435 | return msgpackr.pack(jsonrpc.error(parsed.payload.id, rpcErr)) 436 | } 437 | } else { 438 | throw new Error('Unhandled object type') 439 | } 440 | } 441 | } 442 | 443 | export function createLoopbackClient (server: Server): Client { 444 | const handleRPC = createServer(server) 445 | return createClient(async (req: jsonrpc.RequestObject) => { 446 | return msgpackr.unpack(await handleRPC(msgpackr.pack(req) as Buffer) as Buffer) 447 | }) 448 | } 449 | 450 | export function bindServerSocket (server: Server) { 451 | const handleRPC = createServer(server) 452 | const sockPath = server.dir.socketFilePath 453 | const sockServer = net.createServer((c) => { 454 | const encode = frame.encode() 455 | encode.pipe(c) 456 | c.pipe(frame.decode()).on('data', async (buf: Buffer) => { 457 | encode.write(await handleRPC(buf)) 458 | }) 459 | }); 460 | sockServer.listen(sockPath, () => { 461 | console.log(`Listening on ${sockPath}`) 462 | }) 463 | return sockServer 464 | } 465 | 466 | export async function connectServerSocket (sockPath: string) { 467 | const socket = net.connect(sockPath) 468 | await new Promise((resolve, reject) => { 469 | socket.on('connect', resolve) 470 | socket.on('error', reject) 471 | }) 472 | 473 | const pending: Map = new Map() 474 | const encode = frame.encode() 475 | encode.pipe(socket) 476 | socket.pipe(frame.decode()).on('data', (buf: Buffer) => { 477 | const obj = msgpackr.unpack(buf) 478 | const r = pending.get(obj.id) 479 | if (r) { 480 | pending.delete(obj.id) 481 | r(obj) 482 | } else { 483 | console.error('Received a response for a non-pending request', r) 484 | } 485 | }) 486 | return createClient((req: jsonrpc.RequestObject) => { 487 | encode.write(msgpackr.pack(req)) 488 | return new Promise(r => { pending.set(Number(req.id), r) }) 489 | }) 490 | } -------------------------------------------------------------------------------- /src/server/server.ts: -------------------------------------------------------------------------------- 1 | import { Resource } from '../util/resource.js' 2 | import { DataDirectory } from './data-directory.js' 3 | import { Config } from './config.js' 4 | import { Database } from '../core/database.js' 5 | import { OpLog } from '../core/log.js' 6 | import { ContractMonitor } from '../core/monitor.js' 7 | import { FraudProof } from '../core/fraud-proofs.js' 8 | import { keyToBuf, keyToStr } from '../types.js' 9 | 10 | export class Server extends Resource { 11 | monitor: ContractMonitor|undefined 12 | 13 | constructor (public cfg: Config, public dir: DataDirectory, public db: Database) { 14 | super() 15 | } 16 | 17 | static async createNew (dir: DataDirectory, contractSource: string): Promise { 18 | const db = await Database.create(dir.coresPath, {contract: {source: contractSource}}) 19 | await db.swarm() 20 | const cfg = new Config({pubkey: db.pubkey, createdOplogPubkeys: [], monitor: false}) 21 | await dir.writeConfigFile(cfg) 22 | const server = new Server(cfg, dir, db) 23 | await server.open() 24 | return server 25 | } 26 | 27 | static async createFromExisting (dir: DataDirectory, pubkey: Buffer): Promise { 28 | const db = await Database.load(dir.coresPath, pubkey) 29 | await db.swarm() 30 | const cfg = new Config({pubkey: db.pubkey, createdOplogPubkeys: [], monitor: false}) 31 | await dir.writeConfigFile(cfg) 32 | const server = new Server(cfg, dir, db) 33 | await server.open() 34 | return server 35 | } 36 | 37 | static async load (dir: DataDirectory): Promise { 38 | const cfg = await dir.readConfigFile() 39 | const db = await Database.load(dir.coresPath, cfg.pubkey) 40 | await db.swarm() 41 | const server = new Server(cfg, dir, db) 42 | await server.open() 43 | return server 44 | } 45 | 46 | static async createTestSandbox (dir: DataDirectory, contractSource: string): Promise { 47 | const db = await Database.createSandbox({contract: {source: contractSource}}) 48 | const cfg = new Config({pubkey: db.pubkey, createdOplogPubkeys: [], monitor: false}) 49 | await dir.writeConfigFile(cfg) 50 | const server = new Server(cfg, dir, db) 51 | await server.open() 52 | return server 53 | } 54 | 55 | async _open () { 56 | if (this.cfg.monitor) { 57 | await this.startMonitor() 58 | } 59 | } 60 | 61 | async _close () { 62 | await this.db.close() 63 | } 64 | 65 | async startMonitor () { 66 | if (this.monitor) return 67 | this.monitor = await this.db.monitor() 68 | this.monitor.on('violation', e => { 69 | if (e instanceof FraudProof) { 70 | this.dir.writeFraud(String(Date.now()), e) 71 | } else { 72 | console.error(`An error occurred during monitoring:`) 73 | console.error(e) 74 | } 75 | }) 76 | if (!this.cfg.monitor) { 77 | this.cfg.monitor = true 78 | await this.dir.writeConfigFile(this.cfg) 79 | } 80 | } 81 | 82 | async stopMonitor () { 83 | if (this.monitor) { 84 | await this.monitor.close() 85 | this.monitor = undefined 86 | this.cfg.monitor = false 87 | await this.dir.writeConfigFile(this.cfg) 88 | } 89 | } 90 | 91 | async createOplog () { 92 | const log = await OpLog.create(this.db.storage) 93 | this.cfg.createdOplogPubkeys.push(log.pubkey) 94 | await this.dir.writeConfigFile(this.cfg) 95 | return {pubkey: keyToStr(log.pubkey)} 96 | } 97 | 98 | async deleteOplog ({pubkey}: {pubkey: string}) { 99 | const pubkeyBuf = keyToBuf(pubkey) 100 | if (this.db.isOplogParticipant(pubkeyBuf)) { 101 | throw new Error(`Cannot delete oplog: still a participant in the database`) 102 | } 103 | let i = this.cfg.createdOplogPubkeys.findIndex(buf => buf.equals(pubkeyBuf)) 104 | if (i !== -1) { 105 | this.cfg.createdOplogPubkeys.splice(i, 1) 106 | await this.dir.writeConfigFile(this.cfg) 107 | // TODO: delete the folder 108 | } 109 | } 110 | } -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { AckSchema } from './schemas.js' 2 | import { Database } from './core/database.js' 3 | 4 | export type Key = string|Buffer 5 | 6 | export interface ContractCode { 7 | source: string 8 | } 9 | 10 | export enum ExecutorBehavior { 11 | DISABLED, 12 | TEST_PROCESS_OP_MULTIPLE_TIMES, 13 | TEST_SKIP_OPS, 14 | TEST_WRONG_OP_MUTATIONS 15 | } 16 | 17 | export interface DatabaseOpts { 18 | executorBehavior?: ExecutorBehavior 19 | } 20 | 21 | export interface DatabaseCreateOpts extends DatabaseOpts { 22 | contract: ContractCode 23 | } 24 | 25 | export interface SandboxDatabaseCreateOpts { 26 | from?: Database 27 | contract?: ContractCode 28 | } 29 | 30 | export interface IndexChange { 31 | type: 'put'|'del' 32 | seq: number 33 | path: string 34 | value: any 35 | } 36 | 37 | export interface OperationResults extends AckSchema { 38 | changes: IndexChange[] 39 | } 40 | 41 | export interface IndexBatchEntry { 42 | type: string 43 | path: string 44 | value?: any 45 | } 46 | 47 | export interface OpLogEntry { 48 | seq: number 49 | value: any 50 | } 51 | 52 | export interface IndexLogListOpts { 53 | reverse?: boolean 54 | offset?: number 55 | limit?: number 56 | } 57 | 58 | export interface IndexLogEntry { 59 | container: boolean 60 | seq: number|undefined 61 | path: string 62 | name: string 63 | value: any 64 | } 65 | 66 | export interface IndexHistoryOpts { 67 | live?: boolean 68 | reverse?: boolean 69 | gte?: number 70 | gt?: number 71 | lte?: number 72 | lt?: number 73 | limit?: number 74 | } 75 | 76 | export interface IndexHistoryEntry { 77 | type: string 78 | seq: number 79 | path: string 80 | name: string 81 | value: any 82 | } 83 | 84 | export interface BaseApiCallRes { 85 | response: any 86 | ops: any[] 87 | } 88 | 89 | export type ApplyActions = Record 90 | 91 | export function keyToBuf (key: Key) { 92 | if (Buffer.isBuffer(key)) { 93 | if (key.byteLength !== 32) { 94 | throw new Error(`Invalid key size (${key.byteLength}), must be 32 bytes`) 95 | } 96 | return key 97 | } else if (typeof key === 'string') { 98 | if (key.length !== 64) { 99 | throw new Error(`Invalid key size (${key.length}), must be a 64-character hex string`) 100 | } 101 | return Buffer.from(key, 'hex') 102 | } 103 | throw new Error(`Not a key: ${key}`) 104 | } 105 | 106 | export function keyToStr (key: Key) { 107 | if (Buffer.isBuffer(key)) { 108 | if (key.byteLength !== 32) { 109 | throw new Error(`Invalid key size (${key.byteLength}), must be 32 bytes`) 110 | } 111 | return key.toString('hex') 112 | } else if (typeof key === 'string') { 113 | if (key.length !== 64) { 114 | throw new Error(`Invalid key size (${key.length}), must be a 64-character hex string`) 115 | } 116 | return key 117 | } 118 | throw new Error(`Not a key: ${key}`) 119 | } -------------------------------------------------------------------------------- /src/util/async.ts: -------------------------------------------------------------------------------- 1 | export async function timeout (n: number, p: Promise): Promise { 2 | const to = new Promise((resolve, reject) => { 3 | setTimeout(() => reject(new Error('Timed out')), n) 4 | }) 5 | return await Promise.race([p, to]) 6 | } -------------------------------------------------------------------------------- /src/util/hyper.ts: -------------------------------------------------------------------------------- 1 | import Hyperbee from 'hyperbee' 2 | // @ts-ignore no types available -prf 3 | import { Node } from 'hyperbee/lib/messages.js' 4 | import * as msgpackr from 'msgpackr' 5 | import { IndexLogEntry } from '../types.js' 6 | 7 | const SEP = `\x00` 8 | const MIN = `\x00` 9 | const MAX = `\xff` 10 | 11 | export function pathToBeekey (path: string): string { 12 | return path.split('/').filter(Boolean).join(SEP) 13 | } 14 | 15 | export function beekeyToPath (key: string): string { 16 | return key.split(SEP).filter(Boolean).join('/') 17 | } 18 | 19 | export async function beeShallowList (bee: Hyperbee, path: string[]): Promise { 20 | const arr: IndexLogEntry[] = [] 21 | const pathlen = path && path.length > 0 ? path.length : 0 22 | let bot = pathlen > 0 ? `${path.join(SEP)}${SEP}${MIN}` : MIN 23 | const top = pathlen > 0 ? `${path.join(SEP)}${SEP}${MAX}` : MAX 24 | 25 | let lastItem: IndexLogEntry|undefined = undefined 26 | do { 27 | const item = await bee.peek({gt: bot, lt: top}) 28 | if (!item) return arr 29 | 30 | const itemPath = item.key.split(SEP).filter(Boolean) 31 | if (itemPath.length > pathlen + 1) { 32 | const containerPath = itemPath.slice(0, pathlen + 1) 33 | const containerPathStr = `/${containerPath.join('/')}` 34 | if (lastItem && lastItem.path === containerPathStr) { 35 | lastItem.container = true 36 | } else { 37 | arr.push({ 38 | container: true, 39 | seq: undefined, 40 | name: containerPath[containerPath.length - 1], 41 | path: containerPathStr, 42 | value: undefined 43 | }) 44 | } 45 | bot = `${containerPath.join(SEP)}${SEP}${MAX}` 46 | } else { 47 | arr.push({ 48 | container: false, 49 | seq: item.seq, 50 | name: itemPath[itemPath.length - 1], 51 | path: `/${itemPath.join('/')}`, 52 | value: item.value 53 | }) 54 | lastItem = arr[arr.length - 1] 55 | bot = itemPath.join(SEP) 56 | } 57 | } while (true) 58 | } 59 | -------------------------------------------------------------------------------- /src/util/lock.ts: -------------------------------------------------------------------------------- 1 | /* 2 | await-lock 3 | NOTE copied into here because it struggles with compilation and it's too small to bother with anything else 4 | 5 | The MIT License (MIT) 6 | 7 | Copyright (c) 2015-present James Ide 8 | */ 9 | 10 | /** 11 | * A mutex lock for coordination across async functions 12 | */ 13 | export class AwaitLock { 14 | _acquired: boolean 15 | _waitingResolvers: ((value: unknown) => void)[] 16 | 17 | constructor() { 18 | this._acquired = false; 19 | this._waitingResolvers = []; 20 | } 21 | /** 22 | * Whether the lock is currently acquired or not. Accessing this property does not affect the 23 | * status of the lock. 24 | */ 25 | get acquired() { 26 | return this._acquired; 27 | } 28 | /** 29 | * Acquires the lock, waiting if necessary for it to become free if it is already locked. The 30 | * returned promise is fulfilled once the lock is acquired. 31 | * 32 | * After acquiring the lock, you **must** call `release` when you are done with it. 33 | */ 34 | acquireAsync() { 35 | if (!this._acquired) { 36 | this._acquired = true; 37 | return Promise.resolve(); 38 | } 39 | return new Promise((resolve) => { 40 | this._waitingResolvers.push(resolve); 41 | }); 42 | } 43 | /** 44 | * Acquires the lock if it is free and otherwise returns immediately without waiting. Returns 45 | * `true` if the lock was free and is now acquired, and `false` otherwise, 46 | */ 47 | tryAcquire() { 48 | if (!this._acquired) { 49 | this._acquired = true; 50 | return true; 51 | } 52 | return false; 53 | } 54 | /** 55 | * Releases the lock and gives it to the next waiting acquirer, if there is one. Each acquirer 56 | * must release the lock exactly once. 57 | */ 58 | release() { 59 | if (!this._acquired) { 60 | throw new Error(`Cannot release an unacquired lock`); 61 | } 62 | if (this._waitingResolvers.length > 0) { 63 | const resolve = this._waitingResolvers.shift(); 64 | if (resolve) { 65 | resolve(undefined); 66 | } 67 | } 68 | else { 69 | this._acquired = false; 70 | } 71 | } 72 | } 73 | 74 | // wraps await-lock in a simpler interface, with many possible locks 75 | interface LocksMap { 76 | [key: string]: AwaitLock 77 | } 78 | var locks: LocksMap = {} 79 | 80 | /** 81 | * Create a new lock 82 | * @example 83 | * var lock = require('./lock') 84 | * async function foo () { 85 | * var release = await lock('bar') 86 | * // ... 87 | * release() 88 | * } 89 | */ 90 | export default async function (key: string): Promise<() => void> { 91 | if (!(key in locks)) locks[key] = new AwaitLock() 92 | 93 | var lock = locks[key] 94 | await lock.acquireAsync() 95 | return lock.release.bind(lock) 96 | }; -------------------------------------------------------------------------------- /src/util/parser.ts: -------------------------------------------------------------------------------- 1 | import { parse } from '@babel/parser' 2 | 3 | export interface ExportedMethod { 4 | name: string 5 | args: string|undefined 6 | } 7 | 8 | export function listExportedMethods (source: string) { 9 | const parsed = parse(source, { 10 | sourceType: 'module', 11 | attachComment: false 12 | }) 13 | const methods: ExportedMethod[] = [] 14 | for (const node of parsed.program.body) { 15 | if (node.type !== 'ExportNamedDeclaration') continue 16 | if (node.declaration?.type !== 'FunctionDeclaration') continue 17 | 18 | let args = undefined 19 | const arg0 = node.declaration.params[0] 20 | if (arg0?.type === 'ObjectPattern') { 21 | args = `{${arg0.properties.map((node: any) => node.key.name).join(', ')}}` 22 | } else if (arg0?.type === 'Identifier') { 23 | args = arg0.name 24 | } 25 | methods.push({ 26 | name: node.declaration.id.name, 27 | args 28 | }) 29 | } 30 | return methods 31 | } -------------------------------------------------------------------------------- /src/util/resource.ts: -------------------------------------------------------------------------------- 1 | import { EventEmitter } from 'events' 2 | import { AwaitLock } from './lock.js' 3 | 4 | const lock = Symbol('lock') 5 | const reopen = Symbol('allow reopen') 6 | const init = Symbol('init()') 7 | 8 | export class Resource extends EventEmitter { 9 | opening = false 10 | opened = false 11 | closing = false 12 | closed = false 13 | ;[reopen] = false 14 | ;[lock] = new AwaitLock() 15 | 16 | constructor (opts?: {reopen: boolean}) { 17 | super() 18 | this[init]() 19 | this[reopen] = opts?.reopen || false 20 | } 21 | 22 | [init] () { 23 | this.opening = false 24 | this.opened = false 25 | this.closing = false 26 | this.closed = false 27 | } 28 | 29 | async open (opts?: any) { 30 | await this[lock].acquireAsync() 31 | try { 32 | if (this.closed) { 33 | if (!this[reopen]) { 34 | throw new Error('Resource is closed') 35 | } 36 | this[init]() 37 | } 38 | if (this.opened) return 39 | this.opening = true 40 | await this._open(opts) 41 | this.opening = false 42 | this.opened = true 43 | this.emit('opened') 44 | } finally { 45 | this[lock].release() 46 | } 47 | } 48 | 49 | async close () { 50 | await this[lock].acquireAsync() 51 | try { 52 | if (this.closed) return 53 | if (!this.opened) { 54 | this.closed = true 55 | return 56 | } 57 | this.closing = true 58 | await this._close() 59 | this.closing = false 60 | this.closed = true 61 | this.emit('closed') 62 | } finally { 63 | this[lock].release() 64 | } 65 | } 66 | 67 | async _open (opts?: any) {} 68 | async _close () {} 69 | } -------------------------------------------------------------------------------- /src/util/resources-manager.ts: -------------------------------------------------------------------------------- 1 | import EventEmitter from 'events' 2 | 3 | interface Resource { 4 | open (): Promise 5 | close (): Promise 6 | equals (other: Resource): boolean 7 | } 8 | 9 | export class ResourcesManager extends EventEmitter { 10 | items: T[] = [] 11 | 12 | get length () { 13 | return this.items.length 14 | } 15 | 16 | *[Symbol.iterator] () { 17 | for (const item of this.items) { 18 | yield item 19 | } 20 | } 21 | 22 | async* watch (emitExisting = true): AsyncGenerator<[string, T]> { 23 | if (emitExisting) { 24 | for (const item of this.items) { 25 | yield ['added', item] 26 | } 27 | } 28 | while (true) { 29 | yield await new Promise(r => { 30 | this.once('changed', (evt, item) => r([evt, item])) 31 | }) 32 | } 33 | } 34 | 35 | at (index: number): T|undefined { 36 | return this.items[index] 37 | } 38 | 39 | find (item: T|((item: T) => boolean)): T|undefined { 40 | if (typeof item === 'function') return this.items.find(item) 41 | return this.items.find(item2 => item2.equals(item)) 42 | } 43 | 44 | findIndex (item: T|((item: T) => boolean)): number { 45 | if (typeof item === 'function') return this.items.findIndex(item) 46 | return this.items.findIndex(item2 => item2.equals(item)) 47 | } 48 | 49 | has (item: T|((item: T) => boolean)): boolean { 50 | if (typeof item === 'function') return !!this.items.find(item) 51 | return !!this.items.find(item2 => item2.equals(item)) 52 | } 53 | 54 | map (fn: (item: T, index: number)=>any): any[] { 55 | return this.items.map(fn) 56 | } 57 | 58 | async add (item: T) { 59 | if (!this.find(item)) { 60 | this.items.push(item) 61 | await item.open() 62 | this.emit('added', item) 63 | this.emit('changed', 'added', item) 64 | } 65 | } 66 | 67 | async remove (item: T) { 68 | const index = this.findIndex(item) 69 | if (index !== -1) { 70 | await this.removeAt(index) 71 | } 72 | } 73 | 74 | async removeAt (index: number) { 75 | const item = this.items[index] 76 | this.items.splice(index, 1) 77 | await item.close() 78 | this.emit('removed', item) 79 | this.emit('changed', 'removed', item) 80 | } 81 | 82 | async removeAll () { 83 | await Promise.all(this.items.map(item => item.close())) 84 | this.items.length = 0 85 | } 86 | } -------------------------------------------------------------------------------- /src/util/usage-manager.ts: -------------------------------------------------------------------------------- 1 | import assert from 'assert' 2 | 3 | export class UsageManager { 4 | public actives = 0 5 | 6 | private _pausePromise: Promise|undefined = undefined 7 | private _pauseResolve: ((v:any)=>void)|undefined = undefined 8 | private _onPausable: ((v:any)=>void)|undefined = undefined 9 | 10 | async use (fn: ()=>Promise): Promise { 11 | if (this.paused) await this._pausePromise 12 | this.actives++ 13 | try { 14 | return await fn() 15 | } finally { 16 | this.actives-- 17 | if (this.actives === 0) { 18 | this._onPausable?.(undefined) 19 | } 20 | } 21 | } 22 | 23 | get paused () { 24 | return !!this._pausePromise 25 | } 26 | 27 | async pause (): Promise { 28 | assert(!this._pausePromise, 'Already paused') 29 | if (this.actives > 0) { 30 | await new Promise(resolve => { 31 | this._onPausable = resolve 32 | }) 33 | } 34 | this._pausePromise = new Promise(resolve => { 35 | this._pauseResolve = resolve 36 | }) 37 | } 38 | 39 | unpause () { 40 | assert(this._pausePromise, 'Not paused') 41 | assert(this._pauseResolve, 'Not paused') 42 | const resolve = this._pauseResolve 43 | this._pausePromise = undefined 44 | this._pauseResolve = undefined 45 | resolve(undefined) 46 | } 47 | } -------------------------------------------------------------------------------- /test/base.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database, IndexHistoryEntry } from '../src/index.js' 3 | 4 | const SIMPLE_CONTRACT = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export async function get ({path}) { 9 | assert(typeof path === 'string' && path.length > 0) 10 | return await index.get(path) 11 | } 12 | 13 | export function put ({path, value}, emit) { 14 | assert(typeof path === 'string' && path.length > 0) 15 | emit({op: 'PUT', path, value}) 16 | } 17 | 18 | export const apply = { 19 | PUT (tx, op, ack) { 20 | assert(typeof op.path === 'string' && op.path.length > 0) 21 | tx.put(op.path, op.value) 22 | } 23 | } 24 | ` 25 | 26 | ava('simple full db run', async t => { 27 | const db = await Database.create(new StorageInMemory(), { 28 | contract: {source: SIMPLE_CONTRACT} 29 | }) 30 | 31 | t.truthy(db.opened) 32 | t.falsy(db.opening) 33 | t.falsy(db.closing) 34 | t.falsy(db.closed) 35 | t.truthy(db.pubkey) 36 | t.truthy(db.isExecutor) 37 | t.truthy(db.isParticipant) 38 | t.truthy(db.localOplog) 39 | 40 | const res1 = await db.call('get', {path: '/foo'}) 41 | const res2 = await db.call('put', {path: '/foo', value: 'hello world'}) 42 | await db.executor?.sync() 43 | const res3 = await db.call('get', {path: '/foo'}) 44 | t.falsy(res1.response) 45 | t.deepEqual(res2.ops[0].value, { op: 'PUT', path: '/foo', value: 'hello world' }) 46 | t.is(res3.response.value, 'hello world') 47 | await db.close() 48 | }) 49 | 50 | ava('successfully runs loaded version', async t => { 51 | const storage = new StorageInMemory() 52 | const db = await Database.create(storage, { 53 | contract: {source: SIMPLE_CONTRACT} 54 | }) 55 | const pubkey = db.pubkey 56 | await db.call('put', {path: '/foo', value: 'hello world'}) 57 | await db.call('put', {path: '/bar', value: 'hello world'}) 58 | await db.executor?.sync() 59 | 60 | const contract2 = await Database.load(storage, pubkey) 61 | const res = await contract2.call('get', {path: '/foo'}) 62 | t.is(res.response.value, 'hello world') 63 | await db.call('put', {path: '/foo', value: 'hello world!!'}) 64 | await db.executor?.sync() 65 | const res2 = await contract2.call('get', {path: '/foo'}) 66 | t.is(res2.response.value, 'hello world!!') 67 | 68 | await db.close() 69 | await contract2.close() 70 | }) 71 | 72 | ava('simple db run with verification', async t => { 73 | const db = await Database.create(new StorageInMemory(), { 74 | contract: {source: SIMPLE_CONTRACT} 75 | }) 76 | 77 | const res1 = await db.call('get', {path: '/foo'}) 78 | const res2 = await db.call('put', {path: '/foo', value: 'hello world'}) 79 | await db.executor?.sync() 80 | const res3 = await db.call('put', {path: '/foo', value: 'hello world!'}) 81 | await db.executor?.sync() 82 | const res4 = await db.call('get', {path: '/foo'}) 83 | t.falsy(res1.response) 84 | t.deepEqual(res2.ops[0].value, { op: 'PUT', path: '/foo', value: 'hello world' }) 85 | t.deepEqual(res3.ops[0].value, { op: 'PUT', path: '/foo', value: 'hello world!' }) 86 | t.is(res4.response.value, 'hello world!') 87 | 88 | await db.verify() 89 | 90 | await db.close() 91 | }) 92 | 93 | ava('simple db run with active monitoring', async t => { 94 | const db = await Database.create(new StorageInMemory(), { 95 | contract: {source: SIMPLE_CONTRACT} 96 | }) 97 | 98 | const monitor = await db.monitor() 99 | const validationEvents: IndexHistoryEntry[] = [] 100 | const whenValidated = new Promise(resolve => { 101 | monitor.on('validated', (evt: IndexHistoryEntry) => { 102 | validationEvents.push(evt) 103 | if (monitor.verifiedLength === 5) resolve(undefined) 104 | }) 105 | }) 106 | 107 | const res1 = await db.call('get', {path: '/foo'}) 108 | const res2 = await db.call('put', {path: '/foo', value: 'hello world'}) 109 | await db.executor?.sync() 110 | const res3 = await db.call('get', {path: '/foo'}) 111 | t.falsy(res1.response) 112 | t.deepEqual(res2.ops[0].value, { op: 'PUT', path: '/foo', value: 'hello world' }) 113 | t.is(res3.response.value, 'hello world') 114 | 115 | await whenValidated 116 | t.is(validationEvents[0].type, 'put') 117 | t.is(validationEvents[1].type, 'put') 118 | t.is(validationEvents[2].type, 'put') 119 | t.is(validationEvents[2].path, '/.sys/acks/genesis') 120 | t.is(validationEvents[3].type, 'put') 121 | t.is(validationEvents[4].type, 'put') 122 | t.is(validationEvents[4].path, '/foo') 123 | 124 | await monitor.close() 125 | await db.close() 126 | }) -------------------------------------------------------------------------------- /test/contract-code.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database, IndexHistoryEntry } from '../src/index.js' 3 | 4 | const CONTRACT_V1 = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export async function get ({path}) { 9 | return await index.get(path) 10 | } 11 | 12 | export function put ({path, value}, emit) { 13 | emit({op: 'PUT', path, value}) 14 | } 15 | 16 | export function setSource ({code}, emit) { 17 | emit({op: 'SET_SOURCE', code}) 18 | } 19 | 20 | export const apply = { 21 | PUT (tx, op) { 22 | tx.put(op.path, op.value) 23 | }, 24 | SET_SOURCE (tx, op) { 25 | tx.setContractSource({code: op.code}) 26 | } 27 | } 28 | ` 29 | 30 | const CONTRACT_V2 = ` 31 | import assert from 'assert' 32 | import { index } from 'contract' 33 | 34 | export async function getValue ({path}) { 35 | return await index.get(path) 36 | } 37 | 38 | export function putValue ({path, value}, emit) { 39 | emit({op: 'PUT', path, value}) 40 | } 41 | 42 | export const apply = { 43 | PUT (tx, op) { 44 | tx.put(op.path, op.value) 45 | } 46 | } 47 | ` 48 | 49 | ava('change contract source during execution', async t => { 50 | const db = await Database.create(new StorageInMemory(), { 51 | contract: {source: CONTRACT_V1} 52 | }) 53 | 54 | // execution 55 | 56 | const res1 = await db.call('get', {path: '/foo'}) 57 | const res2 = await db.call('put', {path: '/foo', value: 'hello world'}) 58 | await res2.whenProcessed() 59 | const res3 = await db.call('get', {path: '/foo'}) 60 | t.falsy(res1.response) 61 | t.deepEqual(res2.ops[0].value, { op: 'PUT', path: '/foo', value: 'hello world' }) 62 | t.is(res3.response.value, 'hello world') 63 | 64 | const res4 = await db.call('setSource', {code: CONTRACT_V2}) 65 | await res4.whenProcessed() 66 | const res4Results = await res4.fetchResults() 67 | t.is(res4Results[0]?.changes[0]?.path, '/.sys/contract/source') 68 | t.is(res4Results[0]?.changes[0]?.value, CONTRACT_V2) 69 | 70 | const res5 = await db.call('putValue', {path: '/foo', value: 'hello world!'}) 71 | await res5.whenProcessed() 72 | const res6 = await db.call('getValue', {path: '/foo'}) 73 | t.deepEqual(res5.ops[0].value, { op: 'PUT', path: '/foo', value: 'hello world!' }) 74 | t.is(res6.response.value, 'hello world!') 75 | 76 | // verification 77 | 78 | await db.verify() 79 | 80 | await db.close() 81 | }) -------------------------------------------------------------------------------- /test/debugging.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database, ContractParseError, ContractRuntimeError } from '../src/index.js' 3 | 4 | const SIMPLE_CONTRACT = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export async function get ({path}) { 9 | assert(typeof path === 'string' && path.length > 0) 10 | return await index.get(path) 11 | } 12 | 13 | export function put ({path, value}, emit) { 14 | assert(typeof path === 'string' && path.length > 0) 15 | emit({op: 'PUT', path, value}) 16 | } 17 | 18 | export const apply = { 19 | PUT (tx, op, ack) { 20 | assert(typeof op.path === 'string' && op.path.length > 0) 21 | tx.put(op.path, op.value) 22 | } 23 | } 24 | ` 25 | 26 | const SIMPLE_CONTRACT_MODIFIED = ` 27 | import assert from 'assert' 28 | import { index } from 'contract' 29 | 30 | export async function get ({path}) { 31 | assert(typeof path === 'string' && path.length > 0) 32 | return await index.get(path) 33 | } 34 | 35 | export function put ({path, value}, emit) { 36 | assert(typeof path === 'string' && path.length > 0) 37 | emit({op: 'PUT', path, value}) 38 | } 39 | 40 | export const apply = { 41 | PUT (tx, op, ack) { 42 | assert(typeof op.path === 'string' && op.path.length > 0) 43 | tx.put(op.path, op.value.toUpperCase()) 44 | } 45 | } 46 | ` 47 | 48 | ava.only('can create test sandboxes of active contracts without affecting the active deployment', async t => { 49 | const db = await Database.create(new StorageInMemory(), { 50 | contract: {source: SIMPLE_CONTRACT} 51 | }) 52 | 53 | await db.call('put', {path: '/foo', value: 'hello world'}) 54 | await db.executor?.sync() 55 | 56 | const sbxDb = await Database.createSandbox({from: db}) 57 | 58 | const res1 = await sbxDb.call('get', {path: '/foo'}) 59 | await sbxDb.call('put', {path: '/foo', value: 'hello sandbox'}) 60 | await sbxDb.executor?.sync() 61 | 62 | const sbxDb2 = await Database.createSandbox({from: db, contract: {source: SIMPLE_CONTRACT_MODIFIED}}) 63 | 64 | await sbxDb2.call('put', {path: '/foo', value: 'hello sandbox 2'}) 65 | await sbxDb2.executor?.sync() 66 | 67 | const res2 = await db.call('get', {path: '/foo'}) 68 | const res3 = await sbxDb.call('get', {path: '/foo'}) 69 | const res4 = await sbxDb2.call('get', {path: '/foo'}) 70 | t.is(res1.response?.value, 'hello world') 71 | t.is(res2.response?.value, 'hello world') 72 | t.is(res3.response?.value, 'hello sandbox') 73 | t.is(res4.response?.value, 'HELLO SANDBOX 2') 74 | 75 | await db.close() 76 | }) 77 | 78 | ava('parsing errors in contract code', async t => { 79 | const PARSE_ERROR = `!@#$IASDFklj;14li3kjzs` 80 | const db = await Database.create(new StorageInMemory(), { 81 | contract: {source: PARSE_ERROR} 82 | }) 83 | let err: any 84 | db.on('error', _err => { err = _err }) 85 | await db._startVM() // trigger vm start 86 | t.truthy(err instanceof ContractParseError) 87 | await db?.close() 88 | }) 89 | 90 | ava('runtime errors in contract code', async t => { 91 | const RUNTIME_ERROR = `foo()` 92 | const db = await Database.create(new StorageInMemory(), { 93 | contract: {source: RUNTIME_ERROR} 94 | }) 95 | let err: any 96 | db.on('error', _err => { err = _err }) 97 | await db._startVM() // trigger vm start 98 | t.truthy(err instanceof ContractRuntimeError) 99 | await db.close() 100 | }) 101 | 102 | ava('runtime errors in a contract call', async t => { 103 | const RUNTIME_ERROR = `export function test () { foo() }` 104 | const db = await Database.create(new StorageInMemory(), { 105 | contract: {source: RUNTIME_ERROR} 106 | }) 107 | try { 108 | await db.call('test', {}) 109 | t.fail('Should have thrown') 110 | } catch (e) { 111 | t.truthy(e instanceof ContractRuntimeError) 112 | } 113 | await db.close() 114 | }) 115 | 116 | ava('runtime errors in apply()', async t => { 117 | const RUNTIME_ERROR = ` 118 | export function normalError (_, emit) { emit({op: 'NORMAL_ERROR'}) } 119 | export function runtimeError (_, emit) { emit({op: 'RUNTIME_ERROR'}) } 120 | export const apply = { 121 | NORMAL_ERROR () { 122 | throw new Error('Expected') 123 | }, 124 | RUNTIME_ERROR () { 125 | foo() 126 | } 127 | } 128 | ` 129 | const db = await Database.create(new StorageInMemory(), { 130 | contract: {source: RUNTIME_ERROR} 131 | }) 132 | 133 | let err: any 134 | db.on('error', _err => { err = _err }) 135 | 136 | const tx1 = await db.call('normalError', {}) 137 | await tx1.whenProcessed() 138 | const tx2 = await db.call('runtimeError', {}) 139 | await tx2.whenProcessed() 140 | 141 | const tx1res = await tx1.fetchResults() 142 | const tx2res = await tx2.fetchResults() 143 | 144 | t.is(tx1res[0]?.error, 'Error: Expected') 145 | t.is(tx2res[0]?.error, 'ContractRuntimeError: The contract failed to execute with "ReferenceError: foo is not defined"') 146 | t.truthy(err instanceof ContractRuntimeError) 147 | 148 | await db.close() 149 | }) 150 | 151 | ava('runtime errors in process()', async t => { 152 | const RUNTIME_ERROR = ` 153 | export function normalError (_, emit) { emit({op: 'NORMAL_ERROR'}) } 154 | export function runtimeError (_, emit) { emit({op: 'RUNTIME_ERROR'}) } 155 | export function process (op) { 156 | if (op.op === 'NORMAL_ERROR') { 157 | throw new Error('Expected') 158 | } else { 159 | foo() 160 | } 161 | } 162 | export const apply = { 163 | NORMAL_ERROR () { 164 | // do nothing 165 | }, 166 | RUNTIME_ERROR () { 167 | // do nothing 168 | } 169 | } 170 | ` 171 | const db = await Database.create(new StorageInMemory(), { 172 | contract: {source: RUNTIME_ERROR} 173 | }) 174 | 175 | let err: any 176 | db.on('error', _err => { err = _err }) 177 | 178 | const tx1 = await db.call('normalError', {}) 179 | await tx1.whenProcessed() 180 | const tx2 = await db.call('runtimeError', {}) 181 | await tx2.whenProcessed() 182 | 183 | t.truthy(err instanceof ContractRuntimeError) 184 | 185 | await db.close() 186 | }) -------------------------------------------------------------------------------- /test/index.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, IndexLog, IndexLogEntry } from '../src/index.js' 3 | 4 | ava('batch modifications', async t => { 5 | const idx = await IndexLog.create(new StorageInMemory()) 6 | await idx.dangerousBatch([ 7 | {type: 'put', path: '/foo', value: 1}, 8 | {type: 'put', path: 'bar', value: 2}, 9 | {type: 'put', path: '/baz/buz', value: 3}, 10 | {type: 'del', path: 'nothing'} 11 | ]) 12 | t.is(idx.length, 4) 13 | await t.throwsAsync(() => idx.dangerousBatch([{type: 'wrong', path: '/foo', value: 100}])) 14 | await t.throwsAsync(() => idx.dangerousBatch([{type: 'put', path: '', value: 100}])) 15 | }) 16 | 17 | ava('list', async t => { 18 | const testOutput = (res: IndexLogEntry[], desc: string[]) => { 19 | t.is(res.length, desc.length) 20 | for (let i = 0; i < res.length; i++) { 21 | const itemDesc = desc[i] 22 | if (itemDesc.endsWith('/')) { 23 | t.truthy(res[i].container) 24 | t.is(res[i].path, itemDesc.slice(0, -1)) 25 | } else { 26 | t.falsy(res[i].container) 27 | t.is(res[i].path, itemDesc) 28 | } 29 | } 30 | } 31 | const idx = await IndexLog.create(new StorageInMemory()) 32 | await idx.dangerousBatch([ 33 | {type: 'put', path: '/a', value: '/a'}, 34 | {type: 'put', path: '/a/a', value: '/a/a'}, 35 | {type: 'put', path: '/a/b', value: '/a/b'}, 36 | {type: 'put', path: '/a/c', value: '/a/c'}, 37 | {type: 'put', path: '/a/c/a', value: '/a/c/a'}, 38 | {type: 'put', path: '/a/d/a', value: '/a/d/a'}, 39 | {type: 'put', path: '/a/d/b', value: '/a/d/b'}, 40 | {type: 'put', path: '/a/e', value: '/a/e'}, 41 | {type: 'put', path: '/b', value: '/b'}, 42 | {type: 'put', path: '/c', value: '/c'}, 43 | {type: 'put', path: '/d', value: '/d'}, 44 | {type: 'put', path: '/e/a/a/a', value: '/e/a/a/a'}, 45 | {type: 'put', path: '/e/a/a/b', value: '/e/a/a/b'}, 46 | {type: 'put', path: '/e/a/a/c', value: '/e/a/a/c'}, 47 | ]) 48 | testOutput(await idx.list('/'), ['/a/', '/b', '/c', '/d', '/e/']) 49 | testOutput(await idx.list(''), ['/a/', '/b', '/c', '/d', '/e/']) 50 | testOutput(await idx.list('/a'), ['/a/a', '/a/b', '/a/c/', '/a/d/', '/a/e']) 51 | testOutput(await idx.list('/a/'), ['/a/a', '/a/b', '/a/c/', '/a/d/', '/a/e']) 52 | testOutput(await idx.list('/a/b'), []) 53 | testOutput(await idx.list('/a/b/'), []) 54 | testOutput(await idx.list('/a/c'), ['/a/c/a']) 55 | testOutput(await idx.list('/a/c/'), ['/a/c/a']) 56 | testOutput(await idx.list('/a/d'), ['/a/d/a', '/a/d/b']) 57 | testOutput(await idx.list('/e'), ['/e/a/']) 58 | testOutput(await idx.list('/e/a'), ['/e/a/a/']) 59 | testOutput(await idx.list('/e/a/a'), ['/e/a/a/a','/e/a/a/b','/e/a/a/c']) 60 | testOutput(await idx.list('/', {offset: 1}), ['/b', '/c', '/d', '/e/']) 61 | testOutput(await idx.list('/', {limit: 4}), ['/a/', '/b', '/c', '/d']) 62 | testOutput(await idx.list('/', {offset: 1, limit: 3}), ['/b', '/c', '/d']) 63 | testOutput(await idx.list('/', {reverse: true}), ['/e/', '/d', '/c', '/b', '/a/']) 64 | testOutput(await idx.list('/', {reverse: true, offset: 1}), ['/d', '/c', '/b', '/a/']) 65 | testOutput(await idx.list('/', {reverse: true, limit: 4}), ['/e/', '/d', '/c', '/b']) 66 | testOutput(await idx.list('/', {reverse: true, offset: 1, limit: 3}), ['/d', '/c', '/b']) 67 | }) 68 | 69 | ava('get', async t => { 70 | const testOutput = (res: IndexLogEntry|undefined, name: string, path: string) => { 71 | t.truthy(res) 72 | if (res) { 73 | t.is(res.name, name) 74 | t.is(res.path, path) 75 | t.deepEqual(res.value, path) 76 | } 77 | } 78 | const idx = await IndexLog.create(new StorageInMemory()) 79 | await idx.dangerousBatch([ 80 | {type: 'put', path: '/a', value: '/a'}, 81 | {type: 'put', path: '/a/a', value: '/a/a'}, 82 | {type: 'put', path: '/a/b', value: '/a/b'}, 83 | {type: 'put', path: '/a/c', value: '/a/c'}, 84 | {type: 'put', path: '/a/c/a', value: '/a/c/a'} 85 | ]) 86 | testOutput(await idx.get('/a'), 'a', '/a') 87 | testOutput(await idx.get('a'), 'a', '/a') 88 | testOutput(await idx.get('/a/'), 'a', '/a') 89 | testOutput(await idx.get('a/'), 'a', '/a') 90 | testOutput(await idx.get('/a/a'), 'a', '/a/a') 91 | testOutput(await idx.get('/a/b'), 'b', '/a/b') 92 | testOutput(await idx.get('/a/c'), 'c', '/a/c') 93 | testOutput(await idx.get('/a/c/a'), 'a', '/a/c/a') 94 | }) -------------------------------------------------------------------------------- /test/networking.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database } from '../src/index.js' 3 | 4 | const SIMPLE_CONTRACT = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export async function get ({path}) { 9 | assert(typeof path === 'string' && path.length > 0) 10 | return await index.get(path) 11 | } 12 | 13 | export function put ({path, value}, emit) { 14 | assert(typeof path === 'string' && path.length > 0) 15 | emit({op: 'PUT', path, value}) 16 | } 17 | 18 | export const apply = { 19 | PUT (tx, op, ack) { 20 | assert(typeof op.path === 'string' && op.path.length > 0) 21 | tx.put(op.path, op.value) 22 | } 23 | } 24 | ` 25 | 26 | ava('can load and read over the network', async t => { 27 | const db1 = await Database.create(new StorageInMemory(), { 28 | contract: {source: SIMPLE_CONTRACT} 29 | }) 30 | await db1.swarm({local: true}) 31 | const tx1 = await db1.call('put', {path: '/foo', value: 'hello world'}) 32 | await tx1.whenProcessed() 33 | 34 | const db2 = await Database.load(new StorageInMemory(), db1.pubkey) 35 | await db2.swarm({local: true}) 36 | await db2.whenConnected() 37 | 38 | const tx2 = await db2.call('get', {path: '/foo'}) 39 | t.is(tx2.response.value, 'hello world') 40 | 41 | await db1.close() 42 | await db2.close() 43 | }) 44 | -------------------------------------------------------------------------------- /test/oplogs.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database, ExecutorBehavior, OpLog } from '../src/index.js' 3 | 4 | const CONTRACT = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export function get ({key}, emit) { 9 | assert(typeof key === 'string' && key.length > 0) 10 | return index.get(key) 11 | } 12 | 13 | export function put ({key, value}, emit) { 14 | assert(typeof key === 'string' && key.length > 0) 15 | emit({op: 'PUT', key, value}) 16 | } 17 | 18 | export function addOplog ({pubkey}, emit) { 19 | assert(typeof pubkey === 'string' && pubkey.length === 64) 20 | emit({op: 'ADD_OPLOG', pubkey}) 21 | } 22 | 23 | export function addOplogs ({pubkeys}, emit) { 24 | assert(Array.isArray(pubkeys)) 25 | for (const pubkey of pubkeys) { 26 | assert(typeof pubkey === 'string' && pubkey.length === 64) 27 | } 28 | emit({op: 'ADD_OPLOGS', pubkeys}) 29 | } 30 | 31 | export function removeOplog ({pubkey}, emit) { 32 | assert(typeof pubkey === 'string' && pubkey.length === 64) 33 | emit({op: 'REMOVE_OPLOG', pubkey}) 34 | } 35 | 36 | export function removeOplogs ({pubkeys}, emit) { 37 | assert(Array.isArray(pubkeys)) 38 | for (const pubkey of pubkeys) { 39 | assert(typeof pubkey === 'string' && pubkey.length === 64) 40 | } 41 | emit({op: 'REMOVE_OPLOGS', pubkeys}) 42 | } 43 | 44 | export const apply = { 45 | PUT (tx, op) { 46 | tx.put(op.key, op.value) 47 | }, 48 | ADD_OPLOG (tx, op) { 49 | tx.addOplog({pubkey: op.pubkey}) 50 | }, 51 | ADD_OPLOGS (tx, op) { 52 | for (const pubkey of op.pubkeys) { 53 | tx.addOplog({pubkey}) 54 | } 55 | }, 56 | REMOVE_OPLOG (tx, op) { 57 | tx.removeOplog({pubkey: op.pubkey}) 58 | }, 59 | REMOVE_OPLOGS (tx, op) { 60 | for (const pubkey of op.pubkeys) { 61 | tx.removeOplog({pubkey}) 62 | } 63 | } 64 | } 65 | ` 66 | 67 | ava('add oplogs', async t => { 68 | const storage1 = new StorageInMemory() 69 | const storage2 = new StorageInMemory() 70 | const db = await Database.create(storage1, { 71 | contract: {source: CONTRACT} 72 | }) 73 | const dbPubkey = db.pubkey 74 | 75 | const oplogCores: any[] = [] 76 | for (let i = 0; i < 10; i++) { 77 | oplogCores.push(await storage2.createHypercore()) 78 | } 79 | 80 | await db.call('addOplog', {pubkey: oplogCores[0].key.toString('hex')}) 81 | await db.executor?.sync() 82 | t.is(db.oplogs.length, 2) 83 | t.is(db.oplogs.at(1)?.pubkey.toString('hex'), oplogCores[0].key.toString('hex')) 84 | 85 | await db.call('addOplog', {pubkey: oplogCores[1].key.toString('hex')}) 86 | await db.call('addOplog', {pubkey: oplogCores[2].key.toString('hex')}) 87 | await db.executor?.sync() 88 | t.is(db.oplogs.length, 4) 89 | t.is(db.oplogs.at(2)?.pubkey.toString('hex'), oplogCores[1].key.toString('hex')) 90 | t.is(db.oplogs.at(3)?.pubkey.toString('hex'), oplogCores[2].key.toString('hex')) 91 | 92 | await db.call('addOplogs', {pubkeys: oplogCores.slice(3).map(core => core.key.toString('hex'))}) 93 | await db.executor?.sync() 94 | t.is(db.oplogs.length, 11) 95 | for (let i = 3; i < 10; i++) { 96 | t.truthy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[i].key))) 97 | } 98 | 99 | // check read from fresh 100 | const db2 = await Database.load(storage1, dbPubkey, {executorBehavior: ExecutorBehavior.DISABLED}) 101 | t.is(db2.oplogs.length, 11) 102 | for (let i = 0; i < 10; i++) { 103 | t.truthy(!!db2.oplogs.find(o => o.pubkey.equals(oplogCores[i].key))) 104 | } 105 | 106 | await db.close() 107 | }) 108 | 109 | ava('remove oplogs', async t => { 110 | const storage1 = new StorageInMemory() 111 | const storage2 = new StorageInMemory() 112 | const db = await Database.create(storage1, { 113 | contract: {source: CONTRACT} 114 | }) 115 | const dbPubkey = db.pubkey 116 | 117 | const oplogCores: any[] = [] 118 | for (let i = 0; i < 10; i++) { 119 | oplogCores.push(await storage2.createHypercore()) 120 | } 121 | 122 | await db.call('addOplogs', {pubkeys: oplogCores.slice(0, 5).map(core => core.key.toString('hex'))}) 123 | await db.executor?.sync() 124 | t.is(db.oplogs.length, 6) 125 | for (let i = 0; i < 5; i++) { 126 | t.truthy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[i].key))) 127 | } 128 | 129 | await db.call('removeOplog', {pubkey: oplogCores[0].key.toString('hex')}) 130 | await db.executor?.sync() 131 | t.is(db.oplogs.length, 5) 132 | t.falsy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[0].key))) 133 | 134 | await db.call('removeOplog', {pubkey: oplogCores[1].key.toString('hex')}) 135 | await db.call('removeOplog', {pubkey: oplogCores[2].key.toString('hex')}) 136 | await db.executor?.sync() 137 | t.is(db.oplogs.length, 3) 138 | t.falsy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[1].key))) 139 | t.falsy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[2].key))) 140 | 141 | await db.call('addOplogs', {pubkeys: oplogCores.slice(5).map(core => core.key.toString('hex'))}) 142 | await db.executor?.sync() 143 | t.is(db.oplogs.length, 8) 144 | for (let i = 3; i < 10; i++) { 145 | t.truthy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[i].key))) 146 | } 147 | 148 | await db.call('removeOplogs', {pubkeys: oplogCores.slice(0, 9).map(core => core.key.toString('hex'))}) 149 | await db.executor?.sync() 150 | t.is(db.oplogs.length, 2) 151 | for (let i = 0; i < 9; i++) { 152 | t.falsy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[i].key))) 153 | } 154 | t.truthy(!!db.oplogs.find(o => o.pubkey.equals(oplogCores[9].key))) 155 | 156 | // check read from fresh 157 | const db2 = await Database.load(storage1, dbPubkey, {executorBehavior: ExecutorBehavior.DISABLED}) 158 | t.is(db2.oplogs.length, 2) 159 | t.truthy(!!db2.oplogs.find(o => o.pubkey.equals(oplogCores[9].key))) 160 | 161 | await db.close() 162 | }) 163 | 164 | ava('execute ops on added oplogs', async t => { 165 | const storage = new StorageInMemory() 166 | const db = await Database.create(storage, { 167 | contract: {source: CONTRACT} 168 | }) 169 | 170 | const secondOplogCore = await storage.createHypercore() 171 | await db.call('addOplog', {pubkey: secondOplogCore.key.toString('hex')}) 172 | await db.executor?.sync() 173 | t.is(db.oplogs.length, 2) 174 | t.is(db.oplogs.at(1)?.pubkey.toString('hex'), secondOplogCore.key.toString('hex')) 175 | 176 | // execute transactions on the second oplog 177 | 178 | await db.setLocalOplog(db.oplogs.at(1)) 179 | 180 | const tx = await db.call('put', {key: 'foo', value: 'bar'}) 181 | await tx.whenProcessed() 182 | const tx2 = await db.call('get', {key: 'foo'}) 183 | t.is(tx2.response.value, 'bar') 184 | 185 | const tx3 = await db.call('put', {key: 'foo', value: 'baz'}) 186 | await tx3.whenProcessed() 187 | const tx4 = await db.call('get', {key: 'foo'}) 188 | t.is(tx4.response.value, 'baz') 189 | 190 | t.is(db.oplogs.at(0)?.length, 1) 191 | t.is(db.oplogs.at(1)?.length, 2) 192 | 193 | await db.close() 194 | }) 195 | 196 | ava('dont execute ops on removed oplogs', async t => { 197 | const storage = new StorageInMemory() 198 | const db = await Database.create(storage, { 199 | contract: {source: CONTRACT} 200 | }) 201 | 202 | const secondOplogCore = await storage.createHypercore() 203 | await db.call('addOplog', {pubkey: secondOplogCore.key.toString('hex')}) 204 | await db.executor?.sync() 205 | t.is(db.oplogs.length, 2) 206 | t.is(db.oplogs.at(1)?.pubkey.toString('hex'), secondOplogCore.key.toString('hex')) 207 | const secondOplogPubkey = db.oplogs.at(1)?.pubkey 208 | 209 | // execute transactions on the second oplog 210 | 211 | await db.setLocalOplog(db.oplogs.at(1)) 212 | 213 | const tx = await db.call('put', {key: 'foo', value: 'bar'}) 214 | await tx.whenProcessed() 215 | const tx2 = await db.call('get', {key: 'foo'}) 216 | t.is(tx2.response.value, 'bar') 217 | 218 | // remove the second oplog 219 | 220 | await db.setLocalOplog(db.oplogs.at(0)) 221 | 222 | const tx3 = await db.call('removeOplog', {pubkey: db.oplogs.at(1)?.pubkey.toString('hex')}) 223 | await tx3.whenProcessed() 224 | 225 | // try to execute another transaction (and fail) 226 | 227 | const secondOplog = new OpLog(await storage.getHypercore(secondOplogPubkey as Buffer)) 228 | await db.setLocalOplog(secondOplog) 229 | 230 | await db.call('put', {key: 'foo', value: 'baz'}) 231 | await db.executor?.sync() 232 | const tx4 = await db.call('get', {key: 'foo'}) 233 | 234 | // not mutated... 235 | t.is(tx4.response.value, 'bar') 236 | // ...despite existence of second op 237 | t.is(secondOplog.length, 2) 238 | 239 | await db.close() 240 | }) -------------------------------------------------------------------------------- /test/proofs.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database, verifyInclusionProof, Transaction } from '../src/index.js' 3 | 4 | const SIMPLE_CONTRACT = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export function put ({path, value}, emit) { 9 | emit({op: 'PUT', path, value}) 10 | return {done: true} 11 | } 12 | 13 | export const apply = { 14 | PUT (tx, op, ack) { 15 | assert(typeof op.path === 'string' && op.path.length > 0) 16 | tx.put(op.path, op.value) 17 | } 18 | } 19 | ` 20 | 21 | ava('op inclusion proof: valid, successful transaction', async t => { 22 | const db = await Database.create(new StorageInMemory(), { 23 | contract: {source: SIMPLE_CONTRACT} 24 | }) 25 | 26 | const tx = await db.call('put', {path: '/foo', value: 'hello world'}) 27 | 28 | t.is(tx.ops.length, 1) 29 | await tx.ops[0].verifyInclusion() 30 | const txOp0Proof = tx.ops[0].proof.toJSON() 31 | await verifyInclusionProof(txOp0Proof, {database: db}) 32 | 33 | await tx.whenProcessed() 34 | 35 | const txObj = await tx.toJSON({includeValues: true}) 36 | t.is(txObj.isProcessed, true) 37 | t.is(txObj.operations[0]?.result?.success, true) 38 | t.is(txObj.operations[0]?.result?.changes?.length, 1) 39 | const txClone = Transaction.fromJSON(db, txObj) 40 | await txClone.verifyInclusion() 41 | 42 | await db.close() 43 | }) 44 | 45 | ava('op inclusion proof: valid, unsuccessful transaction', async t => { 46 | const db = await Database.create(new StorageInMemory(), { 47 | contract: {source: SIMPLE_CONTRACT} 48 | }) 49 | 50 | const tx = await db.call('put', {value: 'hello world'}) 51 | 52 | t.is(tx.ops.length, 1) 53 | await tx.ops[0].verifyInclusion() 54 | const txOp0Proof = tx.ops[0].proof.toJSON() 55 | await verifyInclusionProof(txOp0Proof, {database: db}) 56 | 57 | await tx.whenProcessed() 58 | 59 | const txObj = await tx.toJSON({includeValues: true}) 60 | t.is(txObj.isProcessed, true) 61 | t.is(txObj.operations[0]?.result?.success, false) 62 | t.is(txObj.operations[0]?.result?.changes?.length, 0) 63 | const txClone = Transaction.fromJSON(db, txObj) 64 | await txClone.verifyInclusion() 65 | 66 | await db.close() 67 | }) 68 | 69 | ava('fraud proof: oplog forked away an operation after publishing', async t => { 70 | const db = await Database.create(new StorageInMemory(), { 71 | contract: {source: SIMPLE_CONTRACT} 72 | }) 73 | 74 | const tx = await db.call('put', {path: '/foo', value: 'hello world'}) 75 | 76 | t.is(tx.ops.length, 1) 77 | await tx.ops[0].verifyInclusion() 78 | const txOp0Proof = tx.ops[0].proof.toJSON() 79 | await verifyInclusionProof(txOp0Proof, {database: db}) 80 | await tx.whenProcessed() 81 | 82 | // use truncate() to remove the operation 83 | await db.localOplog?.core.truncate(0) 84 | 85 | try { 86 | await tx.ops[0].verifyInclusion() 87 | t.fail('Fraud not detected') 88 | } catch (e: any) { 89 | t.is(e.name, 'LogForkFraudProof') 90 | const obj: any = e.toJSON() 91 | t.is(obj.logPubkey, db.localOplog?.pubkey.toString('hex')) 92 | t.is(obj.forkNumber, 1) 93 | t.is(obj.blockSeq, 0) 94 | t.truthy(typeof obj.rootHashAtBlock, 'string') 95 | t.truthy(typeof obj.rootHashSignature, 'string') 96 | } 97 | 98 | await db.close() 99 | }) 100 | 101 | ava('failed validation: oplog removed operation and cannot verify', async t => { 102 | const db = await Database.create(new StorageInMemory(), { 103 | contract: {source: SIMPLE_CONTRACT} 104 | }) 105 | 106 | const tx = await db.call('put', {path: '/foo', value: 'hello world'}) 107 | 108 | t.is(tx.ops.length, 1) 109 | await tx.ops[0].verifyInclusion() 110 | const txOp0Proof = tx.ops[0].proof.toJSON() 111 | await verifyInclusionProof(txOp0Proof, {database: db}) 112 | await tx.whenProcessed() 113 | 114 | // mutate the log without using truncate() by replacing it with a log from separate, unsynced storage 115 | const storage2 = new StorageInMemory() 116 | // @ts-ignore keyPairs will exist on contract.storage 117 | storage2.keyPairs = db.storage.keyPairs 118 | const newCore = await storage2.getHypercore(db.localOplog?.pubkey as Buffer) 119 | // @ts-ignore at(0) will exist 120 | db.oplogs.at(0).core = newCore 121 | 122 | try { 123 | await tx.ops[0].verifyInclusion() 124 | t.fail('Fraud not detected') 125 | } catch (e: any) { 126 | t.is(e.name, 'BlocksNotAvailableError') 127 | } 128 | 129 | await db.close() 130 | }) 131 | 132 | ava('fraud proof: oplog removed operation after publishing without forking', async t => { 133 | const db = await Database.create(new StorageInMemory(), { 134 | contract: {source: SIMPLE_CONTRACT} 135 | }) 136 | 137 | const tx = await db.call('put', {path: '/foo', value: 'hello world'}) 138 | 139 | t.is(tx.ops.length, 1) 140 | await tx.ops[0].verifyInclusion() 141 | const txOp0Proof = tx.ops[0].proof.toJSON() 142 | await verifyInclusionProof(txOp0Proof, {database: db}) 143 | await tx.whenProcessed() 144 | 145 | // mutate the log without using truncate() by replacing it with a log from separate, unsynced storage 146 | const storage2 = new StorageInMemory() 147 | const newCore = await storage2.getHypercore(db.localOplog?.pubkey as Buffer) 148 | // @ts-ignore override sign 149 | newCore.sign = db.oplogs.at(0).core.sign 150 | newCore.writable = true 151 | // @ts-ignore at(0) will exist 152 | db.oplogs.at(0).core = newCore 153 | await db.call('put', {path: '/foo', value: 'hello world!'}) 154 | 155 | try { 156 | await tx.ops[0].verifyInclusion() 157 | t.fail('Fraud not detected') 158 | } catch (e: any) { 159 | t.is(e.name, 'BlockRewriteFraudProof') 160 | const obj: any = e.toJSON() 161 | t.is(obj.givenInclusionProof.logPubkey, obj.violatingInclusionProof.logPubkey) 162 | t.is(obj.givenInclusionProof.blockSeq, obj.violatingInclusionProof.blockSeq) 163 | t.notDeepEqual(obj.givenInclusionProof.rootHashAtBlock, obj.violatingInclusionProof.rootHashAtBlock) 164 | t.notDeepEqual(obj.givenInclusionProof.rootHashSignature, obj.violatingInclusionProof.rootHashSignature) 165 | } 166 | 167 | await db.close() 168 | }) -------------------------------------------------------------------------------- /test/transactions.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database } from '../src/index.js' 3 | 4 | const CONTRACT = ` 5 | export async function succeed (_, emit) { 6 | emit({op: 'SUCCEED'}) 7 | } 8 | 9 | export async function fail (_, emit) { 10 | emit({op: 'FAIL'}) 11 | } 12 | 13 | export function process (op) { 14 | return {foo: 'bar'} 15 | } 16 | 17 | export const apply = { 18 | SUCCEED (tx, op, ack) { 19 | tx.put('/success', ack.metadata) 20 | }, 21 | FAIL (tx, op, ack) { 22 | throw new Error('TX failed') 23 | } 24 | } 25 | ` 26 | 27 | ava('process() metadata', async t => { 28 | const db = await Database.create(new StorageInMemory(), { 29 | contract: {source: CONTRACT} 30 | }) 31 | await db.call('succeed', {}) 32 | await db.executor?.sync() 33 | t.deepEqual((await db.index.get('/success'))?.value, {foo: 'bar'}) 34 | await db.close() 35 | }) 36 | 37 | ava('can await call results (success)', async t => { 38 | const db = await Database.create(new StorageInMemory(), { 39 | contract: {source: CONTRACT} 40 | }) 41 | const res1 = await db.call('succeed', {}) 42 | await res1.whenProcessed() 43 | const res1Results = await res1.fetchResults() 44 | t.is(res1Results.length, 1) 45 | if (res1Results[0]) { 46 | t.truthy(res1Results[0].success) 47 | t.falsy(res1Results[0].error) 48 | t.is(typeof res1Results[0].seq, 'number') 49 | t.is(typeof res1Results[0].ts, 'number') 50 | t.deepEqual(res1Results[0].metadata, {foo: 'bar'}) 51 | t.is(res1Results[0].numChanges, 1) 52 | t.is(res1Results[0].changes.length, 1) 53 | if (res1Results[0].changes[0]) { 54 | t.deepEqual(res1Results[0].changes[0], { 55 | type: 'put', 56 | seq: 5, 57 | path: '/success', 58 | value: { foo: 'bar' } 59 | }) 60 | } 61 | } 62 | await db.close() 63 | }) 64 | 65 | ava('can await call results (failure)', async t => { 66 | const db = await Database.create(new StorageInMemory(), { 67 | contract: {source: CONTRACT} 68 | }) 69 | const res1 = await db.call('fail', {}) 70 | await res1.whenProcessed() 71 | const res1Results = await res1.fetchResults() 72 | t.is(res1Results.length, 1) 73 | if (res1Results[0]) { 74 | t.falsy(res1Results[0].success) 75 | t.is(res1Results[0].error, 'Error: TX failed') 76 | t.is(typeof res1Results[0].seq, 'number') 77 | t.is(typeof res1Results[0].ts, 'number') 78 | t.deepEqual(res1Results[0].metadata, {foo: 'bar'}) 79 | t.is(res1Results[0].numChanges, 0) 80 | t.is(res1Results[0].changes.length, 0) 81 | } 82 | await db.close() 83 | }) -------------------------------------------------------------------------------- /test/verification.ts: -------------------------------------------------------------------------------- 1 | import ava from 'ava' 2 | import { StorageInMemory, Database, OpLog, ContractFraudProof, ExecutorBehavior } from '../src/index.js' 3 | 4 | const SIMPLE_CONTRACT = ` 5 | import assert from 'assert' 6 | import { index } from 'contract' 7 | 8 | export async function get ({path}) { 9 | return await index.get(path) 10 | } 11 | 12 | export function put ({path, value}, emit) { 13 | emit({op: 'PUT', path, value}) 14 | } 15 | 16 | export const apply = { 17 | PUT (tx, op) { 18 | tx.put(op.path, op.value) 19 | } 20 | } 21 | ` 22 | 23 | ava('verification failure: executor processed an op multiple times', async t => { 24 | const db = await Database.create(new StorageInMemory(), { 25 | contract: {source: SIMPLE_CONTRACT}, 26 | executorBehavior: ExecutorBehavior.TEST_PROCESS_OP_MULTIPLE_TIMES 27 | }) 28 | 29 | const monitor = await db.monitor() 30 | const violations: ContractFraudProof[] = [] 31 | const whenViolated = new Promise(resolve => { 32 | monitor.on('violation', (evt: ContractFraudProof) => { 33 | violations.push(evt) 34 | resolve(undefined) 35 | }) 36 | }) 37 | 38 | await db.call('put', {path: '/foo', value: 'hello world'}) 39 | await db.executor?.sync() 40 | 41 | await whenViolated 42 | t.is(violations.length, 1) 43 | t.is(violations[0].details.code, 'ProcessedOutOfOrderError') 44 | t.is(violations[0].details.data.expectedSeq as number, 1) 45 | t.is(violations[0].details.data.executedSeq as number, 0) 46 | 47 | try { 48 | await db.verify() 49 | } catch (violation: any) { 50 | t.is(violation.details.code, 'ProcessedOutOfOrderError') 51 | t.is(violation.details.data.expectedSeq as number, 1) 52 | t.is(violation.details.data.executedSeq as number, 0) 53 | } 54 | 55 | await monitor.close() 56 | await db.close() 57 | }) 58 | 59 | ava('verification failure: executor skipped an operation', async t => { 60 | const db = await Database.create(new StorageInMemory(), { 61 | contract: {source: SIMPLE_CONTRACT}, 62 | executorBehavior: ExecutorBehavior.TEST_SKIP_OPS 63 | }) 64 | 65 | const monitor = await db.monitor() 66 | const violations: ContractFraudProof[] = [] 67 | const whenViolated = new Promise(resolve => { 68 | monitor.on('violation', (evt: ContractFraudProof) => { 69 | violations.push(evt) 70 | resolve(undefined) 71 | }) 72 | }) 73 | 74 | await db.call('put', {path: '/foo', value: 'hello world'}) 75 | await db.call('put', {path: '/bar', value: 'hello world!'}) 76 | await db.call('put', {path: '/baz', value: 'hello world!!'}) 77 | await db.executor?.sync() 78 | 79 | await whenViolated 80 | t.is(violations.length, 1) 81 | t.is(violations[0].details.code, 'ProcessedOutOfOrderError') 82 | t.is(violations[0].details.data.expectedSeq as number, 1) 83 | t.is(violations[0].details.data.executedSeq as number, 2) 84 | 85 | try { 86 | await db.verify() 87 | } catch (violation: any) { 88 | t.is(violation.details.code, 'ProcessedOutOfOrderError') 89 | t.is(violation.details.data.expectedSeq as number, 1) 90 | t.is(violation.details.data.executedSeq as number, 2) 91 | } 92 | 93 | await monitor.close() 94 | await db.close() 95 | }) 96 | 97 | ava('verification failure: executor op-changes do not match contract', async t => { 98 | const db = await Database.create(new StorageInMemory(), { 99 | contract: {source: SIMPLE_CONTRACT}, 100 | executorBehavior: ExecutorBehavior.TEST_WRONG_OP_MUTATIONS 101 | }) 102 | 103 | const monitor = await db.monitor() 104 | const violations: ContractFraudProof[] = [] 105 | const whenViolated = new Promise(resolve => { 106 | monitor.on('violation', (evt: ContractFraudProof) => { 107 | violations.push(evt) 108 | resolve(undefined) 109 | }) 110 | }) 111 | 112 | await db.call('put', {path: '/foo', value: 'hello world'}) 113 | await db.call('put', {path: '/bar', value: 'hello world!'}) 114 | await db.call('put', {path: '/baz', value: 'hello world!!'}) 115 | await db.executor?.sync() 116 | 117 | await whenViolated 118 | t.is(violations.length, 1) 119 | t.is(violations[0].details.code, 'ChangeMismatchError') 120 | t.deepEqual(violations[0].details.data.expectedChange as any, { path: '/foo', type: 'put', value: 'hello world' }) 121 | 122 | try { 123 | await db.verify() 124 | } catch (violation: any) { 125 | t.is(violation.details.code, 'ChangeMismatchError') 126 | t.deepEqual(violation.details.data.expectedChange as any, { path: '/foo', type: 'put', value: 'hello world' }) 127 | } 128 | 129 | await monitor.close() 130 | await db.close() 131 | }) 132 | 133 | ava('verification failure: executor processed an op from a non-participant', async t => { 134 | const db = await Database.create(new StorageInMemory(), { 135 | contract: {source: SIMPLE_CONTRACT}, 136 | executorBehavior: ExecutorBehavior.TEST_WRONG_OP_MUTATIONS 137 | }) 138 | 139 | const evilOplog = await OpLog.create(db.storage) 140 | db.oplogs.add(evilOplog) 141 | await db.setLocalOplog(evilOplog) 142 | 143 | const monitor = await db.monitor() 144 | const violations: ContractFraudProof[] = [] 145 | const whenViolated = new Promise(resolve => { 146 | monitor.on('violation', (evt: ContractFraudProof) => { 147 | violations.push(evt) 148 | resolve(undefined) 149 | }) 150 | }) 151 | 152 | await db.call('put', {path: '/foo', value: 'hello world'}) 153 | await db.executor?.sync() 154 | 155 | await whenViolated 156 | t.is(violations.length, 1) 157 | t.is(violations[0].details.code, 'NonParticipantError') 158 | 159 | try { 160 | await db.verify() 161 | } catch (violation: any) { 162 | t.is(violation.details.code, 'NonParticipantError') 163 | } 164 | 165 | await monitor.close() 166 | await db.close() 167 | }) 168 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "include": ["src/**/*"], 4 | "compilerOptions": { 5 | "outDir": "dist", 6 | "typeRoots": [ "./types", "./node_modules/@types"], 7 | "lib": ["es2020"], 8 | "module": "es2020", 9 | "moduleResolution": "node", 10 | "target": "es2020", 11 | "declaration": true, 12 | 13 | "checkJs": true, 14 | "strict": true, 15 | "esModuleInterop": true, 16 | "skipLibCheck": true, 17 | "forceConsistentCasingInFileNames": true 18 | } 19 | } -------------------------------------------------------------------------------- /types/corestore/index.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'corestore' { 2 | type EventEmitter = import('events').EventEmitter 3 | type Hypercore = import('hypercore') 4 | type NoiseSecretStream = import('@hyperswarm/secret-stream') 5 | 6 | declare class Corestore extends EventEmitter { 7 | constructor (opts: any) 8 | get (opts: any): Hypercore 9 | replicate (opts: any): NoiseSecretStream 10 | namespace (ns: string): Corestore 11 | ready (): Promise 12 | close (): Promise 13 | } 14 | 15 | export = Corestore 16 | } -------------------------------------------------------------------------------- /types/hyperbee/index.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'hyperbee' { 2 | type Hypercore = import('hypercore') 3 | 4 | declare class Batch { 5 | async put (key: string, value: any) 6 | async del (key: string) 7 | async flush () 8 | } 9 | 10 | declare interface HyperBeeNodeObj { 11 | seq: number 12 | key: any 13 | value: any 14 | } 15 | 16 | declare class HyperBeeNode { 17 | isDeletion(): boolean 18 | final(): HyperBeeNodeObj 19 | } 20 | 21 | export default class HyperBee { 22 | constructor(feed: any, opts?: {}); 23 | keyEncoding: any; 24 | valueEncoding: any; 25 | sep: any; 26 | readonly: boolean; 27 | prefix: any; 28 | get feed(): Hypercore; 29 | ready(): Promise; 30 | get version(): number; 31 | update(): any; 32 | peek(opts: any): Promise; 33 | createReadStream(opts?: any): any; 34 | createHistoryStream(opts?: any): any; 35 | createDiffStream(right: any, opts?: any): any; 36 | get(key: any, opts?: any): Promise; 37 | getBlock(seq: number, opts: any): Promise 38 | put(key: any, value: any, opts?: any): Promise; 39 | batch(opts?: any): Batch; 40 | del(key: any, opts?: any): Promise; 41 | checkout(version: any): HyperBee; 42 | snapshot(): HyperBee; 43 | sub(prefix: any, opts?: {}): HyperBee; 44 | } 45 | interface BlockEntry { 46 | seq: any; 47 | key: any; 48 | value: any; 49 | } 50 | } -------------------------------------------------------------------------------- /types/hypercore-crypto/index.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'hypercore-crypto' { 2 | export declare interface KeyPair { 3 | publicKey: Buffer 4 | secretKey: Buffer 5 | } 6 | export declare interface MerkleTreeNode { 7 | index: number 8 | size: number 9 | hash: Buffer 10 | } 11 | export function keyPair (): KeyPair 12 | export function validateKeyPair (kp: KeyPair): boolean 13 | export function sign (message: Buffer, secretKey: Buffer): Buffer 14 | export function verify (message: Buffer, signature: Buffer, publicKey: Buffer): boolean 15 | export function data (data: Buffer): Buffer 16 | export function parent (a: Buffer, b: Buffer): Buffer 17 | export function tree (roots: MerkleTreeNode, out?: Buffer): Buffer 18 | export function randomBytes (n: number): Buffer 19 | export function discoveryKey (publicKey: Buffer): Buffer 20 | export function free (secureBuf: Buffer): void 21 | } -------------------------------------------------------------------------------- /types/hypercore/index.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'hypercore' { 2 | type EventEmitter = import('events').EventEmitter 3 | type NoiseSecretStream = import('@hyperswarm/secret-stream') 4 | 5 | declare interface HypercorePeer { 6 | key: Buffer 7 | discoveryKey: Buffer 8 | } 9 | 10 | declare interface HypercoreSeekResult { 11 | // TODO 12 | } 13 | 14 | declare interface HypercoreRangeOpts { 15 | linear?: boolean 16 | blocks?: Set|number[] 17 | start?: number 18 | end?: number 19 | } 20 | 21 | declare interface HypercoreRangeResult { 22 | linear: boolean 23 | start: number 24 | end: number 25 | done: boolean 26 | contains ({index: number}): boolean 27 | downloaded (): Promise 28 | } 29 | 30 | declare interface HypercoreExtensionHandlers { 31 | onmessage: Function 32 | onremotesupports: Function 33 | } 34 | 35 | declare interface HypercoreExtension { 36 | // TODO 37 | } 38 | 39 | declare class MerkleTreeNode { 40 | index: number 41 | size: number 42 | hash: Buffer 43 | } 44 | 45 | declare interface HypercoreCrypto { 46 | // this is a partial declaration of the interface 47 | sign (message: Buffer, secretKey: Buffer): Buffer 48 | verify (message: Buffer, signature: Buffer, publicKey: Buffer): boolean 49 | tree (peaks: MerkleTreeNode[]): Buffer 50 | } 51 | 52 | declare class MerkleTree { 53 | // this is a partial declaration of the interface 54 | crypto: HypercoreCrypto 55 | fork: number 56 | signature: Buffer 57 | roots: MerkleTreeNode[] 58 | hash (): Buffer 59 | signedBy (key: Buffer): boolean 60 | get (seq: number, error = true): Promise 61 | getRoots (seq: number): Promise 62 | } 63 | 64 | declare class InnerHypercore { 65 | // this is a partial declaration of the interface 66 | tree: MerkleTree 67 | } 68 | 69 | declare interface HypercoreConstructorOpts { 70 | createIfMissing?: boolean 71 | overwrite?: boolean 72 | valueEncoding?: any 73 | encodeBatch?: (batch: any[]) => any[] 74 | keyPair?: {publicKey: Buffer, secretKey: Buffer} 75 | encryptionKey?: Buffer 76 | } 77 | 78 | declare class Hypercore extends EventEmitter { 79 | key: Buffer 80 | discoveryKey?: Buffer 81 | readable: boolean 82 | writable: boolean 83 | opened: boolean 84 | closed: boolean 85 | opening: Promise 86 | closing: Promise 87 | core?: InnerHypercore 88 | 89 | static createProtocolStream (isInitiator: boolean, opts?: any): NoiseSecretStream 90 | 91 | constructor (storage: any, key?: Buffer, opts?: HypercoreConstructorOpts) 92 | session (opts?: any): Hypercore 93 | close (): Promise 94 | replicate (isInitiator: boolean, opts?: any): NoiseSecretStream 95 | get length (): number 96 | get byteLength (): number 97 | get fork (): number 98 | get peers (): HypercorePeer[] 99 | get encryptionKey (): Buffer|undefined 100 | get padding (): number 101 | ready(): Promise 102 | setUserData (key: string, value: any): Promise 103 | getUserData (key: string): Promise 104 | update(): Promise 105 | seek (bytes: number): Promise 106 | has (index: number): Promise 107 | get (index: number, opts?: any): Promise 108 | createReadStream (opts?: any): ReadStream 109 | createWriteStream (opts?: any): WriteStream 110 | download (range: HypercoreRangeOpts): HypercoreRangeResult 111 | truncate (newLength = 0, fork = -1): Promise 112 | append (block: any): Promise 113 | registerExtension (name: string, handlers: HypercoreExtensionHandlers): HypercoreExtension 114 | sign (signable: Buffer): Buffer 115 | on(evt: string, handler: Function) 116 | } 117 | 118 | export = Hypercore 119 | } -------------------------------------------------------------------------------- /types/hyperswarm/index.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'hyperswarm' { 2 | type EventEmitter = import('events').EventEmitter 3 | 4 | declare class PeerDiscovery { 5 | // TODO 6 | } 7 | 8 | declare class PeerInfo { 9 | publicKey: Buffer 10 | relayAddresses: any[] 11 | reconnecting: boolean 12 | proven: boolean 13 | banned: boolean 14 | tried: boolean 15 | queued: boolean 16 | topics: string[] 17 | attempts: number 18 | priority: number 19 | server: boolean 20 | } 21 | 22 | declare class Hyperswarm extends EventEmitter { 23 | peers: Map 24 | 25 | constructor (opts?: any) 26 | join (topic: Buffer): PeerDiscovery 27 | leave (topic: Buffer): Promise 28 | flush (): Promise 29 | destroy (): Promise 30 | on(evt: string, handler: Function) 31 | } 32 | 33 | export = Hyperswarm 34 | } --------------------------------------------------------------------------------