├── output └── .editorconfig ├── .gitignore ├── tests ├── mocha.opts ├── helpers.ts └── index.ts ├── source ├── models │ ├── log.ts │ ├── log-history.ts │ ├── block-history.ts │ ├── block.ts │ ├── filters.ts │ └── transaction.ts ├── utilities.ts ├── index.ts ├── log-reconciler.ts ├── block-reconciler.ts └── block-and-log-streamer.ts ├── .travis.yml ├── typings └── chai-immutable.d.ts ├── .npmignore ├── wallaby.json ├── .editorconfig ├── Dockerfile ├── .vscode ├── tasks.json └── launch.json ├── tsconfig.json ├── LICENSE ├── package.json └── README.md /output/.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | output/ 3 | coverage/ 4 | npm-debug.log 5 | -------------------------------------------------------------------------------- /tests/mocha.opts: -------------------------------------------------------------------------------- 1 | --timeout 3000 2 | --recursive 3 | --colors 4 | -u bdd 5 | -------------------------------------------------------------------------------- /source/models/log.ts: -------------------------------------------------------------------------------- 1 | export interface Log { 2 | readonly logIndex: string, 3 | readonly blockNumber: string, 4 | readonly blockHash: string, 5 | } 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | 3 | language: node_js 4 | 5 | node_js: 6 | - "8" 7 | 8 | before_script: 9 | - npm install 10 | 11 | script: 12 | - npm run test 13 | -------------------------------------------------------------------------------- /source/models/log-history.ts: -------------------------------------------------------------------------------- 1 | import { Log } from "./log"; 2 | import { List as ImmutableList } from "immutable"; 3 | 4 | export type LogHistory = ImmutableList; 5 | -------------------------------------------------------------------------------- /typings/chai-immutable.d.ts: -------------------------------------------------------------------------------- 1 | declare module "chai-immutable" { 2 | function chaiImmutable(chai: any, utils: any): void; 3 | namespace chaiImmutable { } 4 | export = chaiImmutable; 5 | } 6 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .git/ 3 | tests/ 4 | .vscode/ 5 | output/.editorconfig 6 | .editorconfig 7 | .gitignore 8 | .travis.yml 9 | Dockerfile 10 | tsconfig.json 11 | wallaby.json 12 | -------------------------------------------------------------------------------- /source/models/block-history.ts: -------------------------------------------------------------------------------- 1 | import { Block } from "./block"; 2 | import { List as ImmutableList } from "immutable"; 3 | 4 | export type BlockHistory = ImmutableList; 5 | -------------------------------------------------------------------------------- /source/models/block.ts: -------------------------------------------------------------------------------- 1 | import { Transaction } from "./transaction"; 2 | 3 | export interface Block { 4 | readonly number: string; 5 | readonly hash: string; 6 | readonly parentHash: string; 7 | } 8 | -------------------------------------------------------------------------------- /wallaby.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [ 3 | "source/**/*.ts", 4 | "tests/helpers.ts" 5 | ], 6 | "tests": [ 7 | "tests/index.ts" 8 | ], 9 | "env": { 10 | "type": "node" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | charset = utf-8 7 | 8 | [*.{js,json}] 9 | indent_style = space 10 | indent_size = 2 11 | 12 | [*.ts] 13 | indent_style = tab 14 | -------------------------------------------------------------------------------- /source/models/filters.ts: -------------------------------------------------------------------------------- 1 | export interface Filter { 2 | readonly address?: string; 3 | readonly topics?: (string | string[] | null)[]; 4 | } 5 | 6 | export interface FilterOptions extends Filter { 7 | readonly blockHash: string 8 | } 9 | -------------------------------------------------------------------------------- /source/utilities.ts: -------------------------------------------------------------------------------- 1 | export const parseHexInt = (value: string) => { 2 | const result = Number.parseInt(value, 16); 3 | if (!Number.isFinite(result)) throw new Error(`${value} is not a hex encoded integer, parsing returned ${result}.`); 4 | return result; 5 | } 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:6 2 | 3 | COPY package.json /app/package.json 4 | WORKDIR /app 5 | RUN npm install 6 | 7 | COPY tsconfig.json /app/tsconfig.json 8 | COPY source/ /app/source/ 9 | COPY tests/ /app/tests/ 10 | COPY typings/ /app/typings/ 11 | 12 | RUN npm run build 13 | 14 | ENTRYPOINT [ "npm", "test" ] 15 | -------------------------------------------------------------------------------- /.vscode/tasks.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=733558 3 | // for the documentation about the tasks.json format 4 | "version": "0.1.0", 5 | "command": "tsc", 6 | "isShellCommand": true, 7 | "args": ["-w", "-p", "."], 8 | "showOutput": "silent", 9 | "isBackground": true, 10 | "problemMatcher": "$tsc-watch" 11 | } 12 | -------------------------------------------------------------------------------- /source/models/transaction.ts: -------------------------------------------------------------------------------- 1 | export interface Transaction { 2 | readonly hash: string; 3 | readonly nonce: string; 4 | readonly blockHash: string; 5 | readonly blockNumber: string; 6 | readonly transactionIndex: string; 7 | readonly from: string; 8 | readonly to: string; 9 | readonly value: string; 10 | readonly gasPrice: string; 11 | readonly gas: string; 12 | readonly input: string; 13 | } 14 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "module": "commonjs", 5 | "sourceMap": true, 6 | "outDir": "output", 7 | "rootDir": ".", 8 | "noEmitOnError": true, 9 | "noImplicitAny": true, 10 | "noImplicitThis": true, 11 | "noImplicitReturns": true, 12 | "emitDecoratorMetadata": true, 13 | "experimentalDecorators": true, 14 | "strictNullChecks": true, 15 | "declaration": true, 16 | "lib": [ "es6", "dom" ] 17 | }, 18 | "include": [ 19 | "source/**/*ts", 20 | "typings/**/*.d.ts" 21 | ] 22 | } 23 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible Node.js debug attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Mocha Tests", 11 | "program": "${workspaceRoot}/node_modules/mocha/bin/_mocha", 12 | "args": [ "--require", "ts-node/register", "--timeout", "999999", "--recursive", "--colors", "${workspaceRoot}/tests/**/*.ts" ], 13 | "internalConsoleOptions": "openOnSessionStart" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /source/index.ts: -------------------------------------------------------------------------------- 1 | // NOTE -- 2 | // Commented out to avoid cross origin error produced when running again webpack 3 | // More research is necessary to resolve, as it may simply be a configuration issue. 4 | // Repro Steps: Checkout augur `new-contracts` branch + run `yarn dev` to start the dev server. 5 | // When accessing within a browser, attempts to get files via XHR produces cross origin errors due the proto being `webpack-internal` 6 | // import * as sourceMapSupport from "source-map-support"; 7 | // sourceMapSupport.install(); 8 | 9 | export { Block } from "./models/block"; 10 | export { Log } from "./models/log"; 11 | export { Transaction } from "./models/transaction"; 12 | export { FilterOptions } from "./models/filters"; 13 | 14 | export { BlockAndLogStreamer } from "./block-and-log-streamer"; 15 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 The Augur Developers 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ethereumjs-blockstream", 3 | "version": "7.0.0", 4 | "description": "A library to turn an unreliable remote source of Ethereum blocks into a reliable stream of blocks with removals on re-orgs and backfills on skips.", 5 | "main": "output/source/index.js", 6 | "types": "output/source/index.d.ts", 7 | "scripts": { 8 | "build": "tsc", 9 | "prepublishOnly": "npm run test && tsc", 10 | "test": "mocha --require ts-node/register tests/**/*.ts" 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "git+https://github.com/ethereumjs/ethereumjs-blockstream.git" 15 | }, 16 | "author": "Micah Zoltu", 17 | "license": "CC0-1.0", 18 | "bugs": { 19 | "url": "https://github.com/ethereumjs/ethereumjs-blockstream/issues" 20 | }, 21 | "homepage": "https://github.com/ethereumjs/ethereumjs-blockstream#readme", 22 | "devDependencies": { 23 | "@types/chai": "4.1.4", 24 | "@types/chai-as-promised": "7.1.0", 25 | "@types/mocha": "5.2.2", 26 | "@types/source-map-support": "0.4.1", 27 | "@types/uuid": "3.4.3", 28 | "chai": "3.5.0", 29 | "chai-as-promised": "7.1.1", 30 | "chai-immutable": "1.6.0", 31 | "copyfiles": "2.0.0", 32 | "coveralls": "3.0.1", 33 | "istanbul": "0.4.5", 34 | "mocha": "5.2.0", 35 | "ts-node": "^6.1.1", 36 | "typescript": "2.9.2" 37 | }, 38 | "dependencies": { 39 | "immutable": "3.8.2", 40 | "source-map-support": "0.5.6", 41 | "uuid": "3.2.1" 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /tests/helpers.ts: -------------------------------------------------------------------------------- 1 | import { Block, Transaction, Log, FilterOptions } from "../source/index"; 2 | 3 | export function delay(milliseconds: number): Promise { return new Promise((resolve, reject) => setTimeout(resolve, milliseconds)); } 4 | 5 | export function getBlockByHashFactory(blocks: Block[] = []) { 6 | const blockMapping = blocks.reduce((mapping, block) => mapping.set(block.hash, block), new Map()); 7 | return async (hash: string): Promise => { 8 | await delay(0); 9 | const mappedBlock = blockMapping.get(hash); 10 | if (mappedBlock !== undefined) { 11 | return mappedBlock; 12 | } else { 13 | const blockNumber = parseInt(hash.substr(-4), 16); 14 | const fork = hash.substr(-8, 4); 15 | return new MockBlock(blockNumber, fork, fork); 16 | } 17 | }; 18 | } 19 | 20 | export function getLogsFactory(logsPerFilter: number, fork: string = "AAAA") { 21 | return async (filterOptions: FilterOptions): Promise => { 22 | await delay(0); 23 | if (!filterOptions) throw new Error("filter options are required"); 24 | const logs = []; 25 | let logIndex = 0; 26 | for (let i = 0; i < logsPerFilter; ++i) { 27 | const blockHash = filterOptions.blockHash; 28 | logs.push(new MockLog(blockHash, logIndex++, fork)); 29 | } 30 | return logs; 31 | }; 32 | } 33 | 34 | export class MockBlock implements Block { 35 | readonly hash: string; 36 | readonly parentHash: string; 37 | readonly number: string; 38 | readonly nonce: string = ""; 39 | readonly sha3Uncles: string = ""; 40 | readonly logsBloom: string = "string"; 41 | readonly transactionRoot: string = "string"; 42 | readonly stateRoot: string = "string"; 43 | readonly receiptsRoot: string = "string"; 44 | readonly miner: string = "string"; 45 | readonly difficulty: string = "string"; 46 | readonly totalDifficulty: string = "string"; 47 | readonly size: string = "string"; 48 | readonly gasLimit: string = "string"; 49 | readonly gasUsed: string = "string"; 50 | readonly timestamp: string = "string"; 51 | readonly transactions: string[] | Transaction[] = [] 52 | readonly uncles: string[] = []; 53 | 54 | constructor(number: number, fork: string = "AAAA", parentFork?: string) { 55 | if (!parentFork) parentFork = fork; 56 | const numberAsHex = number.toString(16); 57 | const parentNumberAsHex = (number - 1).toString(16); 58 | this.hash = `0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0c${fork}${`0000${numberAsHex}`.substring(numberAsHex.length)}`; 59 | this.parentHash = `0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0c${parentFork}${`0000${parentNumberAsHex}`.substring(parentNumberAsHex.length)}`; 60 | this.number = `0x${numberAsHex}`; 61 | } 62 | } 63 | 64 | export class MockLog implements Log { 65 | readonly logIndex: string; 66 | readonly blockNumber: string 67 | readonly blockHash: string 68 | readonly transactionHash: string = "0xbaadf00dbaadf00dbaadf00dbaadf00dbaadf00dbaadf00dbaadf00dbaadf00d"; 69 | readonly transactionIndex: string = "0x0"; 70 | readonly address: string = "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef"; 71 | readonly data: string = "0x0000000000000000000000000000000000000000000000000000000000000000"; 72 | readonly topics: string[] = []; 73 | 74 | constructor(blockHash: string, logIndex: number = 0x0, fork: string = "AAAA") { 75 | const blockNumber = parseInt(blockHash.substring(62), 16); 76 | this.blockNumber = `0x${blockNumber.toString(16)}`; 77 | this.blockHash = blockHash; 78 | this.logIndex = `0x${logIndex.toString(16)}`; 79 | this.transactionIndex = this.logIndex; 80 | } 81 | } 82 | -------------------------------------------------------------------------------- /source/log-reconciler.ts: -------------------------------------------------------------------------------- 1 | import { Block } from "./models/block"; 2 | import { Log } from "./models/log"; 3 | import { Filter, FilterOptions } from "./models/filters"; 4 | import { LogHistory } from "./models/log-history"; 5 | import { parseHexInt } from "./utilities"; 6 | 7 | export const reconcileLogHistoryWithAddedBlock = async ( 8 | getLogs: (filterOptions: FilterOptions) => Promise, 9 | logHistory: LogHistory | Promise>, 10 | newBlock: TBlock, 11 | onLogsAdded: (blockHash: string, logs: Array) => Promise, 12 | filters: Filter[] = [], 13 | historyBlockLength: number = 100, 14 | ): Promise> => { 15 | logHistory = await logHistory; 16 | const logs = await getFilteredLogs(getLogs, newBlock, filters); 17 | logHistory = await addNewLogsToHead(newBlock.hash, logHistory, logs, onLogsAdded); 18 | logHistory = await pruneOldLogs(logHistory, newBlock, historyBlockLength); 19 | return logHistory; 20 | } 21 | 22 | const getFilteredLogs = async (getLogs: (filterOptions: FilterOptions) => Promise>, newBlock: TBlock, filters: Array): Promise> => { 23 | const logPromises = filters 24 | .map(filter => ({ blockHash: newBlock.hash, address: filter.address, topics: filter.topics, })) 25 | .map(filter => getLogs(filter)); 26 | const nestedLogs = await Promise.all(logPromises); 27 | return nestedLogs.reduce((allLogs, logs) => allLogs.concat(logs), []); 28 | } 29 | 30 | const addNewLogsToHead = async (blockHash: string, logHistory: LogHistory, newLogs: Array, onLogsAdded: (blockHash: string, logs: Array) => Promise): Promise> => { 31 | const sortedLogs = newLogs.sort((logA, logB) => parseHexInt(logA.logIndex) - parseHexInt(logB.logIndex)); 32 | const addedLogs: Array = [] 33 | for (const logToAdd of sortedLogs) { 34 | // we may already have this log because two filters can return the same log 35 | if (logHistory.some(logInHistory => logInHistory!.blockHash === logToAdd.blockHash && logInHistory!.logIndex === logToAdd.logIndex)) continue; 36 | ensureOrder(logHistory.last(), logToAdd); 37 | logHistory = logHistory.push(logToAdd) 38 | addedLogs.push(logToAdd) 39 | } 40 | // CONSIDER: the user getting this notification won't have any visibility into the updated log history yet. should we announce new logs in a `setTimeout`? should we provide log history with new logs? 41 | await onLogsAdded(blockHash, addedLogs) 42 | return logHistory; 43 | } 44 | 45 | const pruneOldLogs = async (logHistory: LogHistory, newBlock: TBlock, historyBlockLength: number): Promise> => { 46 | // `log!` is required until the next major version of `immutable` is published to NPM (current version 3.8.2) which improves the type definitions 47 | return logHistory.skipUntil(log => parseHexInt(newBlock.number) - parseHexInt(log!.blockNumber) < historyBlockLength).toList(); 48 | } 49 | 50 | const ensureOrder = (headLog: TLog | undefined, newLog: TLog) => { 51 | if (headLog === undefined) return; 52 | const headBlockNumber = parseHexInt(headLog.blockNumber); 53 | const newLogBlockNumber = parseHexInt(newLog.blockNumber); 54 | if (headBlockNumber > newLogBlockNumber) throw new Error(`received log for a block (${newLogBlockNumber}) older than current head log's block (${headBlockNumber})`); 55 | if (headBlockNumber !== newLogBlockNumber) return; 56 | const headLogIndex = parseHexInt(headLog.logIndex); 57 | const newLogIndex = parseHexInt(newLog.logIndex); 58 | if (headLogIndex >= newLogIndex) throw new Error(`received log with same block number (${newLogBlockNumber}) but index (${newLogIndex}) is the same or older than previous index (${headLogIndex})`); 59 | } 60 | 61 | export const reconcileLogHistoryWithRemovedBlock = async ( 62 | logHistory: LogHistory|Promise>, 63 | removedBlock: TBlock, 64 | onLogsRemoved: (blockHash: string, logs: Array) => Promise, 65 | ): Promise> => { 66 | logHistory = await logHistory; 67 | 68 | const removedLogs = [] 69 | while (!logHistory.isEmpty() && logHistory.last().blockHash === removedBlock.hash) { 70 | removedLogs.push(logHistory.last()); 71 | logHistory = logHistory.pop(); 72 | } 73 | await onLogsRemoved(removedBlock.hash, removedLogs); 74 | 75 | // sanity check, no known way to trigger the error 76 | if (logHistory.some(log => log!.blockHash === removedBlock.hash)) throw new Error("found logs for removed block not at head of log history"); 77 | 78 | return logHistory; 79 | } 80 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/ethereumjs/ethereumjs-blockstream.svg?branch=master)](https://travis-ci.org/ethereumjs/ethereumjs-blockstream) [![Coverage Status](https://coveralls.io/repos/ethereumjs/ethereumjs-blockstream/badge.svg?branch=master&service=github)](https://coveralls.io/github/ethereumjs/ethereumjs-blockstream?branch=master) [![npm version](https://badge.fury.io/js/ethereumjs-blockstream.svg)](https://badge.fury.io/js/ethereumjs-blockstream) 2 | 3 | > [!NOTE] 4 | > This package is not maintained by the EthereumJS ([EF JavaScript](https://github.com/ethereumjs/ethereumjs-monorepo)) core team (see "Contributors"). 5 | --- 6 | 7 | A library to turn an unreliable remote source of Ethereum blocks into a reliable stream of blocks. Handles block and log removals on chain reorganization and block and log backfills on skipped blocks. 8 | 9 | # Requirements for supported Ethereum node 10 | Blockstream requires support for [EIP-234](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-234.md) in the configured Ethereum node. EIP-234 was merged Jul 28, 2018 and implemented in Geth and Parity shortly after. Versions that provide the needed functionality: 11 | - Parity: v2.1.0+ 12 | - geth: v1.8.13+ 13 | 14 | # Usage 15 | 16 | ## Full Example 17 | ```typescript 18 | // blockRetention is how many blocks of history to keep in memory. it defaults to 100 if not supplied 19 | const configuration = { blockRetention: 100 }; 20 | async function getBlockByHash(hash: string): Promise { 21 | const response = await fetch("http://localhost:8545", { 22 | method: "POST", 23 | headers: new Headers({"Content-Type": "application/json"}), 24 | body: { jsonrpc: "2.0", id: 1, method: "eth_getBlockByHash", params: [hash, false] } 25 | }); 26 | return await response.json(); 27 | } 28 | async function getLogs(filterOptions: FilterOptions): Promise { 29 | const response = await fetch("http://localhost:8545", { 30 | method: "POST", 31 | headers: new Headers({"Content-Type": "application/json"}), 32 | body: { jsonrpc: "2.0", id: 1, method: "eth_getLogs", params: [filterOptions] } 33 | }); 34 | return await response.json(); 35 | } 36 | async function getLatestBlock(): Promise { 37 | const response = await fetch("http://localhost:8545", { 38 | method: "POST", 39 | headers: new Headers({"Content-Type": "application/json"}), 40 | body: { jsonrpc: "2.0", id: 1, method: "eth_getBlockByNumber", params: ["latest", false] } 41 | }); 42 | return await response.json(); 43 | } 44 | const blockAndLogStreamer = new BlockAndLogStreamer(getBlockByHash, getLogs, configuration); 45 | const onBlockAddedSubscriptionToken = blockAndLogStreamer.subscribeToOnBlockAdded(block => console.log(block)); 46 | const onLogAddedSubscriptionToken = blockAndLogStreamer.subscribeToOnLogAdded(log => console.log(log)); 47 | const onBlockRemovedSubscriptionToken = blockAndLogStreamer.subscribeToOnBlockRemoved(block => console.log(block)); 48 | const onLogRemovedSubscriptionToken = blockAndLogStreamer.subscribeToOnLogRemoved(log => console.log(log)); 49 | const logFilterToken = blockAndLogStreamer.addLogFilter({address: "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef", topics: ["0xbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbaadf00d"]}); 50 | blockAndLogStreamer.reconcileNewBlock(getLatestBlock()); 51 | // you will get a callback for the block and any logs that match the filter here 52 | triggerBlockMining(); 53 | triggerBlockMining(); 54 | triggerBlockMining(); 55 | blockAndLogStreamer.reconcileNewBlock(getLatestBlock()); 56 | // you will get a callback for all blocks and logs that match the filter that have been added to the chain since the previous call to reconcileNewBlock 57 | triggerChainReorg(); 58 | blockAndLogStreamer.reconcileNewBlock(getLatestBlock()); 59 | // you will get a callback for block/log removals that occurred due to the chain re-org, followed by block/log additions 60 | blockAndLogStreamer.unsubscribeFromOnBlockAdded(onBlockAddedSubscriptionToken); 61 | blockAndLogStreamer.unsubscribeFromOnBlockRemoved(onBlockRemovedSubscriptionToken); 62 | blockAndLogStreamer.unsubscribeFromOnLogAdded(onLogAddedSubscriptionToken); 63 | blockAndLogStreamer.unsubscribeFromOnLogRemoved(onLogRemovedSubscriptionToken); 64 | blockAndLogStreamer.removeLogFilter(logFilterToken); 65 | console.log(blockAndLogStreamer.getLatestReconciledBlock()); 66 | ``` 67 | 68 | ## Signatures 69 | Note: if you have a TypeScript aware editor this will all be available in the tooltip 70 | * [Filter/FilterOptions](https://github.com/ethereumjs/ethereumjs-blockstream/blob/master/source/models/filters.ts#L1-L10) - More details at [Parity JSON-RPC Wiki](https://wiki.parity.io/JSONRPC-eth-module#eth_newfilter) 71 | * [Block](https://github.com/ethereumjs/ethereumjs-blockstream/blob/master/source/models/block.ts#L3-L22) - More details at [Parity JSON-RPC Wiki](https://wiki.parity.io/JSONRPC-eth-module#eth_getblockbyhash) 72 | * [Log](https://github.com/ethereumjs/ethereumjs-blockstream/blob/master/source/models/log.ts#L1-L10) - More details at [Parity JSON-RPC Wiki](https://wiki.parity.io/JSONRPC-eth-module#eth_getfilterchanges) 73 | 74 | # Development 75 | 76 | ## Build 77 | ``` 78 | docker build -t blockstream . 79 | ``` 80 | or 81 | ``` 82 | npm run build 83 | ``` 84 | 85 | ## Test 86 | ``` 87 | docker run blockstream 88 | ```` 89 | or 90 | ``` 91 | npm run test 92 | ``` 93 | -------------------------------------------------------------------------------- /source/block-reconciler.ts: -------------------------------------------------------------------------------- 1 | import { Block } from "./models/block"; 2 | import { BlockHistory } from "./models/block-history"; 3 | import { parseHexInt } from "./utilities"; 4 | import { List as ImmutableList } from "immutable"; 5 | 6 | type GetBlockByHash = (hash: string) => Promise; 7 | 8 | export const reconcileBlockHistory = async ( 9 | getBlockByHash: GetBlockByHash, 10 | blockHistory: BlockHistory|Promise>, 11 | newBlock: TBlock, 12 | onBlockAdded: (block: TBlock) => Promise, 13 | onBlockRemoved: (block: TBlock) => Promise, 14 | blockRetention: number = 100, 15 | ): Promise> => { 16 | blockHistory = await blockHistory; 17 | if (isFirstBlock(blockHistory)) 18 | return await addNewHeadBlock(blockHistory, newBlock, onBlockAdded, blockRetention); 19 | 20 | if (isOlderThanOldestBlock(blockHistory, newBlock)) { 21 | blockHistory = await rollback(blockHistory, onBlockRemoved); 22 | return await addNewHeadBlock(blockHistory, newBlock, onBlockAdded, blockRetention); 23 | } 24 | 25 | if (isAlreadyInHistory(blockHistory, newBlock)) 26 | return blockHistory; 27 | 28 | if (isNewHeadBlock(blockHistory, newBlock)) 29 | return await addNewHeadBlock(blockHistory, newBlock, onBlockAdded, blockRetention); 30 | 31 | if (parentHashIsInHistory(blockHistory, newBlock)) { 32 | while (blockHistory.last().hash !== newBlock.parentHash) { 33 | blockHistory = await removeHeadBlock(blockHistory, onBlockRemoved); 34 | } 35 | return await addNewHeadBlock(blockHistory, newBlock, onBlockAdded, blockRetention); 36 | } 37 | 38 | return await backfill(getBlockByHash, blockHistory, newBlock, onBlockAdded, onBlockRemoved, blockRetention); 39 | } 40 | 41 | const rollback = async (blockHistory: BlockHistory, onBlockRemoved: (block: TBlock) => Promise): Promise> => { 42 | while (!blockHistory.isEmpty()) { 43 | // CONSIDER: if this throws an exception, removals may have been announced that are actually still in history since throwing will result in no history update. we can't catch errors here because there isn't a clear way to recover from them, the failure may be a downstream system telling us that the block removal isn't possible because they are in a bad state. we could try re-announcing the successfully added blocks, but there would still be a problem with the failed block (should it be re-announced?) and the addition announcements may also fail 44 | blockHistory = await removeHeadBlock(blockHistory, onBlockRemoved); 45 | } 46 | return blockHistory; 47 | } 48 | 49 | const backfill = async (getBlockByHash: GetBlockByHash, blockHistory: BlockHistory, newBlock: TBlock, onBlockAdded: (block: TBlock) => Promise, onBlockRemoved: (block: TBlock) => Promise, blockRetention: number): Promise> => { 50 | if (newBlock.parentHash === "0x0000000000000000000000000000000000000000000000000000000000000000") 51 | return await rollback(blockHistory, onBlockRemoved); 52 | const parentBlock = await getBlockByHash(newBlock.parentHash); 53 | if (parentBlock === null) throw new Error("Failed to fetch parent block."); 54 | if (parseHexInt(parentBlock.number) + blockRetention < parseHexInt(blockHistory.last().number)) 55 | return await rollback(blockHistory, onBlockRemoved); 56 | blockHistory = await reconcileBlockHistory(getBlockByHash, blockHistory, parentBlock, onBlockAdded, onBlockRemoved, blockRetention); 57 | return await reconcileBlockHistory(getBlockByHash, blockHistory, newBlock, onBlockAdded, onBlockRemoved, blockRetention); 58 | } 59 | 60 | const addNewHeadBlock = async (blockHistory: BlockHistory, newBlock: TBlock, onBlockAdded: (block: TBlock) => Promise, blockRetention: number): Promise> => { 61 | // this is here as a final sanity check, in case we somehow got into an unexpected state, there are no known (and should never be) ways to reach this exception 62 | if (!blockHistory.isEmpty() && blockHistory.last().hash !== newBlock.parentHash) throw new Error("New head block's parent isn't our current head."); 63 | // CONSIDER: the user getting this notification won't have any visibility into the updated block history yet. should we announce new blocks in a `setTimeout`? should we provide block history with new logs? an announcement failure will result in unwinding the stack and returning the original blockHistory, if we are in the process of backfilling we may have already announced previous blocks that won't actually end up in history (they won't get removed if a re-org occurs and may be re-announced). we can't catch errors thrown by the callback because it may be trying to signal to use that the block has become invalid and is un-processable 64 | await onBlockAdded(newBlock); 65 | blockHistory = blockHistory.push(newBlock); 66 | return blockHistory.takeLast(blockRetention).toList(); 67 | } 68 | 69 | const removeHeadBlock = async (blockHistory: BlockHistory, onBlockRemoved: (block: TBlock) => Promise): Promise> => { 70 | let removedBlock = blockHistory.last(); 71 | blockHistory = blockHistory.pop(); 72 | await onBlockRemoved(removedBlock); 73 | return blockHistory; 74 | } 75 | 76 | const isFirstBlock = (blockHistory: BlockHistory): boolean => { 77 | return blockHistory.isEmpty(); 78 | } 79 | 80 | const isOlderThanOldestBlock = (blockHistory: BlockHistory, newBlock: TBlock): boolean => { 81 | return parseHexInt(blockHistory.first().number) > parseHexInt(newBlock.number); 82 | } 83 | 84 | const isAlreadyInHistory = (blockHistory: BlockHistory, newBlock: TBlock): boolean => { 85 | // `block!` is required until the next version of `immutable` is published to NPM (current version 3.8.1) which improves the type definitions 86 | return blockHistory.some(block => block!.hash === newBlock.hash); 87 | } 88 | 89 | const isNewHeadBlock = (blockHistory: BlockHistory, newBlock: TBlock): boolean => { 90 | return blockHistory.last().hash === newBlock.parentHash; 91 | } 92 | 93 | const parentHashIsInHistory = (blockHistory: BlockHistory, newBlock: TBlock): boolean => { 94 | // `block!` is required until the next version of `immutable` is published to NPM (current version 3.8.1) which improves the type definitions 95 | return blockHistory.some(block => block!.hash === newBlock.parentHash); 96 | } 97 | -------------------------------------------------------------------------------- /source/block-and-log-streamer.ts: -------------------------------------------------------------------------------- 1 | import { Block } from "./models/block"; 2 | import { Log } from "./models/log"; 3 | import { Filter, FilterOptions } from "./models/filters"; 4 | import { BlockHistory } from "./models/block-history"; 5 | import { LogHistory } from "./models/log-history"; 6 | import { reconcileBlockHistory } from "./block-reconciler"; 7 | import { reconcileLogHistoryWithAddedBlock, reconcileLogHistoryWithRemovedBlock } from "./log-reconciler"; 8 | 9 | import { List as ImmutableList } from "immutable"; 10 | import * as createUuid from "uuid"; 11 | 12 | export interface Configuration { 13 | /** number of blocks to retain in history, defaults to 100 */ 14 | blockRetention?: number 15 | } 16 | 17 | export class BlockAndLogStreamer { 18 | private lastKnownGoodBlockHistory: BlockHistory = ImmutableList(); 19 | private blockHistory: Promise> = Promise.resolve(this.lastKnownGoodBlockHistory); 20 | private lastKnownGoodLogHistory: LogHistory = ImmutableList(); 21 | private logHistory: Promise> = Promise.resolve(this.lastKnownGoodLogHistory); 22 | private pendingCallbacks: Array<() => void> = []; 23 | 24 | private readonly blockRetention: number; 25 | 26 | private readonly getBlockByHash: (hash: string) => Promise; 27 | private readonly getLogs: (filterOptions: FilterOptions) => Promise; 28 | private readonly onError: (error: Error) => void = () => {}; 29 | 30 | private readonly logFilters: { [propName: string]: Filter } = {} 31 | private readonly onBlockAddedSubscribers: { [propName: string]: (block: TBlock) => void } = {}; 32 | private readonly onBlockRemovedSubscribers: { [propName: string]: (block: TBlock) => void } = {}; 33 | private readonly onLogsAddedSubscribers: { [propName: string]: (blockHash: string, logs: Array) => void } = {}; 34 | private readonly onLogsRemovedSubscribers: { [propName: string]: (blockHash: string, logs: Array) => void } = {}; 35 | 36 | /** 37 | * @param getBlockByHash async function that returns a block given a particular hash or null/throws if the block is not found 38 | * @param getLogs async function that returns the logs matching the given filter 39 | * @param onError called if a subscriber throws an error, the error will otherwise be swallowed 40 | * @param configuration additional optional configuration items 41 | */ 42 | constructor( 43 | getBlockByHash: (hash: string) => Promise, 44 | getLogs: (filterOptions: FilterOptions) => Promise, 45 | onError: (error: Error) => void, 46 | configuration?: Configuration, 47 | ) { 48 | if (getBlockByHash === undefined) throw new Error(`getBlockByHash must be provided`); 49 | this.getBlockByHash = getBlockByHash; 50 | if (getLogs === undefined) throw new Error(`getLogs must be provided`); 51 | this.getLogs = getLogs; 52 | if (onError === undefined) throw new Error(`onError must be provided`); 53 | this.onError = onError; 54 | this.blockRetention = (configuration && configuration.blockRetention) ? configuration.blockRetention : 100; 55 | } 56 | 57 | public readonly reconcileNewBlock = async (block: TBlock): Promise => { 58 | try { 59 | this.blockHistory = reconcileBlockHistory(this.getBlockByHash, this.blockHistory, block, this.onBlockAdded, this.onBlockRemoved, this.blockRetention); 60 | const blockHistory = await this.blockHistory; 61 | const logHistory = await this.logHistory; 62 | // everything reconciled correctly, checkpoint state 63 | this.lastKnownGoodBlockHistory = blockHistory; 64 | this.lastKnownGoodLogHistory = logHistory; 65 | this.pendingCallbacks.forEach(callback => callback()); 66 | this.pendingCallbacks = []; 67 | } catch (error) { 68 | // NOTE: this catch block may be hit multiple times for a single failure root cause, thus we need to be careful to only do idempotent operations in here 69 | // something went wrong, rollback to last checkpoint 70 | this.blockHistory = Promise.resolve(this.lastKnownGoodBlockHistory); 71 | this.logHistory = Promise.resolve(this.lastKnownGoodLogHistory); 72 | this.pendingCallbacks = []; 73 | throw error; 74 | } 75 | }; 76 | 77 | private readonly onBlockAdded = async (block: TBlock): Promise => { 78 | Object.keys(this.onBlockAddedSubscribers) 79 | .map((key: string) => this.onBlockAddedSubscribers[key]) 80 | .map(callback => logAndSwallowWrapper(callback, this.onError)) 81 | .forEach(callback => this.pendingCallbacks.push(() => callback(block))); 82 | 83 | const logFilters = Object.keys(this.logFilters).map(key => this.logFilters[key]); 84 | this.logHistory = reconcileLogHistoryWithAddedBlock(this.getLogs, this.logHistory, block, this.onLogsAdded, logFilters, this.blockRetention); 85 | await this.logHistory; 86 | }; 87 | 88 | private readonly onBlockRemoved = async (block: TBlock): Promise => { 89 | this.logHistory = reconcileLogHistoryWithRemovedBlock(this.logHistory, block, this.onLogsRemoved); 90 | await this.logHistory; 91 | 92 | Object.keys(this.onBlockRemovedSubscribers) 93 | .map((key: string) => this.onBlockRemovedSubscribers[key]) 94 | .map(callback => logAndSwallowWrapper(callback, this.onError)) 95 | .forEach(callback => this.pendingCallbacks.push(() => callback(block))); 96 | }; 97 | 98 | private readonly onLogsAdded = async (blockHash: string, logs: Array): Promise => { 99 | Object.keys(this.onLogsAddedSubscribers) 100 | .map((key: string) => this.onLogsAddedSubscribers[key]) 101 | .map(callback => logAndSwallowWrapper(callback, this.onError)) 102 | .forEach(callback => this.pendingCallbacks.push(() => callback(blockHash, logs))); 103 | }; 104 | 105 | private readonly onLogsRemoved = async (blockHash: string, logs: Array): Promise => { 106 | Object.keys(this.onLogsRemovedSubscribers) 107 | .map((key: string) => this.onLogsRemovedSubscribers[key]) 108 | .map(callback => logAndSwallowWrapper(callback, this.onError)) 109 | .forEach(callback => this.pendingCallbacks.push(() => callback(blockHash, logs))); 110 | }; 111 | 112 | 113 | public readonly getLatestReconciledBlock = (): TBlock | null => { 114 | return this.lastKnownGoodBlockHistory.isEmpty() ? null : this.lastKnownGoodBlockHistory.last(); 115 | }; 116 | 117 | 118 | public readonly addLogFilter = (filter: Filter): string => { 119 | const uuid = `log filter token ${createUuid()}`; 120 | this.logFilters[uuid] = filter; 121 | return uuid; 122 | }; 123 | 124 | public readonly removeLogFilter = (token: string): void => { 125 | if (!token.startsWith("log filter token ")) throw new Error(`Expected a log filter token. Actual: ${token}`); 126 | delete this.logFilters[token]; 127 | }; 128 | 129 | 130 | public readonly subscribeToOnBlockAdded = (onBlockAdded: (block: TBlock) => void): string => { 131 | const uuid = `on block added token ${createUuid()}`; 132 | this.onBlockAddedSubscribers[uuid] = onBlockAdded; 133 | return uuid; 134 | }; 135 | 136 | public readonly unsubscribeFromOnBlockAdded = (token: string) => { 137 | if (!token.startsWith("on block added token ")) throw new Error(`Expected a block added subscription token. Actual: ${token}`); 138 | delete this.onBlockAddedSubscribers[token]; 139 | }; 140 | 141 | 142 | public readonly subscribeToOnBlockRemoved = (onBlockRemoved: (block: TBlock) => void): string => { 143 | const uuid = `on block removed token ${createUuid()}`; 144 | this.onBlockRemovedSubscribers[uuid] = onBlockRemoved; 145 | return uuid; 146 | }; 147 | 148 | public readonly unsubscribeFromOnBlockRemoved = (token: string) => { 149 | if (!token.startsWith("on block removed token ")) throw new Error(`Expected a block added subscription token. Actual: ${token}`); 150 | delete this.onBlockRemovedSubscribers[token]; 151 | }; 152 | 153 | 154 | public readonly subscribeToOnLogsAdded = (onLogsAdded: (blockHash: string, logs: Array) => void): string => { 155 | const uuid = `on log added token ${createUuid()}`; 156 | this.onLogsAddedSubscribers[uuid] = onLogsAdded; 157 | return uuid; 158 | }; 159 | 160 | public readonly unsubscribeFromOnLogsAdded = (token: string) => { 161 | if (!token.startsWith("on log added token ")) throw new Error(`Expected a log added subscription token. Actual: ${token}`); 162 | delete this.onLogsAddedSubscribers[token]; 163 | }; 164 | 165 | 166 | public readonly subscribeToOnLogsRemoved = (onLogsRemoved: (blockHash: string, logs: Array) => void): string => { 167 | const uuid = `on log removed token ${createUuid()}`; 168 | this.onLogsRemovedSubscribers[uuid] = onLogsRemoved; 169 | return uuid; 170 | }; 171 | 172 | public readonly unsubscribeFromOnLogsRemoved = (token: string) => { 173 | if (!token.startsWith("on log removed token ")) throw new Error(`Expected a log added subscription token. Actual: ${token}`); 174 | delete this.onLogsRemovedSubscribers[token]; 175 | }; 176 | } 177 | 178 | function logAndSwallowWrapper(callback: (arg1?: T, arg2?: U) => void, onError: (error: Error) => void): (arg1?: T, arg2?: U) => void { 179 | return function (parameter1, parameter2) { 180 | try { 181 | callback(parameter1, parameter2); 182 | } catch (error) { 183 | onError(error); 184 | } 185 | }; 186 | } 187 | -------------------------------------------------------------------------------- /tests/index.ts: -------------------------------------------------------------------------------- 1 | import { reconcileBlockHistory } from "../source/block-reconciler"; 2 | import { reconcileLogHistoryWithAddedBlock, reconcileLogHistoryWithRemovedBlock } from "../source/log-reconciler"; 3 | import { Block, Log, FilterOptions, BlockAndLogStreamer } from "../source/index"; 4 | import { MockBlock, MockLog, getBlockByHashFactory, getLogsFactory, delay } from "./helpers"; 5 | import { expect, use as chaiUse } from "chai"; 6 | import * as chaiAsPromised from "chai-as-promised"; 7 | import * as chaiImmutable from "chai-immutable"; 8 | import { List as ImmutableList, Record as ImmutableRecord, fromJS } from "immutable"; 9 | import "mocha"; 10 | 11 | chaiUse(chaiImmutable); 12 | chaiUse(chaiAsPromised); 13 | 14 | describe("reconcileBlockHistory", () => { 15 | let newBlockAnnouncements: Block[]; 16 | let blockRemovalAnnouncments: Block[]; 17 | const onBlockAdded = async (block: Block) => { await delay(0); newBlockAnnouncements.push(block); }; 18 | const onBlockRemoved = async (block: Block) => { await delay(0); blockRemovalAnnouncments.push(block); }; 19 | 20 | beforeEach(() => { 21 | newBlockAnnouncements = []; 22 | blockRemovalAnnouncments = []; 23 | }); 24 | 25 | it("announces new head when first block is added to history", async () => { 26 | const oldHistory = Promise.resolve(ImmutableList()); 27 | const newBlock = new MockBlock(0x7777); 28 | 29 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 30 | 31 | expect(newHistory.toJS()).to.deep.equal([newBlock]); 32 | expect(newBlockAnnouncements).to.deep.include(newBlock); 33 | expect(blockRemovalAnnouncments).to.be.empty; 34 | }); 35 | 36 | it("does not announce new block on repeat of current head", async () => { 37 | const oldHistory = Promise.resolve(ImmutableList([new MockBlock(0x7777)])); 38 | const newBlock = new MockBlock(0x7777); 39 | 40 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 41 | 42 | expect(newHistory).to.equal(await oldHistory); 43 | expect(newBlockAnnouncements).to.be.empty; 44 | expect(blockRemovalAnnouncments).to.be.empty; 45 | }); 46 | 47 | it("announces a new head when nth block is added to history", async () => { 48 | const oldHistory = Promise.resolve(ImmutableList([ 49 | new MockBlock(0x7777), 50 | new MockBlock(0x7778) 51 | ])); 52 | const newBlock = new MockBlock(0x7779); 53 | 54 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 55 | 56 | expect(newHistory.toJS()).to.deep.equal([ 57 | new MockBlock(0x7777), 58 | new MockBlock(0x7778), 59 | new MockBlock(0x7779) 60 | ]); 61 | expect(newBlockAnnouncements).to.deep.equal([newBlock]); 62 | expect(blockRemovalAnnouncments).to.be.empty; 63 | }); 64 | 65 | it("ignores blocks already in history", async () => { 66 | const oldHistory = Promise.resolve(ImmutableList([ 67 | new MockBlock(0x7777), 68 | new MockBlock(0x7778), 69 | new MockBlock(0x7779) 70 | ])); 71 | const newBlock = new MockBlock(0x7778); 72 | 73 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 74 | 75 | expect(newHistory).to.equal(await oldHistory); 76 | expect(newBlockAnnouncements).to.be.empty; 77 | expect(blockRemovalAnnouncments).to.be.empty; 78 | }); 79 | 80 | it("does a multi-block rollback to attach new block to head", async () => { 81 | const oldHistory = Promise.resolve(ImmutableList([ 82 | new MockBlock(0x7777), 83 | new MockBlock(0x7778), 84 | new MockBlock(0x7779), 85 | new MockBlock(0x777A) 86 | ])); 87 | const newBlock = new MockBlock(0x7779, "BBBB", "AAAA"); 88 | 89 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 90 | 91 | expect(newHistory.toJS()).to.deep.equal([ 92 | new MockBlock(0x7777, "AAAA"), 93 | new MockBlock(0x7778, "AAAA"), 94 | new MockBlock(0x7779, "BBBB", "AAAA") 95 | ]); 96 | expect(newHistory.count()).to.equal(3); 97 | expect(newBlockAnnouncements).to.deep.equal([newBlock]); 98 | expect(blockRemovalAnnouncments).to.deep.equal([ 99 | new MockBlock(0x777A), 100 | new MockBlock(0x7779) 101 | ]); 102 | }); 103 | 104 | it("backfills missing blocks", async () => { 105 | const oldHistory = Promise.resolve(ImmutableList([ 106 | new MockBlock(0x7777), 107 | new MockBlock(0x7778) 108 | ])); 109 | const newBlock = new MockBlock(0x777B); 110 | 111 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 112 | 113 | expect(newHistory.toJS()).to.deep.equal([ 114 | new MockBlock(0x7777), 115 | new MockBlock(0x7778), 116 | new MockBlock(0x7779), 117 | new MockBlock(0x777A), 118 | new MockBlock(0x777B) 119 | ]); 120 | expect(newBlockAnnouncements).to.deep.equal([ 121 | new MockBlock(0x7779), 122 | new MockBlock(0x777A), 123 | new MockBlock(0x777B) 124 | ]); 125 | expect(blockRemovalAnnouncments).to.be.empty; 126 | }); 127 | 128 | it("rolls back and backfills if necessary", async () => { 129 | const oldHistory = Promise.resolve(ImmutableList([ 130 | new MockBlock(0x7777), 131 | new MockBlock(0x7778), 132 | new MockBlock(0x7779), 133 | new MockBlock(0x777A) 134 | ])); 135 | const newBlock = new MockBlock(0x777B, "BBBB", "BBBB"); 136 | const getBlockByHash = getBlockByHashFactory([ 137 | new MockBlock(0x777A, "BBBB", "BBBB"), 138 | new MockBlock(0x7779, "BBBB", "AAAA") 139 | ]); 140 | 141 | const newHistory = await reconcileBlockHistory(getBlockByHash, oldHistory, newBlock, onBlockAdded, onBlockRemoved); 142 | 143 | expect(newHistory.toJS()).to.deep.equal([ 144 | new MockBlock(0x7777, "AAAA", "AAAA"), 145 | new MockBlock(0x7778, "AAAA", "AAAA"), 146 | new MockBlock(0x7779, "BBBB", "AAAA"), 147 | new MockBlock(0x777A, "BBBB", "BBBB"), 148 | new MockBlock(0x777B, "BBBB", "BBBB"), 149 | ]); 150 | expect(newBlockAnnouncements).to.deep.equal([ 151 | new MockBlock(0x7779, "BBBB", "AAAA"), 152 | new MockBlock(0x777A, "BBBB", "BBBB"), 153 | new MockBlock(0x777B, "BBBB", "BBBB"), 154 | ]); 155 | expect(blockRemovalAnnouncments).to.deep.equal([ 156 | new MockBlock(0x777A, "AAAA", "AAAA"), 157 | new MockBlock(0x7779, "AAAA", "AAAA"), 158 | ]); 159 | }); 160 | 161 | it("resets history if reconciliation not possible", async () => { 162 | const oldHistory = Promise.resolve(ImmutableList([ 163 | new MockBlock(0x7777), 164 | new MockBlock(0x7778) 165 | ])); 166 | const newBlock = new MockBlock(0x7778, "BBBB", "BBBB"); 167 | 168 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved, 5); 169 | 170 | expect(newHistory.toJS()).to.deep.equal([ 171 | new MockBlock(0x7776, "BBBB", "BBBB"), 172 | new MockBlock(0x7777, "BBBB", "BBBB"), 173 | new MockBlock(0x7778, "BBBB", "BBBB"), 174 | ]); 175 | expect(newBlockAnnouncements).to.deep.equal([ 176 | new MockBlock(0x7776, "BBBB", "BBBB"), 177 | new MockBlock(0x7777, "BBBB", "BBBB"), 178 | new MockBlock(0x7778, "BBBB", "BBBB"), 179 | ]); 180 | expect(blockRemovalAnnouncments).to.deep.equal((await oldHistory).reverse().toJS()); 181 | }); 182 | 183 | it("throws if block fetching of parent during backfill fails", async () => { 184 | const getBlockByHash = async (hash: string): Promise => { await delay(0); return null; } 185 | const oldHistory = Promise.resolve(ImmutableList([ 186 | new MockBlock(0x7777), 187 | new MockBlock(0x7778) 188 | ])); 189 | const newBlock = new MockBlock(0x777B); 190 | 191 | const newHistoryPromise = reconcileBlockHistory(getBlockByHash, oldHistory, newBlock, onBlockAdded, onBlockRemoved); 192 | 193 | await expect(newHistoryPromise).to.eventually.be.rejectedWith(Error, "Failed to fetch parent block."); 194 | expect(newBlockAnnouncements).to.be.empty; 195 | expect(blockRemovalAnnouncments).to.be.empty; 196 | }); 197 | 198 | it("wipes out history if new block is older than oldest block in history", async () => { 199 | const oldHistory = Promise.resolve(ImmutableList([ 200 | new MockBlock(0x7777), 201 | new MockBlock(0x7778), 202 | new MockBlock(0x7779), 203 | new MockBlock(0x777A), 204 | ])); 205 | const newBlock = new MockBlock(0x7776); 206 | 207 | const newHistory = await reconcileBlockHistory(getBlockByHashFactory(), oldHistory, newBlock, onBlockAdded, onBlockRemoved); 208 | 209 | expect(newHistory.toJS()).to.deep.equal([ 210 | new MockBlock(0x7776), 211 | ]); 212 | expect(newBlockAnnouncements).to.deep.equal([ 213 | new MockBlock(0x7776), 214 | ]); 215 | expect(blockRemovalAnnouncments).to.deep.equal([ 216 | new MockBlock(0x777A), 217 | new MockBlock(0x7779), 218 | new MockBlock(0x7778), 219 | new MockBlock(0x7777), 220 | ]) 221 | }) 222 | }); 223 | 224 | describe("reconcileLogHistoryWithAddedBlock", async () => { 225 | let newLogAnnouncements: Array; 226 | const onLogsAdded = async (blockHash: string, logs: Array) => { await delay(0); logs.forEach(log => newLogAnnouncements.push(log)); }; 227 | 228 | beforeEach(() => { 229 | newLogAnnouncements = []; 230 | }); 231 | 232 | it("does not fetch logs if no filters are applied", async () => { 233 | let called = 0; 234 | const getLogs = async (filterOptions: FilterOptions): Promise => { ++called; return Promise.resolve([]); }; 235 | const newBlock = new MockBlock(0x7777); 236 | const oldLogHistory = Promise.resolve(ImmutableList()); 237 | 238 | const newLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded); 239 | 240 | expect(newLogHistory).to.deep.equal(ImmutableList()); 241 | expect(newLogAnnouncements).to.be.empty; 242 | expect(called).to.equal(0); 243 | }); 244 | 245 | it("adds block with no logs", async () => { 246 | const getLogs = async (filterOptions: FilterOptions) => Promise.resolve([]); 247 | const newBlock = new MockBlock(0x7777); 248 | const oldLogHistory = Promise.resolve(ImmutableList()); 249 | 250 | const newLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded, [{}]); 251 | 252 | expect(newLogHistory).to.deep.equal(ImmutableList()); 253 | expect(newLogAnnouncements).to.be.empty; 254 | }); 255 | 256 | it("adds block with logs", async () => { 257 | const getLogs = getLogsFactory(1); 258 | const newBlock = new MockBlock(0x7777); 259 | const oldLogHistory = Promise.resolve(ImmutableList()); 260 | 261 | const newLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded, [{}]); 262 | 263 | // unfortunately, because we have an immutable list of a complex object with a nested list of a complex object in it, we can't do a normal equality comparison 264 | expect(newLogHistory.toJS()).to.deep.equal([new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777')]); 265 | expect(newLogAnnouncements).to.deep.equal([new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777')]); 266 | }); 267 | 268 | it("adds block with multiple logs", async () => { 269 | const getLogs = getLogsFactory(2); 270 | const newBlock = new MockBlock(0x7777); 271 | const oldLogHistory = Promise.resolve(ImmutableList()); 272 | 273 | const newLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded, [{}]); 274 | 275 | // unfortunately, because we have an immutable list of a complex object with a nested list of a complex object in it, we can't do a normal equality comparison 276 | expect(newLogHistory.toJS()).to.deep.equal([ 277 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 278 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 279 | ]); 280 | expect(newLogAnnouncements).to.deep.equal([ 281 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 282 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 283 | ]); 284 | }); 285 | 286 | it("orders logs by index", async () => { 287 | const getLogs = async (filterOptions: FilterOptions) => Promise.resolve([ 288 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 289 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x2), 290 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 291 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x3), 292 | ]); 293 | const newBlock = new MockBlock(0x7777); 294 | const oldLogHistory = Promise.resolve(ImmutableList()); 295 | 296 | const newLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded, [{}]); 297 | 298 | expect(newLogHistory.toJS()).to.deep.equal([ 299 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 300 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 301 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x2), 302 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x3), 303 | ]); 304 | expect(newLogAnnouncements).to.deep.equal([ 305 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 306 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 307 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x2), 308 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x3), 309 | ]); 310 | }); 311 | 312 | it("fails if getLogs fails", async () => { 313 | const getLogs = async (filterOptions: FilterOptions) => { await delay(0); throw new Error("apple"); }; 314 | const newBlock = new MockBlock(0x7777); 315 | const oldLogHistory = Promise.resolve(ImmutableList()); 316 | 317 | const newLogHistoryPromise = reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded, [{}]); 318 | 319 | await expect(newLogHistoryPromise).to.eventually.be.rejectedWith(Error, "apple"); 320 | expect(newLogAnnouncements).to.be.empty; 321 | }); 322 | 323 | it("fails if onNewLog fails", async () => { 324 | const getLogs = getLogsFactory(1); 325 | const failingOnLogAdded = async () => { await delay(0); throw new Error("banana"); }; 326 | const newBlock = new MockBlock(0x7777); 327 | const oldLogHistory = Promise.resolve(ImmutableList()); 328 | 329 | const newLogHistoryPromise = reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, failingOnLogAdded, [{}]); 330 | 331 | await expect(newLogHistoryPromise).to.eventually.rejectedWith(Error, "banana"); 332 | }); 333 | 334 | it("fails if old block with logs is added before new block with logs is removed", async () => { 335 | const getLogs = getLogsFactory(1); 336 | const firstBlock = new MockBlock(0x7777); 337 | const secondBlock = new MockBlock(0x7776); 338 | const oldLogHistory = Promise.resolve(ImmutableList()); 339 | 340 | const firstLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, firstBlock, onLogsAdded, [{}]); 341 | const secondLogHistoryPromise = reconcileLogHistoryWithAddedBlock(getLogs, firstLogHistory, secondBlock, onLogsAdded, [{}]); 342 | 343 | await expect(secondLogHistoryPromise).to.eventually.rejectedWith(Error, /received log for a block (.*?) older than current head log's block (.*?)/); 344 | // unfortunate reality 345 | expect(newLogAnnouncements).to.deep.equal([new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777')]); 346 | }) 347 | 348 | it("dedupes logs with same blockhash and index from multiple filters", async () => { 349 | const getLogs = async (filterOptions: FilterOptions) => Promise.resolve([ 350 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 351 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 352 | ]); 353 | const newBlock = new MockBlock(0x7777); 354 | const oldLogHistory = Promise.resolve(ImmutableList()); 355 | 356 | const newLogHistory = await reconcileLogHistoryWithAddedBlock(getLogs, oldLogHistory, newBlock, onLogsAdded, [{},{}]); 357 | 358 | expect(newLogAnnouncements).to.deep.equal([ 359 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 360 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 361 | ]); 362 | }); 363 | }); 364 | 365 | describe("reconcileLogHistoryWithRemovedBlock", async () => { 366 | let removedLogAnnouncements: Array; 367 | const onLogsRemoved = async (blockHash: string, logs: Array) => { await delay(0); logs.forEach(log => removedLogAnnouncements.push(log)); }; 368 | 369 | beforeEach(() => { 370 | removedLogAnnouncements = []; 371 | }); 372 | 373 | it("returns empty log history when starting with null log", async () => { 374 | const removedBlock = new MockBlock(0x7777); 375 | const oldLogHistory = Promise.resolve(ImmutableList()); 376 | 377 | const newLogHistory = await reconcileLogHistoryWithRemovedBlock(oldLogHistory, removedBlock, onLogsRemoved); 378 | 379 | expect(newLogHistory.toJS()).to.be.empty; 380 | expect(removedLogAnnouncements).to.be.empty; 381 | }); 382 | 383 | it("handles block removal with no associated logs", async () => { 384 | const removedBlock = new MockBlock(0x7777); 385 | const oldLogHistory = Promise.resolve(ImmutableList([new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776')])); 386 | 387 | const newLogHistory = await reconcileLogHistoryWithRemovedBlock(oldLogHistory, removedBlock, onLogsRemoved); 388 | 389 | expect(newLogHistory.toJS()).to.deep.equal([new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776')]); 390 | expect(removedLogAnnouncements).to.be.empty; 391 | }); 392 | 393 | it("removes logs at head for given block", async () => { 394 | const removedBlock = new MockBlock(0x7777); 395 | const oldLogHistory = Promise.resolve(ImmutableList([ 396 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7775'), 397 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776'), 398 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777'), 399 | ])); 400 | 401 | const newLogHistory = await reconcileLogHistoryWithRemovedBlock(oldLogHistory, removedBlock, onLogsRemoved); 402 | 403 | expect(newLogHistory.toJS()).to.deep.equal([ 404 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7775'), 405 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776'), 406 | ]); 407 | expect(removedLogAnnouncements).to.deep.equal([new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777')]); 408 | }); 409 | 410 | it("removes multiple logs in reverse order for same block", async () => { 411 | const removedBlock = new MockBlock(0x7777); 412 | // NOTE: log index sorting is handled on new block processing but not validated during removal process so out-of-order indexes are only possible by manually creating history 413 | const oldLogHistory = Promise.resolve(ImmutableList([ 414 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7775'), 415 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776'), 416 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 417 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 418 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x2), 419 | ])); 420 | 421 | const newLogHistory = await reconcileLogHistoryWithRemovedBlock(oldLogHistory, removedBlock, onLogsRemoved); 422 | 423 | expect(newLogHistory.toJS()).to.deep.equal([ 424 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7775'), 425 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776'), 426 | ]); 427 | expect(removedLogAnnouncements).to.deep.equal([ 428 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x2), 429 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 430 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 431 | ]); 432 | }); 433 | 434 | it("throws if removed block is not at head", async () => { 435 | const removedBlock = new MockBlock(0x7776); 436 | const oldLogHistory = Promise.resolve(ImmutableList([ 437 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7775'), 438 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776'), 439 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777'), 440 | ])); 441 | 442 | const newLogHistoryPromise = reconcileLogHistoryWithRemovedBlock(oldLogHistory, removedBlock, onLogsRemoved); 443 | 444 | await expect(newLogHistoryPromise).to.eventually.rejectedWith(Error, "found logs for removed block not at head of log history"); 445 | expect(removedLogAnnouncements).to.be.empty; 446 | }); 447 | 448 | it("removes head logs for block before throwing upon finding nonhead logs for block", async () => { 449 | const removedBlock = new MockBlock(0x7777); 450 | const oldLogHistory = Promise.resolve(ImmutableList([ 451 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7775'), 452 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777'), 453 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7776'), 454 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 455 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 456 | ])); 457 | 458 | const newLogHistoryPromise = reconcileLogHistoryWithRemovedBlock(oldLogHistory, removedBlock, onLogsRemoved); 459 | 460 | await expect(newLogHistoryPromise).to.eventually.rejectedWith(Error, "found logs for removed block not at head of log history"); 461 | expect(removedLogAnnouncements).to.deep.equal([ 462 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x1), 463 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0x0), 464 | ]); 465 | }); 466 | }); 467 | 468 | describe("BlockAndLogStreamer", async () => { 469 | let blockAndLogStreamer: BlockAndLogStreamer; 470 | let announcements: {addition: boolean, item: Block|Log|Error}[]; 471 | const onBlockAdded = (block: Block) => announcements.push({addition: true, item: block}); 472 | const onBlockRemoved = (block: Block) => announcements.push({addition: false, item: block}); 473 | const onLogsAdded = (blockHash: string, logs: Array) => logs.forEach(log => announcements.push({addition: true, item: log})); 474 | const onLogsRemoved = (blockHash: string, logs: Array) => logs.forEach(log => announcements.push({addition: false, item: log})); 475 | const onError = (error: any) => announcements.push({addition: true, item: error}); 476 | 477 | const reinitialize = (getBlockByHash: (hash: string) => Promise, getLogs: (filterOptions: FilterOptions) => Promise) => { 478 | blockAndLogStreamer = new BlockAndLogStreamer(getBlockByHash, getLogs, onError, { blockRetention: 5 }); 479 | blockAndLogStreamer.addLogFilter({}); 480 | blockAndLogStreamer.subscribeToOnBlockAdded(onBlockAdded); 481 | blockAndLogStreamer.subscribeToOnBlockRemoved(onBlockRemoved); 482 | blockAndLogStreamer.subscribeToOnLogsAdded(onLogsAdded); 483 | blockAndLogStreamer.subscribeToOnLogsRemoved(onLogsRemoved); 484 | announcements = []; 485 | } 486 | 487 | beforeEach(() => { 488 | reinitialize(getBlockByHashFactory(), getLogsFactory(1)); 489 | }); 490 | 491 | it("announces new blocks and logs", async () => { 492 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777)); 493 | 494 | expect(announcements).to.deep.equal([ 495 | {addition: true, item: new MockBlock(0x7777)}, 496 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0)}, 497 | ]); 498 | }); 499 | 500 | it("announces removed blocks and logs", async () => { 501 | const logs = [ new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0, 'AAAA'), new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0, 'AAAA'), new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0, 'BBBB') ]; 502 | const getLogs = async (filterOptions: FilterOptions) => [logs.shift()!]; 503 | reinitialize(getBlockByHashFactory(), getLogs); 504 | 505 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777, "AAAA")); 506 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7778, "AAAA")); 507 | announcements = []; 508 | 509 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7778, "BBBB", "AAAA")); 510 | 511 | expect(announcements).to.deep.equal([ 512 | {addition: false, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0, 'AAAA')}, 513 | {addition: false, item: new MockBlock(0x7778, "AAAA", "AAAA")}, 514 | {addition: true, item: new MockBlock(0x7778, "BBBB", "AAAA")}, 515 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0, 'BBBB')}, 516 | ]); 517 | }); 518 | 519 | it("latest block is latest fully reconciled block", async () => { 520 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777)); 521 | const promise = blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779)); 522 | 523 | expect(blockAndLogStreamer.getLatestReconciledBlock()).to.deep.equal(new MockBlock(0x7777)); 524 | await promise; 525 | expect(blockAndLogStreamer.getLatestReconciledBlock()).to.deep.equal(new MockBlock(0x7779)); 526 | }); 527 | 528 | it("adding multiple blocks in quick succession results in expected callbacks", async () => { 529 | const logs = [ 530 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0, 'AAAA'), 531 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0, 'AAAA'), 532 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0, 'AAAA'), 533 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0, 'BBBB'), 534 | ]; 535 | const getLogs = async (filterOptions: FilterOptions) => [logs.shift()!]; 536 | reinitialize(getBlockByHashFactory(), getLogs); 537 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777, "AAAA", "AAAA")); 538 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, "AAAA", "AAAA")); 539 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, "BBBB", "AAAA")); 540 | 541 | expect(announcements).to.deep.equal([ 542 | {addition: true, item: new MockBlock(0x7777, "AAAA", "AAAA")}, 543 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0)}, 544 | {addition: true, item: new MockBlock(0x7778, "AAAA", "AAAA")}, 545 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0)}, 546 | {addition: true, item: new MockBlock(0x7779, "AAAA", "AAAA")}, 547 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0)}, 548 | {addition: false, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0)}, 549 | {addition: false, item: new MockBlock(0x7779, "AAAA", "AAAA")}, 550 | {addition: true, item: new MockBlock(0x7779, "BBBB", "AAAA")}, 551 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0, "BBBB")}, 552 | ]); 553 | }); 554 | 555 | it("swallows errors from callbacks", async () => { 556 | const logs = [ 557 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0, 'AAAA'), 558 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0, 'AAAA'), 559 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0, 'AAAA'), 560 | new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0, 'BBBB'), 561 | ]; 562 | const getLogs = async (filterOptions: FilterOptions) => [logs.shift()!]; 563 | reinitialize(getBlockByHashFactory(), getLogs); 564 | 565 | blockAndLogStreamer.subscribeToOnBlockAdded(block => { throw new Error("apple"); }); 566 | blockAndLogStreamer.subscribeToOnBlockRemoved(block => { throw new Error("banana"); }); 567 | blockAndLogStreamer.subscribeToOnLogsAdded(logs => { throw new Error("cherry"); }); 568 | blockAndLogStreamer.subscribeToOnLogsRemoved(logs => { throw new Error("durian") }); 569 | 570 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777, "AAAA", "AAAA")); 571 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, "AAAA", "AAAA")); 572 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, "BBBB", "AAAA")); 573 | 574 | expect(announcements).to.deep.equal([ 575 | {addition: true, item: new MockBlock(0x7777, "AAAA", "AAAA")}, 576 | {addition: true, item: new Error("apple")}, 577 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0)}, 578 | {addition: true, item: new Error("cherry")}, 579 | {addition: true, item: new MockBlock(0x7778, "AAAA", "AAAA")}, 580 | {addition: true, item: new Error("apple")}, 581 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0)}, 582 | {addition: true, item: new Error("cherry")}, 583 | {addition: true, item: new MockBlock(0x7779, "AAAA", "AAAA")}, 584 | {addition: true, item: new Error("apple")}, 585 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0)}, 586 | {addition: true, item: new Error("cherry")}, 587 | {addition: false, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0)}, 588 | {addition: true, item: new Error("durian")}, 589 | {addition: false, item: new MockBlock(0x7779, "AAAA", "AAAA")}, 590 | {addition: true, item: new Error("banana")}, 591 | {addition: true, item: new MockBlock(0x7779, "BBBB", "AAAA")}, 592 | {addition: true, item: new Error("apple")}, 593 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0, "BBBB")}, 594 | {addition: true, item: new Error("cherry")}, 595 | ]); 596 | }); 597 | 598 | it("unsubscribes correctly", async () => { 599 | const addBlockToken = blockAndLogStreamer.subscribeToOnBlockAdded(block => expect(true).to.be.false); 600 | blockAndLogStreamer.unsubscribeFromOnBlockAdded(addBlockToken); 601 | const removeBlockToken = blockAndLogStreamer.subscribeToOnBlockRemoved(block => expect(true).to.be.false); 602 | blockAndLogStreamer.unsubscribeFromOnBlockRemoved(removeBlockToken); 603 | const addLogToken = blockAndLogStreamer.subscribeToOnLogsAdded(block => expect(true).to.be.false); 604 | blockAndLogStreamer.unsubscribeFromOnLogsAdded(addLogToken); 605 | const removeLogToken = blockAndLogStreamer.subscribeToOnLogsRemoved(block => expect(true).to.be.false); 606 | blockAndLogStreamer.unsubscribeFromOnLogsRemoved(removeLogToken); 607 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777, "AAAA", "AAAA")); 608 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, "AAAA", "AAAA")); 609 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, "AAAA", "BBBB")); 610 | }); 611 | 612 | it("throws if unsubscribing with invalid token", async () => { 613 | const addBlockToken = blockAndLogStreamer.subscribeToOnBlockAdded(_ => { }); 614 | const removeBlockToken = blockAndLogStreamer.subscribeToOnBlockRemoved(_ => { }); 615 | expect(() => blockAndLogStreamer.unsubscribeFromOnBlockAdded(removeBlockToken)).to.throw(Error); 616 | expect(() => blockAndLogStreamer.unsubscribeFromOnBlockRemoved(addBlockToken)).to.throw(Error); 617 | expect(() => blockAndLogStreamer.unsubscribeFromOnLogsAdded(addBlockToken)).to.throw(Error); 618 | expect(() => blockAndLogStreamer.unsubscribeFromOnLogsRemoved(addBlockToken)).to.throw(Error); 619 | }); 620 | 621 | it("calls getLogs multiple times for multiple filters", async () => { 622 | let getLogsCallCount = 0; 623 | const getLogs = async (filter: FilterOptions) => { 624 | ++getLogsCallCount; 625 | return []; 626 | } 627 | blockAndLogStreamer = new BlockAndLogStreamer(getBlockByHashFactory(), getLogs, onError); 628 | blockAndLogStreamer.addLogFilter({ address: "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef", topics: [] }); 629 | blockAndLogStreamer.addLogFilter({ address: "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef", topics: ["0xbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbaadf00d"] }); 630 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777)); 631 | 632 | expect(getLogsCallCount).to.equal(2); 633 | }); 634 | 635 | it("doesn't call getLogs if no filters are attached", async () => { 636 | let getLogsCallCount = 0; 637 | const getLogs = async (filter: FilterOptions) => { 638 | ++getLogsCallCount; 639 | return []; 640 | } 641 | blockAndLogStreamer = new BlockAndLogStreamer(getBlockByHashFactory(), getLogs, onError); 642 | const filterAToken = blockAndLogStreamer.addLogFilter({ address: "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef", topics: [] }); 643 | const filterBToken = blockAndLogStreamer.addLogFilter({ address: "0xdeadbeefdeadbeefdeadbeefdeadbeefdeadbeef", topics: ["0xbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbadf00dbaadf00d"] }); 644 | blockAndLogStreamer.removeLogFilter(filterAToken); 645 | blockAndLogStreamer.removeLogFilter(filterBToken); 646 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777)); 647 | 648 | expect(getLogsCallCount).to.equal(0); 649 | }); 650 | 651 | it("does not announce or make changes to state if we can't fetch a parent block", async () => { 652 | const defaultGetBlockByHash = getBlockByHashFactory(); 653 | const getBlockByHash = async (hash: string) => (hash === '0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cBBBB7778') ? null : defaultGetBlockByHash(hash); 654 | reinitialize(getBlockByHash, getLogsFactory(1)); 655 | 656 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777)); 657 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7778)); 658 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, 'BBBB', 'BBBB')).catch(() => {}); 659 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779)); 660 | 661 | expect(announcements).to.deep.equal([ 662 | {addition: true, item: new MockBlock(0x7777)}, 663 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7777', 0)}, 664 | {addition: true, item: new MockBlock(0x7778)}, 665 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7778', 0)}, 666 | {addition: true, item: new MockBlock(0x7779)}, 667 | {addition: true, item: new MockLog('0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cAAAA7779', 0)}, 668 | ]); 669 | }); 670 | 671 | it("non-awaited reconciliation failure will result in failure of following reconciliation", async () => { 672 | const defaultGetBlockByHash = getBlockByHashFactory(); 673 | const getBlockByHash = async (hash: string) => (hash === '0xbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cbl0cBBBB7778') ? null : defaultGetBlockByHash(hash); 674 | reinitialize(getBlockByHash, getLogsFactory(0)); 675 | 676 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7777)); 677 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7778)); 678 | blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779, 'BBBB', 'BBBB')).catch(() => {}); 679 | await blockAndLogStreamer.reconcileNewBlock(new MockBlock(0x7779)).catch(() => {}); 680 | 681 | expect(announcements).to.deep.equal([ 682 | {addition: true, item: new MockBlock(0x7777)}, 683 | {addition: true, item: new MockBlock(0x7778)}, 684 | ]); 685 | }); 686 | }); 687 | --------------------------------------------------------------------------------