├── .eslintrc.yaml ├── .github └── workflows │ ├── audit.yaml │ └── lint-test.yaml ├── .gitignore ├── .npmignore ├── LICENSE ├── MAINTAINERS.md ├── README.md ├── docs └── application-checker.md ├── integration ├── .gitignore ├── basic-fabric.test.ts ├── basic.sh ├── cleanup.sh ├── copy-files.sh ├── peer.sh ├── prepare.sh └── var.inc.sh ├── jest.integration.js ├── jest.unit.js ├── package.json ├── src ├── bcverifier.ts ├── check │ ├── block-integrity.test.ts │ ├── block-integrity.ts │ ├── fabric-block-check.ts │ ├── fabric-transaction-check.ts │ ├── index.ts │ ├── multiple-ledgers.test.ts │ └── multiple-ledgers.ts ├── checkpoint.ts ├── cli.ts ├── common.ts ├── data │ └── fabric │ │ ├── fabric-bcv-checkpoint.test.ts │ │ ├── fabric-bcv-checkpoint.ts │ │ ├── fabric-common-block-decoder.d.ts │ │ ├── fabric-data.test.ts │ │ ├── fabric-data.ts │ │ ├── fabric-types.ts │ │ ├── fabric-utils.test.ts │ │ ├── fabric-utils.ts │ │ └── index.ts ├── index.ts ├── kvmanager.test.ts ├── kvmanager.ts ├── mock │ └── mock-block.ts ├── network-plugin.ts ├── network │ ├── fabric-block.test.ts │ ├── fabric-block.ts │ ├── fabric-query2.test.ts │ └── fabric-query2.ts ├── output │ ├── index.ts │ └── json.ts ├── provider.test.ts ├── provider.ts ├── result-set.ts └── samples │ └── fabcar.ts ├── test ├── asset-transfer-basic-2.4.7 │ ├── chains │ │ ├── chains │ │ │ └── mychannel │ │ │ │ └── blockfile_000000 │ │ └── index │ │ │ ├── 000001.log │ │ │ ├── CURRENT │ │ │ ├── LOCK │ │ │ ├── LOG │ │ │ └── MANIFEST-000000 │ ├── config.json │ └── pvtdataStore │ │ ├── 000001.log │ │ ├── CURRENT │ │ ├── LOCK │ │ ├── LOG │ │ └── MANIFEST-000000 ├── asset-transfer-private-data-2.4.7 │ ├── config.json │ ├── org1 │ │ ├── chains │ │ │ ├── chains │ │ │ │ └── mychannel │ │ │ │ │ └── blockfile_000000 │ │ │ └── index │ │ │ │ ├── 000001.log │ │ │ │ ├── CURRENT │ │ │ │ ├── LOCK │ │ │ │ ├── LOG │ │ │ │ └── MANIFEST-000000 │ │ └── pvtdataStore │ │ │ ├── 000001.log │ │ │ ├── CURRENT │ │ │ ├── LOCK │ │ │ ├── LOG │ │ │ └── MANIFEST-000000 │ └── org2 │ │ ├── chains │ │ ├── chains │ │ │ └── mychannel │ │ │ │ └── blockfile_000000 │ │ └── index │ │ │ ├── 000001.log │ │ │ ├── CURRENT │ │ │ ├── LOCK │ │ │ ├── LOG │ │ │ └── MANIFEST-000000 │ │ └── pvtdataStore │ │ ├── 000001.log │ │ ├── CURRENT │ │ ├── LOCK │ │ ├── LOG │ │ └── MANIFEST-000000 └── fabric-query2 │ ├── config.json │ ├── config.multiple.json │ ├── config.none.json │ ├── org1-ca-tls.pem │ ├── org2-ca-tls.pem │ ├── tls-user-cert.pem │ ├── tls-user-key.pem │ ├── user-cert.pem │ └── user-key.pem ├── tsconfig.json └── tsconfig.test.json /.eslintrc.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | root: true 3 | parser: "@typescript-eslint/parser" 4 | plugins: 5 | - "@typescript-eslint" 6 | extends: 7 | - eslint:recommended 8 | - plugin:@typescript-eslint/recommended 9 | rules: 10 | no-bitwise: error 11 | no-console: error 12 | object-shorthand: 13 | - error 14 | - never 15 | sort-imports: 16 | - error 17 | - ignoreDeclarationSort: true 18 | ignoreCase: true 19 | "@typescript-eslint/explicit-module-boundary-types": off 20 | "@typescript-eslint/no-explicit-any": off 21 | "@typescript-eslint/no-unused-vars": 22 | - error 23 | - argsIgnorePattern: "^_" 24 | varsIgnorePattern: "^_" 25 | indent: 26 | - error 27 | - 4 28 | - FunctionDeclaration: 29 | parameters: first 30 | FunctionExpression: 31 | parameters: first 32 | CallExpression: 33 | arguments: first 34 | ObjectExpression: first 35 | ImportDeclaration: first 36 | SwitchCase: 1 37 | no-multiple-empty-lines: error 38 | no-trailing-spaces: error 39 | lines-between-class-members: 40 | - error 41 | - always 42 | - exceptAfterSingleLine: true 43 | -------------------------------------------------------------------------------- /.github/workflows/audit.yaml: -------------------------------------------------------------------------------- 1 | name: "npm audit check" 2 | 3 | on: 4 | schedule: 5 | - cron: "10 17 * * 0" 6 | 7 | jobs: 8 | npm-audit: 9 | runs-on: ubuntu-latest 10 | 11 | steps: 12 | - uses: actions/checkout@v3 13 | - uses: actions/setup-node@v3 14 | with: 15 | node-version: "16" 16 | - run: npm install 17 | - run: npm audit --audit-level high 18 | -------------------------------------------------------------------------------- /.github/workflows/lint-test.yaml: -------------------------------------------------------------------------------- 1 | name: "Lint & Unit Tests" 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | jobs: 12 | lint-unit-tests: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v3 17 | - uses: actions/setup-node@v3 18 | with: 19 | node-version: "16" 20 | - run: npm install 21 | - run: npm run lint 22 | - run: npm test 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | package-lock.json 3 | /build 4 | /coverage 5 | /.vscode 6 | /config 7 | /sample 8 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | /config 2 | /coverage 3 | /node_modules 4 | /notes 5 | package-lock.json 6 | result.json 7 | /integration 8 | /sample 9 | /src 10 | /test 11 | tsconfig.test.json 12 | .vscode 13 | *.tgz 14 | *.map 15 | -------------------------------------------------------------------------------- /MAINTAINERS.md: -------------------------------------------------------------------------------- 1 | # Maintainers 2 | 3 | ## Active Maintainers 4 | 5 | | Name | GitHub | Discord | 6 | |----------------|----------------------------------------|--------------| 7 | | Taku Shimosawa | [@shimos](https://github.com/shimos) | shimos#2760 | 8 | | Tatsuya Sato | [@satota2](https://github.com/satota2) | satota2#5505 | 9 | -------------------------------------------------------------------------------- /docs/application-checker.md: -------------------------------------------------------------------------------- 1 | # Application Checker 2 | 3 | bcverifier supports check programs (modules) which users create for specific applications. 4 | This supports two types of checks: chronological checks of keys (data) and of transactions. 5 | In the former, a check function is called for each key (data), and in the latter, it is called for each transaction. 6 | 7 | In the following, a sample check program is presented, which is written in TypeScript, 8 | but you may select to write check programs with JavaScript. 9 | 10 | ## Program Structure 11 | 12 | A check program (module) should export one class as its default export. 13 | 14 | ```typescript 15 | import { AppTransactionCheckLogic, CheckPlugin, AppTransaction } from "bcverifier"; 16 | 17 | export default class FabCarChecker extends CheckPlugin implements AppTransactionCheckLogic { 18 | ... 19 | } 20 | ``` 21 | 22 | The class should implement one or both of the following interfaces. 23 | 24 | | Interface Name | Description | 25 | |----------------------------|-----------------------------------------------------------------------------------------------| 26 | | `AppStateCheckLogic` | A checker for data (the function will be called for each key that exists in the latest state) | 27 | | `AppTransactionCheckLogic` | A checker for transactions (the function will be called for each transaction) | 28 | 29 | ## State Checker 30 | 31 | The first type of checkers is for data. Checkers of this type should implement the `AppStateCheckLogic` interface: 32 | 33 | ```typescript 34 | export interface AppStateCheckLogic { 35 | probeStateCheck(kvState: KeyValueState): Promise; 36 | performStateCheck(kvState: KeyValueState, resultSet: ResultSet): Promise; 37 | } 38 | ``` 39 | 40 | The `probeStateCheck` method is called to determine if the checker is willing to inspect the state. 41 | The checker can check if the state is really for the expected applications. 42 | 43 | The `performStateCheck` method is called to perform the checks. 44 | The only argument is the latest state that consists of all the keys and values at the latest point. 45 | The results should be registered to `resultSet`. 46 | 47 | ## Transaction Checker 48 | 49 | The second type is checkers for transactions. Checkers of this type should implement the `AppTransactionCheckLogic` interface: 50 | 51 | ```typescript 52 | export interface AppTransactionCheckLogic { 53 | probeTransactionCheck(tx: AppTransaction): Promise; 54 | performTransactionCheck(tx: AppTransaction, resultSet: ResultSet): Promise; 55 | } 56 | ``` 57 | 58 | Like the interface for state checkers, the `probeTransactionCheck` method is called to determine if the checker is willing to inspect the transaction. 59 | The `performTransactionCheck` method is called to perform the checks. 60 | The methods are called for each transaction, first `probeTransactionCheck` then `performTransactionCheck`. 61 | The results should be registered to `resultSet`. 62 | 63 | ## Classes 64 | 65 | This section briefly shows classes relevant to checkers. 66 | 67 | ### KeyValueState 68 | 69 | An instance of the `KeyValueState` class represents a snapshot of the key-value data in the ledger. 70 | 71 | ```typescript 72 | export interface KeyValueState { 73 | getKeys(): KeyValue[]; 74 | getValue(key: Buffer): KeyValue; 75 | } 76 | ``` 77 | 78 | The `getKeys` method returns the array of data (key-values), and the `getValue` method returns one key-value for the specified key. 79 | 80 | ### KeyValue 81 | 82 | An instance of the `KeyValue` class represents a key-value pair, which is stored in the ledger. 83 | 84 | ```typescript 85 | export interface KeyValue { 86 | getKey(): Buffer; 87 | getValue(): Buffer; 88 | getVersion(): Buffer; 89 | getHistory(): Promise; 90 | getTransaction(): Promise; 91 | } 92 | ``` 93 | 94 | The methods in the class are: 95 | 96 | - `getKey` 97 | - Returns the key of the pair, and the `getValue` method returns the value, in `Buffer` (binary). 98 | - `getVersion` 99 | - Returns the version of the value in binary. 100 | - `getHistory` 101 | - Returns the array of the previous versions of the value in `KeyValue`. 102 | - `getTransaction` 103 | - Returns the low-level `Transaction` object which created the version of the value. 104 | 105 | ### AppTransaction 106 | 107 | An instance of the `AppTransaction` class represents a transaction with the read set and write set populated with the values. 108 | 109 | ```typescript 110 | export interface AppTransaction { 111 | getInput(): KeyValuePair[]; 112 | getOutput(): KeyValuePair[]; 113 | getTransaction(): Transaction; 114 | } 115 | ``` 116 | 117 | The `getInput` method returns the read set of the transaction, and the `getOutput` method returns the write set. 118 | The read/write set is an array of `KeyValuePair` objects, which include the key, value, version 119 | and a flag (`isDelete`) that indicates whether it is delete (*true*) or write (*false*). 120 | The `getTransaction` method returns the low-level transaction object. 121 | 122 | ### ResultSet 123 | 124 | An instance of the `ResultSet` class contains results. Its key methods are as follows: 125 | 126 | ```typescript 127 | export class ResultSet { 128 | ... 129 | public pushTransactionResult(transaction: Transaction, result: CheckResult): void; 130 | public pushStateResult(result: CheckResult): void; 131 | ... 132 | } 133 | ``` 134 | 135 | A transaction checker should call `pushTransactionResult` for every check it performs for each transaction while a state checker should call `pushStateResult`. 136 | 137 | The `CheckResult` interface is defined as: 138 | 139 | ```typescript 140 | export type CheckResult = { 141 | checkerID: string; 142 | result: ResultCode.OK | ResultCode.ERROR; 143 | predicate: ResultPredicate; 144 | operands: ResultOperand[]; 145 | } | { 146 | checkerID: string; 147 | result: ResultCode.SKIPPED; 148 | skipReason: string; 149 | }; 150 | ``` 151 | 152 | An example for a result, which means that a check if two operands are equal is successful. 153 | 154 | ```typescript 155 | { 156 | checkerID: CHECKER_ID, 157 | result: ResultCode.OK, 158 | predicate: ResultPredicate.EQ, 159 | operands: [ { name: fabricTx.toString() + ".WriteSet.length", value: values.length }, 160 | { name: "1", value: 1 } ] 161 | } 162 | ``` 163 | 164 | ### How to execute 165 | 166 | Run CLI with the `-k` option. 167 | 168 | For example: 169 | 170 | ```sh 171 | $ node ./build/cli.js -n fabric-block -c test/fabcar-1.4.1/blockfile_000000 -k ./samples/fabcar start 172 | ``` 173 | 174 | #### fabcar sample 175 | 176 | Running the command above will perform checks implemented in [the sample fabcar checker](../src/samples/fabcar.ts) 177 | on [the ledger for testing](../test/fabcar-1.4.1). 178 | 179 | You will see the following messages from the checker: 180 | 181 | ``` 182 | INFO: Transaction 1f3ae6fa8b555241ddc7b327b011db6a0be72d1c35939ceb2aabfdaca7f18f20: createCar is ok 183 | ERROR: CreateCar should not overwrite the existing car 184 | INFO: Transaction f0d88ed25bf0456d921d733d514a3aa566a4d7792f8cd0f20f6296b6ca3c5757: changeCarOwner is ok 185 | ``` 186 | 187 | And the summary shows: 188 | 189 | ``` 190 | Checked by fabric-block 191 | Config: test/fabcar-1.4.1/blockfile_000000 192 | 193 | Blocks: 194 | Block Range: Block 0 to Block 7 195 | 196 | Checks performed: 38 (8 blocks) 197 | Checks passed: 38 (8 blocks) 198 | Checks failed: 0 (0 blocks) 199 | Checks skipped: 0 200 | 201 | Transactions: 202 | Checks performed: 58 (8 transactions) 203 | Checks passed: 54 (7 transactions) 204 | Checks failed: 1 (1 transactions) 205 | Checks skipped: 3 206 | 207 | States: 208 | Checks performed: 0 209 | Checks passed: 0 210 | Checks failed: 0 211 | Checks skipped: 0 212 | 213 | Some checks failed. 214 | ``` 215 | 216 | One failure is recorded for the transaction check. 217 | 218 | ### TODO/Limitation 219 | 220 | - Only the key-value data model is assumed and supported. 221 | -------------------------------------------------------------------------------- /integration/.gitignore: -------------------------------------------------------------------------------- 1 | /bin 2 | /config 3 | /artifacts 4 | /output -------------------------------------------------------------------------------- /integration/basic.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright 2021 Hitachi, Ltd. 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | . "`dirname $0`/var.inc.sh" 8 | 9 | set -e 10 | 11 | # Initialize CC 12 | cd ${SAMPLES}/test-network 13 | ./network.sh deployCC -ccn basic -ccl go -ccp ../asset-transfer-basic/chaincode-go 14 | 15 | # Run transactions 16 | cd ${SAMPLES}/asset-transfer-basic/application-javascript 17 | npm install 18 | rm -rf wallet 19 | 20 | node app.js 21 | -------------------------------------------------------------------------------- /integration/cleanup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright 2021 Hitachi, Ltd. 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | . "`dirname $0`/var.inc.sh" 8 | 9 | if [ -d ${CURDIR}/fabric-samples/test-network ]; then 10 | cd ${CURDIR}/fabric-samples/test-network 11 | ./network.sh down || true 12 | fi 13 | 14 | cd ${CURDIR} 15 | 16 | rm -rf artifacts fabric-samples 17 | 18 | docker ps -aq | xargs docker rm -f > /dev/null 2>&1 19 | docker volume ls -q | xargs docker volume rm -f > /dev/null 2>&1 20 | 21 | -------------------------------------------------------------------------------- /integration/copy-files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright 2021 Hitachi, Ltd. 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | . "`dirname $0`/var.inc.sh" 8 | 9 | DEST=${CURDIR}/artifacts 10 | PEERS="peer0.org1.example.com peer0.org2.example.com" 11 | 12 | for P in ${PEERS}; do 13 | mkdir -p ${DEST}/${P} 14 | 15 | docker cp ${P}:/var/hyperledger/production - | tar -C ${DEST}/${P} -x --strip-components 1 16 | done 17 | -------------------------------------------------------------------------------- /integration/peer.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Copyright 2021 Hitachi, Ltd. 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | set -e 8 | 9 | . "`dirname $0`/var.inc.sh" 10 | 11 | if [ $# -lt 1 ]; then 12 | echo "Usage:" $0 "[-o]" "(org)" "[(arguments)...]" 1>&2 13 | exit 1 14 | fi 15 | 16 | if [ "$1" = "-o" ]; then 17 | shift 1 18 | WITH_ORDERER=t 19 | fi 20 | 21 | cd ${SAMPLES}/test-network 22 | . ./scripts/envVar.sh 23 | 24 | parsePeerConnectionParameters $1 25 | 26 | if [ "${WITH_ORDERER}" = "t" ]; then 27 | VARNAME=PEER0_ORG$1_CA 28 | 29 | ADDITIONAL_PARAMS="--tlsRootCertFile ${!VARNAME}" 30 | fi 31 | 32 | shift 1 33 | 34 | FABRIC_CFG_PATH=${SAMPLES}/config peer "$@" ${ADDITIONAL_PARAMS} 35 | -------------------------------------------------------------------------------- /integration/prepare.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # 3 | # Copyright 2021 Hitachi, Ltd. 4 | # 5 | # SPDX-License-Identifier: Apache-2.0 6 | 7 | set -e 8 | 9 | . "`dirname $0`/var.inc.sh" 10 | 11 | if [ $# -lt 2 ]; then 12 | echo "Usage:" $0 "(Fabric version)" "(Fabric CA version)" 1>&2 13 | exit 1 14 | fi 15 | 16 | cd ${CURDIR} 17 | curl -sSL https://raw.githubusercontent.com/hyperledger/fabric/main/scripts/install-fabric.sh | bash -s -- -f $1 -c $2 18 | 19 | cd ${SAMPLES}/test-network 20 | 21 | ./network.sh down || true 22 | ./network.sh up -ca -s couchdb 23 | ./network.sh createChannel -c mychannel 24 | -------------------------------------------------------------------------------- /integration/var.inc.sh: -------------------------------------------------------------------------------- 1 | # Copyright 2021 Hitachi, Ltd. 2 | # 3 | # SPDX-License-Identifier: Apache-2.0 4 | 5 | CURDIR=`dirname "$0" | xargs readlink -f` 6 | SAMPLES="${CURDIR}/fabric-samples" 7 | 8 | export PATH="$PATH:${SAMPLES}/bin" 9 | -------------------------------------------------------------------------------- /jest.integration.js: -------------------------------------------------------------------------------- 1 | // jest.config.js 2 | // Sync object 3 | /** @type {import('@jest/types').Config.InitialOptions} */ 4 | const config = { 5 | transform: { 6 | "^.+\\.tsx?$": [ 7 | "ts-jest", 8 | { 9 | tsconfig: "tsconfig.test.json" 10 | } 11 | ] 12 | }, 13 | testRegex: "integration/.*\.test\.ts$", 14 | moduleFileExtensions: [ 15 | "ts", 16 | "tsx", 17 | "js", 18 | "jsx", 19 | "json", 20 | "node" 21 | ], 22 | modulePathIgnorePatterns: [ 23 | "/integration/fabric-samples" 24 | ] 25 | }; 26 | 27 | module.exports = config; -------------------------------------------------------------------------------- /jest.unit.js: -------------------------------------------------------------------------------- 1 | // jest.config.js 2 | // Sync object 3 | /** @type {import('@jest/types').Config.InitialOptions} */ 4 | const config = { 5 | transform: { 6 | "^.+\\.tsx?$": [ 7 | "ts-jest", 8 | { 9 | tsconfig: "tsconfig.test.json" 10 | } 11 | ] 12 | }, 13 | testRegex: "(/__tests__/.*|(\\.|/)(test|spec))\\.(tsx?)$", 14 | moduleFileExtensions: [ 15 | "ts", 16 | "tsx", 17 | "js", 18 | "jsx", 19 | "json", 20 | "node" 21 | ], 22 | modulePathIgnorePatterns: [ 23 | "/integration/" 24 | ] 25 | }; 26 | 27 | module.exports = config; -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "blockchain-verifier", 3 | "version": "0.5.0-alpha", 4 | "description": "Blockchain Verifier", 5 | "main": "build/index.js", 6 | "types": "build/index.d.ts", 7 | "bin": { 8 | "bcverifier": "build/cli.js" 9 | }, 10 | "scripts": { 11 | "build": "tsc && chmod a+x build/cli.js", 12 | "test": "jest --coverage --config jest.unit.js", 13 | "integration-test": "jest --config jest.integration.js", 14 | "lint": "eslint src --ext .ts", 15 | "prepare": "tsc", 16 | "watch": "tsc -w" 17 | }, 18 | "author": "Taku Shimosawa ", 19 | "license": "Apache-2.0", 20 | "devDependencies": { 21 | "@types/jest": "^29.2.3", 22 | "@types/node": "^18.11.9", 23 | "@types/pem": "^1.9.6", 24 | "@typescript-eslint/eslint-plugin": "^5.44.0", 25 | "@typescript-eslint/parser": "^5.44.0", 26 | "eslint": "^8.28.0", 27 | "jest": "^29.3.1", 28 | "ts-jest": "^29.0.3", 29 | "typescript": "^4.9.3" 30 | }, 31 | "dependencies": { 32 | "asn1js": "^3.0.5", 33 | "commander": "^9.4.1", 34 | "fabric-ca-client": "^2.2.15", 35 | "fabric-common": "^2.2.15", 36 | "fabric-protos": "^2.2.15", 37 | "level": "^8.0.0", 38 | "pem": "^1.14.6" 39 | }, 40 | "engines": { 41 | "node": ">=10.13.0" 42 | }, 43 | "homepage": "https://github.com/hyperledger-labs/blockchain-verifier" 44 | } 45 | -------------------------------------------------------------------------------- /src/bcverifier.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | import { AppStateCheckLogic, AppTransactionCheckLogic, BlockCheckPlugin, TransactionCheckPlugin } from "./check"; 7 | import { BCVerifierError, BCVerifierNotImplemented, KeyValueTransaction, Transaction, 8 | VerificationConfig, VerificationResult } from "./common"; 9 | import { DataModelType, NetworkPlugin } from "./network-plugin"; 10 | import { BlockProvider, KeyValueBlockProvider, KeyValueProviderOptions } from "./provider"; 11 | import { ResultSet } from "./result-set"; 12 | import { BCVCheckpoint } from "./checkpoint"; 13 | 14 | import FabricBlock from "./network/fabric-block"; 15 | import FabricQuery2 from "./network/fabric-query2"; 16 | 17 | import GenericBlockChecker from "./check/block-integrity"; 18 | import FabricBlockChecker from "./check/fabric-block-check"; 19 | import FabricTransactionChecker from "./check/fabric-transaction-check"; 20 | import MultipleLedgerChecker from "./check/multiple-ledgers"; 21 | 22 | type NetworkPluginInfo = { pluginName: string, module: new (configString: string) => NetworkPlugin }; 23 | type BlockCheckPluginInfo = { pluginName: string, module: new (provider: BlockProvider, resultSet: ResultSet, checkpoint?: BCVCheckpoint) => BlockCheckPlugin }; 24 | type TransactionCheckPluginInfo = { pluginName: string, module: new (provider: BlockProvider, resultSet: ResultSet, checkpoint?: BCVCheckpoint) => TransactionCheckPlugin }; 25 | type MultipleLedgerCheckPluginInfo = { pluginName: string, module: new (provider: BlockProvider, others: BlockProvider[], resultSet: ResultSet) => BlockCheckPlugin }; 26 | 27 | const networkPlugins: NetworkPluginInfo[] = [ 28 | { pluginName: "fabric-block", module: FabricBlock }, 29 | { pluginName: "fabric-query2", module: FabricQuery2 } 30 | ]; 31 | 32 | const blockVerifiers: BlockCheckPluginInfo[] = [ 33 | { pluginName: "generic-block", module: GenericBlockChecker }, 34 | { pluginName: "fabric-block", module: FabricBlockChecker } 35 | ]; 36 | 37 | const txVerifiers: TransactionCheckPluginInfo[] = [ 38 | { pluginName: "fabric-transaction", module: FabricTransactionChecker } 39 | ]; 40 | 41 | const multipleLedgerVerifiers: MultipleLedgerCheckPluginInfo[] = [ 42 | { pluginName: "multiple-ledgers", module: MultipleLedgerChecker } 43 | ]; 44 | 45 | export class BCVerifier { 46 | public static getAvailableNetwork(): string[] { 47 | return networkPlugins.map((p) => p.pluginName); 48 | } 49 | 50 | private config: VerificationConfig; 51 | private network?: NetworkPlugin; 52 | private resultSet: ResultSet; 53 | private networkPlugin: NetworkPluginInfo; 54 | 55 | constructor(config: VerificationConfig) { 56 | this.config = config; 57 | 58 | const networkPlugin = networkPlugins.find((p) => p.pluginName === config.networkType); 59 | if (networkPlugin == null) { 60 | throw new BCVerifierError("No suitable network plugin found"); 61 | } else { 62 | this.networkPlugin = networkPlugin; 63 | } 64 | this.resultSet = new ResultSet(); 65 | } 66 | 67 | public async verify(): Promise { 68 | this.network = new this.networkPlugin.module(this.config.networkConfig); 69 | 70 | if (this.network == null) { 71 | throw new BCVerifierError("Failed to initialize network plugin"); 72 | } 73 | 74 | // Key-value processing can be skipped only if 75 | // 1) No application checker is specified AND 76 | // 2a) Checkpoint is not to be saved OR 2b) --skip-key-value option is specified 77 | const skipKV = this.config.applicationCheckers.length === 0 && (this.config.saveCheckpoint !== true || this.config.skipKeyValue === true); 78 | let checkpoint: BCVCheckpoint | undefined = undefined; 79 | let firstBlock = 0; 80 | if (this.config.checkpointToResume != null) { 81 | checkpoint = this.network.loadFromCheckpoint(this.config.checkpointToResume); 82 | firstBlock = checkpoint.getLastBlock() + 1; 83 | } 84 | 85 | const blockSource = await this.network.getPreferredBlockSource(); 86 | let blockProvider: BlockProvider; 87 | if (this.network.getDataModelType() === DataModelType.KeyValue && !skipKV) { 88 | const opts: KeyValueProviderOptions = {}; 89 | if (checkpoint != null) { 90 | opts.initialState = await checkpoint.getInitialKVState(); 91 | if (opts.initialState == null) { 92 | throw new BCVerifierError("Checkpoint does not contain key-value information"); 93 | } 94 | } 95 | 96 | blockProvider = new KeyValueBlockProvider(blockSource, opts); 97 | } else { 98 | blockProvider = new BlockProvider(blockSource); 99 | } 100 | 101 | const blockHeight = await blockSource.getBlockHeight(); 102 | let lastBlock = blockHeight - 1; 103 | if (firstBlock >= blockHeight) { 104 | throw new BCVerifierError("No block to inspect"); 105 | } 106 | if (this.config.endBlock != null) { 107 | if (lastBlock > this.config.endBlock) { 108 | lastBlock = this.config.endBlock; 109 | } 110 | } 111 | if (this.config.checkBlockCount != null && this.config.checkBlockCount > 0) { 112 | if (lastBlock - firstBlock + 1 >= this.config.checkBlockCount) { 113 | lastBlock = firstBlock + this.config.checkBlockCount - 1; 114 | } 115 | } 116 | 117 | await blockProvider.cacheBlockRange(firstBlock, lastBlock); 118 | 119 | const blockCheckPlugins: BlockCheckPlugin[] = []; 120 | for (const info of blockVerifiers) { 121 | if (!this.config.checkersToExclude.includes(info.pluginName)) { 122 | blockCheckPlugins.push(new info.module(blockProvider, this.resultSet, checkpoint)); 123 | } 124 | } 125 | const txCheckPlugins: TransactionCheckPlugin[] = []; 126 | for (const info of txVerifiers) { 127 | if (!this.config.checkersToExclude.includes(info.pluginName)) { 128 | txCheckPlugins.push(new info.module(blockProvider, this.resultSet, checkpoint)); 129 | } 130 | } 131 | 132 | const preferredProvider = blockProvider; 133 | const allSources = await this.network.getBlockSources(); 134 | const dataModelType = this.network.getDataModelType(); 135 | const otherProviders = allSources.filter((s) => s.getSourceID() !== preferredProvider.getSourceID()) 136 | .map((s) => { 137 | if (dataModelType === DataModelType.KeyValue && !skipKV) { 138 | return new KeyValueBlockProvider(s); 139 | } else { 140 | return new BlockProvider(s); 141 | } 142 | }); 143 | 144 | const multipleBlockCheckPlugins: BlockCheckPlugin[] = []; 145 | for (const info of multipleLedgerVerifiers) { 146 | if (!this.config.checkersToExclude.includes(info.pluginName)) { 147 | multipleBlockCheckPlugins.push(new info.module(preferredProvider, otherProviders, this.resultSet)); 148 | } 149 | } 150 | 151 | const appStateCheckers: AppStateCheckLogic[] = []; 152 | const appTxCheckers: AppTransactionCheckLogic[] = []; 153 | for (const modName of this.config.applicationCheckers) { 154 | const checkerModule = await import(modName); 155 | const checkerObject: AppStateCheckLogic & AppTransactionCheckLogic 156 | = new checkerModule.default(blockProvider, this.resultSet); 157 | 158 | if (checkerObject.probeStateCheck != null) { 159 | appStateCheckers.push(checkerObject); 160 | } 161 | if (checkerObject.probeTransactionCheck != null) { 162 | appTxCheckers.push(checkerObject); 163 | } 164 | } 165 | 166 | let lastTx: Transaction | null = null; 167 | for (let i = firstBlock; i <= lastBlock; i++) { 168 | const b = await blockProvider.getBlock(i); 169 | 170 | for (const v of blockCheckPlugins) { 171 | await v.performCheck(i); 172 | } 173 | 174 | for (const tx of b.getTransactions()) { 175 | for (const v of txCheckPlugins) { 176 | await v.performCheck(tx.getTransactionID()); 177 | } 178 | lastTx = tx; 179 | } 180 | 181 | if (otherProviders.length > 0) { 182 | for (const v of multipleBlockCheckPlugins) { 183 | await v.performCheck(i); 184 | } 185 | } 186 | } 187 | 188 | if (lastTx != null && this.network.getDataModelType() === DataModelType.KeyValue && !skipKV) { 189 | const kvProvider = blockProvider as KeyValueBlockProvider; 190 | const lastKeyValueTx = lastTx as KeyValueTransaction; 191 | try { 192 | const stateSet = await kvProvider.getKeyValueState(lastKeyValueTx); 193 | for (const v of appStateCheckers) { 194 | if (await v.probeStateCheck(stateSet)) { 195 | await v.performStateCheck(stateSet, this.resultSet); 196 | } 197 | } 198 | } catch (e) { 199 | if (!(e instanceof BCVerifierNotImplemented)) { 200 | throw e; 201 | } 202 | } 203 | 204 | for (let i = firstBlock; i <= lastBlock; i++) { 205 | const b = await blockProvider.getBlock(i); 206 | for (const tx of b.getTransactions()) { 207 | const appTx = kvProvider.getAppTransaction(tx.getTransactionID()); 208 | for (const v of appTxCheckers) { 209 | if (await v.probeTransactionCheck(appTx)) { 210 | await v.performTransactionCheck(appTx, this.resultSet); 211 | } 212 | } 213 | } 214 | } 215 | } 216 | 217 | const result: VerificationResult = { 218 | resultSet: this.resultSet 219 | }; 220 | 221 | if (this.config.saveCheckpoint === true && lastTx != null) { 222 | result.checkpointData = await this.network.createCheckpoint(blockProvider, lastTx); 223 | } 224 | 225 | return result; 226 | } 227 | } 228 | -------------------------------------------------------------------------------- /src/check/block-integrity.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { HashValueType, ResultCode, ResultPredicate } from "../common"; 8 | import { correctBlocks, MockBlock, MockSource } from "../mock/mock-block"; 9 | import { BlockProvider } from "../provider"; 10 | import { ResultSet } from "../result-set"; 11 | import GenericBlockIntegrityChecker from "./block-integrity"; 12 | 13 | const incorrectBlocks = [ 14 | // Block 0 : Block.Hash != Hash(Block.Hash) 15 | new MockBlock(0, Buffer.from("ABCD"), Buffer.from(""), Buffer.from("ABCDE"), Buffer.from("PABCD"), []), 16 | // Block 1 : Block.PrevHash != PrevHash(Block(0).Hash) 17 | new MockBlock(1, Buffer.from("XYZW"), Buffer.from("ABCD"), Buffer.from("XYZW"), Buffer.from("PABCD"), []) 18 | ]; 19 | 20 | test("Correct chain", async () => { 21 | const targetBlocks = correctBlocks; 22 | const provider = new BlockProvider(new MockSource("mock-source", "mock-org", targetBlocks)); 23 | const resultSet = new ResultSet(); 24 | const checker = new GenericBlockIntegrityChecker(provider, resultSet); 25 | 26 | for (let i = 0; i < correctBlocks.length; i++) { 27 | await checker.performCheck(i); 28 | } 29 | 30 | const blockResults = resultSet.getBlockResults(); 31 | for (let i = 0; i < targetBlocks.length; i++) { 32 | const blockResult = blockResults[i]; 33 | 34 | expect(blockResult).toBeDefined(); 35 | expect(blockResult.number).toBe(i); 36 | 37 | for (const checkResult of blockResult.results) { 38 | if (checkResult.checkerID === "GenericBlockIntegrityChecker.checkHash") { 39 | expect(checkResult.result).toBe(ResultCode.OK); 40 | if (checkResult.result === ResultCode.OK) { 41 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 42 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".Hash"); 43 | expect(checkResult.operands[0].value).toEqual(targetBlocks[i].getHashValue()); 44 | expect(checkResult.operands[1].value) 45 | .toEqual(targetBlocks[i].calcHashValue(HashValueType.HASH_FOR_SELF)); 46 | } 47 | } else if (checkResult.checkerID === "GenericBlockIntegrityChecker.checkPreviousHash") { 48 | expect(checkResult.result).toBe(ResultCode.OK); 49 | if (checkResult.result === ResultCode.OK) { 50 | if (i === 0) { 51 | expect(checkResult.predicate).toBe(ResultPredicate.EQ); 52 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".Number"); 53 | expect(checkResult.operands[0].value).toBe(i); 54 | expect(checkResult.operands[1].value).toBe(0); 55 | } else { 56 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 57 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".PreviousHash"); 58 | expect(checkResult.operands[0].value).toEqual(targetBlocks[i].getPrevHashValue()); 59 | expect(checkResult.operands[1].value) 60 | .toEqual(targetBlocks[i].calcHashValue(HashValueType.HASH_FOR_PREV)); 61 | } 62 | } 63 | } 64 | } 65 | } 66 | 67 | return; 68 | }); 69 | 70 | test("Incorrect chain", async () => { 71 | const targetBlocks = incorrectBlocks; 72 | const provider = new BlockProvider(new MockSource("mock-source", "mock-org", targetBlocks)); 73 | const resultSet = new ResultSet(); 74 | const checker = new GenericBlockIntegrityChecker(provider, resultSet); 75 | 76 | for (let i = 0; i < correctBlocks.length; i++) { 77 | await checker.performCheck(i); 78 | } 79 | 80 | const blockResults = resultSet.getBlockResults(); 81 | for (let i = 0; i < targetBlocks.length; i++) { 82 | const blockResult = blockResults[i]; 83 | 84 | expect(blockResult).toBeDefined(); 85 | expect(blockResult.number).toBe(i); 86 | 87 | for (const checkResult of blockResult.results) { 88 | if (checkResult.checkerID === "GenericBlockIntegrityChecker.checkHash") { 89 | if (i === 0) { 90 | expect(checkResult.result).toBe(ResultCode.ERROR); 91 | if (checkResult.result === ResultCode.ERROR) { 92 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 93 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".Hash"); 94 | expect(checkResult.operands[0].value).toEqual(targetBlocks[i].getHashValue()); 95 | expect(checkResult.operands[1].value) 96 | .toEqual(targetBlocks[i].calcHashValue(HashValueType.HASH_FOR_SELF)); 97 | } 98 | } else { 99 | expect(checkResult.result).toBe(ResultCode.OK); 100 | if (checkResult.result === ResultCode.OK) { 101 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 102 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".Hash"); 103 | expect(checkResult.operands[0].value).toEqual(targetBlocks[i].getHashValue()); 104 | expect(checkResult.operands[1].value) 105 | .toEqual(targetBlocks[i].calcHashValue(HashValueType.HASH_FOR_SELF)); 106 | } 107 | } 108 | } else if (checkResult.checkerID === "GenericBlockIntegrityChecker.checkPreviousHash") { 109 | if (i === 0) { 110 | expect(checkResult.result).toBe(ResultCode.OK); 111 | if (checkResult.result === ResultCode.OK) { 112 | expect(checkResult.predicate).toBe(ResultPredicate.EQ); 113 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".Number"); 114 | expect(checkResult.operands[0].value).toBe(i); 115 | expect(checkResult.operands[1].value).toBe(0); 116 | } 117 | } else { 118 | expect(checkResult.result).toBe(ResultCode.ERROR); 119 | if (checkResult.result === ResultCode.ERROR) { 120 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 121 | expect(checkResult.operands[0].name).toBe(targetBlocks[i] + ".PreviousHash"); 122 | expect(checkResult.operands[0].value).toEqual(targetBlocks[i].getPrevHashValue()); 123 | expect(checkResult.operands[1].value) 124 | .toEqual(targetBlocks[i].calcHashValue(HashValueType.HASH_FOR_PREV)); 125 | } 126 | } 127 | } 128 | } 129 | } 130 | 131 | return; 132 | }); 133 | -------------------------------------------------------------------------------- /src/check/block-integrity.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Generic block integrity check 3 | * 4 | * Copyright 2018 Hitachi America, Ltd. 5 | * 6 | * SPDX-License-Identifier: Apache-2.0 7 | */ 8 | 9 | import { BlockCheckPlugin } from "."; 10 | import { Block, HashValueType, ResultPredicate } from "../common"; 11 | import { BlockProvider } from "../provider"; 12 | import { BlockResultPusher, ResultSet } from "../result-set"; 13 | 14 | export default class GenericBlockIntegrityChecker implements BlockCheckPlugin { 15 | public checkerName = "GenericBlockIntegrityChecker"; 16 | 17 | private provider: BlockProvider; 18 | private results: BlockResultPusher; 19 | 20 | constructor(provider: BlockProvider, resultSet: ResultSet) { 21 | this.provider = provider; 22 | this.results = new BlockResultPusher(this.checkerName, resultSet); 23 | } 24 | 25 | public async performCheck(blockNumber: number): Promise { 26 | const block = await this.provider.getBlock(blockNumber); 27 | let prevBlock: Block | null = null; 28 | 29 | this.results.setBlock(block); 30 | if (blockNumber > 0) { 31 | prevBlock = await this.provider.getBlock(blockNumber - 1); 32 | } 33 | 34 | this.checkPreviousHash(block, prevBlock); 35 | this.checkHash(block); 36 | } 37 | 38 | private checkPreviousHash(block: Block, prevBlock: Block | null): void { 39 | if (prevBlock == null) { 40 | this.results.addResult("checkPreviousHash", ResultPredicate.EQ, 41 | { name: block + ".Number", value: block.getBlockNumber() }, 42 | { name: "0", value: 0 }); 43 | return; 44 | } 45 | const prevHash = block.getPrevHashValue(); 46 | const prevCalcHash = prevBlock.calcHashValue(HashValueType.HASH_FOR_PREV); 47 | 48 | this.results.addResult("checkPreviousHash", ResultPredicate.EQBIN, 49 | { name : block + ".PreviousHash", value: prevHash }, 50 | { name : "HashForPrev(" + prevBlock + ")", value: prevCalcHash }); 51 | } 52 | 53 | private checkHash(block: Block): void { 54 | const currentHash = block.getHashValue(); 55 | const currentCalcHash = block.calcHashValue(HashValueType.HASH_FOR_SELF); 56 | 57 | this.results.addResult("checkHash", ResultPredicate.EQBIN, 58 | { name : block.toString() + ".Hash", value: currentHash }, 59 | { name : "HashForSelf(" + block + ")", value: currentCalcHash }); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/check/fabric-block-check.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Fabric block integrity check 3 | * 4 | * Copyright 2018-2020 Hitachi America, Ltd. 5 | * 6 | * SPDX-License-Identifier: Apache-2.0 7 | */ 8 | import { common } from "fabric-protos"; 9 | import { BlockCheckPlugin } from "."; 10 | import { BCVCheckpoint } from ".."; 11 | import { ResultPredicate } from "../common"; 12 | import { FabricBlock, FabricConfigTransactionInfo, FabricMetaDataIndex, verifyMetadataSignature, verifySignatureHeader } from "../data/fabric"; 13 | import { FabricBCVCheckpoint } from "../data/fabric/fabric-bcv-checkpoint"; 14 | import { FabricConfigCache } from "../data/fabric/fabric-utils"; 15 | import { BlockProvider } from "../provider"; 16 | import { BlockResultPusher, ResultSet } from "../result-set"; 17 | 18 | export default class FabricBlockIntegrityChecker implements BlockCheckPlugin { 19 | public checkerName = "FabricBlockIntegrityChecker"; 20 | 21 | private provider: BlockProvider; 22 | private config: FabricConfigCache; 23 | private results: BlockResultPusher; 24 | 25 | constructor(provider: BlockProvider, resultSet: ResultSet, checkpoint?: BCVCheckpoint) { 26 | this.provider = provider; 27 | this.results = new BlockResultPusher(this.checkerName, resultSet); 28 | 29 | if (checkpoint != null) { 30 | const fabricCheckpoint = checkpoint as FabricBCVCheckpoint; 31 | this.config = FabricConfigCache.Init(provider, fabricCheckpoint); 32 | } else { 33 | this.config = FabricConfigCache.Init(provider); 34 | } 35 | } 36 | 37 | public async performCheck(blockNumber: number): Promise { 38 | const block = await this.provider.getBlock(blockNumber); 39 | 40 | this.results.setBlock(block); 41 | 42 | if (!(block instanceof FabricBlock)) { 43 | this.results.addSkipResult("performCheck", "Not FabricBlock"); 44 | return; 45 | } 46 | 47 | const configInfo = await this.checkLastConfig(block); 48 | 49 | await this.checkMetadataSignature(block, configInfo); 50 | } 51 | 52 | private checkLastConfigIndex(index: number, block: FabricBlock): void { 53 | this.results.addResult("checkLastConfigIndex", 54 | ResultPredicate.LE, 55 | { name: block + ".Metadata[1].LastConfig.Value", value: index }, 56 | { name: block + ".Number", value: block.getBlockNumber() }); 57 | } 58 | 59 | private async checkLastConfig(block: FabricBlock): Promise { 60 | const index = block.getLastConfigBlockIndex(); 61 | // XXX: Better to use raw value due to different implementation of encoding zero 62 | // https://github.com/protobufjs/protobuf.js/issues/1138 63 | const lastConfigObj: { index?: number } = {}; 64 | if (index !== 0) { 65 | lastConfigObj.index = index; 66 | } 67 | const lastConfigValue = common.LastConfig.encode(lastConfigObj).finish(); 68 | 69 | this.checkLastConfigIndex(index, block); 70 | 71 | const lastConfig = block.getMetaData(FabricMetaDataIndex.LAST_CONFIG); 72 | for (const i in lastConfig.signatures) { 73 | const signature = lastConfig.signatures[i]; 74 | 75 | this.results.addResult("CheckLastConfig", 76 | ResultPredicate.INVOKE, 77 | { name: "VerifyMetadataSignature", value: verifyMetadataSignature }, 78 | { name: block.toString(), value: block }, 79 | { name: block + ".Metadata[1].LastConfig", value: lastConfigValue }, 80 | { name: block + ".Metadata[1].Signature[" + i + "]", value: signature } 81 | ); 82 | } 83 | 84 | const configInfo = await this.config.getConfig(index); 85 | const ordererMSPs = configInfo.ordererMSPs; 86 | 87 | for (const i in lastConfig.signatures) { 88 | const signature = lastConfig.signatures[i]; 89 | 90 | // VerifySignatureHeader(signature.signature_header, ordererMSPs) 91 | await this.results.addAsyncResult("CheckLastConfig", 92 | ResultPredicate.INVOKE, 93 | { name: "VerifySignatureHeader", value: verifySignatureHeader }, 94 | { name: block + ".Metadata[1].Signature.Creator", 95 | value: signature.signature_header }, 96 | { name: configInfo.transactionId + ".Config.OrdererMSPs", value: ordererMSPs }); 97 | } 98 | 99 | return configInfo; 100 | } 101 | 102 | private async checkMetadataSignature(block: FabricBlock, configInfo: FabricConfigTransactionInfo): Promise { 103 | const signatures = block.getMetaData(FabricMetaDataIndex.SIGNATURES); 104 | const ordererMSPs = configInfo.ordererMSPs; 105 | 106 | for (const i in signatures.signatures) { 107 | const signature = signatures.signatures[i]; 108 | this.results.addResult("CheckMetadataSignature", 109 | ResultPredicate.INVOKE, 110 | { name: "VerifyMetadataSignature", value: verifyMetadataSignature }, 111 | { name: block.toString(), value: block }, 112 | { name: "None", value: Buffer.from(signatures.value) }, 113 | { name: block + ".Metadata[0].Signature[" + i + "]", value: signature } 114 | ); 115 | 116 | // VerifySignatureHeader(signature.signature_header, ordererMSPs) 117 | await this.results.addAsyncResult("CheckLastConfig", 118 | ResultPredicate.INVOKE, 119 | { name: "VerifySignatureHeader", value: verifySignatureHeader }, 120 | { name: block + ".Metadata[1].Signature.Creator", value: signature.signature_header }, 121 | { name: configInfo.transactionId + ".Config.OrdererMSPs", value: ordererMSPs } 122 | ); 123 | } 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/check/fabric-transaction-check.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { format } from "util"; 8 | 9 | import { TransactionCheckPlugin } from "."; 10 | import { BCVCheckpoint } from ".."; 11 | import { ResultPredicate } from "../common"; 12 | import { FabricAction, FabricConfigTransactionInfo, FabricMetaDataIndex, FabricPrivateRWSet, 13 | FabricTransaction, verifyIdentityMSP, verifySignature, verifySignatureHeader } from "../data/fabric"; 14 | import { FabricBCVCheckpoint } from "../data/fabric/fabric-bcv-checkpoint"; 15 | import { FabricConfigCache } from "../data/fabric/fabric-utils"; 16 | import { BlockProvider } from "../provider"; 17 | import { ResultSet, TransactionResultPusher } from "../result-set"; 18 | 19 | export default class FabricTransactionIntegrityChecker implements TransactionCheckPlugin { 20 | public checkerName = "FabricTransactionIntegrityChecker"; 21 | 22 | private provider: BlockProvider; 23 | private config: FabricConfigCache; 24 | private results: TransactionResultPusher; 25 | 26 | constructor(provider: BlockProvider, resultSet: ResultSet, checkpoint?: BCVCheckpoint) { 27 | this.provider = provider; 28 | this.results = new TransactionResultPusher(this.checkerName, resultSet); 29 | 30 | if (checkpoint != null) { 31 | const fabricCheckpoint = checkpoint as FabricBCVCheckpoint; 32 | this.config = FabricConfigCache.Init(provider, fabricCheckpoint); 33 | } else { 34 | this.config = FabricConfigCache.Init(provider); 35 | } 36 | } 37 | 38 | public async performCheck(transactionID: string): Promise { 39 | const transaction = await this.provider.getTransaction(transactionID); 40 | this.results.setTransaction(transaction); 41 | 42 | if (!(transaction instanceof FabricTransaction)) { 43 | this.results.addSkipResult("performCheck", "Transaction is not a Fabric Transaction"); 44 | return; 45 | } 46 | 47 | const metadataLastConfig = transaction.block.getMetaData(FabricMetaDataIndex.LAST_CONFIG).value?.index; 48 | const lastConfigBlock = metadataLastConfig == null ? 0 : metadataLastConfig; 49 | const configInfo = await this.config.getConfig(lastConfigBlock); 50 | 51 | if ((transaction.header.signature_header.creator.id_bytes as Buffer).byteLength === 0) { 52 | this.results.addSkipResult("performCheck", "No creator information"); 53 | return; 54 | } 55 | 56 | if (transaction.getTransactionType() === 1 || transaction.getTransactionType() === 2) { 57 | this.results.addResult("performCheck", 58 | ResultPredicate.INVOKE, 59 | { name: "VerifySignatureHeader", value: verifySignatureHeader }, 60 | { name: transaction + ".SignatureHeader", value: transaction.header.signature_header }, 61 | { name: configInfo.transactionId + ".Config.OrdererMSP", value: configInfo.ordererMSPs } 62 | ); 63 | } else { 64 | this.results.addResult("performCheck", 65 | ResultPredicate.INVOKE, 66 | { name: "VerifySignatureHeader", value: verifySignatureHeader }, 67 | { name: transaction + ".SignatureHeader", value: transaction.header.signature_header }, 68 | { name: configInfo.transactionId + ".Config.ApplicationMSP", value: configInfo.applicationMSPs } 69 | ); 70 | } 71 | 72 | this.results.addResult("performCheck", 73 | ResultPredicate.INVOKE, 74 | { name: "VerifySignature", value: verifySignature }, 75 | { name: transaction + ".Signature", value: transaction.signature }, 76 | { name: transaction + ".Payload", value: transaction.getPayloadBytes() }, 77 | { name: transaction + ".SignatureHeader.Creator", value: transaction.header.signature_header.creator } 78 | ); 79 | 80 | if (transaction.getTransactionType() === 3) { 81 | await this.checkNormalTransaction(transaction, configInfo); 82 | } else { 83 | this.results.addSkipResult("performCheck", 84 | "Transaction type (" + transaction.getTransactionType() + ") not supported"); 85 | } 86 | } 87 | 88 | private async checkNormalTransaction(transaction: FabricTransaction, 89 | configInfo: FabricConfigTransactionInfo): Promise { 90 | const actions = transaction.getActions(); 91 | 92 | for (const action of actions) { 93 | // Check Proposal 94 | this.results.addResult("checkNormalTransaction", 95 | ResultPredicate.INVOKE, 96 | { name: "VerifySignatureHeader", value: verifySignatureHeader }, 97 | { name: action + ".Header", value: action.decoded.header }, 98 | { name: configInfo.transactionId + ".Config.ApplicationMSP", value: configInfo.applicationMSPs } 99 | ); 100 | 101 | // Check Response 102 | const endorsements = action.getEndorsements(); 103 | 104 | for (const i in endorsements) { 105 | const endorsement = endorsements[i]; 106 | const endorsementStr = action + ".Endorsement[" + i + "]"; 107 | 108 | await this.results.addAsyncResult("checkNormalTransaction", 109 | ResultPredicate.INVOKE, 110 | { name: "VerifyIdentityMSP", value: verifyIdentityMSP }, 111 | { name: endorsementStr + ".Endorser.MspID", value: endorsement.endorser.mspid }, 112 | { name: endorsementStr + ".Endorser.Identity", value: endorsement.endorser.id_bytes }, 113 | { name: configInfo.transactionId + ".Config.ApplicationMSP", value: configInfo.applicationMSPs } 114 | ); 115 | 116 | this.results.addResult("checkNormalTransaction", 117 | ResultPredicate.INVOKE, 118 | { name: "VerifySignature", value: verifySignature }, 119 | { name: endorsementStr + ".Signature", value: transaction.signature }, 120 | { name: action + ".Response + " + endorsementStr + ".Endorser", 121 | value: transaction.getPayloadBytes() }, 122 | { name: endorsementStr + ".Endorser", value: transaction.header.signature_header.creator } 123 | ); 124 | } 125 | 126 | // Check Private Data 127 | const rwSets = action.getRWSets(); 128 | for (const i in rwSets) { 129 | const rwSet = rwSets[i]; 130 | 131 | if (rwSet.private_rwset != null && rwSet.private_rwset.length > 0) { 132 | this.checkPrivateData(action, parseInt(i, 10), rwSet); 133 | } 134 | } 135 | } 136 | } 137 | 138 | private checkPrivateData(action: FabricAction, index: number, rwSet: any) { 139 | for (const i in rwSet.collection_hashed_rwset) { 140 | if (rwSet.private_rwset[i] == null) { 141 | // No data in the private DB. Ignore. 142 | continue; 143 | } 144 | const hashedRWSet = rwSet.collection_hashed_rwset[i]; 145 | const privateRWSet = rwSet.private_rwset[i]; 146 | 147 | const rwsetName = format("%s.rwSet[%d].CollectionRWSet[%d]", action, index, i); 148 | 149 | this.results.addResult("checkPrivateData", 150 | ResultPredicate.EQBIN, 151 | { 152 | name: rwsetName + ".PvtRWSetHash", 153 | value: hashedRWSet.pvt_rwset_hash 154 | }, 155 | { 156 | name: "Hash(" + privateRWSet + ".RWSet)", 157 | value: FabricPrivateRWSet.calcHash(privateRWSet.rwSetBytes) 158 | } 159 | ); 160 | 161 | const privateRWSetData = privateRWSet.getRWSet(); 162 | this.results.addResult("checkPrivateData", 163 | ResultPredicate.EQ, 164 | { name: rwsetName + ".CollectionName", 165 | value: hashedRWSet.collection_name }, 166 | { name: privateRWSet + ".CollectionName", value: privateRWSetData.collection_name } 167 | ); 168 | 169 | // Check for Writes 170 | this.results.addResult("checkPrivateData", 171 | ResultPredicate.EQ, 172 | { name: rwsetName + ".RWSet.Writes.Length", 173 | value: hashedRWSet.hashed_rwset.hashed_writes.length }, 174 | { name: privateRWSet + ".RWSet.Length", value: privateRWSetData.rwset.writes.length } 175 | ); 176 | for (const k in hashedRWSet.hashed_rwset.hashed_writes) { 177 | const hashedWrite = hashedRWSet.hashed_rwset.hashed_writes[k]; 178 | const privWrite = privateRWSetData.rwset.writes[k]; 179 | 180 | const keyHash = FabricPrivateRWSet.calcHash(Buffer.from(privWrite.key)); 181 | const valueHash = FabricPrivateRWSet.calcHash(privWrite.value); 182 | 183 | this.results.addResult("checkPrivateData", ResultPredicate.EQBIN, 184 | { name: format("%s.RWSet.Writes[%d].KeyHash", rwsetName, k), 185 | value: hashedWrite.key_hash }, 186 | { name: format("Hash(%s.RWSet.Writes[%d].Key)", privateRWSet, k), 187 | value: keyHash } 188 | ); 189 | 190 | this.results.addResult("checkPrivateData", ResultPredicate.EQBIN, 191 | { name: format("%s.RWSet.Writes[%d].ValueHash", rwsetName, k), 192 | value: hashedWrite.value_hash }, 193 | { name: format("Hash(%s.RWSet.Writes[%d].Value)", privateRWSet, k), 194 | value: valueHash } 195 | ); 196 | } 197 | } 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /src/check/index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { AppTransaction, KeyValueState } from "../common"; 8 | import { BlockProvider } from "../provider"; 9 | import { ResultSet } from "../result-set"; 10 | 11 | export abstract class CheckPlugin { 12 | protected provider: BlockProvider; 13 | protected resultSet: ResultSet; 14 | 15 | public constructor(provider: BlockProvider, resultSet: ResultSet) { 16 | this.provider = provider; 17 | this.resultSet = resultSet; 18 | } 19 | } 20 | 21 | export interface BlockCheckPlugin { 22 | performCheck(blockNumber: number): Promise; 23 | } 24 | 25 | export interface TransactionCheckPlugin { 26 | performCheck(transactionID: string): Promise; 27 | } 28 | 29 | export interface AppStateCheckLogic { 30 | probeStateCheck(kvState: KeyValueState): Promise; 31 | performStateCheck(kvState: KeyValueState, resultSet: ResultSet): Promise; 32 | } 33 | 34 | export interface AppTransactionCheckLogic { 35 | probeTransactionCheck(tx: AppTransaction): Promise; 36 | performTransactionCheck(tx: AppTransaction, resultSet: ResultSet): Promise; 37 | } 38 | 39 | export abstract class MultipleLedgerCheckPlugin { 40 | protected preferredBlockProvider: BlockProvider; 41 | protected otherProviders: BlockProvider[]; 42 | protected resultSet: ResultSet; 43 | 44 | public constructor(preferredBlockProvider: BlockProvider, otherProviders: BlockProvider[], resultSet: ResultSet) { 45 | this.preferredBlockProvider = preferredBlockProvider; 46 | this.otherProviders = otherProviders; 47 | this.resultSet = resultSet; 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/check/multiple-ledgers.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import GenericMultipleLedgerBlockPlugin from "./multiple-ledgers"; 8 | 9 | import { correctBlocks, MockBlock, MockSource } from "../mock/mock-block"; 10 | import { BlockProvider } from "../provider"; 11 | import { ResultSet } from "../result-set"; 12 | import { ResultCode, ResultPredicate } from "../common"; 13 | 14 | test("correct ledgers", async () => { 15 | const preferredProvider = new BlockProvider(new MockSource("mock1", "mock-org1", correctBlocks)); 16 | const otherProviders = [new BlockProvider(new MockSource("mock2", "mock-org2", correctBlocks))]; 17 | 18 | const resultSet = new ResultSet(); 19 | const checker = new GenericMultipleLedgerBlockPlugin(preferredProvider, otherProviders, resultSet); 20 | 21 | for (let i = 0; i < correctBlocks.length; i++) { 22 | await checker.performCheck(i); 23 | } 24 | 25 | const blockResults = resultSet.getBlockResults(); 26 | for (let i = 0; i < correctBlocks.length; i++) { 27 | const blockResult = blockResults[i]; 28 | 29 | expect(blockResult).toBeDefined(); 30 | expect(blockResult.number).toBe(i); 31 | 32 | expect(blockResult.results).toHaveLength(1); 33 | expect(blockResult.results[0].checkerID).toBe("GenericMultipleLedgerBlockPlugin.blockHashComparisonWithOtherSource"); 34 | 35 | const checkResult = blockResult.results[0]; 36 | expect(checkResult.result).toBe(ResultCode.OK); 37 | if (checkResult.result === ResultCode.OK) { 38 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 39 | expect(checkResult.operands[0].name).toBe("mock1." + correctBlocks[i] + ".Hash"); 40 | expect(checkResult.operands[0].value).toEqual(correctBlocks[i].getHashValue()); 41 | expect(checkResult.operands[1].name).toBe("mock2." + correctBlocks[i] + ".Hash"); 42 | expect(checkResult.operands[1].value).toEqual(correctBlocks[i].getHashValue()); 43 | } 44 | } 45 | }); 46 | 47 | export const incorrectBlocks = [ 48 | new MockBlock(0, Buffer.from("NNNN"), Buffer.from(""), Buffer.from("NNNN"), Buffer.from("PABCD"), 49 | [ { id: "Tx1", type: 1 }, { id: "Tx2", type: 2 }]), 50 | new MockBlock(1, Buffer.from("EFGH"), Buffer.from("PABCD"), Buffer.from("EFGH"), Buffer.from("PABCD"), 51 | [ { id: "Tx3", type: 3 }, { id: "Tx4", type: 1 }]) 52 | ]; 53 | 54 | test("incorrect ledgers", async () => { 55 | const preferredProvider = new BlockProvider(new MockSource("mock1", "mock-org1", correctBlocks)); 56 | const otherProviders = [new BlockProvider(new MockSource("mock2", "mock-org2", incorrectBlocks))]; 57 | 58 | const resultSet = new ResultSet(); 59 | const checker = new GenericMultipleLedgerBlockPlugin(preferredProvider, otherProviders, resultSet); 60 | 61 | for (let i = 0; i < correctBlocks.length; i++) { 62 | await checker.performCheck(i); 63 | } 64 | 65 | const blockResults = resultSet.getBlockResults(); 66 | for (let i = 0; i < correctBlocks.length; i++) { 67 | const blockResult = blockResults[i]; 68 | 69 | expect(blockResult).toBeDefined(); 70 | expect(blockResult.number).toBe(i); 71 | 72 | expect(blockResult.results).toHaveLength(1); 73 | expect(blockResult.results[0].checkerID).toBe("GenericMultipleLedgerBlockPlugin.blockHashComparisonWithOtherSource"); 74 | 75 | const checkResult = blockResult.results[0]; 76 | expect(checkResult.result).toBe(ResultCode.ERROR); 77 | if (checkResult.result === ResultCode.ERROR) { 78 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 79 | expect(checkResult.operands[0].name).toBe("mock1." + correctBlocks[i] + ".Hash"); 80 | expect(checkResult.operands[0].value).toEqual(correctBlocks[i].getHashValue()); 81 | expect(checkResult.operands[1].name).toBe("mock2." + correctBlocks[i] + ".Hash"); 82 | expect(checkResult.operands[1].value).toEqual(incorrectBlocks[i].getHashValue()); 83 | } 84 | } 85 | }); 86 | 87 | test("correct but imbalance ledgers", async () => { 88 | const preferredProvider = new BlockProvider(new MockSource("mock1", "mock-org1", correctBlocks)); 89 | const otherProviders = [new BlockProvider(new MockSource("mock2", "mock-org2", correctBlocks.slice(0, 1)))]; 90 | 91 | const resultSet = new ResultSet(); 92 | const checker = new GenericMultipleLedgerBlockPlugin(preferredProvider, otherProviders, resultSet); 93 | 94 | for (let i = 0; i < correctBlocks.length; i++) { 95 | await checker.performCheck(i); 96 | } 97 | 98 | const blockResults = resultSet.getBlockResults(); 99 | for (let i = 0; i < correctBlocks.length; i++) { 100 | const blockResult = blockResults[i]; 101 | 102 | if (i === 0) { 103 | expect(blockResult).toBeDefined(); 104 | expect(blockResult.number).toBe(i); 105 | expect(blockResult.results).toHaveLength(1); 106 | expect(blockResult.results[0].checkerID).toBe("GenericMultipleLedgerBlockPlugin.blockHashComparisonWithOtherSource"); 107 | 108 | const checkResult = blockResult.results[0]; 109 | expect(checkResult.result).toBe(ResultCode.OK); 110 | if (checkResult.result === ResultCode.OK) { 111 | expect(checkResult.predicate).toBe(ResultPredicate.EQBIN); 112 | expect(checkResult.operands[0].name).toBe("mock1." + correctBlocks[i] + ".Hash"); 113 | expect(checkResult.operands[0].value).toEqual(correctBlocks[i].getHashValue()); 114 | expect(checkResult.operands[1].name).toBe("mock2." + correctBlocks[i] + ".Hash"); 115 | expect(checkResult.operands[1].value).toEqual(correctBlocks[i].getHashValue()); 116 | } 117 | } else { 118 | expect(blockResult).toBeUndefined(); 119 | } 120 | } 121 | }); 122 | -------------------------------------------------------------------------------- /src/check/multiple-ledgers.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Generic block checker plugin comparing with multiple ledgers 3 | * 4 | * Copyright 2021 Hitachi America, Ltd. 5 | * 6 | * SPDX-License-Identifier: Apache-2.0 7 | */ 8 | 9 | import { BlockCheckPlugin, MultipleLedgerCheckPlugin } from "."; 10 | import { ResultPredicate } from "../common"; 11 | import { BlockProvider } from "../provider"; 12 | import { BlockResultPusher, ResultSet } from "../result-set"; 13 | 14 | export default class GenericMultipleLedgerBlockPlugin extends MultipleLedgerCheckPlugin implements BlockCheckPlugin { 15 | public readonly checkerName = "GenericMultipleLedgerBlockPlugin"; 16 | 17 | public results: BlockResultPusher; 18 | 19 | public constructor(preferredBlockProvider: BlockProvider, otherProviders: BlockProvider[], resultSet: ResultSet) { 20 | super(preferredBlockProvider, otherProviders, resultSet); 21 | 22 | this.results = new BlockResultPusher(this.checkerName, resultSet); 23 | } 24 | 25 | public async performCheck(blockNumber: number): Promise { 26 | const baseBlock = await this.preferredBlockProvider.getBlock(blockNumber); 27 | const baseSourceId = this.preferredBlockProvider.getSourceID(); 28 | 29 | this.results.setBlock(baseBlock); 30 | 31 | for (const provider of this.otherProviders) { 32 | try { 33 | const block = await provider.getBlock(blockNumber); 34 | 35 | this.results.addResult("blockHashComparisonWithOtherSource", ResultPredicate.EQBIN, 36 | { 37 | name: `${baseSourceId}.${baseBlock}.Hash`, 38 | value: baseBlock.getHashValue() 39 | }, { 40 | name: `${provider.getSourceID()}.${block}.Hash`, 41 | value: block.getHashValue() 42 | }); 43 | } catch (e) { 44 | // Ignore error because some source might not have some block 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/checkpoint.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2021 Hitachi, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { BCVerifierError, Block, KeyValueState, Transaction } from "./common"; 8 | import { KeyValueManagerInitialState } from "./kvmanager"; 9 | 10 | export interface BCVCheckpointData { 11 | networkPlugin: string; 12 | checkpointDataType: string; 13 | 14 | lastBlock: number; 15 | lastTransaction: string; 16 | 17 | timestamp: number; 18 | 19 | blockInformation?: any; 20 | transactionInformation?: any; 21 | stateInformation?: any; 22 | additionalInformation?: any; 23 | } 24 | 25 | export interface BCVCheckpointContext { 26 | block: Block; 27 | transaction: Transaction; 28 | timestamp: number | null; 29 | state?: KeyValueState; 30 | } 31 | 32 | export abstract class BCVCheckpoint { 33 | protected data: BCVCheckpointData; 34 | 35 | public constructor(pluginName: string, dataType: string, Checkpoint: BCVCheckpointData | null, context?: BCVCheckpointContext) { 36 | if (Checkpoint != null) { 37 | if (dataType !== Checkpoint.checkpointDataType) { 38 | throw new BCVerifierError("Datatype does not match"); 39 | } 40 | this.data = Checkpoint; 41 | } else if (context != null) { 42 | this.data = { 43 | networkPlugin: pluginName, 44 | checkpointDataType: dataType, 45 | 46 | lastBlock: context.block.getBlockNumber(), 47 | lastTransaction: context.transaction.getTransactionID(), 48 | 49 | timestamp: context.timestamp == null ? Date.now() : context.timestamp, 50 | }; 51 | } else { 52 | throw new BCVerifierError("Neither context nor Checkpoint is supplied"); 53 | } 54 | } 55 | 56 | public getLastBlock() { 57 | return this.data.lastBlock; 58 | } 59 | 60 | public abstract getInitialKVState(): Promise; 61 | 62 | public abstract getCheckpoint(): Promise; 63 | 64 | public getCheckpointJSON(): Promise { 65 | return this.getCheckpoint().then((Checkpoint) => JSON.stringify(Checkpoint)); 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /src/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | /* 3 | * Copyright 2018-2020 Hitachi America, Ltd. 4 | * 5 | * SPDX-License-Identifier: Apache-2.0 6 | */ 7 | /* eslint-disable no-console */ 8 | 9 | import { Command } from "commander"; 10 | import { readFileSync, writeFileSync } from "fs"; 11 | import { BCVerifier } from "./bcverifier"; 12 | import { BCVerifierError } from "./common"; 13 | import { JSONOutput } from "./output/json"; 14 | 15 | let cliCommand: string | null = null; 16 | 17 | const CLI_COMMANDS: { [commandName: string]: () => Promise } = { 18 | start : start 19 | }; 20 | 21 | function list(val: string): string[] { 22 | return val.split(","); 23 | } 24 | 25 | const program = new Command(); 26 | 27 | program.version("v0.4.0") 28 | .description("Blockchain Verifier CLI") 29 | .option("-n, --network-type ", "Network type") 30 | .option("-c, --network-config ", "Config for network") 31 | .option("-o, --output ", "Result file") 32 | .option("-k, --checkers ", "Checker module list", list) 33 | .option("-x, --exclude-checkers ", "Name of checkers to exclude", list) 34 | .option("-s, --save-checkpoint ", "Save checkpoint after checks") 35 | .option("-r, --resume-checkpoint ", "Resume checks from checkpoint") 36 | .option("-e, --end-block ", "Stop the checks at the specified block (inclusive)") 37 | .option("-b, --check-block-count ", "Maximum number of blocks to be checked") 38 | .option("-i, --skip-key-value", "Skip key value processing even if checkpoint is specified") 39 | .arguments("") 40 | .action((command) => { 41 | cliCommand = command; 42 | }) 43 | .parse(process.argv); 44 | 45 | if (cliCommand == null || CLI_COMMANDS[cliCommand] == null) { 46 | console.error("ERROR: Command is not specified or unknown."); 47 | program.outputHelp(); 48 | process.exit(1); 49 | } else { 50 | CLI_COMMANDS[cliCommand]() 51 | .then((retCode) => { 52 | process.exit(retCode); 53 | }) 54 | .catch((error) => { 55 | if (error instanceof BCVerifierError) { 56 | console.error("BCVerifier Error: %s", error.message); 57 | console.error(error.stack); 58 | } else if (error instanceof Error) { 59 | console.error("Runtime Error: (%s) %s", error.name, error.message); 60 | console.error(error.stack); 61 | } else { 62 | console.error("Exception during execution: %s", error); 63 | } 64 | process.exit(1); 65 | }); 66 | } 67 | 68 | async function start(): Promise { 69 | const opts = program.opts(); 70 | if (opts.networkType == null || opts.networkConfig == null) { 71 | console.error("ERROR: Network type and config must be specified."); 72 | program.outputHelp(); 73 | process.exit(1); 74 | } 75 | let applicationCheckers = []; 76 | if (opts.checkers != null) { 77 | applicationCheckers = opts.checkers; 78 | } 79 | let checkersToExclude = []; 80 | if (opts.excludeCheckers != null) { 81 | checkersToExclude = opts.excludeCheckers; 82 | } 83 | const saveCheckpoint = opts.saveCheckpoint == null ? false : true; 84 | let resumeData; 85 | if (opts.resumeCheckpoint != null) { 86 | resumeData = JSON.parse(readFileSync(opts.resumeCheckpoint).toString("utf-8")); 87 | } 88 | 89 | const bcv = new BCVerifier({ 90 | networkType: opts.networkType, 91 | networkConfig: opts.networkConfig, 92 | applicationCheckers: applicationCheckers, 93 | checkersToExclude: checkersToExclude, 94 | saveCheckpoint: saveCheckpoint, 95 | checkpointToResume: resumeData, 96 | endBlock: opts.endBlock == null ? undefined : parseInt(opts.endBlock), 97 | skipKeyValue: opts.skipKeyValue, 98 | checkBlockCount: opts.checkBlockCount == null ? undefined : parseInt(opts.checkBlockCount) 99 | }); 100 | 101 | const { resultSet, checkpointData } = await bcv.verify(); 102 | 103 | if (saveCheckpoint) { 104 | if (checkpointData == null) { 105 | console.warn("Checkpoint is not generated. Skipping saving the checkpoint..."); 106 | } else { 107 | writeFileSync(opts.saveCheckpoint, JSON.stringify(checkpointData)); 108 | } 109 | } 110 | 111 | if (opts.output) { 112 | const json = new JSONOutput(); 113 | console.log("Output the result to %s", opts.output); 114 | 115 | const buf = await json.convertResult(resultSet); 116 | writeFileSync(opts.output, buf); 117 | } 118 | 119 | const resultSummary = resultSet.getSummary(); 120 | console.log("Checked by %s", opts.networkType); 121 | console.log(" Config: %s", opts.networkConfig); 122 | console.log(""); 123 | console.log("Blocks:"); 124 | console.log(" Block Range: Block %d to Block %d", resultSummary.blockRange.start, resultSummary.blockRange.end); 125 | console.log(""); 126 | console.log(" Checks performed: %d (%d blocks)", resultSummary.blockChecks.total, resultSummary.blocks.total); 127 | console.log(" Checks passed: %d (%d blocks)", resultSummary.blockChecks.passed, resultSummary.blocks.passed); 128 | console.log(" Checks failed: %d (%d blocks)", resultSummary.blockChecks.failed, resultSummary.blocks.failed); 129 | console.log(" Checks skipped: %d ", resultSummary.blockChecks.skipped); 130 | console.log(""); 131 | console.log("Transactions:"); 132 | console.log(" Checks performed: %d (%d transactions)", 133 | resultSummary.transactionChecks.total, resultSummary.transactions.total); 134 | console.log(" Checks passed: %d (%d transactions)", 135 | resultSummary.transactionChecks.passed, resultSummary.transactions.passed); 136 | console.log(" Checks failed: %d (%d transactions)", 137 | resultSummary.transactionChecks.failed, resultSummary.transactions.failed); 138 | console.log(" Checks skipped: %d ", resultSummary.transactionChecks.skipped); 139 | console.log(""); 140 | console.log("States:"); 141 | console.log(" Checks performed: %d", resultSummary.stateChecks.total); 142 | console.log(" Checks passed: %d", resultSummary.stateChecks.passed); 143 | console.log(" Checks failed: %d", resultSummary.stateChecks.failed); 144 | console.log(" Checks skipped: %d", resultSummary.stateChecks.skipped); 145 | console.log(""); 146 | 147 | if (resultSummary.blockChecks.failed === 0 && resultSummary.transactionChecks.failed === 0 && 148 | resultSummary.stateChecks.failed === 0) { 149 | console.log("All checks finished successfully."); 150 | return 0; 151 | } else { 152 | console.log("Some checks failed."); 153 | return 2; 154 | } 155 | } 156 | -------------------------------------------------------------------------------- /src/common.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { ResultSet } from "./result-set"; 8 | import { BCVCheckpointData } from "./checkpoint"; 9 | 10 | export interface VerificationConfig { 11 | networkType: string; 12 | networkConfig: string; 13 | 14 | applicationCheckers: string[]; 15 | checkersToExclude: string[]; 16 | 17 | saveCheckpoint: boolean; 18 | skipKeyValue: boolean; 19 | checkpointToResume?: BCVCheckpointData; 20 | 21 | endBlock?: number; 22 | checkBlockCount?: number; 23 | } 24 | 25 | export interface VerificationResult { 26 | resultSet: ResultSet; 27 | checkpointData?: BCVCheckpointData; 28 | } 29 | 30 | export enum ResultCode { 31 | OK = 0, 32 | ERROR = 1, 33 | SKIPPED = 2 34 | } 35 | 36 | export enum ResultPredicate { 37 | EQ = 0, 38 | EQBIN = 1, 39 | INVOKE = 2, 40 | LT = 3, 41 | LE = 4, 42 | GT = 5, 43 | GE = 6, 44 | } 45 | 46 | export type ResultOperand = { 47 | name: string; 48 | value: any; 49 | }; 50 | 51 | export type CheckResult = { 52 | checkerID: string; 53 | result: ResultCode.OK | ResultCode.ERROR; 54 | predicate: ResultPredicate; 55 | operands: ResultOperand[]; 56 | } | { 57 | checkerID: string; 58 | result: ResultCode.SKIPPED; 59 | skipReason: string; 60 | }; 61 | 62 | export interface BlockResult { 63 | number: number; 64 | block: Block; 65 | results: CheckResult[]; 66 | } 67 | 68 | export interface TransactionResult { 69 | transactionID: string; 70 | blockNumber: number; 71 | index: number; 72 | results: CheckResult[]; 73 | } 74 | 75 | export interface StateResult { 76 | results: CheckResult[]; 77 | } 78 | 79 | export class BCVerifierError extends Error { 80 | } 81 | export class BCVerifierNotImplemented extends Error { 82 | } 83 | export class BCVerifierNotFound extends Error { 84 | } 85 | 86 | export enum HashValueType { 87 | HASH_FOR_SELF = 1, 88 | HASH_FOR_PREV = 2 89 | } 90 | 91 | export interface Block { 92 | getRaw(): Buffer; 93 | getBlockNumber(): number; 94 | 95 | getHashValue(): Buffer; 96 | getPrevHashValue(): Buffer; 97 | 98 | calcHashValue(hashType: HashValueType): Buffer; 99 | 100 | getTransactions(): Transaction[]; 101 | } 102 | 103 | export interface Transaction { 104 | getBlock(): Block; 105 | getIndexInBlock(): number; 106 | getTransactionID(): string; 107 | getTransactionType(): number; 108 | } 109 | 110 | export interface KeyValueBlock extends Block { 111 | getTransactions(): KeyValueTransaction[]; 112 | } 113 | export interface KeyValueTransaction extends Transaction { 114 | getReadSet(): KeyValuePairRead[]; 115 | getWriteSet(): KeyValuePair[]; 116 | } 117 | 118 | export type KeyValuePair = KeyValuePairWrite | KeyValuePairDelete; 119 | 120 | export interface KeyValuePairRead { 121 | key: Buffer; 122 | version: Buffer; 123 | } 124 | export interface KeyValuePairWrite { 125 | isDelete: false; 126 | key: Buffer; 127 | value: Buffer; 128 | version: Buffer; 129 | } 130 | export interface KeyValuePairDelete { 131 | isDelete: true; 132 | key: Buffer; 133 | version: Buffer; 134 | } 135 | 136 | export interface KeyValue { 137 | getKey(): Buffer; 138 | getValue(): Buffer; 139 | getVersion(): Buffer; 140 | getHistory(): Promise; 141 | getTransaction(): Promise; 142 | } 143 | 144 | export interface KeyValueState { 145 | getKeys(): KeyValue[]; 146 | getValue(key: Buffer): KeyValue; 147 | } 148 | 149 | export interface AppTransaction { 150 | getInput(): KeyValuePair[]; 151 | getOutput(): KeyValuePair[]; 152 | getState(): KeyValueState; 153 | getTransaction(): Transaction; 154 | } 155 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-bcv-checkpoint.test.ts: -------------------------------------------------------------------------------- 1 | import { HashValueType } from "../../common"; 2 | import { FabricBCVCheckpoint, FabricBCVCheckpointContext } from "./fabric-bcv-checkpoint"; 3 | 4 | describe("FabricBCVCheckpoint", () => { 5 | const mockLastBlock = { 6 | calcHashValue: (type: HashValueType) => { 7 | if (type === HashValueType.HASH_FOR_PREV) { 8 | return Buffer.from("test-hash-for-prev"); 9 | } else { 10 | return Buffer.from("test-hash-for-self"); 11 | } 12 | }, 13 | getBlockNumber: () => 42 14 | }; 15 | const mockLastTransaction = { 16 | getBlock: () => mockLastBlock, 17 | getTransactionID: () => "tx-id-for-last" 18 | }; 19 | const mockLastConfigInfo = { 20 | blockNumber: 24, 21 | transactionId: "tx-id-for-configtx", 22 | applicationMSPs: [], 23 | ordererMSPs: [], 24 | }; 25 | const mockState = { 26 | getKeys: () => [{ 27 | getKey: () => Buffer.from("KEY0123"), 28 | getValue: () => Buffer.from("VALUE0123"), 29 | getVersion: () => Buffer.from("VERSION0123") 30 | }] 31 | }; 32 | const now = Date.now(); 33 | const context: FabricBCVCheckpointContext = { 34 | block: mockLastBlock as any, 35 | transaction: mockLastTransaction as any, 36 | configInfo: mockLastConfigInfo, 37 | state: mockState as any, 38 | timestamp: now 39 | }; 40 | 41 | test("constructor is initialized successfully with context", () => { 42 | new FabricBCVCheckpoint("test", null, context); 43 | }); 44 | 45 | test("constructor throws when neither data nor context is passed", () => { 46 | expect(() => { 47 | new FabricBCVCheckpoint("test", null); 48 | }).toThrowError(); 49 | }); 50 | 51 | test("getCheckpoint with context returns a valid checkpoint data", async () => { 52 | const checkpoint = new FabricBCVCheckpoint("test", null, context); 53 | const data = await checkpoint.getCheckpoint(); 54 | 55 | expect(data.lastBlock).toBe(42); 56 | expect(data.lastTransaction).toBe("tx-id-for-last"); 57 | expect(data.timestamp).toBe(now); 58 | expect(data.networkPlugin).toBe("test"); 59 | expect(data.checkpointDataType).toBe("fabric"); 60 | expect(Buffer.from(data.blockInformation.hashForPrev, "hex").toString("utf-8")).toBe("test-hash-for-prev"); 61 | expect(Buffer.from(data.blockInformation.hashForSelf, "hex").toString("utf-8")).toBe("test-hash-for-self"); 62 | expect(data.blockInformation.lastConfigBlock.blockNumber).toBe(24); 63 | 64 | expect(data).toHaveProperty("stateInformation"); 65 | if(data.stateInformation != null) { // Condition to satisfy the compiler 66 | expect(data.stateInformation).toHaveLength(1); 67 | expect(Buffer.from(data.stateInformation[0].key, "hex").toString("utf-8")).toBe("KEY0123"); 68 | expect(Buffer.from(data.stateInformation[0].value, "hex").toString("utf-8")).toBe("VALUE0123"); 69 | expect(Buffer.from(data.stateInformation[0].version, "hex").toString("utf-8")).toBe("VERSION0123"); 70 | } 71 | }); 72 | }); 73 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-bcv-checkpoint.ts: -------------------------------------------------------------------------------- 1 | import { deserializeConfigTxInfo, serializeConfigTxInfo } from "."; 2 | import { BCVerifierError, HashValueType } from "../../common"; 3 | import { KeyValueManagerInitialState } from "../../kvmanager"; 4 | import { BCVCheckpoint, BCVCheckpointContext, BCVCheckpointData } from "../../checkpoint"; 5 | import { FabricBlock, FabricConfigTransactionInfo, FabricTransaction } from "./fabric-data"; 6 | 7 | export interface FabricBCVCheckpointBlockInformation { 8 | hashForSelf: string; 9 | hashForPrev: string; 10 | lastConfigBlock: any; 11 | } 12 | export interface FabricBCVCheckpointKV { 13 | key: string; 14 | value: string; 15 | version: string; 16 | } 17 | export type FabricBCVCheckpointStateInformation = FabricBCVCheckpointKV[]; 18 | 19 | export interface FabricBCVCheckpointContext extends BCVCheckpointContext { 20 | block: FabricBlock; 21 | transaction: FabricTransaction; 22 | configInfo: FabricConfigTransactionInfo; 23 | } 24 | 25 | export interface FabricBCVCheckpointData extends BCVCheckpointData { 26 | blockInformation: FabricBCVCheckpointBlockInformation; 27 | stateInformation?: FabricBCVCheckpointStateInformation; 28 | } 29 | 30 | export class FabricBCVCheckpoint extends BCVCheckpoint { 31 | protected context: FabricBCVCheckpointContext | null; 32 | 33 | public constructor(pluginName: string, checkpointData: FabricBCVCheckpointData | null, context?: FabricBCVCheckpointContext) { 34 | super(pluginName, "fabric", checkpointData, context); 35 | 36 | if (checkpointData == null && context != null) { 37 | this.context = context; 38 | } else { 39 | this.context = null; 40 | } 41 | } 42 | 43 | public async getCheckpoint(): Promise { 44 | if (this.context == null) { 45 | throw new BCVerifierError("No context is set. Checkpoint cannot be generated"); 46 | } 47 | 48 | const data: FabricBCVCheckpointData = { 49 | ...this.data, 50 | blockInformation: { 51 | hashForSelf: this.context.block.calcHashValue(HashValueType.HASH_FOR_SELF).toString("hex"), 52 | hashForPrev: this.context.block.calcHashValue(HashValueType.HASH_FOR_PREV).toString("hex"), 53 | lastConfigBlock: serializeConfigTxInfo(this.context.configInfo) 54 | } 55 | }; 56 | 57 | if (this.context.state != null) { 58 | const keys = this.context.state.getKeys(); 59 | data.stateInformation = keys.map((keyValue) => ({ 60 | key: keyValue.getKey().toString("hex"), 61 | value: keyValue.getValue().toString("hex"), 62 | version: keyValue.getVersion().toString("hex") 63 | })); 64 | } 65 | 66 | return data; 67 | } 68 | 69 | public async getInitialKVState(): Promise { 70 | const state: FabricBCVCheckpointStateInformation = this.data.stateInformation; 71 | if (state == null) { 72 | return undefined; 73 | } 74 | 75 | return { 76 | lastBlockNumber: this.data.lastBlock, 77 | keyValueState: state.map((kv) => ({ 78 | isDelete: false, 79 | key: Buffer.from(kv.key, "hex"), 80 | value: Buffer.from(kv.value, "hex"), 81 | version: Buffer.from(kv.version, "hex") 82 | })) 83 | }; 84 | } 85 | 86 | public getLastConfigBlockInfo(): FabricConfigTransactionInfo { 87 | const info: FabricBCVCheckpointBlockInformation = this.data.blockInformation; 88 | if (info == null) { 89 | throw new Error("Checkpoint does not contain valid block information"); 90 | } 91 | return deserializeConfigTxInfo(info.lastConfigBlock); 92 | } 93 | } 94 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-common-block-decoder.d.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | declare module "fabric-common/lib/BlockDecoder" { 8 | class BlockDecoder { 9 | public static decode(block: Buffer): any; 10 | 11 | public static decodeBlock(blockProto: any): any; 12 | } 13 | export = BlockDecoder; 14 | } 15 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-data.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import * as path from "path"; 8 | 9 | import { HashValueType, KeyValuePairWrite } from "../../common"; 10 | import { FabricBlockSource } from "../../network/fabric-block"; 11 | import { FabricFunctionInfo, FabricTransactionType } from "./fabric-data"; 12 | 13 | const testDataPathBase = path.join(__dirname, "..", "..", "..", "test"); 14 | const testDataset: { [name: string]: string } = { 15 | "asset-transfer-basic-2.4.7": path.join(testDataPathBase, "asset-transfer-basic-2.4.7") 16 | }; 17 | 18 | describe("Fabric Data", () => { 19 | let assetTransferConfig: any; 20 | let assetTransferBlockSource: FabricBlockSource; 21 | 22 | beforeAll(async () => { 23 | const assetTransferPath = testDataset["asset-transfer-basic-2.4.7"]; 24 | assetTransferConfig = require(path.join(assetTransferPath, "config.json")); 25 | assetTransferBlockSource = await FabricBlockSource.createFromConfig({ 26 | ledgerStore: path.join(assetTransferPath, assetTransferConfig.ledgers[0].ledgerStore) 27 | }); 28 | }); 29 | 30 | test("Simple Transaction Block (basic:6)", async () => { 31 | const blockNumber = 6; 32 | 33 | const block = await assetTransferBlockSource.getBlock(blockNumber); 34 | expect(block.getBlockNumber()).toBe(blockNumber); 35 | expect(block.getHashValue().toString("hex")).toBe(assetTransferConfig.hashes[blockNumber]); 36 | expect(block.calcHashValue(HashValueType.HASH_FOR_SELF).toString("hex")).toBe(assetTransferConfig.hashes[blockNumber]); 37 | 38 | const transactions = block.getTransactions(); 39 | expect(transactions.length).toBe(1); 40 | 41 | const tx = transactions[0]; 42 | expect(tx.getTransactionType()).toBe(FabricTransactionType.ENDORSER_TRANSACTION); 43 | expect(tx.getTransactionTypeString()).toBe("ENDORSER_TRANSACTION"); 44 | expect(tx.validity).toBeTruthy(); 45 | 46 | const actions = tx.getActions(); 47 | expect(actions).toHaveLength(1); 48 | const funcInfo = actions[0].getFunction() as FabricFunctionInfo; 49 | expect(funcInfo).not.toBeNull(); 50 | expect(funcInfo.ccName).toBe("basic"); 51 | expect(funcInfo.funcName.toString()).toBe("InitLedger"); 52 | expect(funcInfo.args).toHaveLength(0); 53 | 54 | const set = tx.getWriteSet(); 55 | expect(set.length).toBe(6); 56 | expect(set[0].key.toString()).toBe("basic\0asset1"); 57 | expect(set[5].key.toString()).toBe("basic\0asset6"); 58 | 59 | expect(set[3].isDelete).toBeFalsy(); 60 | 61 | const pair = set[2] as KeyValuePairWrite; 62 | expect(JSON.parse(pair.value.toString())).toEqual({ 63 | docType: "asset", ID: "asset3", Color: "green", Size: 10, Owner: "Jin Soo", AppraisedValue: 500 64 | }); 65 | expect(set[5].version.toString()).toBe("6-0"); 66 | 67 | const readSet = tx.getReadSet(); 68 | expect(readSet.length).toBe(1); 69 | expect(readSet[0].key.toString()).toBe("_lifecycle\0namespaces/fields/basic/Sequence"); 70 | expect(readSet[0].version.toString()).toBe("5-0"); 71 | 72 | expect(() => block.getConfigTx()).toThrowError(); 73 | }); 74 | 75 | test("Simple Transaction Block with nontrivial readset (basic:8)", async () => { 76 | const blockNumber = 8; 77 | 78 | const block = await assetTransferBlockSource.getBlock(blockNumber); 79 | expect(block.getBlockNumber()).toBe(blockNumber); 80 | 81 | const transactions = block.getTransactions(); 82 | expect(transactions.length).toBe(1); 83 | const tx = transactions[0]; 84 | 85 | const actions = tx.getActions(); 86 | expect(actions).toHaveLength(1); 87 | const funcInfo = actions[0].getFunction() as FabricFunctionInfo; 88 | expect(funcInfo).not.toBeNull(); 89 | expect(funcInfo.ccName).toBe("basic"); 90 | expect(funcInfo.funcName.toString()).toBe("UpdateAsset"); 91 | expect(funcInfo.args).toHaveLength(5); 92 | expect(funcInfo.args[0].toString()).toBe("asset1"); 93 | expect(funcInfo.args[3].toString()).toBe("Tomoko"); 94 | 95 | const readSet = tx.getReadSet(); 96 | expect(readSet.length).toBe(2); 97 | expect(readSet[1].key.toString()).toBe("basic\0asset1"); 98 | expect(readSet[1].version.toString()).toBe("6-0"); // written by InitLedger (Block 6, Tx 0) 99 | 100 | const writeSet = tx.getWriteSet(); 101 | expect(writeSet.length).toBe(1); 102 | expect(writeSet[0].key.toString()).toBe("basic\0asset1"); 103 | expect(writeSet[0].isDelete).toBeFalsy(); 104 | expect(JSON.parse((writeSet[0] as KeyValuePairWrite).value.toString())).toEqual({ 105 | ID: "asset1", Color: "blue", Size: 5, Owner: "Tomoko", AppraisedValue: 350 106 | }); 107 | }); 108 | 109 | test("Config Block", async () => { 110 | const block = await assetTransferBlockSource.getBlock(0); 111 | const transactions = block.getTransactions(); 112 | expect(transactions.length).toBe(1); 113 | const configTx = block.getConfigTx(); 114 | expect(configTx.getTransactionType()).toBe(FabricTransactionType.CONFIG); 115 | 116 | const info = block.getConfigTxInfo(); 117 | expect(info.blockNumber).toBe(0); 118 | expect(info.transactionId).toBe(transactions[0].getTransactionID()); 119 | expect(info.applicationMSPs).toHaveLength(2); 120 | expect(info.applicationMSPs[0].name).toBe("Org1MSP"); 121 | expect(info.applicationMSPs[1].name).toBe("Org2MSP"); 122 | expect(info.ordererMSPs).toHaveLength(1); 123 | expect(info.ordererMSPs[0].name).toBe("OrdererMSP"); 124 | }); 125 | }); 126 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-types.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | export type BlockData = { 8 | header: { 9 | number: number; 10 | previous_hash: Buffer; 11 | data_hash: Buffer; 12 | }; 13 | data: { 14 | data: Buffer[]; 15 | }; 16 | metadata: { 17 | metadata: Buffer[]; 18 | }; 19 | }; 20 | 21 | export type KeyInfo = { 22 | key_identifier: string; 23 | key_material: Buffer; 24 | }; 25 | 26 | export type SigningIdentityInfo = { 27 | public_signer: Buffer[]; 28 | private_signer: KeyInfo; 29 | }; 30 | 31 | export type FabricOUIdentifier = { 32 | certificate: Buffer; 33 | organizational_unit_identifier: string; 34 | }; 35 | 36 | export type FabricNodeOUs = { 37 | enable: boolean; 38 | client_ou_identifier: FabricOUIdentifier; 39 | peer_ou_identifier: FabricOUIdentifier; 40 | }; 41 | 42 | export type FabricCryptoConfig = { 43 | signature_hash_family: string; 44 | identity_identifier_hash_function: string; 45 | }; 46 | 47 | // crypto_config and fabric_node_ous are missing in decodeFabricMSPConfig() in fabric-common/lib/BlockDecoder.js 48 | export type MSPConfig = { 49 | name: string; 50 | root_certs: Buffer[]; 51 | intermediate_certs: Buffer[]; 52 | admins: Buffer[]; 53 | revocation_list: Buffer[]; 54 | signing_identity: SigningIdentityInfo; 55 | organizational_unit_identifiers: FabricOUIdentifier[]; 56 | tls_root_certs: Buffer[]; 57 | tls_intermediate_certs: Buffer[]; 58 | }; 59 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-utils.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright 2021 Hitachi, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { FabricBlock, FabricConfigCache, FabricConfigTransactionInfo, FabricTransaction, MSPConfig } from "."; 8 | import { BCVerifierNotFound } from "../../common"; 9 | import { BlockProvider } from "../../provider"; 10 | import { FabricBCVCheckpoint } from "./fabric-bcv-checkpoint"; 11 | import * as utils from "./fabric-utils"; 12 | 13 | const configTx: FabricTransaction = { 14 | data: { 15 | config: { 16 | channel_group: { 17 | groups: { 18 | Orderer: { 19 | groups: { 20 | OrdererOrg: { 21 | values: { 22 | MSP: { 23 | value: { 24 | config: { 25 | name: "OrdererOrg", 26 | root_certs: [] 27 | } 28 | } 29 | } 30 | } 31 | } 32 | } 33 | }, 34 | Application: { 35 | groups: { 36 | Org1: { 37 | values: { 38 | MSP: { 39 | value: { 40 | config: { 41 | name: "Org1", 42 | root_certs: [] 43 | } 44 | } 45 | } 46 | } 47 | }, 48 | Org2MSP: { 49 | values: { 50 | MSP: { 51 | value: { 52 | config: { 53 | name: "Org2MSP", 54 | root_certs: [] 55 | } 56 | } 57 | } 58 | } 59 | } 60 | } 61 | } 62 | } 63 | } 64 | } 65 | } 66 | } as any; 67 | 68 | describe("getOrdererMSPs", () => { 69 | test("Returns an array of MSPs", () => { 70 | const result = utils.getOrdererMSPs(configTx); 71 | 72 | expect(result).toHaveLength(1); 73 | expect(result[0]).toHaveProperty("name", "OrdererOrg"); 74 | }); 75 | }); 76 | 77 | describe("getApplicationMSPs", () => { 78 | test("Returns an array of MSPs", () => { 79 | const result = utils.getApplicationMSPs(configTx); 80 | 81 | expect(result).toHaveLength(2); 82 | expect(result[0]).toHaveProperty("name", "Org1"); 83 | expect(result[1]).toHaveProperty("name", "Org2MSP"); 84 | }); 85 | }); 86 | 87 | describe("findMSPs", () => { 88 | const msps = utils.getApplicationMSPs(configTx); 89 | 90 | test("Returns a MSP when it is found", () => { 91 | const config = utils.findMSP("Org1", msps); 92 | 93 | expect(config).toBeDefined(); 94 | expect(config).toHaveProperty("name", "Org1"); 95 | }); 96 | 97 | test("Throws a BCVerifierNotFound exception when a name is not found", () => { 98 | expect(() => utils.findMSP("Org4", msps)).toThrowError(BCVerifierNotFound); 99 | }); 100 | }); 101 | 102 | describe("FabricConfigCache", () => { 103 | const mockBlock: FabricBlock = Object.create(FabricBlock.prototype); 104 | const mockConfigBlock = Object.assign(mockBlock, { 105 | getConfigTxInfo: jest.fn().mockReturnValue({ 106 | blockNumber: 15 107 | }) 108 | }); 109 | 110 | const mockProvider: BlockProvider = { 111 | getBlock: jest.fn().mockResolvedValue(mockConfigBlock) 112 | } as any; 113 | 114 | const mockCheckpoint: FabricBCVCheckpoint = { 115 | getLastConfigBlockInfo: jest.fn().mockReturnValue({ 116 | blockNumber: 10 117 | }) 118 | } as any; 119 | 120 | test("GetInstance() throws null for a first call", () => { 121 | expect(() => utils.FabricConfigCache.GetInstance()).toThrowError(); 122 | }); 123 | 124 | test("Init() returns an instance", () => { 125 | const cache = utils.FabricConfigCache.Init(mockProvider, mockCheckpoint); 126 | 127 | expect(cache).toBeInstanceOf(FabricConfigCache); 128 | }); 129 | 130 | test("GetInstance() now returns an instance after Init()", () => { 131 | const cache = utils.FabricConfigCache.GetInstance(); 132 | 133 | expect(cache).toBeInstanceOf(FabricConfigCache); 134 | }); 135 | 136 | test("Two Init()'s return the same instance", () => { 137 | const cache1 = utils.FabricConfigCache.Init(mockProvider, mockCheckpoint); 138 | const cache2 = utils.FabricConfigCache.Init(mockProvider, mockCheckpoint); 139 | 140 | expect(cache1).toBe(cache2); 141 | }); 142 | 143 | test("getConfig() returns a config block information for Block 15", async () => { 144 | const cache = utils.FabricConfigCache.GetInstance(); 145 | 146 | const info = await cache.getConfig(15); 147 | expect(info).toHaveProperty("blockNumber", 15); 148 | expect(mockProvider.getBlock).toHaveBeenCalled(); 149 | expect(mockProvider.getBlock).toHaveBeenCalledWith(15); 150 | }); 151 | 152 | test("getConfig() returns a config block information for Block 10 without getting the block", async () => { 153 | const cache = utils.FabricConfigCache.GetInstance(); 154 | (mockProvider.getBlock as jest.Mock).mockClear(); 155 | 156 | const info = await cache.getConfig(10); 157 | expect(info).toHaveProperty("blockNumber", 10); 158 | expect(mockProvider.getBlock).not.toHaveBeenCalled(); 159 | }); 160 | }); 161 | 162 | const mspConfig: MSPConfig = { 163 | name: "Org3MSP", 164 | root_certs: [ 165 | Buffer.from("Org3 Root Certificate") 166 | ], 167 | intermediate_certs: [ 168 | Buffer.from("Org3 Intermediate Certificate") 169 | ], 170 | admins: [ 171 | Buffer.from("Org3 Admin Certificate") 172 | ], 173 | revocation_list: [ 174 | Buffer.from("Org3 Revocation 1"), 175 | Buffer.from("Org3 Revocation 2") 176 | ], 177 | signing_identity: { 178 | public_signer: [ 179 | Buffer.from("Public Signer") 180 | ], 181 | private_signer: { 182 | key_identifier: "Key ID", 183 | key_material: Buffer.from("Private Key") 184 | } 185 | }, 186 | organizational_unit_identifiers: [{ 187 | certificate: Buffer.from("OU Certificate"), 188 | organizational_unit_identifier: "OU ID" 189 | }], 190 | tls_root_certs: [ 191 | Buffer.from("Org3 TLS Root Certificate") 192 | ], 193 | tls_intermediate_certs: [ 194 | Buffer.from("Org3 TLS Intermediate Certificate") 195 | ] 196 | }; 197 | 198 | const txInfo: FabricConfigTransactionInfo = { 199 | applicationMSPs: [mspConfig], 200 | blockNumber: 42, 201 | transactionId: "ConfigTx42", 202 | ordererMSPs: [mspConfig] 203 | }; 204 | 205 | describe("serializeConfigTxInfo()", () => { 206 | test("returns serialized object", () => { 207 | const obj = utils.serializeConfigTxInfo(txInfo); 208 | 209 | expect(obj).toHaveProperty("blockNumber", 42); 210 | expect(obj).toHaveProperty("transactionId", "ConfigTx42"); 211 | expect(obj.applicationMSPs).toHaveLength(1); 212 | expect(obj.applicationMSPs[0]).toHaveProperty("name", "Org3MSP"); 213 | const app = obj.applicationMSPs[0]; 214 | expect(app.root_certs[0]).toBe(mspConfig.root_certs[0].toString("base64")); 215 | expect(app.intermediate_certs[0]).toBe(mspConfig.intermediate_certs[0].toString("base64")); 216 | }); 217 | }); 218 | 219 | describe("deserializeConfigTxInfo()", () => { 220 | test("returns ConfigTxInfo from serialized one", () => { 221 | const obj = utils.serializeConfigTxInfo(txInfo); 222 | const deserializedTxInfo = utils.deserializeConfigTxInfo(obj); 223 | 224 | expect(deserializedTxInfo).toStrictEqual(txInfo); 225 | }); 226 | }); 227 | -------------------------------------------------------------------------------- /src/data/fabric/fabric-utils.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { createVerify } from "crypto"; 8 | import { common, msp } from "fabric-protos"; 9 | import { verifySigningChain } from "pem"; 10 | import { BCVerifierError, BCVerifierNotFound } from "../../common"; 11 | import { BlockProvider } from "../../provider"; 12 | import { FabricBCVCheckpoint } from "./fabric-bcv-checkpoint"; 13 | import { FabricBlock, FabricConfigTransactionInfo, FabricTransaction } from "./fabric-data"; 14 | import { FabricOUIdentifier, MSPConfig, SigningIdentityInfo } from "./fabric-types"; 15 | 16 | export function getOrdererMSPs(configTx: FabricTransaction): MSPConfig[] { 17 | const groups = configTx.data.config.channel_group.groups.Orderer.groups; 18 | const results: MSPConfig[] = []; 19 | 20 | for (const org in groups) { 21 | const group = groups[org]; 22 | const mspConfig: MSPConfig = group.values.MSP.value.config; 23 | 24 | results.push(mspConfig); 25 | } 26 | 27 | return results; 28 | } 29 | 30 | export function getApplicationMSPs(configTx: FabricTransaction): MSPConfig[] { 31 | const groups = configTx.data.config.channel_group.groups.Application.groups; 32 | const results: MSPConfig[] = []; 33 | 34 | for (const org in groups) { 35 | const group = groups[org]; 36 | const mspConfig: MSPConfig = group.values.MSP.value.config; 37 | 38 | results.push(mspConfig); 39 | } 40 | 41 | return results; 42 | } 43 | 44 | export function findMSP(mspName: string, mspConfigs: MSPConfig[]): MSPConfig { 45 | for (const config of mspConfigs) { 46 | if (config.name === mspName) { 47 | return config; 48 | } 49 | } 50 | throw new BCVerifierNotFound(); 51 | } 52 | 53 | export function verifyIdentityMSP(mspName: string, identity: string, mspConfigs: MSPConfig[]): Promise { 54 | try { 55 | const config = findMSP(mspName, mspConfigs); 56 | return new Promise((resolve, reject) => { 57 | verifySigningChain(identity, config.root_certs.map((cert) => cert.toString("utf-8")), (error, result) => { 58 | if (error != null) { 59 | reject(error); 60 | } else { 61 | resolve(result); 62 | } 63 | }); 64 | }); 65 | } catch (e) { 66 | if (e instanceof BCVerifierNotFound) { 67 | return Promise.resolve(false); 68 | } 69 | return Promise.reject(e); 70 | } 71 | } 72 | 73 | export async function verifySignatureHeader(signatureHeader: any, mspConfigs: MSPConfig[]): Promise { 74 | return await verifyIdentityMSP(signatureHeader.creator.mspid, signatureHeader.creator.id_bytes, mspConfigs); 75 | } 76 | 77 | export function verifySignature(signature: Buffer, data: Buffer, identity: any): boolean { 78 | // algorithm needs to conform to config. 79 | const verify = createVerify("sha256"); 80 | 81 | verify.update(data); 82 | 83 | return verify.verify(identity.id_bytes, signature); 84 | } 85 | 86 | export function verifyMetadataSignature(block: FabricBlock, data: Buffer, metadataSignature: any): boolean { 87 | const verify = createVerify("sha256"); 88 | 89 | const creator = msp.SerializedIdentity.encode({ 90 | mspid: metadataSignature.signature_header.creator.mspid, 91 | id_bytes: Buffer.from(metadataSignature.signature_header.creator.id_bytes) 92 | }).finish(); 93 | 94 | const sigHeader = common.SignatureHeader.encode({ 95 | creator: creator, 96 | nonce: metadataSignature.signature_header.nonce 97 | }).finish(); 98 | 99 | verify.update(Buffer.concat([data, sigHeader, block.getHeaderBytes()])); 100 | 101 | return verify.verify(metadataSignature.signature_header.creator.id_bytes, metadataSignature.signature); 102 | } 103 | 104 | export class FabricConfigCache { 105 | protected static instance: FabricConfigCache | null = null; 106 | 107 | private configMap: { [configBlockNumber: number]: FabricConfigTransactionInfo }; 108 | private provider: BlockProvider; 109 | 110 | public static Init(provider: BlockProvider, checkpoint?: FabricBCVCheckpoint) { 111 | if (FabricConfigCache.instance == null) { 112 | FabricConfigCache.instance = new FabricConfigCache(provider, checkpoint); 113 | } 114 | return FabricConfigCache.instance; 115 | } 116 | 117 | public static GetInstance() { 118 | if (FabricConfigCache.instance == null) { 119 | throw new Error("No FabricConfigCache is initialized"); 120 | } 121 | return FabricConfigCache.instance; 122 | } 123 | 124 | protected constructor(provider: BlockProvider, checkpoint?: FabricBCVCheckpoint) { 125 | this.configMap = {}; 126 | this.provider = provider; 127 | 128 | if (checkpoint != null) { 129 | const info = checkpoint.getLastConfigBlockInfo(); 130 | this.configMap[info.blockNumber] = info; 131 | } 132 | } 133 | 134 | public async getConfig(blockNumber: number): Promise { 135 | if (this.configMap[blockNumber] == null) { 136 | const configBlock = await this.provider.getBlock(blockNumber); 137 | 138 | if (!(configBlock instanceof FabricBlock)) { 139 | throw new BCVerifierError("Provider does not return FabricBlock"); 140 | } 141 | this.configMap[blockNumber] = configBlock.getConfigTxInfo(); 142 | } 143 | return this.configMap[blockNumber]; 144 | } 145 | } 146 | 147 | function serializeOUIdentifier(ouIdentifier: FabricOUIdentifier) { 148 | return { 149 | certificate: ouIdentifier.certificate.toString("base64"), 150 | organizational_unit_identifier: ouIdentifier.organizational_unit_identifier 151 | }; 152 | } 153 | function deserializeOUIdentifier(ouIdentifier: any): FabricOUIdentifier { 154 | return { 155 | certificate: Buffer.from(ouIdentifier.certificate, "base64"), 156 | organizational_unit_identifier: ouIdentifier.organizational_unit_identifier 157 | }; 158 | } 159 | 160 | function serializeSigningIdentity(signingIdentity: SigningIdentityInfo) { 161 | if (signingIdentity.public_signer == null || signingIdentity.private_signer == null) { 162 | return {}; 163 | } else { 164 | return { 165 | public_signer: signingIdentity.public_signer.map((cert) => cert.toString("base64")), 166 | private_signer: { 167 | key_identifier: signingIdentity.private_signer.key_identifier, 168 | key_material: signingIdentity.private_signer.key_material.toString("base64") 169 | } 170 | }; 171 | } 172 | } 173 | 174 | function deserializeSigningIdentity(signingIdentity: any): any { 175 | if (signingIdentity.public_signer == null || signingIdentity.private_signer == null) { 176 | return {}; 177 | } else { 178 | return { 179 | public_signer: signingIdentity.public_signer.map((cert: string) => Buffer.from(cert, "base64")), 180 | private_signer: { 181 | key_identifier: signingIdentity.private_signer.key_identifier, 182 | key_material: Buffer.from(signingIdentity.private_signer.key_material, "base64") 183 | } 184 | }; 185 | } 186 | } 187 | 188 | function serializeMSP(msp: MSPConfig) { 189 | return { 190 | name: msp.name, 191 | root_certs: msp.root_certs.map((cert) => cert.toString("base64")), 192 | intermediate_certs: msp.intermediate_certs.map((cert) => cert.toString("base64")), 193 | admins: msp.admins.map((cert) => cert.toString("base64")), 194 | revocation_list: msp.revocation_list.map((cert) => cert.toString("base64")), 195 | signing_identity: serializeSigningIdentity(msp.signing_identity), 196 | organizational_unit_identifiers: msp.organizational_unit_identifiers.map((ouIdentifier) => serializeOUIdentifier(ouIdentifier)), 197 | tls_root_certs: msp.tls_root_certs.map((cert) => cert.toString("base64")), 198 | tls_intermediate_certs: msp.tls_intermediate_certs.map((cert) => cert.toString("base64")) 199 | }; 200 | } 201 | 202 | function deserializeMSP(msp: any): MSPConfig { 203 | return { 204 | name: msp.name, 205 | root_certs: msp.root_certs.map((cert: string) => Buffer.from(cert, "base64")), 206 | intermediate_certs: msp.intermediate_certs.map((cert: string) => Buffer.from(cert, "base64")), 207 | admins: msp.admins.map((cert: string) => Buffer.from(cert, "base64")), 208 | revocation_list: msp.revocation_list.map((cert: string) => Buffer.from(cert, "base64")), 209 | signing_identity: deserializeSigningIdentity(msp.signing_identity), 210 | organizational_unit_identifiers: msp.organizational_unit_identifiers.map((ou: string) => deserializeOUIdentifier(ou)), 211 | tls_root_certs: msp.tls_root_certs.map((cert: string) => Buffer.from(cert, "base64")), 212 | tls_intermediate_certs: msp.tls_intermediate_certs.map((cert: string) => Buffer.from(cert, "base64")), 213 | }; 214 | } 215 | 216 | export function serializeConfigTxInfo(info: FabricConfigTransactionInfo): any { 217 | return { 218 | applicationMSPs: info.applicationMSPs.map((msp) => serializeMSP(msp)), 219 | blockNumber: info.blockNumber, 220 | transactionId: info.transactionId, 221 | ordererMSPs: info.ordererMSPs.map((msp) => serializeMSP(msp)) 222 | }; 223 | } 224 | 225 | export function deserializeConfigTxInfo(obj: any): FabricConfigTransactionInfo { 226 | return { 227 | applicationMSPs: obj.applicationMSPs.map((msp: any) => deserializeMSP(msp)), 228 | blockNumber: obj.blockNumber, 229 | transactionId: obj.transactionId, 230 | ordererMSPs: obj.ordererMSPs.map((msp: any) => deserializeMSP(msp)) 231 | }; 232 | } 233 | -------------------------------------------------------------------------------- /src/data/fabric/index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | export * from "./fabric-data"; 8 | export { MSPConfig } from "./fabric-types"; 9 | export * from "./fabric-utils"; 10 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | export { BCVerifier } from "./bcverifier"; 8 | export * from "./check"; 9 | export * from "./common"; 10 | export * from "./data/fabric"; 11 | export * from "./result-set"; 12 | export * from "./checkpoint"; 13 | -------------------------------------------------------------------------------- /src/kvmanager.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { BCVerifierNotFound, KeyValuePairWrite, KeyValueTransaction } from "./common"; 8 | import { KeyValueManagerBlockNotSufficientError, SimpleKeyValueManager } from "./kvmanager"; 9 | import { correctKVBlocks } from "./mock/mock-block"; 10 | 11 | describe("SimpleKeyValueManager", () => { 12 | test("good path without initial state", async () => { 13 | const manager = new SimpleKeyValueManager(); 14 | 15 | expect(manager.getNextDesiredBlockNumber()).toBe(0); 16 | expect(manager.feedBlock(correctKVBlocks[0])).toBeTruthy(); 17 | expect(manager.getVersionsForKey(Buffer.from("key1")).length).toBe(1); 18 | expect(manager.getVersionsForKey(Buffer.from("key2")).length).toBe(0); 19 | 20 | expect(manager.feedBlock(correctKVBlocks[1])).toBeTruthy(); 21 | expect(manager.feedBlock(correctKVBlocks[2])).toBeTruthy(); 22 | expect(manager.getNextDesiredBlockNumber()).toBe(3); 23 | // key1: w, w, d 24 | expect(manager.getVersionsForKey(Buffer.from("key1")).length).toBe(3); 25 | // key2: w, w 26 | expect(manager.getVersionsForKey(Buffer.from("key2")).length).toBe(2); 27 | 28 | const key1 = manager.getVersionsForKey(Buffer.from("key1")); 29 | expect(key1[0].isDelete).toBeFalsy(); 30 | expect(key1[2].isDelete).toBeTruthy(); 31 | expect((key1[1].transaction as KeyValueTransaction).getTransactionID()).toBe("Tx5"); 32 | expect((key1[2].transaction as KeyValueTransaction).getTransactionID()).toBe("Tx6"); 33 | 34 | const key1Write = key1[0] as KeyValuePairWrite; 35 | expect(key1Write.value.toString()).toBe("A"); 36 | 37 | const state0 = manager.getState(correctKVBlocks[0]); 38 | expect(state0.getKeys().length).toBe(1); 39 | expect(state0.getValue(Buffer.from("key1")).getValue().toString()).toBe("A"); 40 | const state1 = manager.getState(correctKVBlocks[1]); 41 | expect(state1.getKeys().length).toBe(3); 42 | expect(state1.getValue(Buffer.from("key2")).getValue().toString()).toBe("1"); 43 | expect(state1.getValue(Buffer.from("key1")).getValue().toString()).toBe("A"); 44 | const state2 = manager.getState(correctKVBlocks[2]); 45 | expect(state2.getKeys().length).toBe(2); 46 | expect(state2.getValue(Buffer.from("key2")).getValue().toString()).toBe("3"); 47 | expect(() => state2.getValue(Buffer.from("key1"))).toThrow(BCVerifierNotFound); 48 | 49 | const value2 = state2.getValue(Buffer.from("key2")); 50 | const tx = await value2.getTransaction() as KeyValueTransaction; 51 | expect(tx).not.toBeNull(); 52 | expect(tx.getTransactionID()).toBe("Tx5"); 53 | expect(value2.getVersion().toString()).toBe("2*0"); 54 | expect(value2.getKey().toString()).toBe("key2"); 55 | const history2 = await value2.getHistory(); 56 | expect(history2.length).toBe(2); 57 | 58 | const value3 = state2.getValue(Buffer.from("key3")); 59 | const history3 = await value3.getHistory(); 60 | expect(history3.length).toBe(1); 61 | 62 | const tx1 = manager.getTransaction("Tx1"); 63 | const tx1WriteSet = tx1.getOutput(); 64 | const tx1ReadSet = tx1.getInput(); 65 | 66 | expect(tx1).not.toBeNull(); 67 | expect(tx1.getTransaction().getTransactionID()).toBe("Tx1"); 68 | expect(tx1WriteSet[0].isDelete).toBeFalsy(); 69 | expect(tx1WriteSet[0].key.toString()).toBe("key1"); 70 | expect((tx1WriteSet[0] as KeyValuePairWrite).value.toString()).toBe("A"); 71 | expect(tx1ReadSet).toHaveLength(0); 72 | expect(tx1.getState().getKeys()).toHaveLength(0); 73 | 74 | const tx4 = manager.getTransaction("Tx4"); 75 | const tx4WriteSet = tx4.getOutput(); 76 | const tx4ReadSet = tx4.getInput(); 77 | 78 | expect(tx4).not.toBeNull(); 79 | expect(tx4WriteSet[0].isDelete).toBeFalsy(); 80 | expect(tx4WriteSet[0].key.toString()).toBe("key2"); 81 | expect((tx4WriteSet[0] as KeyValuePairWrite).value.toString()).toBe("1"); 82 | expect(tx4ReadSet).toHaveLength(1); 83 | expect(tx4ReadSet[0].isDelete).toBeFalsy(); 84 | expect(tx4ReadSet[0].key.toString()).toBe("key1"); 85 | expect((tx4ReadSet[0] as KeyValuePairWrite).value.toString()).toBe("A"); 86 | 87 | const tx4State = tx4.getState(); 88 | expect(tx4State.getKeys()).toHaveLength(1); 89 | 90 | expect(() => manager.getTransaction("NonExistent")).toThrow(BCVerifierNotFound); 91 | }); 92 | 93 | test("error without initial state", async () => { 94 | const manager = new SimpleKeyValueManager(); 95 | 96 | expect(() => manager.getState(correctKVBlocks[0])).toThrow(KeyValueManagerBlockNotSufficientError); 97 | expect(manager.feedBlock(correctKVBlocks[1])).toBeFalsy(); 98 | }); 99 | 100 | test("good path with initial state", async () => { 101 | const initialState: KeyValuePairWrite[] = [{ 102 | key: Buffer.from("key1"), 103 | value: Buffer.from("1"), 104 | version: Buffer.from("1*1"), 105 | isDelete: false, 106 | }, { 107 | key: Buffer.from("key2"), 108 | value: Buffer.from("4"), 109 | version: Buffer.from("1*1"), 110 | isDelete: false 111 | }]; 112 | 113 | const manager = new SimpleKeyValueManager({ 114 | lastBlockNumber: 1, 115 | keyValueState: initialState 116 | }); 117 | 118 | expect(manager.getNextDesiredBlockNumber()).toBe(2); 119 | expect(manager.feedBlock(correctKVBlocks[1])).toBeFalsy(); 120 | expect(manager.feedBlock(correctKVBlocks[2])).toBeTruthy(); 121 | 122 | const version1 = manager.getVersionsForKey(Buffer.from("key1")); 123 | expect(version1.length).toBe(3); 124 | expect(version1[0].transaction).toBeNull(); 125 | expect((version1[1].transaction as KeyValueTransaction).getTransactionID()).toBe("Tx5"); 126 | expect((version1[2].transaction as KeyValueTransaction).getTransactionID()).toBe("Tx6"); 127 | 128 | const state = manager.getState(correctKVBlocks[2]); 129 | const value2 = state.getValue(Buffer.from("key2")); 130 | expect(value2.getValue().toString()).toBe("3"); 131 | const history2 = await value2.getHistory(); 132 | expect(history2.length).toBe(2); 133 | }); 134 | 135 | test("error with initial state", async () => { 136 | const initialState: KeyValuePairWrite[] = [{ 137 | key: Buffer.from("key1"), 138 | value: Buffer.from("10"), 139 | version: Buffer.from("10*1"), 140 | isDelete: false, 141 | }]; 142 | 143 | const manager = new SimpleKeyValueManager({ 144 | lastBlockNumber: 100, 145 | keyValueState: initialState 146 | }); 147 | 148 | expect(manager.getNextDesiredBlockNumber()).toBe(101); 149 | expect(() => manager.getState(correctKVBlocks[0])).toThrow(BCVerifierNotFound); 150 | }); 151 | }); 152 | -------------------------------------------------------------------------------- /src/kvmanager.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | import * as util from "util"; 7 | import { AppTransaction, BCVerifierError, BCVerifierNotFound, KeyValue, KeyValueBlock, KeyValuePair, KeyValuePairWrite, 8 | KeyValueState, KeyValueTransaction, Transaction } from "./common"; 9 | 10 | export class KeyValueManagerBlockNotSufficientError extends Error { 11 | // Need to feed more blocks 12 | } 13 | export interface KeyValueManager { 14 | getNextDesiredBlockNumber(): number; 15 | feedBlock(block: KeyValueBlock): boolean; 16 | getState(block: KeyValueBlock): KeyValueState; 17 | getTransaction(tx: string | KeyValueTransaction): AppTransaction; 18 | } 19 | 20 | export interface KeyValueManagerInitialState { 21 | lastBlockNumber: number; 22 | keyValueState: KeyValuePairWrite[]; 23 | } 24 | 25 | // Implementation of Simple Key-Value Manager 26 | type KeyValuePairWithTx = KeyValuePair & { 27 | transaction: Transaction | null; 28 | }; 29 | type KeyValuePairWriteWithTx = KeyValuePairWrite & { 30 | transaction: Transaction | null; 31 | }; 32 | 33 | type SimpleKeyValueHistory = { 34 | [key: string]: KeyValuePairWithTx[] 35 | }; 36 | 37 | class SimpleKeyValue implements KeyValue { 38 | protected keyValueManager: SimpleKeyValueManager; 39 | protected idxInHistory: number; 40 | protected pair: KeyValuePairWriteWithTx; 41 | 42 | public constructor(mgr: SimpleKeyValueManager, pair: KeyValuePairWriteWithTx) { 43 | this.pair = pair; 44 | this.keyValueManager = mgr; 45 | 46 | this.idxInHistory = this.keyValueManager.getVersionsForKey(this.pair.key).findIndex( 47 | (v) => v.version.compare(this.pair.version) === 0 48 | ); 49 | } 50 | 51 | public getKey(): Buffer { 52 | return this.pair.key; 53 | } 54 | 55 | public getValue(): Buffer { 56 | return this.pair.value; 57 | } 58 | 59 | public getVersion(): Buffer { 60 | return this.pair.version; 61 | } 62 | 63 | /* The following methods are defined as async just to return a promise. In this implementation, they can be sync */ 64 | public async getTransaction(): Promise { 65 | return this.pair.transaction; 66 | } 67 | 68 | public async getHistory(): Promise { 69 | let ret: SimpleKeyValue[] = []; 70 | 71 | this.keyValueManager.getVersionsForKey(this.pair.key) 72 | .slice(0, this.idxInHistory + 1).forEach((pair) => { 73 | if (pair.isDelete) { 74 | ret = []; 75 | } else { 76 | ret.push(new SimpleKeyValue(this.keyValueManager, pair)); 77 | } 78 | }); 79 | 80 | return ret; 81 | } 82 | } 83 | 84 | export class SimpleKeyValueState implements KeyValueState { 85 | protected values: { [key: string]: KeyValuePairWriteWithTx }; 86 | protected keyValueManager: SimpleKeyValueManager; 87 | 88 | public constructor(mgr: SimpleKeyValueManager, prev?: SimpleKeyValueState) { 89 | if (prev == null) { 90 | this.values = {}; 91 | } else { 92 | this.values = Object.assign({}, prev.values); 93 | } 94 | this.keyValueManager = mgr; 95 | } 96 | 97 | public addKeyValuePair(pair: KeyValuePairWithTx) { 98 | const keyName = pair.key.toString("hex"); 99 | if (pair.isDelete) { 100 | delete this.values[keyName]; 101 | } else { 102 | this.values[keyName] = pair; 103 | } 104 | } 105 | 106 | public getSnapshot(): SimpleKeyValueState { 107 | return new SimpleKeyValueState(this.keyValueManager, this); 108 | } 109 | 110 | public getValue(key: Buffer): KeyValue { 111 | const keyName = key.toString("hex"); 112 | if (this.values[keyName] != null) { 113 | return new SimpleKeyValue(this.keyValueManager, this.values[keyName]); 114 | } else { 115 | throw new BCVerifierNotFound(); 116 | } 117 | } 118 | 119 | public getKeys(): KeyValue[] { 120 | const keyValues = []; 121 | for (const key in this.values) { 122 | keyValues.push(new SimpleKeyValue(this.keyValueManager, this.values[key])); 123 | } 124 | return keyValues; 125 | } 126 | } 127 | 128 | export class AppKeyValueTransaction implements AppTransaction { 129 | protected transaction: KeyValueTransaction; 130 | protected readSet: KeyValuePair[]; 131 | protected writeSet: KeyValuePair[]; 132 | protected state: SimpleKeyValueState; 133 | 134 | public constructor(transaction: KeyValueTransaction, readSet: KeyValuePair[], writeSet: KeyValuePair[], 135 | state: SimpleKeyValueState) { 136 | this.transaction = transaction; 137 | this.readSet = readSet; 138 | this.writeSet = writeSet; 139 | this.state = state; 140 | } 141 | 142 | public getInput(): KeyValuePair[] { 143 | return this.readSet; 144 | } 145 | 146 | public getOutput(): KeyValuePair[] { 147 | return this.writeSet; 148 | } 149 | 150 | public getState(): SimpleKeyValueState { 151 | return this.state; 152 | } 153 | 154 | public getTransaction(): KeyValueTransaction { 155 | return this.transaction; 156 | } 157 | } 158 | 159 | export class SimpleKeyValueManager implements KeyValueManager { 160 | protected startBlock: number; 161 | protected nextBlock: number; 162 | 163 | // Latest Key-Value and Versions 164 | protected keyVersions: SimpleKeyValueHistory; 165 | protected snapshot: { [blockNumber: number]: SimpleKeyValueState }; 166 | protected transactions: { [transactionId: string]: AppKeyValueTransaction }; 167 | 168 | public constructor(initialState?: KeyValueManagerInitialState) { 169 | if (initialState != null) { 170 | this.nextBlock = initialState.lastBlockNumber + 1; 171 | 172 | this.keyVersions = {}; 173 | const newSnapshot = new SimpleKeyValueState(this); 174 | for (const value of initialState.keyValueState) { 175 | const pair = { 176 | ...value, 177 | transaction: null 178 | }; 179 | newSnapshot.addKeyValuePair(pair); 180 | const keyHex = value.key.toString("hex"); 181 | this.keyVersions[keyHex] = [ pair ]; 182 | } 183 | this.snapshot = { [initialState.lastBlockNumber]: newSnapshot }; 184 | } else { 185 | this.nextBlock = 0; 186 | this.keyVersions = {}; 187 | this.snapshot = {}; 188 | } 189 | this.transactions = {}; 190 | this.startBlock = this.nextBlock; 191 | } 192 | 193 | public getNextDesiredBlockNumber(): number { 194 | return this.nextBlock; 195 | } 196 | 197 | public getVersionsForKey(key: Buffer): KeyValuePairWithTx[] { 198 | const keyName = key.toString("hex"); 199 | if (this.keyVersions[keyName] == null) { 200 | return []; 201 | } else { 202 | return this.keyVersions[keyName]; 203 | } 204 | } 205 | 206 | public feedBlock(block: KeyValueBlock): boolean { 207 | if (this.nextBlock !== block.getBlockNumber()) { 208 | return false; 209 | } 210 | 211 | const blockNumber = block.getBlockNumber(); 212 | let newSnapshot; 213 | if (this.snapshot[blockNumber - 1] == null) { 214 | newSnapshot = new SimpleKeyValueState(this); 215 | } else { 216 | newSnapshot = new SimpleKeyValueState(this, this.snapshot[blockNumber - 1]); 217 | } 218 | 219 | for (const tx of block.getTransactions()) { 220 | const state = newSnapshot.getSnapshot(); 221 | 222 | const rSet = tx.getReadSet(); 223 | const readValues: KeyValuePair[] = []; 224 | for (const rPair of rSet) { 225 | const pair = newSnapshot.getValue(rPair.key); 226 | if (pair.getVersion().compare(rPair.version) !== 0) { 227 | throw new BCVerifierError("Read conflict detected in a valid transaction"); 228 | } 229 | readValues.push({ 230 | ...rPair, 231 | isDelete: false, 232 | value: pair.getValue() 233 | }); 234 | } 235 | 236 | const wSet = tx.getWriteSet(); 237 | const writeValues: KeyValuePair[] = []; 238 | for (const wPair of wSet) { 239 | const pair = { 240 | ...wPair, 241 | transaction: tx 242 | }; 243 | newSnapshot.addKeyValuePair(pair); 244 | const keyHex = wPair.key.toString("hex"); 245 | if (this.keyVersions[keyHex] == null) { 246 | this.keyVersions[keyHex] = [ pair ]; 247 | } else { 248 | this.keyVersions[keyHex].push(pair); 249 | } 250 | writeValues.push(wPair); 251 | } 252 | 253 | const txId = tx.getTransactionID(); 254 | this.transactions[txId] = new AppKeyValueTransaction(tx, readValues, writeValues, state); 255 | } 256 | this.snapshot[block.getBlockNumber()] = newSnapshot; 257 | 258 | this.nextBlock = block.getBlockNumber() + 1; 259 | return true; 260 | } 261 | 262 | public getState(block: KeyValueBlock): SimpleKeyValueState { 263 | if (block.getBlockNumber() >= this.nextBlock) { 264 | const msg = util.format("State for block %d requested. But only the blocks up to %d are fed to the manager", 265 | block.getBlockNumber(), this.nextBlock - 1); 266 | throw new KeyValueManagerBlockNotSufficientError(msg); 267 | } else if (block.getBlockNumber() < this.startBlock) { 268 | throw new BCVerifierNotFound(); 269 | } 270 | // 271 | return this.snapshot[block.getBlockNumber()]; 272 | } 273 | 274 | public getTransaction(tx: Transaction | string): AppKeyValueTransaction { 275 | const txId = typeof(tx) === "string" ? tx : tx.getTransactionID(); 276 | const kvtx = this.transactions[txId]; 277 | 278 | if (kvtx == null) { 279 | throw new BCVerifierNotFound(); 280 | } else { 281 | return kvtx; 282 | } 283 | } 284 | } 285 | -------------------------------------------------------------------------------- /src/mock/mock-block.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { format } from "util"; 8 | 9 | import { BCVerifierError, BCVerifierNotFound, BCVerifierNotImplemented, Block, 10 | HashValueType, KeyValueBlock, KeyValuePair, KeyValuePairRead, 11 | KeyValueState, KeyValueTransaction, Transaction, } from "../common"; 12 | import { BlockSource } from "../network-plugin"; 13 | 14 | export type TransactionIDAndType = { id: string, type: number }; 15 | export type KVTransactionIDAndType = TransactionIDAndType & { rwset: SampleRWSet }; 16 | 17 | export class MockTransaction implements Transaction { 18 | private transactionID: string; 19 | private transactionType: number; 20 | private block: Block; 21 | private index: number; 22 | 23 | constructor(block: Block, args: TransactionIDAndType, index: number) { 24 | this.transactionID = args.id; 25 | this.transactionType = args.type; 26 | this.block = block; 27 | this.index = index; 28 | } 29 | 30 | public getBlock(): Block { 31 | return this.block; 32 | } 33 | 34 | public getIndexInBlock(): number { 35 | return this.index; 36 | } 37 | 38 | public getTransactionID(): string { 39 | return this.transactionID; 40 | } 41 | 42 | public getTransactionType(): number { 43 | return this.transactionType; 44 | } 45 | 46 | public async getKeyValueState(): Promise { 47 | throw new BCVerifierNotImplemented(); 48 | } 49 | } 50 | 51 | export class MockBlock implements Block { 52 | private blockNumber: number; 53 | private hashSelf: Buffer; 54 | private hashPrev: Buffer; 55 | private calcHashSelf: Buffer; 56 | private calcHashPrev: Buffer; 57 | private transactions: Transaction[]; 58 | 59 | constructor(num: number, hashSelf: Buffer, hashPrev: Buffer, calcHashSelf: Buffer, calcHashPrev: Buffer, 60 | transactions: TransactionIDAndType[]) { 61 | this.blockNumber = num; 62 | this.hashSelf = hashSelf; 63 | this.hashPrev = hashPrev; 64 | this.calcHashSelf = calcHashSelf; 65 | this.calcHashPrev = calcHashPrev; 66 | 67 | this.transactions = []; 68 | for (const i in transactions) { 69 | const transaction = transactions[i]; 70 | this.transactions.push(new MockTransaction(this, transaction, parseInt(i, 10))); 71 | } 72 | } 73 | 74 | public getBlockNumber(): number { 75 | return this.blockNumber; 76 | } 77 | 78 | public getHashValue(): Buffer { 79 | return this.hashSelf; 80 | } 81 | 82 | public getPrevHashValue(): Buffer { 83 | return this.hashPrev; 84 | } 85 | 86 | public calcHashValue(hash: HashValueType) { 87 | switch (hash) { 88 | case HashValueType.HASH_FOR_PREV: 89 | return this.calcHashPrev; 90 | case HashValueType.HASH_FOR_SELF: 91 | return this.calcHashSelf; 92 | } 93 | } 94 | 95 | public getRaw(): Buffer { 96 | return Buffer.alloc(0); 97 | } 98 | 99 | public getTransactions(): Transaction[] { 100 | return this.transactions; 101 | } 102 | } 103 | 104 | export class MockSource implements BlockSource { 105 | private sourceID: string; 106 | private orgID: string; 107 | private blocks: Block[]; 108 | private useFindTransaction: boolean; 109 | 110 | constructor(sourceID: string, orgID: string, blocks: Block[], useFindTransaction?: boolean) { 111 | this.sourceID = sourceID; 112 | this.orgID = orgID; 113 | this.blocks = blocks; 114 | if (useFindTransaction == null || !useFindTransaction) { 115 | this.useFindTransaction = false; 116 | } else { 117 | this.useFindTransaction = true; 118 | } 119 | } 120 | 121 | public getSourceID() { 122 | return this.sourceID; 123 | } 124 | 125 | public getSourceOrganizationID() { 126 | return this.orgID; 127 | } 128 | 129 | public async getBlock(num: number): Promise { 130 | if (num < 0 || num >= this.blocks.length) { 131 | throw new BCVerifierError(format("Block %d not found", num)); 132 | } 133 | return this.blocks[num]; 134 | } 135 | 136 | public async getBlockHash(num: number): Promise { 137 | if (num < 0 || num >= this.blocks.length) { 138 | throw new BCVerifierError(format("Block %d not found", num)); 139 | } 140 | return this.blocks[num].getHashValue(); 141 | } 142 | 143 | public async getBlockHeight(): Promise { 144 | return this.blocks.length; 145 | } 146 | 147 | public async getBlockRange(start: number, end: number): Promise { 148 | return this.blocks.slice(start, end + 1); 149 | } 150 | 151 | public async findBlockByTransaction(transactionID: string): Promise { 152 | if (this.useFindTransaction) { 153 | for (const b of this.blocks) { 154 | const txs = b.getTransactions(); 155 | for (const tx of txs) { 156 | if (tx.getTransactionID() === transactionID) { 157 | return b; 158 | } 159 | } 160 | } 161 | throw new BCVerifierNotFound("Block not found"); 162 | } else { 163 | throw new BCVerifierNotImplemented(); 164 | } 165 | } 166 | } 167 | 168 | export class MockKVTransaction extends MockTransaction implements KeyValueTransaction { 169 | private writeSet: KeyValuePair[]; 170 | private readSet: KeyValuePairRead[]; 171 | 172 | constructor(block: KeyValueBlock, transaction: KVTransactionIDAndType, index: number) { 173 | super(block, transaction, index); 174 | 175 | this.readSet = []; 176 | for (const key in transaction.rwset.read) { 177 | const version = transaction.rwset.read[key]; 178 | 179 | this.readSet.push({ 180 | key: Buffer.from(key), 181 | version: Buffer.from(version) 182 | }); 183 | } 184 | 185 | this.writeSet = []; 186 | for (const key in transaction.rwset.write) { 187 | const value = transaction.rwset.write[key]; 188 | if (value != null) { 189 | this.writeSet.push({ 190 | isDelete: false, 191 | key: Buffer.from(key), 192 | value: Buffer.from(value), 193 | version: Buffer.from(block.getBlockNumber() + "*" + index) 194 | }); 195 | } else { 196 | this.writeSet.push({ 197 | isDelete: true, 198 | key: Buffer.from(key), 199 | version: Buffer.from(block.getBlockNumber() + "*" + index) 200 | }); 201 | } 202 | } 203 | } 204 | 205 | public getWriteSet() { 206 | return this.writeSet; 207 | } 208 | 209 | public getReadSet() { 210 | return this.readSet; 211 | } 212 | } 213 | 214 | export class MockKVBlock extends MockBlock implements KeyValueBlock { 215 | private kvTransactions: KeyValueTransaction[]; 216 | 217 | constructor(num: number, hashSelf: Buffer, hashPrev: Buffer, calcHashSelf: Buffer, calcHashPrev: Buffer, 218 | transactions: KVTransactionIDAndType[]) { 219 | super(num, hashSelf, hashPrev, calcHashSelf, calcHashPrev, transactions); 220 | 221 | this.kvTransactions = []; 222 | for (const transaction of transactions) { 223 | this.kvTransactions.push(new MockKVTransaction( 224 | this, transaction, this.kvTransactions.length 225 | )); 226 | } 227 | } 228 | 229 | public getTransactions(): KeyValueTransaction[] { 230 | return this.kvTransactions; 231 | } 232 | } 233 | 234 | export const correctBlocks = [ 235 | new MockBlock(0, Buffer.from("ABCD"), Buffer.from(""), Buffer.from("ABCD"), Buffer.from("PABCD"), 236 | [ { id: "Tx1", type: 1 }, { id: "Tx2", type: 2 }]), 237 | new MockBlock(1, Buffer.from("XYZW"), Buffer.from("PABCD"), Buffer.from("XYZW"), Buffer.from("PABCD"), 238 | [ { id: "Tx3", type: 3 }, { id: "Tx4", type: 1 }]) 239 | ]; 240 | 241 | interface SampleRWSet { 242 | read: { [key: string]: string }; 243 | write: { [key: string]: string | null }; 244 | } 245 | 246 | export const sampleRWSets: SampleRWSet[] = [ 247 | { read: {}, write: {} }, // 0 248 | { read: {}, write: { key1: "A" }}, // 1 (Used in 0-0) 249 | { read: { key1: "0*0" }, write: { key2: "1", key3: "foo" }}, // 2 (Used in 1-1) 250 | { read: { key2: "1*1" }, write: { key1: "B", key2: "3", key3: null }}, // 3 (Used in 2-0) 251 | { read: { key1: "2*0" }, write: { key1: null, key3: "bar" }} // 4 (Used in 2-1) 252 | ]; 253 | 254 | export const correctKVBlocks = [ 255 | new MockKVBlock(0, Buffer.from("ABCD"), Buffer.from(""), Buffer.from("ABCD"), Buffer.from("PABCD"), 256 | [ { id: "Tx1", type: 1, rwset: sampleRWSets[1] }, { id: "Tx2", type: 2, rwset: sampleRWSets[0] }]), 257 | new MockKVBlock(1, Buffer.from("XYZW"), Buffer.from("PABCD"), Buffer.from("XYZW"), Buffer.from("PABCD"), 258 | [ { id: "Tx3", type: 3, rwset: sampleRWSets[0] }, { id: "Tx4", type: 1, rwset: sampleRWSets[2] }]), 259 | new MockKVBlock(2, Buffer.from("EFGH"), Buffer.from("XYZW"), Buffer.from("EFGH"), Buffer.from("XYZW"), 260 | [ { id: "Tx5", type: 1, rwset: sampleRWSets[3] }, { id: "Tx6", type: 1, rwset: sampleRWSets[4] }]), 261 | ]; 262 | -------------------------------------------------------------------------------- /src/network-plugin.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { Block, Transaction } from "./common"; 8 | import { BlockProvider } from "./provider"; 9 | import { BCVCheckpoint, BCVCheckpointData } from "./checkpoint"; 10 | 11 | export interface BlockSource { 12 | getSourceID(): string; 13 | getSourceOrganizationID(): string; 14 | 15 | getBlock(blockNumber: number): Promise; 16 | getBlockRange(blockStart: number, blockEnd: number): Promise; 17 | getBlockHash(blockNumber: number): Promise; 18 | getBlockHeight(): Promise; 19 | 20 | findBlockByTransaction(transactionID: string): Promise; 21 | } 22 | 23 | export enum DataModelType { 24 | Other = 0, 25 | KeyValue = 1, 26 | UTXO = 2 27 | } 28 | 29 | export interface NetworkPlugin { 30 | getBlockSources(): Promise; 31 | getPreferredBlockSource(): Promise; 32 | getDataModelType(): DataModelType; 33 | createCheckpoint(provider: BlockProvider, transaction: Transaction): Promise; 34 | loadFromCheckpoint(data: BCVCheckpointData): BCVCheckpoint; 35 | } 36 | -------------------------------------------------------------------------------- /src/network/fabric-block.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import * as path from "path"; 8 | 9 | import { BCVerifierError, BCVerifierNotImplemented } from "../common"; 10 | import { DataModelType } from "../network-plugin"; 11 | import FabricBlockPlugin, { FabricBlockSource } from "./fabric-block"; 12 | 13 | const testDataPathBase = path.join(__dirname, "..", "..", "test"); 14 | 15 | const testDataset: { [name: string]: string } = { 16 | "asset-transfer-basic-2.4.7": path.join(testDataPathBase, "asset-transfer-basic-2.4.7"), 17 | "asset-transfer-private-data-2.4.7": path.join(testDataPathBase, "asset-transfer-private-data-2.4.7") 18 | }; 19 | 20 | describe("FabricBlockSource", () => { 21 | for (const dataName in testDataset) { 22 | const dataPath = testDataset[dataName]; 23 | // eslint-disable-next-line @typescript-eslint/no-var-requires 24 | const dataConfig = require(path.join(dataPath, "config.json")); 25 | 26 | test("Create BlockSource - " + dataName, async () => { 27 | expect(FabricBlockSource.createFromConfig({ 28 | blockFile: path.join(dataPath, dataConfig.ledgers[0].blockFile) 29 | })).resolves.toBeDefined(); 30 | 31 | expect(FabricBlockSource.createFromConfig({ 32 | ledgerStore: path.join(dataPath, dataConfig.ledgers[0].ledgerStore) 33 | })).resolves.toBeDefined(); 34 | 35 | const source = await FabricBlockSource.createFromConfig({ 36 | ledgerStore: path.join(dataPath, dataConfig.ledgers[0].ledgerStore) 37 | }); 38 | expect(source.getBlockHeight()).resolves.toBe(dataConfig.blockHeight); 39 | expect(source.getSourceOrganizationID()).toBe("file"); 40 | expect(source.getSourceID()).toBe(path.join(dataPath, dataConfig.ledgers[0].ledgerStore)); 41 | expect(source.findBlockByTransaction("AAAA")).rejects.toBeInstanceOf(BCVerifierNotImplemented); 42 | 43 | expect((await source.getBlockHash(0)).toString("hex")).toBe(dataConfig.hashes[0]); 44 | expect(source.getBlock(0)).resolves.toBeDefined(); 45 | 46 | const block = await source.getBlock(0); 47 | expect(block.getHashValue().toString("hex")).toBe(dataConfig.hashes[0]); 48 | 49 | const block1 = await source.getBlock(1); 50 | expect(source.getBlockRange(0, 1)).resolves.toEqual([ block, block1 ]); 51 | 52 | const height = await source.getBlockHeight(); 53 | for (let i = 0; i < height; i++) { 54 | const b = await source.getBlock(i); 55 | expect(b.getHashValue().toString("hex")).toBe(dataConfig.hashes[i]); 56 | expect(b.getTransactions().length).toBe(dataConfig.numTransactions[i]); 57 | } 58 | 59 | // Try private data store if exists 60 | if (dataConfig.ledgers[0].privateDataStore == null) { 61 | return; 62 | } 63 | // 64 | const sourceWithPrivate = await FabricBlockSource.createFromConfig({ 65 | blockFile: path.join(dataPath, dataConfig.ledgers[0].blockFile), 66 | privateDataStore: path.join(dataPath, dataConfig.ledgers[0].privateDataStore) 67 | }); 68 | expect(sourceWithPrivate).toBeDefined(); 69 | await sourceWithPrivate.closePrivateDB(); 70 | }); 71 | } 72 | 73 | test("Create BlockSource - Non Existent", async () => { 74 | expect(FabricBlockSource.createFromConfig({})).rejects.toBeInstanceOf(BCVerifierError); 75 | expect(FabricBlockSource.createFromConfig({ 76 | blockFile: "/dev/null/non-existent" 77 | })).rejects.toThrowError(); 78 | }); 79 | }); 80 | 81 | describe("FabricBlockPlugin", () => { 82 | for (const dataName in testDataset) { 83 | const dataPath = testDataset[dataName]; 84 | // eslint-disable-next-line @typescript-eslint/no-var-requires 85 | const dataConfig = require(path.join(dataPath, "config.json")); 86 | 87 | // test for the first set only 88 | test("FabricBlockPlugin - " + dataName, async () => { 89 | const plugin1 = new FabricBlockPlugin(path.join(dataPath, dataConfig.ledgers[0].ledgerStore)); 90 | expect(plugin1.getDataModelType()).toBe(DataModelType.KeyValue); 91 | 92 | const sources = await plugin1.getBlockSources(); 93 | expect(sources.length).toBe(1); 94 | const preferred = await plugin1.getPreferredBlockSource(); 95 | expect(preferred).toBe(sources[0]); 96 | expect(preferred.getBlockHeight()).resolves.toBe(dataConfig.blockHeight); 97 | 98 | const plugin2 = new FabricBlockPlugin(path.join(dataPath, dataConfig.ledgers[0].blockFile)); 99 | const sources2 = await plugin2.getBlockSources(); 100 | expect(sources2.length).toBe(1); 101 | const preferred2 = await plugin2.getPreferredBlockSource(); 102 | expect(preferred2.getBlockHeight()).resolves.toBe(dataConfig.blockHeight); 103 | }); 104 | break; 105 | } 106 | }); 107 | -------------------------------------------------------------------------------- /src/network/fabric-block.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2022 Hitachi America, Ltd. & Hitachi, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { openSync, read, readFileSync, readSync, statSync } from "fs"; 8 | import { Level } from "level"; 9 | import * as path from "path"; 10 | import { format } from "util"; 11 | 12 | import { BCVerifierError, BCVerifierNotImplemented, Transaction } from "../common"; 13 | import { FabricBlock, FabricTransaction } from "../data/fabric"; 14 | import { FabricBCVCheckpoint, FabricBCVCheckpointContext, FabricBCVCheckpointData } from "../data/fabric/fabric-bcv-checkpoint"; 15 | import { FabricConfigCache } from "../data/fabric/fabric-utils"; 16 | import { BlockSource, DataModelType, NetworkPlugin } from "../network-plugin"; 17 | import { BlockProvider, KeyValueBlockProvider } from "../provider"; 18 | import { BCVCheckpoint, BCVCheckpointData } from "../checkpoint"; 19 | 20 | type FabricBlockConfigSet = FabricBlockConfig[]; 21 | 22 | interface FabricBlockConfig { 23 | name?: string; 24 | blockFile?: string; 25 | ledgerStore?: string; 26 | privateDataStore?: string; 27 | stateLevelDB?: string; 28 | } 29 | 30 | type FabricBlockFileInfo = FabricBlockPosition[]; 31 | 32 | interface FabricBlockPosition { 33 | file: number; 34 | offset: number; 35 | size: number; 36 | } 37 | 38 | function getConfig(config: string): FabricBlockConfigSet { 39 | /* 40 | * For compatibility, the config is assumed to be a path to the block file 41 | * unless it is a path to the json file (judged by the extension). 42 | */ 43 | if (config === "") { 44 | throw new BCVerifierError("fabric-block plugin: config should be a path to json or block file"); 45 | } else if (config.toLowerCase().endsWith(".json")) { 46 | return JSON.parse(readFileSync(config, { encoding: "utf-8" })); 47 | } else { 48 | const st = statSync(config); 49 | if (st.isDirectory()) { 50 | return [{ name: "blockDir", ledgerStore: config }]; 51 | } else { 52 | return [{ name: "block", blockFile: config }]; 53 | } 54 | } 55 | } 56 | 57 | function readVarInt(file: number, position: number): [number, number] { 58 | let ret = 0; 59 | let value = 0; 60 | const buf = Buffer.alloc(1); 61 | let numByte = 0; 62 | 63 | // eslint-disable-next-line no-constant-condition 64 | while (true) { 65 | if (readSync(file, buf, 0, 1, position) !== 1) { 66 | throw new BCVerifierError("Cannot read varint from a block file"); 67 | } 68 | value = buf.readUInt8(0); 69 | 70 | // eslint-disable-next-line no-bitwise 71 | ret |= (value & 0x7f) << (7 * numByte); 72 | numByte++; 73 | 74 | // eslint-disable-next-line no-bitwise 75 | if (!(value & 0x80)) { 76 | return [ret, numByte]; 77 | } 78 | 79 | position++; 80 | } 81 | } 82 | 83 | export class FabricBlockSource implements BlockSource { 84 | public static async createFromConfig(config: FabricBlockConfig): Promise { 85 | const blockInfo: FabricBlockFileInfo = []; 86 | 87 | let file; 88 | if (config.blockFile != null) { 89 | file = openSync(config.blockFile, "r"); 90 | } else if (config.ledgerStore != null) { 91 | file = openSync(path.join(config.ledgerStore, "blockfile_000000"), "r"); 92 | } else { 93 | throw new BCVerifierError("Cannot find ledger file"); 94 | } 95 | 96 | try { 97 | let position = 0; 98 | let size = 0; 99 | let len = 0; 100 | 101 | // eslint-disable-next-line no-constant-condition 102 | while (true) { 103 | [size, len] = readVarInt(file, position); 104 | if (size > 0) { 105 | blockInfo.push({ file: file, offset: position + len, size: size }); 106 | position += len + size; 107 | } else { 108 | break; 109 | } 110 | } 111 | } catch (e) { 112 | // Read until EOF. 113 | } 114 | 115 | let privateDB = null; 116 | if (config.privateDataStore != null) { 117 | privateDB = new Level(config.privateDataStore, 118 | { createIfMissing: false, keyEncoding: "binary", valueEncoding: "binary" }); 119 | } 120 | 121 | return new FabricBlockSource(config, file, blockInfo, privateDB); 122 | } 123 | 124 | private file: number; 125 | private blockInfo: FabricBlockFileInfo; 126 | private config: FabricBlockConfig; 127 | private privateDB: Level | null; 128 | 129 | private constructor(config: FabricBlockConfig, file: number, blockInfo: FabricBlockFileInfo, privateDB: Level | null) { 130 | this.file = file; 131 | this.blockInfo = blockInfo; 132 | this.config = config; 133 | this.privateDB = privateDB; 134 | } 135 | 136 | public getBlock(blockNumber: number): Promise { 137 | const bi = this.blockInfo[blockNumber]; 138 | 139 | if (bi == null) { 140 | throw new BCVerifierError(format("Block %d not found", blockNumber)); 141 | } 142 | const buffer = Buffer.alloc(bi.size); 143 | 144 | return new Promise((resolve, reject) => { 145 | read(this.file, buffer, 0, bi.size, bi.offset, 146 | (err, bytesRead, bufferRead) => { 147 | if (err == null && bytesRead === bi.size) { 148 | const b = FabricBlock.fromFileBytes(bufferRead); 149 | if (this.privateDB != null) { 150 | b.addPrivateData(this.privateDB).then(() => { 151 | resolve(b); 152 | }, (error) => { reject(error); }); 153 | } else { 154 | resolve(b); 155 | } 156 | } else { 157 | reject(err); 158 | } 159 | } 160 | ); 161 | }); 162 | } 163 | 164 | public async getBlockHash(blockNumber: number): Promise { 165 | const block = await this.getBlock(blockNumber); 166 | 167 | return block.getHashValue(); 168 | } 169 | 170 | public async getBlockHeight(): Promise { 171 | return this.blockInfo.length; 172 | } 173 | 174 | public async getBlockRange(blockStart: number, blockEnd: number): Promise { 175 | let b = 0; 176 | const result: FabricBlock[] = []; 177 | if (blockEnd < blockStart) { 178 | throw new BCVerifierError(format("Block range invalid (start: %d, end %d)", blockStart, blockEnd)); 179 | } 180 | // No special method defined. Just get blocks one by one 181 | for (b = blockStart; b <= blockEnd; b++) { 182 | result.push(await this.getBlock(b)); 183 | } 184 | return result; 185 | } 186 | 187 | public getSourceID(): string { 188 | if (this.config.name != null) { 189 | return this.config.name; 190 | } else if (this.config.ledgerStore != null) { 191 | return this.config.ledgerStore; 192 | } else if (this.config.blockFile != null) { 193 | return this.config.blockFile; 194 | } else { 195 | return "block"; 196 | } 197 | } 198 | 199 | public getSourceOrganizationID(): string { 200 | return "file"; 201 | } 202 | 203 | public async findBlockByTransaction(_transactionId: string): Promise { 204 | // No special function for finding a transaction. 205 | // Throw a not-implemented exception to make the provider to perform a slow-path 206 | throw new BCVerifierNotImplemented("findBlockByTransaction is not implemented"); 207 | } 208 | 209 | public closePrivateDB(): Promise { 210 | if (this.privateDB) { 211 | return this.privateDB.close(); 212 | } else { 213 | return Promise.resolve(); 214 | } 215 | } 216 | } 217 | 218 | export default class FabricBlockPlugin implements NetworkPlugin { 219 | private sources: FabricBlockSource[] | undefined; 220 | private configSet: FabricBlockConfigSet; 221 | 222 | constructor(configString: string) { 223 | this.configSet = getConfig(configString); 224 | } 225 | 226 | public getDataModelType(): DataModelType { 227 | return DataModelType.KeyValue; 228 | } 229 | 230 | public async getBlockSources(): Promise { 231 | if (this.sources == null) { 232 | this.sources = []; 233 | for (const i in this.configSet) { 234 | const config = this.configSet[i]; 235 | if (config.name == null) { 236 | config.name = "Source " + i; 237 | } 238 | this.sources.push(await FabricBlockSource.createFromConfig(config)); 239 | } 240 | } 241 | return this.sources; 242 | } 243 | 244 | public async getPreferredBlockSource(): Promise { 245 | const sources = await this.getBlockSources(); 246 | if (sources.length === 0) { 247 | throw new BCVerifierError("No Block Source found"); 248 | } 249 | return sources[0]; 250 | } 251 | 252 | public async createCheckpoint(provider: BlockProvider, transaction: Transaction): Promise { 253 | const kvProvider = provider as KeyValueBlockProvider; 254 | const fabricTransaction = transaction as FabricTransaction; 255 | 256 | const lastBlock = fabricTransaction.getBlock(); 257 | const configBlockIndex = lastBlock.getLastConfigBlockIndex(); 258 | const configInfo = await FabricConfigCache.GetInstance().getConfig(configBlockIndex); 259 | 260 | const context: FabricBCVCheckpointContext = { 261 | block: lastBlock, 262 | configInfo: configInfo, 263 | transaction: fabricTransaction, 264 | timestamp: Date.now(), 265 | }; 266 | 267 | if (kvProvider instanceof KeyValueBlockProvider) { 268 | const state = await kvProvider.getKeyValueState(fabricTransaction); 269 | context.state = state; 270 | } 271 | 272 | const checkpoint = new FabricBCVCheckpoint("fabric-block", null, context); 273 | 274 | return await checkpoint.getCheckpoint(); 275 | } 276 | 277 | public loadFromCheckpoint(data: BCVCheckpointData): BCVCheckpoint { 278 | const fabricCheckpointData = data as FabricBCVCheckpointData; 279 | 280 | return new FabricBCVCheckpoint("fabric-block", fabricCheckpointData); 281 | } 282 | } 283 | -------------------------------------------------------------------------------- /src/network/fabric-query2.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | import { Channel, Client, ConnectOptions, Endorser, IdentityContext, Query, User } from "fabric-common"; 7 | import { common } from "fabric-protos"; 8 | import fs from "fs"; 9 | import util from "util"; 10 | 11 | import { BCVerifierError, Transaction } from "../common"; 12 | import { FabricBlock, FabricTransaction } from "../data/fabric"; 13 | import { FabricBCVCheckpoint, FabricBCVCheckpointContext, FabricBCVCheckpointData } from "../data/fabric/fabric-bcv-checkpoint"; 14 | import { FabricConfigCache } from "../data/fabric/fabric-utils"; 15 | import { BlockSource, DataModelType, NetworkPlugin } from "../network-plugin"; 16 | import { BlockProvider, KeyValueBlockProvider } from "../provider"; 17 | import { BCVCheckpoint, BCVCheckpointData } from "../checkpoint"; 18 | 19 | const QUERY_SYSTEM_CHAINCODE = "qscc"; 20 | const FUNC_GET_BLOCK = "GetBlockByNumber"; 21 | const FUNC_GET_BLOCK_BY_TXID = "GetBlockByTxID"; 22 | const FUNC_GET_CHAIN_INFO = "GetChainInfo"; 23 | 24 | type FabricQuery2PluginDiscoveryConfig = { 25 | useDiscovery: false; 26 | }; 27 | 28 | interface FabricQuery2PluginPeerConfig { 29 | url: string; 30 | mspID: string; 31 | tlsCACertFile?: string; 32 | } 33 | 34 | export interface FabricQuery2PluginConfig { 35 | peer?: FabricQuery2PluginPeerConfig; 36 | peers?: FabricQuery2PluginPeerConfig[]; 37 | channel: string; 38 | client: { 39 | certFile: string; 40 | keyFile: string; 41 | mspID: string; 42 | mutualTLS?: { 43 | certFile: string; 44 | keyFile: string; 45 | } 46 | }; 47 | config: FabricQuery2PluginDiscoveryConfig; 48 | } 49 | 50 | export class FabricQuery2Source implements BlockSource { 51 | protected client: Client; 52 | protected channel: Channel; 53 | protected identity: IdentityContext; 54 | protected peer: Endorser | null; 55 | protected query: Query; 56 | protected config: FabricQuery2PluginConfig; 57 | protected peerConfig: FabricQuery2PluginPeerConfig; 58 | 59 | public constructor(config: FabricQuery2PluginConfig, peer: FabricQuery2PluginPeerConfig) { 60 | this.client = Client.newClient("peer"); 61 | this.channel = this.client.newChannel(config.channel); 62 | this.peer = null; 63 | this.query = this.channel.newQuery(QUERY_SYSTEM_CHAINCODE); 64 | this.config = config; 65 | this.peerConfig = peer; 66 | 67 | this.identity = this.client.newIdentityContext( 68 | User.createUser("user", "", 69 | this.config.client.mspID, 70 | fs.readFileSync(this.config.client.certFile).toString(), 71 | fs.readFileSync(this.config.client.keyFile).toString() 72 | )); 73 | } 74 | 75 | public async init() { 76 | this.peer = this.client.newEndorser("peer1"); 77 | 78 | const opts: ConnectOptions = { 79 | url: this.peerConfig.url 80 | }; 81 | if (this.peerConfig.tlsCACertFile != null) { 82 | opts.pem = fs.readFileSync(this.peerConfig.tlsCACertFile).toString(); 83 | } 84 | if (this.config.client.mutualTLS != null) { 85 | opts.clientCert = fs.readFileSync(this.config.client.mutualTLS.certFile).toString(); 86 | opts.clientKey = fs.readFileSync(this.config.client.mutualTLS.keyFile).toString(); 87 | } 88 | 89 | await this.peer.connect( 90 | this.client.newEndpoint(opts) 91 | ); 92 | } 93 | 94 | public getSourceID(): string { 95 | return util.format("%s", this.peerConfig.url); 96 | } 97 | 98 | public getSourceOrganizationID(): string { 99 | return this.peerConfig.mspID; 100 | } 101 | 102 | public async getBlock(blockNumber: number): Promise { 103 | const blockBytes = await this.queryChaincode(FUNC_GET_BLOCK, this.config.channel, blockNumber.toString()); 104 | 105 | return FabricBlock.fromQueryBytes(blockBytes); 106 | } 107 | 108 | public async getBlockRange(blockStart: number, blockEnd: number): Promise { 109 | const result: FabricBlock[] = []; 110 | if (blockEnd < blockStart) { 111 | throw new BCVerifierError(util.format("Block range invalid (start: %d, end %d)", blockStart, blockEnd)); 112 | } 113 | 114 | let b: number; 115 | for (b = blockStart; b <= blockEnd; b++) { 116 | result.push(await this.getBlock(b)); 117 | } 118 | return result; 119 | } 120 | 121 | public async getBlockHash(blockNumber: number): Promise { 122 | const block = await this.getBlock(blockNumber); 123 | 124 | return block.getHashValue(); 125 | } 126 | 127 | public async getBlockHeight(): Promise { 128 | const infoBytes = await this.queryChaincode(FUNC_GET_CHAIN_INFO, this.config.channel); 129 | const info = common.BlockchainInfo.decode(infoBytes); 130 | 131 | if (typeof(info.height) === "number") { 132 | return info.height; 133 | } else { 134 | return info.height.toNumber(); 135 | } 136 | } 137 | 138 | public async findBlockByTransaction(txID: string): Promise { 139 | const block = await this.queryChaincode(FUNC_GET_BLOCK_BY_TXID, this.config.channel, txID); 140 | 141 | return FabricBlock.fromQueryBytes(block); 142 | } 143 | 144 | protected async queryChaincode(func: string, ...args: string[]): Promise { 145 | if (this.peer == null) { 146 | throw new BCVerifierError("FabricQuery2Source not initialized"); 147 | } 148 | 149 | this.query.build(this.identity, { 150 | fcn: func, 151 | args: args 152 | }); 153 | this.query.sign(this.identity); 154 | const response = await this.query.send({ 155 | targets: [this.peer] 156 | }); 157 | 158 | if (response.queryResults.length < 1) { 159 | throw new BCVerifierError("Peer returned error: " + response.responses[0].response.message); 160 | } else { 161 | return response.queryResults[0]; 162 | } 163 | } 164 | } 165 | 166 | export default class FabricQuery2Plugin implements NetworkPlugin { 167 | private sources: FabricQuery2Source[] | null; 168 | private pluginConfig: FabricQuery2PluginConfig; 169 | 170 | public constructor(config: string) { 171 | if (config === "") { 172 | throw new BCVerifierError("fabric-query2 plugin: config should be the configuration file"); 173 | } 174 | this.pluginConfig = JSON.parse(fs.readFileSync(config).toString()); 175 | this.sources = null; 176 | } 177 | 178 | public getDataModelType(): DataModelType { 179 | return DataModelType.KeyValue; 180 | } 181 | 182 | public async getBlockSources(): Promise { 183 | if (this.sources == null) { 184 | if (this.pluginConfig.peers) { 185 | this.sources = []; 186 | for (const peer of this.pluginConfig.peers) { 187 | const blockSource = new FabricQuery2Source(this.pluginConfig, peer); 188 | await blockSource.init(); 189 | 190 | this.sources.push(blockSource); 191 | } 192 | } else if (this.pluginConfig.peer) { 193 | const blockSource = new FabricQuery2Source(this.pluginConfig, this.pluginConfig.peer); 194 | await blockSource.init(); 195 | 196 | this.sources = [blockSource]; 197 | } else { 198 | throw new BCVerifierError("fabric-query2 Plugin: No peer is specified in the config") 199 | } 200 | } 201 | return this.sources; 202 | } 203 | 204 | public async getPreferredBlockSource(): Promise { 205 | if (this.sources == null) { 206 | await this.getBlockSources(); 207 | } 208 | if (this.sources != null && this.sources.length > 0) { 209 | return this.sources[0]; 210 | } else { 211 | throw new BCVerifierError("fabric-query2 Plugin: Cannot find any source"); 212 | } 213 | } 214 | 215 | public async createCheckpoint(provider: BlockProvider, transaction: Transaction): Promise { 216 | const kvProvider = provider as KeyValueBlockProvider; 217 | const fabricTransaction = transaction as FabricTransaction; 218 | 219 | const lastBlock = fabricTransaction.getBlock(); 220 | const configBlockIndex = lastBlock.getLastConfigBlockIndex(); 221 | const configInfo = await FabricConfigCache.GetInstance().getConfig(configBlockIndex); 222 | 223 | const context: FabricBCVCheckpointContext = { 224 | block: lastBlock, 225 | configInfo: configInfo, 226 | transaction: fabricTransaction, 227 | timestamp: Date.now(), 228 | }; 229 | 230 | if (kvProvider instanceof KeyValueBlockProvider) { 231 | const state = await kvProvider.getKeyValueState(fabricTransaction); 232 | context.state = state; 233 | } 234 | 235 | const checkpoint = new FabricBCVCheckpoint("fabric-query2", null, context); 236 | 237 | return await checkpoint.getCheckpoint(); 238 | } 239 | 240 | public loadFromCheckpoint(data: BCVCheckpointData): BCVCheckpoint { 241 | const fabricCheckpointData = data as FabricBCVCheckpointData; 242 | 243 | return new FabricBCVCheckpoint("fabric-query2", fabricCheckpointData); 244 | } 245 | } 246 | -------------------------------------------------------------------------------- /src/output/index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2019 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { ResultSet } from "../result-set"; 8 | 9 | export interface OutputPlugin { 10 | convertResult(resultSet: ResultSet): Promise; 11 | } 12 | -------------------------------------------------------------------------------- /src/output/json.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Hitachi America, Ltd. All rights reserved. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { format } from "util"; 8 | 9 | import { OutputPlugin } from "."; 10 | import { BCVerifierError, CheckResult, ResultCode } from "../common"; 11 | import { ResultSet } from "../result-set"; 12 | 13 | type JSONableCheckResult = { 14 | checkerID: string; 15 | result: string; 16 | predicate?: string; 17 | operands?: string[]; 18 | skipReason?: string; 19 | }; 20 | 21 | type JSONableBlockResult = { 22 | number: number; 23 | results: JSONableCheckResult[]; 24 | }; 25 | 26 | type JSONableTransactionResult = { 27 | id: string; 28 | blockNumber: number; 29 | results: JSONableCheckResult[]; 30 | }; 31 | 32 | type JSONableResultSet = { 33 | blocks: JSONableBlockResult[]; 34 | transactions: JSONableTransactionResult[]; 35 | }; 36 | 37 | const resultToStr = { 38 | 0 : "OK", 39 | 1 : "ERROR", 40 | 2 : "SKIPPED", 41 | }; 42 | const predicateToStr = { 43 | 0: "EQ", 44 | 1: "EQ", 45 | 2: "INVOKE", 46 | 3: "LT", 47 | 4: "LE", 48 | 5: "GT", 49 | 6: "GE", 50 | }; 51 | 52 | function resultToJSONable(r: CheckResult): JSONableCheckResult { 53 | if (r.result === ResultCode.OK || r.result === ResultCode.ERROR) { 54 | return { 55 | checkerID: r.checkerID, 56 | result: resultToStr[r.result], 57 | predicate: predicateToStr[r.predicate], 58 | operands: r.operands.map((o) => o.name) 59 | }; 60 | } else if (r.result === ResultCode.SKIPPED) { 61 | return { 62 | checkerID: r.checkerID, 63 | result: resultToStr[r.result], 64 | skipReason: r.skipReason 65 | }; 66 | } else { 67 | throw new BCVerifierError(format("Unexpected result code : %d", r.result)); 68 | } 69 | } 70 | 71 | function convertResultToJSONable(resultSet: ResultSet): JSONableResultSet { 72 | return { 73 | blocks: resultSet.getBlockResults().map((r) => { 74 | return { 75 | number: r.number, 76 | results: r.results.map((res) => resultToJSONable(res)) 77 | }; 78 | }), 79 | transactions: resultSet.getTransactionResults().map((r) => { 80 | return { 81 | id: r.transactionID, 82 | blockNumber: r.blockNumber, 83 | results: r.results.map((res) => resultToJSONable(res)) 84 | }; 85 | }) 86 | }; 87 | } 88 | 89 | export class JSONOutput implements OutputPlugin { 90 | public async convertResult(resultSet: ResultSet): Promise { 91 | const resultObj = convertResultToJSONable(resultSet); 92 | 93 | return Buffer.from(JSON.stringify(resultObj)); 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/provider.test.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { Block, KeyValueTransaction } from "./common"; 8 | import { KeyValueManagerBlockNotSufficientError, SimpleKeyValueManager } from "./kvmanager"; 9 | import { correctBlocks, MockSource } from "./mock/mock-block"; 10 | import { BlockProvider, KeyValueBlockProvider } from "./provider"; 11 | 12 | jest.mock("./kvmanager"); 13 | 14 | describe("BlockProvider", () => { 15 | const blockSource = new MockSource("mockSource", "mockOrg", correctBlocks); 16 | const blockSourceWithTx = new MockSource("mockSource", "mockOrg", correctBlocks, true); 17 | 18 | beforeEach(() => { 19 | // Casting is necessary for mocking to work with TS 20 | (SimpleKeyValueManager as any).mockClear(); 21 | }); 22 | 23 | test("Good path", async () => { 24 | const provider = new BlockProvider(blockSource); 25 | expect(provider.getSourceID()).toBe("mockSource"); 26 | 27 | await expect(provider.getBlock(1)).resolves.toBe(correctBlocks[1]); 28 | await expect(provider.getBlock(0)).resolves.toBe(correctBlocks[0]); 29 | 30 | await expect(provider.getBlockHash(0)).resolves.toEqual(correctBlocks[0].getHashValue()); 31 | 32 | const tx = await provider.getTransaction("Tx1"); 33 | expect(tx.getTransactionID()).toBe("Tx1"); 34 | expect(tx.getTransactionType()).toBe(1); 35 | 36 | const provider2 = new BlockProvider(blockSource); 37 | await expect(provider2.cacheBlockRange(0, 1)).resolves.toBeUndefined(); 38 | // cache again the cached block 39 | await expect(provider2.cacheBlockRange(1, 1)).resolves.toBeUndefined(); 40 | 41 | const provider3 = new BlockProvider(blockSource); 42 | await expect(provider3.cacheBlockRange(0, 0)).resolves.toBeUndefined(); 43 | await expect(provider3.cacheBlockRange(1, 1)).resolves.toBeUndefined(); 44 | 45 | // Find a transaction with no block cached 46 | const provider4 = new BlockProvider(blockSource); 47 | const tx2 = await provider4.getTransaction("Tx2"); 48 | expect(tx2.getTransactionID()).toBe("Tx2"); 49 | expect(tx2.getTransactionType()).toBe(2); 50 | 51 | // Find a transaction with BlockSource's findTransaction enabled 52 | const provider5 = new BlockProvider(blockSourceWithTx); 53 | const tx3 = await provider5.getTransaction("Tx3"); 54 | expect(tx3.getTransactionID()).toBe("Tx3"); 55 | expect(tx3.getTransactionType()).toBe(3); 56 | 57 | // Find a transaction with BlockSource's findTransaction enabled and blocks cached 58 | const provider6 = new BlockProvider(blockSourceWithTx); 59 | await expect(provider6.cacheBlockRange(0, 1)).resolves.toBeUndefined(); 60 | const tx4 = await provider6.getTransaction("Tx4"); 61 | expect(tx4.getTransactionID()).toBe("Tx4"); 62 | expect(tx4.getTransactionType()).toBe(1); 63 | }); 64 | 65 | test("Error path", async () => { 66 | const provider = new BlockProvider(blockSource); 67 | 68 | await expect(provider.getBlock(-1)).rejects.toThrowError(); 69 | await expect(provider.getBlock(correctBlocks.length)).rejects.toThrowError(); 70 | await expect(provider.getBlock(9999)).rejects.toThrowError(); 71 | await expect(provider.getBlockHash(-1)).rejects.toThrowError(); 72 | await expect(provider.getBlockHash(correctBlocks.length)).rejects.toThrowError(); 73 | await expect(provider.getBlockHash(9999)).rejects.toThrowError(); 74 | 75 | await expect(provider.cacheBlockRange(1, 0)).rejects.toThrowError(); 76 | await expect(provider.cacheBlockRange(9999, 10000)).rejects.toThrowError(); 77 | await expect(provider.cacheBlockRange(-9999, 9999)).rejects.toThrowError(); 78 | 79 | await expect(provider.getTransaction("TxNonExist")).rejects.toThrowError(); 80 | 81 | const provider2 = new BlockProvider(blockSourceWithTx); 82 | await expect(provider2.getTransaction("TxNonExist")).rejects.toThrowError(); 83 | }); 84 | 85 | class StrangeSource extends MockSource { 86 | public async findBlockByTransaction(_transactionID: string): Promise { 87 | return this.getBlock(1); 88 | } 89 | } 90 | 91 | test("Strange BlockSource", async () => { 92 | const provider = new BlockProvider(new StrangeSource("strange-source", "strange-org", correctBlocks)); 93 | 94 | await expect(provider.getTransaction("Tx1")).rejects.toThrowError(); 95 | }); 96 | 97 | test("KeyValueProvider", async () => { 98 | const provider = new KeyValueBlockProvider(new MockSource("mockSource", "mockOrg", correctBlocks)); 99 | 100 | expect(SimpleKeyValueManager).toHaveBeenCalledTimes(1); 101 | const mockKVM: any = (SimpleKeyValueManager as any).mock.instances[0]; 102 | 103 | const b = await provider.getBlock(0); 104 | expect(mockKVM.feedBlock).toHaveBeenCalledTimes(1); 105 | expect(mockKVM.feedBlock).toHaveBeenCalledWith(b); 106 | 107 | await provider.getKeyValueState(b.getTransactions()[0] as KeyValueTransaction); 108 | expect(mockKVM.getState).toHaveBeenCalledTimes(1); 109 | expect(mockKVM.getState).toHaveBeenCalledWith(b); 110 | }); 111 | test("KeyValueProvider without cache", async () => { 112 | const provider = new KeyValueBlockProvider(new MockSource("mockSource", "mockOrg", correctBlocks)); 113 | expect(SimpleKeyValueManager).toHaveBeenCalledTimes(1); 114 | 115 | const mockKVM: any = (SimpleKeyValueManager as any).mock.instances[0]; 116 | mockKVM.getState.mockImplementationOnce(() => { 117 | throw new KeyValueManagerBlockNotSufficientError(""); 118 | }).mockImplementationOnce(() => { 119 | return {}; 120 | }); 121 | let feedCount = 0; 122 | mockKVM.getNextDesiredBlockNumber.mockImplementation(() => feedCount); 123 | mockKVM.feedBlock.mockImplementation(() => { feedCount++; }); 124 | 125 | await provider.getKeyValueState(correctBlocks[1].getTransactions()[0] as KeyValueTransaction); 126 | expect(feedCount).toBe(2); 127 | expect(mockKVM.getState).toHaveBeenCalledTimes(2); 128 | }); 129 | test("KeyValueProvider getState error", async () => { 130 | const provider = new KeyValueBlockProvider(new MockSource("mockSource", "mockOrg", correctBlocks)); 131 | expect(SimpleKeyValueManager).toHaveBeenCalledTimes(1); 132 | 133 | const mockKVM: any = (SimpleKeyValueManager as any).mock.instances[0]; 134 | mockKVM.getState.mockImplementationOnce(() => { 135 | throw new Error("Other error"); 136 | }); 137 | 138 | expect(provider.getKeyValueState(correctBlocks[1].getTransactions()[0] as KeyValueTransaction)) 139 | .rejects.toThrowError(); 140 | }); 141 | }); 142 | -------------------------------------------------------------------------------- /src/provider.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2018-2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | import { BCVerifierError, BCVerifierNotImplemented, Block, 8 | KeyValueBlock, KeyValueState, KeyValueTransaction, Transaction } from "./common"; 9 | import { KeyValueManager, KeyValueManagerBlockNotSufficientError, KeyValueManagerInitialState, SimpleKeyValueManager } from "./kvmanager"; 10 | import { BlockSource } from "./network-plugin"; 11 | 12 | // Simple in-memory cacher 13 | export class BlockProvider { 14 | private source: BlockSource; 15 | private blocks: { [blockNumber: number]: Block }; 16 | private transactions: { [transactionID: string]: Transaction }; 17 | private transactionTypeIndex: { [transactionType: number]: Transaction[] }; 18 | 19 | constructor(source: BlockSource) { 20 | this.source = source; 21 | this.blocks = {}; 22 | this.transactions = {}; 23 | this.transactionTypeIndex = {}; 24 | } 25 | 26 | public async getBlock(blockNumber: number): Promise { 27 | if (this.blocks[blockNumber] != null) { 28 | return this.blocks[blockNumber]; 29 | } else { 30 | const block = await this.source.getBlock(blockNumber); 31 | 32 | this.blocks[blockNumber] = block; 33 | this.registerTransactions(block); 34 | 35 | return block; 36 | } 37 | } 38 | 39 | public async getBlockHash(blockNumber: number): Promise { 40 | if (this.blocks[blockNumber] != null) { 41 | return this.blocks[blockNumber].getHashValue(); 42 | } else { 43 | return await this.source.getBlockHash(blockNumber); 44 | } 45 | } 46 | 47 | public async cacheBlockRange(blockStart: number, blockEnd: number): Promise { 48 | if (blockStart > blockEnd) { 49 | throw new BCVerifierError("cacheBlockRange: invalid range"); 50 | } 51 | const bs = await this.source.getBlockRange(blockStart, blockEnd); 52 | for (let i = 0; i < blockEnd - blockStart + 1; i++) { 53 | if (this.blocks[blockStart + i] == null) { 54 | this.blocks[blockStart + i] = bs[i]; 55 | this.registerTransactions(bs[i]); 56 | } 57 | } 58 | } 59 | 60 | public async getTransaction(transactionId: string): Promise { 61 | if (this.transactions[transactionId] != null) { 62 | return this.transactions[transactionId]; 63 | } 64 | try { 65 | const b = await this.source.findBlockByTransaction(transactionId); 66 | 67 | this.registerTransactions(b); 68 | if (this.transactions[transactionId] != null) { 69 | return this.transactions[transactionId]; 70 | } else { 71 | throw new BCVerifierError("transaction is not found in a block where it should be in. " 72 | + "A possible bug in the block plugin."); 73 | } 74 | } catch (e) { 75 | if (!(e instanceof BCVerifierNotImplemented)) { 76 | throw e; 77 | } 78 | } 79 | // Fallback... get all blocks and inspect them. 80 | const height = await this.source.getBlockHeight(); 81 | await this.cacheBlockRange(0, height - 1); 82 | 83 | if (this.transactions[transactionId] != null) { 84 | return this.transactions[transactionId]; 85 | } else { 86 | throw new BCVerifierError("getTransaction: transaction is not found"); 87 | } 88 | } 89 | 90 | public getSourceID(): string { 91 | return this.source.getSourceID(); 92 | } 93 | 94 | protected registerTransactions(block: Block): void { 95 | const txs = block.getTransactions(); 96 | for (const tx of txs) { 97 | // Update transaction ID index 98 | this.transactions[tx.getTransactionID()] = tx; 99 | // Update transaction type index 100 | const type = tx.getTransactionType(); 101 | if (this.transactionTypeIndex[type] == null) { 102 | this.transactionTypeIndex[type] = []; 103 | } 104 | this.transactionTypeIndex[type].push(tx); 105 | } 106 | } 107 | } 108 | 109 | export interface KeyValueProviderOptions { 110 | initialState?: KeyValueManagerInitialState; 111 | } 112 | 113 | export class KeyValueBlockProvider extends BlockProvider { 114 | protected keyValueManager: KeyValueManager; 115 | 116 | constructor(source: BlockSource, opts?: KeyValueProviderOptions) { 117 | super(source); 118 | if (opts == null) { 119 | this.keyValueManager = new SimpleKeyValueManager(); 120 | } else { 121 | this.keyValueManager = new SimpleKeyValueManager(opts.initialState); 122 | } 123 | } 124 | 125 | public async getKeyValueState(tx: KeyValueTransaction): Promise { 126 | try { 127 | return this.keyValueManager.getState(tx.getBlock() as KeyValueBlock); 128 | } catch (e) { 129 | if (!(e instanceof KeyValueManagerBlockNotSufficientError)) { 130 | throw e; 131 | } 132 | } 133 | // Slow path: feed sufficient blocks 134 | const blockNum = tx.getBlock().getBlockNumber(); 135 | 136 | while (this.keyValueManager.getNextDesiredBlockNumber() <= blockNum) { 137 | const nextBlock = this.keyValueManager.getNextDesiredBlockNumber(); 138 | const block = await this.getBlock(nextBlock); 139 | 140 | this.keyValueManager.feedBlock(block as KeyValueBlock); 141 | } 142 | 143 | return this.keyValueManager.getState(tx.getBlock() as KeyValueBlock); 144 | } 145 | 146 | public getAppTransaction(transactionId: string) { 147 | return this.keyValueManager.getTransaction(transactionId); 148 | } 149 | 150 | protected registerTransactions(block: Block): void { 151 | super.registerTransactions(block); 152 | this.registerKeyValue(block as KeyValueBlock); 153 | } 154 | 155 | protected registerKeyValue(block: KeyValueBlock): void { 156 | this.keyValueManager.feedBlock(block); 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /src/samples/fabcar.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2020 Hitachi America, Ltd. 3 | * 4 | * SPDX-License-Identifier: Apache-2.0 5 | */ 6 | 7 | /* eslint-disable no-console */ 8 | 9 | import { AppTransaction, AppTransactionCheckLogic, CheckPlugin, 10 | FabricFunctionInfo, FabricTransaction, ResultSet } from ".."; 11 | import { BCVerifierNotFound, ResultCode, ResultPredicate } from "../common"; 12 | 13 | export default class FabCarChecker extends CheckPlugin implements AppTransactionCheckLogic { 14 | public async probeTransactionCheck(appTx: AppTransaction): Promise { 15 | // Check if the transaction is a Hyperledger Fabric one 16 | if (!(appTx.getTransaction() instanceof FabricTransaction)) { 17 | return false; 18 | } 19 | const fabricTx = appTx.getTransaction() as FabricTransaction; 20 | const action = fabricTx.getActions()[0]; 21 | // Check if the transaction is a normal one (not config transaction) and contains some action 22 | if (fabricTx.getTransactionTypeString() !== "ENDORSER_TRANSACTION" || action == null) { 23 | return false; 24 | } 25 | const func = action.getFunction(); 26 | if (func == null || func.ccName !== "fabcar") { 27 | return false; 28 | } 29 | 30 | return true; 31 | } 32 | 33 | public async performTransactionCheck(appTx: AppTransaction, resultSet: ResultSet): Promise { 34 | const fabricTx = appTx.getTransaction() as FabricTransaction; 35 | const action = fabricTx.getActions()[0]; 36 | const func = action.getFunction() as FabricFunctionInfo; 37 | const funcNameStr = func.funcName.toString(); 38 | 39 | if (funcNameStr === "createCar") { 40 | // createCar(key, make, model, color, owner) 41 | const CHECKER_ID = "fabcar-createCar-checker"; 42 | 43 | const writeSet = fabricTx.getWriteSet(); 44 | const values = writeSet.filter((pair) => pair.key.toString().startsWith("fabcar\0")); 45 | 46 | if (values.length !== 1) { 47 | resultSet.pushTransactionResult(fabricTx, { 48 | checkerID: CHECKER_ID, 49 | result: ResultCode.ERROR, 50 | predicate: ResultPredicate.EQ, 51 | operands: [ { name: fabricTx.toString() + ".WriteSet.length", value: values.length }, 52 | { name: "1", value: 1 } ] 53 | }); 54 | console.error("ERROR: CreateCar should not write to more than one key"); 55 | console.debug(" Tx %s writes to keys %s", fabricTx.getTransactionID(), 56 | values.map((k) => k.key.toString()).join(",")); 57 | } else { 58 | resultSet.pushTransactionResult(fabricTx, { 59 | checkerID: CHECKER_ID, 60 | result: ResultCode.OK, 61 | predicate: ResultPredicate.EQ, 62 | operands: [ { name: fabricTx.toString() + ".WriteSet.length", value: values.length }, 63 | { name: "1", value: 1 } ] 64 | }); 65 | 66 | const v = values[0]; 67 | if (v.key.toString() !== "fabcar\0" + func.args[0].toString()) { 68 | resultSet.pushTransactionResult(fabricTx, { 69 | checkerID: CHECKER_ID, 70 | result: ResultCode.ERROR, 71 | predicate: ResultPredicate.EQ, 72 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].key", value: v.key.toString() }, 73 | { name: "fabcar\0" + func.args[0].toString(), 74 | value: "fabcar\0" + func.args[0].toString() } ] 75 | }); 76 | console.error("ERROR: CreateCar should not write to other key than %s", func.args[0].toString()); 77 | } else { 78 | resultSet.pushTransactionResult(fabricTx, { 79 | checkerID: CHECKER_ID, 80 | result: ResultCode.OK, 81 | predicate: ResultPredicate.EQ, 82 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].key", value: v.key.toString() }, 83 | { name: "fabcar\0" + func.args[0].toString(), 84 | value: "fabcar\0" + func.args[0].toString() } ] 85 | }); 86 | 87 | const state = appTx.getState(); 88 | 89 | try { 90 | state.getValue(v.key); 91 | console.error("ERROR: CreateCar should not overwrite the existing car"); 92 | 93 | resultSet.pushTransactionResult(fabricTx, { 94 | checkerID: CHECKER_ID, 95 | result: ResultCode.ERROR, 96 | predicate: ResultPredicate.INVOKE, 97 | operands: [ { name: "getValue(" + v.key + ")", value: v.key.toString() } ] 98 | }); 99 | } catch (e) { 100 | if (e instanceof BCVerifierNotFound) { 101 | resultSet.pushTransactionResult(fabricTx, { 102 | checkerID: CHECKER_ID, 103 | result: ResultCode.OK, 104 | predicate: ResultPredicate.INVOKE, 105 | operands: [ { name: "getValue(" + v.key + ")", value: v.key.toString() } ] 106 | }); 107 | console.log("INFO: Transaction %s: createCar is ok", fabricTx.getTransactionID()); 108 | } else { 109 | console.error("ERROR: Error while checking: %s", e); 110 | } 111 | } 112 | } 113 | } 114 | } else if (funcNameStr === "changeCarOwner") { 115 | // changeCarOwner(key, newOwner) 116 | const CHECKER_ID = "fabcar-changeCarOwner-checker"; 117 | 118 | const writeSet = fabricTx.getWriteSet(); 119 | const values = writeSet.filter((pair) => pair.key.toString().startsWith("fabcar\0")); 120 | 121 | if (values.length !== 1) { 122 | resultSet.pushTransactionResult(fabricTx, { 123 | checkerID: CHECKER_ID, 124 | result: ResultCode.ERROR, 125 | predicate: ResultPredicate.EQ, 126 | operands: [ { name: fabricTx.toString() + ".WriteSet.length", value: values.length }, 127 | { name: "1", value: 1 } ] 128 | }); 129 | 130 | console.error("ERROR: changeCarOwner should not write to more than one key"); 131 | console.debug(" Tx %s writes to keys %s", fabricTx.getTransactionID(), 132 | values.map((k) => k.key.toString()).join(",")); 133 | } else { 134 | resultSet.pushTransactionResult(fabricTx, { 135 | checkerID: CHECKER_ID, 136 | result: ResultCode.OK, 137 | predicate: ResultPredicate.EQ, 138 | operands: [ { name: fabricTx.toString() + ".WriteSet.length", value: values.length }, 139 | { name: "1", value: 1 } ] 140 | }); 141 | 142 | const v = values[0]; 143 | if (v.key.toString() !== "fabcar\0" + func.args[0].toString()) { 144 | resultSet.pushTransactionResult(fabricTx, { 145 | checkerID: CHECKER_ID, 146 | result: ResultCode.ERROR, 147 | predicate: ResultPredicate.EQ, 148 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].key", value: v.key.toString() }, 149 | { name: "fabcar\0" + func.args[0].toString(), 150 | value: "fabcar\0" + func.args[0].toString() } ] 151 | }); 152 | 153 | console.error("ERROR: changeCarOwner should not write other cars than specified."); 154 | console.debug(" Tx %s writes to key %s", fabricTx.getTransactionID(), v.key.toString()); 155 | } else { 156 | resultSet.pushTransactionResult(fabricTx, { 157 | checkerID: CHECKER_ID, 158 | result: ResultCode.OK, 159 | predicate: ResultPredicate.EQ, 160 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].key", value: v.key.toString() }, 161 | { name: "fabcar\0" + func.args[0].toString(), 162 | value: "fabcar\0" + func.args[0].toString() } ] 163 | }); 164 | 165 | if (v.isDelete === true) { 166 | resultSet.pushTransactionResult(fabricTx, { 167 | checkerID: CHECKER_ID, 168 | result: ResultCode.ERROR, 169 | predicate: ResultPredicate.EQ, 170 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].isDelete", value: v.isDelete }, 171 | { name: "true", value: true } ] 172 | }); 173 | 174 | console.error("ERROR: changeCarOwner should not delete the key"); 175 | } else { 176 | resultSet.pushTransactionResult(fabricTx, { 177 | checkerID: CHECKER_ID, 178 | result: ResultCode.OK, 179 | predicate: ResultPredicate.EQ, 180 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].isDelete", value: v.isDelete }, 181 | { name: "true", value: true } ] 182 | }); 183 | 184 | const newCar = JSON.parse(v.value.toString()); 185 | 186 | if (newCar.owner !== func.args[1].toString()) { 187 | resultSet.pushTransactionResult(fabricTx, { 188 | checkerID: CHECKER_ID, 189 | result: ResultCode.ERROR, 190 | predicate: ResultPredicate.EQ, 191 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].value.owner", 192 | value: v.key.toString() }, 193 | { name: func.args[1].toString(), 194 | value: func.args[1].toString() } ] 195 | }); 196 | 197 | console.error("ERROR: changeCarOwner changes the owner to another person: %s", 198 | newCar.owner); 199 | } else { 200 | resultSet.pushTransactionResult(fabricTx, { 201 | checkerID: CHECKER_ID, 202 | result: ResultCode.OK, 203 | predicate: ResultPredicate.EQ, 204 | operands: [ { name: fabricTx.toString() + ".WriteSet[0].value.owner", 205 | value: v.key.toString() }, 206 | { name: func.args[1].toString(), 207 | value: func.args[1].toString() } ] 208 | }); 209 | 210 | console.log("INFO: Transaction %s: changeCarOwner is ok", fabricTx.getTransactionID()); 211 | } 212 | } 213 | } 214 | } 215 | } 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/chains/chains/mychannel/blockfile_000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/chains/chains/mychannel/blockfile_000000 -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/chains/index/000001.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/chains/index/000001.log -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/chains/index/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000000 2 | -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/chains/index/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/chains/index/LOCK -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/chains/index/LOG: -------------------------------------------------------------------------------- 1 | =============== Dec 2, 2022 (UTC) =============== 2 | 06:57:47.040962 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed 3 | 06:57:47.049465 db@open opening 4 | 06:57:47.050550 version@stat F·[] S·0B[] Sc·[] 5 | 06:57:47.052560 db@janitor F·2 G·0 6 | 06:57:47.052581 db@open done T·2.397486ms 7 | -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/chains/index/MANIFEST-000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/chains/index/MANIFEST-000000 -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "blockHeight": 10, 3 | "hashes": [ 4 | "a8c363ad5cff83a41743ccfb8e9c3e48d0cade6bb187b1e29818f7a52ca69d4b", 5 | "4d4d654acea3d15ce11388d1409637d2c0e92f46a0c4301830600f01d6440c2c", 6 | "f2c9fc8790aa65ddb2e406203bc9d58124b47542fa178a6b66762a0d9842e9f6", 7 | "97742f411c050c7e31a6446ac966987d8cd9db6184f57005a519bc11a081cdcc", 8 | "519afdedb81f027284424e3e3c0078ab36948a8370fffd8a4645c1dd263bf687", 9 | "c1947444f6998cf40585013d78c0312e8e4f60412267c2ab48cc3419cf2cf561", 10 | "d246793aac8617e095ecc0fb5ca1f51d47ad7404c328be3ef769542e74f10a9d", 11 | "bd5e22dffd7841cea82f7dbe98b666d7759d2213360a101a83a4c9d0b876c565", 12 | "c29758264e887a4949b18e93f58d0a63cfbf9d1593e773ae971a47a8d14c7eb3", 13 | "1768daca2dbedd4bcdd29a7876128b8f4d1ba0d5f7b456ddf747c2237e6aa642" 14 | ], 15 | "numTransactions": [ 16 | 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 17 | ], 18 | "ledgers": [ 19 | { 20 | "name": "org1", 21 | "ledgerStore": "./chains/chains/mychannel", 22 | "blockFile": "./chains/chains/mychannel/blockfile_000000" 23 | } 24 | ] 25 | } 26 | -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/pvtdataStore/000001.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/pvtdataStore/000001.log -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/pvtdataStore/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000000 2 | -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/pvtdataStore/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/pvtdataStore/LOCK -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/pvtdataStore/LOG: -------------------------------------------------------------------------------- 1 | =============== Dec 2, 2022 (UTC) =============== 2 | 06:57:47.057760 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed 3 | 06:57:47.070291 db@open opening 4 | 06:57:47.070601 version@stat F·[] S·0B[] Sc·[] 5 | 06:57:47.075578 db@janitor F·2 G·0 6 | 06:57:47.075611 db@open done T·5.294148ms 7 | -------------------------------------------------------------------------------- /test/asset-transfer-basic-2.4.7/pvtdataStore/MANIFEST-000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-basic-2.4.7/pvtdataStore/MANIFEST-000000 -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "blockHeight": 11, 3 | "hashes": [ 4 | "6de05b3f69cf24991b981c552b691426bb54cfd5b42503f87a83b31acdc1fc88", 5 | "a689898c06fb509b5860dd17b01268c2459b7dc6e165a7140af86ebdc28fde58", 6 | "e57605cc9ddf1c46a2b5af72778b0b8298ee8dc73b905b5e1590fc577d897777", 7 | "da481feb15476e8778b1935be40597d3d45f5d474ef53b892c804c7950fc503d", 8 | "4e574529841a2df494d6eeb1f4c0a97b7f90f8104662475bbe214e9e31e6abe8", 9 | "3857cd0576679157640061f76f0dae50f9edbf30ac2762ee374b890e8cc36d29", 10 | "d795c8c2fe531e08d107ad20fac4664554c3e7764cc73a0f84723e806e2cd7f4", 11 | "1226ccd38d4c4bbd4907353be04f287335c46b866fc93441218cb655bc867889", 12 | "3ef170fa84fd43a7098c3cdf2448233678a99132ba3c9f38500bfbb1757ed9d9", 13 | "474ad3e4c7a051d95ba620d16b7b141e7d27bc511aa2b1238ca9025791333639", 14 | "8e5c12c605dcd02a5e5e0abd9bf64874c5f530f33b14e7ebd6bfea0dfbcb4086" 15 | ], 16 | "numTransactions": [ 17 | 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 18 | ], 19 | "ledgers": [ 20 | { 21 | "name": "org1", 22 | "ledgerStore": "./org1/chains/chains/mychannel", 23 | "blockFile": "./org1/chains/chains/mychannel/blockfile_000000", 24 | "privateDataStore": "./org1/pvtdataStore" 25 | }, 26 | { 27 | "name": "org2", 28 | "ledgerStore": "./org2/chains/chains/mychannel", 29 | "blockFile": "./org2/chains/chains/mychannel/blockfile_000000", 30 | "privateDataStore": "./org2/pvtdataStore" 31 | } 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/chains/chains/mychannel/blockfile_000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/chains/chains/mychannel/blockfile_000000 -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/chains/index/000001.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/chains/index/000001.log -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/chains/index/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000000 2 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/chains/index/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/chains/index/LOCK -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/chains/index/LOG: -------------------------------------------------------------------------------- 1 | =============== Dec 2, 2022 (UTC) =============== 2 | 07:01:33.428963 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed 3 | 07:01:33.437968 db@open opening 4 | 07:01:33.438520 version@stat F·[] S·0B[] Sc·[] 5 | 07:01:33.443923 db@janitor F·2 G·0 6 | 07:01:33.443964 db@open done T·5.794662ms 7 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/chains/index/MANIFEST-000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/chains/index/MANIFEST-000000 -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/000001.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/000001.log -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000000 2 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/LOCK -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/LOG: -------------------------------------------------------------------------------- 1 | =============== Dec 2, 2022 (UTC) =============== 2 | 07:01:33.448521 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed 3 | 07:01:33.456551 db@open opening 4 | 07:01:33.457299 version@stat F·[] S·0B[] Sc·[] 5 | 07:01:33.461912 db@janitor F·2 G·0 6 | 07:01:33.461949 db@open done T·5.378806ms 7 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/MANIFEST-000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org1/pvtdataStore/MANIFEST-000000 -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/chains/chains/mychannel/blockfile_000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/chains/chains/mychannel/blockfile_000000 -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/chains/index/000001.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/chains/index/000001.log -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/chains/index/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000000 2 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/chains/index/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/chains/index/LOCK -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/chains/index/LOG: -------------------------------------------------------------------------------- 1 | =============== Dec 2, 2022 (UTC) =============== 2 | 07:01:33.403835 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed 3 | 07:01:33.417113 db@open opening 4 | 07:01:33.417608 version@stat F·[] S·0B[] Sc·[] 5 | 07:01:33.421616 db@janitor F·2 G·0 6 | 07:01:33.421656 db@open done T·4.522907ms 7 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/chains/index/MANIFEST-000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/chains/index/MANIFEST-000000 -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/000001.log: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/000001.log -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/CURRENT: -------------------------------------------------------------------------------- 1 | MANIFEST-000000 2 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/LOCK: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/LOCK -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/LOG: -------------------------------------------------------------------------------- 1 | =============== Dec 2, 2022 (UTC) =============== 2 | 07:01:33.426574 log@legend F·NumFile S·FileSize N·Entry C·BadEntry B·BadBlock Ke·KeyError D·DroppedEntry L·Level Q·SeqNum T·TimeElapsed 3 | 07:01:33.436760 db@open opening 4 | 07:01:33.437176 version@stat F·[] S·0B[] Sc·[] 5 | 07:01:33.441858 db@janitor F·2 G·0 6 | 07:01:33.441886 db@open done T·5.10976ms 7 | -------------------------------------------------------------------------------- /test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/MANIFEST-000000: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hyperledger-labs/blockchain-verifier/c8ee1f35c2895fd0d8b3d63fe0f897a458302208/test/asset-transfer-private-data-2.4.7/org2/pvtdataStore/MANIFEST-000000 -------------------------------------------------------------------------------- /test/fabric-query2/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "peer": { 3 | "url": "grpcs://localhost:7051", 4 | "mspID": "org1", 5 | "tlsCACertFile": "test/fabric-query2/org1-ca-tls.pem" 6 | }, 7 | "channel": "test-channel", 8 | "client": { 9 | "certFile": "test/fabric-query2/user-cert.pem", 10 | "keyFile": "test/fabric-query2/user-key.pem", 11 | "mspID": "user-org", 12 | "mutualTLS": { 13 | "certFile": "test/fabric-query2/tls-user-cert.pem", 14 | "keyFile": "test/fabric-query2/tls-user-key.pem" 15 | } 16 | }, 17 | "config": { 18 | "useDiscovery": false 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /test/fabric-query2/config.multiple.json: -------------------------------------------------------------------------------- 1 | { 2 | "peers": [ 3 | { 4 | "url": "grpcs://localhost:7051", 5 | "mspID": "org1", 6 | "tlsCACertFile": "test/fabric-query2/org1-ca-tls.pem" 7 | }, 8 | { 9 | "url": "grpcs://localhost:8051", 10 | "mspID": "org2", 11 | "tlsCACertFile": "test/fabric-query2/org2-ca-tls.pem" 12 | } 13 | ], 14 | "channel": "test-channel", 15 | "client": { 16 | "certFile": "test/fabric-query2/user-cert.pem", 17 | "keyFile": "test/fabric-query2/user-key.pem", 18 | "mspID": "user-org", 19 | "mutualTLS": { 20 | "certFile": "test/fabric-query2/tls-user-cert.pem", 21 | "keyFile": "test/fabric-query2/tls-user-key.pem" 22 | } 23 | }, 24 | "config": { 25 | "useDiscovery": false 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test/fabric-query2/config.none.json: -------------------------------------------------------------------------------- 1 | { 2 | "channel": "test-channel", 3 | "client": { 4 | "certFile": "test/fabric-query2/user-cert.pem", 5 | "keyFile": "test/fabric-query2/user-key.pem", 6 | "mspID": "user-org", 7 | "mutualTLS": { 8 | "certFile": "test/fabric-query2/tls-user-cert.pem", 9 | "keyFile": "test/fabric-query2/tls-user-key.pem" 10 | } 11 | }, 12 | "config": { 13 | "useDiscovery": false 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /test/fabric-query2/org1-ca-tls.pem: -------------------------------------------------------------------------------- 1 | org1-ca-tls.pem -------------------------------------------------------------------------------- /test/fabric-query2/org2-ca-tls.pem: -------------------------------------------------------------------------------- 1 | org2-ca-tls.pem -------------------------------------------------------------------------------- /test/fabric-query2/tls-user-cert.pem: -------------------------------------------------------------------------------- 1 | tls-user-cert.pem -------------------------------------------------------------------------------- /test/fabric-query2/tls-user-key.pem: -------------------------------------------------------------------------------- 1 | tls-user-key.pem -------------------------------------------------------------------------------- /test/fabric-query2/user-cert.pem: -------------------------------------------------------------------------------- 1 | user-cert.pem -------------------------------------------------------------------------------- /test/fabric-query2/user-key.pem: -------------------------------------------------------------------------------- 1 | user-key.pem -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | "target": "es2015", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */ 5 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 6 | // "lib": [], /* Specify library files to be included in the compilation. */ 7 | // "allowJs": true, /* Allow javascript files to be compiled. */ 8 | // "checkJs": true, /* Report errors in .js files. */ 9 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 10 | "declaration": true, /* Generates corresponding '.d.ts' file. */ 11 | "sourceMap": true, /* Generates corresponding '.map' file. */ 12 | // "outFile": "./", /* Concatenate and emit output to single file. */ 13 | "outDir": "./build", /* Redirect output structure to the directory. */ 14 | "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 15 | // "removeComments": true, /* Do not emit comments to output. */ 16 | // "noEmit": true, /* Do not emit outputs. */ 17 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 18 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 19 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 20 | 21 | /* Strict Type-Checking Options */ 22 | "strict": true, /* Enable all strict type-checking options. */ 23 | "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 24 | // "strictNullChecks": true, /* Enable strict null checks. */ 25 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 26 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 27 | "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 28 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 29 | 30 | /* Additional Checks */ 31 | "noUnusedLocals": false, /* Report errors on unused locals. */ 32 | "noUnusedParameters": false, /* Report errors on unused parameters. */ 33 | "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 34 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 35 | 36 | /* Module Resolution Options */ 37 | "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 38 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 39 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 40 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 41 | // "typeRoots": [], /* List of folders to include type definitions from. */ 42 | // "types": [], /* Type declaration files to be included in compilation. */ 43 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 44 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 45 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 46 | 47 | /* Source Map Options */ 48 | // "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 49 | // "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */ 50 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 51 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 52 | 53 | /* Experimental Options */ 54 | "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */ 55 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 56 | }, 57 | "exclude": ["build", "node_modules", "**/*.test.ts", "integration"] 58 | } 59 | -------------------------------------------------------------------------------- /tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | "target": "es2015", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */ 5 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 6 | // "lib": [], /* Specify library files to be included in the compilation. */ 7 | // "allowJs": true, /* Allow javascript files to be compiled. */ 8 | // "checkJs": true, /* Report errors in .js files. */ 9 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 10 | "declaration": true, /* Generates corresponding '.d.ts' file. */ 11 | "sourceMap": true, /* Generates corresponding '.map' file. */ 12 | // "outFile": "./", /* Concatenate and emit output to single file. */ 13 | "outDir": "./build", /* Redirect output structure to the directory. */ 14 | "rootDir": "./src", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 15 | // "removeComments": true, /* Do not emit comments to output. */ 16 | // "noEmit": true, /* Do not emit outputs. */ 17 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 18 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 19 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 20 | 21 | /* Strict Type-Checking Options */ 22 | "strict": true, /* Enable all strict type-checking options. */ 23 | "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 24 | // "strictNullChecks": true, /* Enable strict null checks. */ 25 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 26 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 27 | "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 28 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 29 | 30 | /* Additional Checks */ 31 | "noUnusedLocals": false, /* Report errors on unused locals. */ 32 | "noUnusedParameters": false, /* Report errors on unused parameters. */ 33 | "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 34 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 35 | 36 | /* Module Resolution Options */ 37 | "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 38 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 39 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 40 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 41 | // "typeRoots": [], /* List of folders to include type definitions from. */ 42 | // "types": [], /* Type declaration files to be included in compilation. */ 43 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 44 | "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 45 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 46 | 47 | /* Source Map Options */ 48 | // "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 49 | // "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */ 50 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 51 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 52 | 53 | /* Experimental Options */ 54 | "experimentalDecorators": true /* Enables experimental support for ES7 decorators. */ 55 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 56 | } 57 | } 58 | --------------------------------------------------------------------------------