├── .env.regtest ├── .gitignore ├── .gitmodules ├── .mocharc.yml ├── .travis.yml ├── .vscode ├── launch.json └── settings.json ├── LICENSE ├── README.md ├── assets └── slpdb_logo.png ├── bit.ts ├── cache.ts ├── config.ts ├── db.ts ├── example-filters.yml ├── example.env ├── examples └── addresses.js ├── filters.ts ├── graphmap.ts ├── index.ts ├── info.ts ├── interfaces.ts ├── migrate-mongo-config.js ├── migrations └── .gitkeep ├── notifications.ts ├── npm-shrinkwrap.json ├── package.json ├── patches └── 1.bitcoind-rpc.patch ├── prunestack.ts ├── query.ts ├── regtest ├── .dockerignore ├── Dockerfile.nodejs ├── README.md ├── _test.sh ├── bitcoind │ ├── Dockerfile │ ├── bitcoin.conf │ └── docker-entrypoint.sh ├── docker-compose.yml ├── slpdb │ ├── .env.regtest │ ├── Dockerfile │ ├── docker-entrypoint.sh │ └── filters.regtest.yml └── test.sh ├── rpc.ts ├── run-service.sh ├── slpgraphmanager.ts ├── slptokengraph.ts ├── status.ts ├── test ├── 1-token-type-1.spec.ts ├── 10-long-chain.spec.ts ├── 11-send-invalid-1.spec.ts ├── 2-genesis-double-spend.spec.ts ├── 2a-mint-double-spend.spec.ts ├── 3-send-double-spend.spec.ts ├── 4-fan-out-fan-in.spec.ts ├── 5-block-reorg-with-genesis.spec.ts ├── 5a-block-reorg-with-genesis.spec.ts ├── 5b-block-reorg-with-genesis.spec.ts ├── 5c-block-reorg-with-genesis.spec.ts ├── 6-burn-with-invalid-txn-fast.spec.ts ├── 7-burn-with-invalid-txn-slow.spec.ts ├── 8-burn-with-valid-txn-fast.spec.ts └── 9-burn-with-valid-txn-slow.spec.ts ├── tna.ts ├── tsconfig.json └── utxos.ts /.env.regtest: -------------------------------------------------------------------------------- 1 | rpc_protocol='http' 2 | rpc_user='bitcoin' 3 | rpc_pass='password' 4 | rpc_host='localhost' 5 | rpc_port='18443' 6 | rpc_limit='150' 7 | db_name='slpdb' 8 | db_url='mongodb://localhost:26017' 9 | core_from='543375' 10 | core_from_testnet='0' 11 | core_slp_mempool_ignore_length='1000000' 12 | zmq_incoming_host='0.0.0.0' 13 | zmq_incoming_port='29332' 14 | zmq_outgoing_host='0.0.0.0' 15 | zmq_outgoing_port='27339' 16 | zmq_outgoing_enable=1 17 | telemetry_host='status.slpdb.io' 18 | telemetry_advertised_host="James' MBP" 19 | telemetry_advertised_graph_search_host='' 20 | telemetry_advertised_slp_socket_host='' 21 | telemetry_secret='7da8a6ac0a1f03f24ae852d6769ac27e9c411ff3ba7d8ec1feae81f81924e0ac' 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | *.js 3 | *.js.map 4 | !migrate-mongo-config.js 5 | !migrations/* 6 | 7 | mongo-dev-net/data* 8 | regtest/mongo/db 9 | regtets/mongo/configdb 10 | _leveldb 11 | _leveldb_old 12 | _leveldb_testnet 13 | _mongo 14 | _mongo_old 15 | .env 16 | 17 | .DS_Store 18 | 19 | filters.yml 20 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "vendor/bignumber.js"] 2 | path = vendor/bignumber.js 3 | url = https://github.com/iamdoron/bignumber.js.git 4 | -------------------------------------------------------------------------------- /.mocharc.yml: -------------------------------------------------------------------------------- 1 | require: 2 | - 'ts-node/register' 3 | - 'source-map-support/register' 4 | recursive: true 5 | spec: 'test/**/*.spec.ts' 6 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | services: 2 | - docker 3 | sudo: false 4 | script: 5 | - sh -c 'cd ./regtest && ./test.sh' 6 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Start From Checkpoint (ts-node)", 9 | "type": "node", 10 | "request": "launch", 11 | "args": ["${workspaceFolder}/index.ts", "run"], 12 | "runtimeArgs": ["--nolazy", "-r", "ts-node/register"], 13 | "sourceMaps": true, 14 | "cwd": "${workspaceRoot}", 15 | "protocol": "inspector", 16 | "console": "integratedTerminal" 17 | }, 18 | { 19 | "type": "node", 20 | "request": "launch", 21 | //"preLaunchTask": "tsc", 22 | "name": "Run Mocha", 23 | "program": "${workspaceRoot}/node_modules/mocha/bin/_mocha", 24 | "args": ["--timeout", "1000000", "--exit", "--require", "mocha-steps"], 25 | "cwd": "${workspaceRoot}", 26 | "outFiles": [] 27 | }, 28 | { 29 | "type": "node", 30 | "request": "launch", 31 | "name": "Start From 574710", 32 | "program": "${workspaceFolder}/index.js", 33 | "args": ["run", "--startHeight", "574710"], 34 | "console": "externalTerminal" 35 | }, 36 | { 37 | "type": "node", 38 | "request": "launch", 39 | "name": "Reprocess token ID", 40 | "program": "${workspaceFolder}/index.js", 41 | "args": ["reprocess", "4de69e374a8ed21cbddd47f2338cc0f479dc58daa2bbe11cd604ca488eca0ddf"], 42 | "console": "externalTerminal" 43 | }, 44 | { 45 | "type": "node", 46 | "request": "launch", 47 | "name": "Start tokens from scratch", 48 | "program": "${workspaceFolder}/index.js", 49 | "args": ["run", "--reprocess"], 50 | "console": "externalTerminal" 51 | }, 52 | { 53 | "type": "node", 54 | "request": "launch", 55 | "name": "Fix (untested)", 56 | "program": "${workspaceFolder}/index.js", 57 | "args": ["fix", "580609"], 58 | "console": "integratedTerminal" 59 | }, 60 | { 61 | "type": "node", 62 | "request": "launch", 63 | "name": "Launch Current File", 64 | "program": "${file}", 65 | "outFiles": ["${workspaceFolder}/regtest/**/*.js"], 66 | "console": "integratedTerminal" 67 | }, 68 | ] 69 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "git.ignoreLimitWarning": true, 3 | "files.associations": { 4 | "*.json": "jsonc" 5 | } 6 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /assets/slpdb_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simpleledger/SLPDB/9a85b1bd381a82e6d2094d6936774ee5a4503de3/assets/slpdb_logo.png -------------------------------------------------------------------------------- /cache.ts: -------------------------------------------------------------------------------- 1 | export class CacheSet { 2 | private set = new Set() 3 | private list: T[] = []; 4 | private maxSize: number; 5 | 6 | constructor(maxSize: number) { 7 | this.maxSize = maxSize; 8 | } 9 | 10 | [Symbol.iterator]() { 11 | return this.list.values(); 12 | } 13 | 14 | get length(): number { 15 | return this.list.length; 16 | } 17 | 18 | push(item: T) { 19 | this.set.add(item); 20 | if (this.maxSize > 0 && this.set.size > this.maxSize) { 21 | this.shift(); 22 | } 23 | return this.list.push(item); 24 | } 25 | 26 | has(item: T) { 27 | return this.set.has(item); 28 | } 29 | 30 | delete(item: T) { 31 | if (this.set.delete(item)) { 32 | this.list = this.list.filter(k => k !== item); 33 | } 34 | } 35 | 36 | toSet() { 37 | return this.set; 38 | } 39 | 40 | shift(): T | undefined { 41 | let item = this.list.shift(); 42 | if (item) { 43 | this.set.delete(item); 44 | } 45 | return item; 46 | } 47 | 48 | pop(): T | undefined { 49 | let item = this.list.pop(); 50 | if (item) { 51 | this.set.delete(item); 52 | } 53 | return item; 54 | } 55 | 56 | clear() { 57 | this.list = []; 58 | this.set.clear(); 59 | } 60 | } 61 | 62 | export class CacheMap { 63 | private map = new Map() 64 | private list: T[] = []; 65 | private maxSize: number; 66 | 67 | constructor(maxSize: number) { 68 | this.maxSize = maxSize; 69 | } 70 | 71 | [Symbol.iterator]() { 72 | return this.items(); 73 | } 74 | 75 | get length(): number { 76 | return this.list.length; 77 | } 78 | 79 | get size(): number { 80 | return this.list.length; 81 | } 82 | 83 | values(): IterableIterator { 84 | let l: M[] = this.list.map(i => this.map.get(i)!); 85 | return l.values(); 86 | } 87 | 88 | keys(): IterableIterator { 89 | return this.list.values(); 90 | } 91 | 92 | items(): IterableIterator<[T,M]> { 93 | let l: [T,M][] = this.list.map(i => [i, this.map.get(i)!]); 94 | return l.values(); 95 | } 96 | 97 | set(key: T, item: M) { 98 | this.list.push(key); 99 | this.map.set(key, item); 100 | if(this.maxSize > 0 && this.map.size > this.maxSize) { 101 | this.shift(); 102 | } 103 | } 104 | 105 | get(key: T) { 106 | return this.map.get(key); 107 | } 108 | 109 | has(key: T) { 110 | return this.map.has(key); 111 | } 112 | 113 | delete(key: T) { 114 | if (this.map.delete(key)) { 115 | this.list = this.list.filter(k => k !== key); 116 | } 117 | } 118 | 119 | toMap() { 120 | return this.map; 121 | } 122 | 123 | private shift(): T | undefined { 124 | let key = this.list.shift(); 125 | if(key) { 126 | this.map.delete(key); 127 | } 128 | return key; 129 | } 130 | 131 | clear() { 132 | this.list = []; 133 | this.map.clear(); 134 | } 135 | } 136 | -------------------------------------------------------------------------------- /config.ts: -------------------------------------------------------------------------------- 1 | export interface DbConfig { 2 | name: string; 3 | name_testnet: string; 4 | url: string; 5 | lazy_loading: number; 6 | index: { [key: string]: { [key: string]: string[] } }; 7 | token_schema_version: number; 8 | confirmed_schema_version: number; 9 | } 10 | 11 | export type CollectionType = { keys: string[], fulltext: string[] } 12 | 13 | export interface RpcConfig { 14 | protocol: string; user: string; pass: string; host: string; port: string; limit: number; 15 | } 16 | 17 | export class Config { 18 | static rpc = { 19 | protocol: process.env.rpc_protocol ? process.env.rpc_protocol : 'http', 20 | user: process.env.rpc_user ? process.env.rpc_user : 'bitcoin', 21 | pass: process.env.rpc_pass ? process.env.rpc_pass : 'password', 22 | host: process.env.rpc_host ? process.env.rpc_host : '0.0.0.0', 23 | port: process.env.rpc_port ? process.env.rpc_port : '8332', 24 | limit: Number.parseInt(process.env.rpc_limit ? process.env.rpc_limit : "150"), 25 | rpcMaxRetries: Number.parseInt(process.env.rpc_max_retries ? process.env.rpc_max_retries : "2"), 26 | rpcRetryDelayMs: Number.parseInt(process.env.rpc_retry_delay ? process.env.rpc_retry_delay : "1000"), 27 | rpcTimeoutMs: Number.parseInt(process.env.rpc_timeout ? process.env.rpc_timeout : "30000"), 28 | skipInitialSyncCheck: process.env.skip_intial_sync_check ? ['1', 'true'].includes(process.env.skip_intial_sync_check) : false, 29 | } 30 | static grpc = { 31 | url: Boolean(process.env.grpc_url) ? process.env.grpc_url : undefined, 32 | certPath: Boolean(process.env.grpc_certPath) ? process.env.grpc_certPath : undefined, 33 | } 34 | static db: DbConfig = { 35 | name: process.env.db_name ? process.env.db_name : 'slpdb', 36 | name_testnet: process.env.db_name ? process.env.db_name + "_test" : 'slpdb_test', 37 | url: process.env.db_url ? process.env.db_url : 'mongodb://127.0.0.1:27017', 38 | confirmed_schema_version: 2, 39 | token_schema_version: 79, 40 | lazy_loading: process.env.lazy_loading ? Number.parseInt(process.env.lazy_loading) : 0, 41 | index: { 42 | tokens: { 43 | keys: [ 'tokenDetails.tokenIdHex', 'tokenDetails.name', 'tokenDetails.symbol', 'tokenStats.qty_token_circulating_supply', 'tokenStats.qty_token_burned', 'tokenStats.qty_token_minted' ], 44 | fulltext: [ 'tokenDetails.name', 'tokenDetails.symbol' ] 45 | }, 46 | graphs: { 47 | keys: [ 'tokenDetails.tokenIdHex', 'tokenDetails.nftGroupIdHex', 'graphTxn.txid', 'graphTxn.outputs.spendTxid'], 48 | fulltext: [ ] 49 | }, 50 | confirmed: { 51 | keys: [ 52 | 'tx.h', 'blk.i', 'blk.t', 'blk.h', 53 | 'in.e.a', 'in.e.h', 'in.e.i', 'in.i', 54 | 'out.e.a', 'out.e.i', 'out.e.v', 'out.i', 55 | 'in.b0', 'in.b1', 'in.b2', 'in.b3', //'in.b4', 'in.b5', 'in.b6', 'in.b7', 'in.b8', 'in.b9', 'in.b10', 'in.b11', 'in.b12', 'in.b13', 'in.b14', 'in.b15', 56 | 'out.b0', 'out.b1', 'out.b2', 'out.b3', 'out.b7', //'out.b4', 'out.b5', 'out.b6', 'out.b7', 'out.b8', 'out.b9', 'out.b10', 'out.b11', 'out.b12', 'out.b13', 'out.b14', 'out.b15', 'out.b16', 'out.b17', 'out.b18', 'out.b19', 57 | 'out.s0', 'out.s1', 'out.s2', 'out.s3', 'out.s4', //'out.s5' 58 | 'slp.detail.outputs.address', 'slp.detail.transactionType' 59 | ], 60 | fulltext: ['out.s0', 'out.s1', 'out.s2', 'out.s3']//, 'out.s4', 'out.s5'] 61 | }, 62 | unconfirmed: { 63 | keys: [ 64 | 'tx.h', 65 | 'in.e.a', 'in.e.h', 'in.e.i', 'in.i', 66 | 'out.e.a', 'out.e.i', 'out.e.v', 'out.i', 67 | 'in.b0', 'in.b1', 'in.b2', 'in.b3', //'in.b4', 'in.b5', 'in.b6', 'in.b7', 'in.b8', 'in.b9', 'in.b10', 'in.b11', 'in.b12', 'in.b13', 'in.b14', 'in.b15', 68 | 'out.b0', 'out.b1', 'out.b2', 'out.b3', 'out.b7', //'out.b4', 'out.b5', 'out.b6', 'out.b7', 'out.b8', 'out.b9', 'out.b10', 'out.b11', 'out.b12', 'out.b13', 'out.b14', 'out.b15', 'out.b16', 'out.b17', 'out.b18', 'out.b19', 69 | 'out.s0', 'out.s1', 'out.s2', 'out.s3', 'out.s4', //'out.s5' 70 | 'slp.detail.outputs.address', 'slp.detail.transactionType' 71 | ], 72 | fulltext: ['out.s0', 'out.s1', 'out.s2', 'out.s3'] //, 'out.s4', 'out.s5'] 73 | } 74 | } 75 | } 76 | static zmq = { 77 | incoming: { 78 | host: process.env.zmq_incoming_host ? process.env.zmq_incoming_host : '0.0.0.0', 79 | port: process.env.zmq_incoming_port ? process.env.zmq_incoming_port : '28332', 80 | }, 81 | outgoing: { 82 | enable: process.env.zmq_outgoing_enable ? ['1', 'true'].includes(process.env.zmq_outgoing_enable) : true, 83 | host: process.env.zmq_outgoing_host ? process.env.zmq_outgoing_host : '0.0.0.0', 84 | port: process.env.zmq_outgoing_port ? process.env.zmq_outgoing_port : '28339', 85 | } 86 | } 87 | static core = { 88 | from: Number.parseInt(process.env.core_from ? process.env.core_from : "543375"), 89 | from_testnet: Number.parseInt(process.env.core_from_testnet ? process.env.core_from_testnet : "1253801"), 90 | slp_mempool_ignore_length: Number.parseInt(process.env.core_slp_mempool_ignore_length ? process.env.core_slp_mempool_ignore_length : "1000000"), 91 | } 92 | static telemetry = { 93 | enable: process.env.telemetry_enable ? ['1', 'true'].includes(process.env.telemetry_enable) : true, 94 | host: process.env.telemetry_host ? process.env.telemetry_host : 'status.slpdb.io', 95 | port: process.env.telemetry_port ? process.env.telemetry_port : 443, 96 | advertised_host: process.env.telemetry_advertised_host ? process.env.telemetry_advertised_host : '', 97 | secret: process.env.telemetry_secret ? process.env.telemetry_secret : '' 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /db.ts: -------------------------------------------------------------------------------- 1 | import { MongoClient, Db as MongoDb } from 'mongodb'; 2 | import { DbConfig } from './config'; 3 | import { TNATxn } from './tna'; 4 | import { GraphTxnDbo, TokenDBObject } from "./interfaces"; 5 | import { GraphMap } from './graphmap'; 6 | 7 | export class Db { 8 | db!: MongoDb; 9 | mongo!: MongoClient; 10 | dbUrl: string; 11 | dbName: string; 12 | config: DbConfig; 13 | 14 | constructor({ dbUrl, dbName, config }: { dbUrl: string, dbName: string, config: DbConfig }) { 15 | this.dbUrl = dbUrl; 16 | this.dbName = dbName; 17 | this.config = config; 18 | } 19 | 20 | private async checkClientStatus(): Promise { 21 | if (!this.mongo) { 22 | this.mongo = await MongoClient.connect(this.dbUrl, { useNewUrlParser: true, useUnifiedTopology: true }); 23 | this.db = this.mongo.db(this.dbName); 24 | return true; 25 | } 26 | return false; 27 | } 28 | 29 | async drop() { 30 | await this.db.dropDatabase(); 31 | } 32 | 33 | async exit() { 34 | await this.mongo.close(); 35 | } 36 | 37 | async statusUpdate(status: any) { 38 | await this.checkClientStatus(); 39 | await this.db.collection('statuses').deleteMany({ "context": status.context }); 40 | return await this.db.collection('statuses').insertOne(status); 41 | } 42 | 43 | async statusFetch(context: string) { 44 | await this.checkClientStatus(); 45 | return await this.db.collection('statuses').findOne({ "context": context }); 46 | } 47 | 48 | private async tokenInsertReplace(token: any) { 49 | await this.checkClientStatus(); 50 | await this.db.collection('tokens').replaceOne({ "tokenDetails.tokenIdHex": token.tokenDetails.tokenIdHex }, token, { upsert: true }); 51 | } 52 | 53 | async tokenDelete(tokenIdHex: string) { 54 | await this.checkClientStatus(); 55 | return await this.db.collection('tokens').deleteMany({ "tokenDetails.tokenIdHex": tokenIdHex }); 56 | } 57 | 58 | async tokenFetch(tokenIdHex: string): Promise { 59 | await this.checkClientStatus(); 60 | return await this.db.collection('tokens').findOne({ "tokenDetails.tokenIdHex": tokenIdHex }); 61 | } 62 | 63 | async tokenFetchAll(): Promise { 64 | await this.checkClientStatus(); 65 | return await this.db.collection('tokens').find({}).toArray(); 66 | } 67 | 68 | async tokenReset() { 69 | await this.checkClientStatus(); 70 | await this.db.collection('tokens').deleteMany({}) 71 | .catch(function(err) { 72 | console.log('[ERROR] token collection reset ERR ', err); 73 | throw err; 74 | }); 75 | } 76 | 77 | async graphItemsUpsert(graph: GraphMap) { 78 | await this.checkClientStatus(); 79 | console.time("ToDBO"); 80 | let { itemsToUpdate, tokenDbo, txidsToDelete } = GraphMap.toDbos(graph); 81 | console.timeEnd("ToDBO"); 82 | for (const i of itemsToUpdate) { 83 | if (txidsToDelete.includes(i.graphTxn.txid)) { 84 | continue; 85 | } 86 | let res = await this.db.collection("graphs").replaceOne({ "tokenDetails.tokenIdHex": i.tokenDetails.tokenIdHex, "graphTxn.txid": i.graphTxn.txid }, i, { upsert: true }); 87 | if (res.modifiedCount) { 88 | console.log(`[DEBUG] graphItemsUpsert - modified: ${i.graphTxn.txid}`); 89 | } else if (res.upsertedCount) { 90 | console.log(`[DEBUG] graphItemsUpsert - inserted: ${i.graphTxn.txid}`); 91 | } else { 92 | throw Error(`Graph record was not updated: ${i.graphTxn.txid} (token: ${i.tokenDetails.tokenIdHex})`); 93 | } 94 | } 95 | await this.tokenInsertReplace(tokenDbo); 96 | 97 | for (const txid of txidsToDelete) { 98 | await this.db.collection("graphs").deleteMany({ "graphTxn.txid": txid }); 99 | await this.db.collection("confirmed").deleteMany({ "tx.h": txid }); 100 | await this.db.collection("unconfirmed").deleteMany({ "tx.h": txid }); 101 | } 102 | } 103 | 104 | async graphDelete(tokenIdHex: string) { 105 | await this.checkClientStatus(); 106 | return await this.db.collection('graphs').deleteMany({ "tokenDetails.tokenIdHex": tokenIdHex }) 107 | } 108 | 109 | async graphItemDelete(txid: string) { 110 | await this.checkClientStatus(); 111 | return await this.db.collection('graphs').deleteMany({ "graphTxn.txid": txid }); 112 | } 113 | 114 | async graphFetch(tokenIdHex: string, lastPrunedHeight?: number): Promise { 115 | await this.checkClientStatus(); 116 | if (lastPrunedHeight) { 117 | return await this.db.collection('graphs').find({ 118 | "tokenDetails.tokenIdHex": tokenIdHex, 119 | "$or": [ { "graphTxn._pruneHeight": { "$gt": lastPrunedHeight } }, { "graphTxn._pruneHeight": null }, { "graphTxn.txid": tokenIdHex }] 120 | }).toArray(); 121 | } else { 122 | return await this.db.collection('graphs').find({ 123 | "tokenDetails.tokenIdHex": tokenIdHex 124 | }).toArray(); 125 | } 126 | } 127 | 128 | async graphTxnFetch(txid: string): Promise { 129 | await this.checkClientStatus(); 130 | return await this.db.collection('graphs').findOne({ "graphTxn.txid": txid }); 131 | } 132 | 133 | async graphReset() { 134 | await this.checkClientStatus(); 135 | await this.db.collection('graphs').deleteMany({}) 136 | .catch(function(err) { 137 | console.log('[ERROR] graphs collection reset ERR ', err) 138 | throw err; 139 | }) 140 | } 141 | 142 | async unconfirmedInsert(item: TNATxn) { 143 | await this.checkClientStatus(); 144 | console.log(`Added unconfirmed: ${item.tx.h}`); 145 | return await this.db.collection('unconfirmed').insertMany([item]); 146 | } 147 | 148 | async unconfirmedReset() { 149 | await this.checkClientStatus(); 150 | await this.db.collection('unconfirmed').deleteMany({}) 151 | .catch(function(err) { 152 | console.log('[ERROR] mempoolreset ERR ', err); 153 | throw err; 154 | }) 155 | } 156 | 157 | async unconfirmedTxids(): Promise { 158 | await this.checkClientStatus(); 159 | let res: TNATxn[] = await this.db.collection('unconfirmed').find({}).toArray(); 160 | return res.map(u => u.tx.h); 161 | } 162 | 163 | async unconfirmedFetch(txid: string): Promise { 164 | await this.checkClientStatus(); 165 | let res = await this.db.collection('unconfirmed').findOne({ "tx.h": txid }) as TNATxn; 166 | return res; 167 | } 168 | 169 | async unconfirmedDelete(txids: string[]): Promise { 170 | await this.checkClientStatus(); 171 | if (txids.length === 0) { 172 | return 0; 173 | } 174 | let res = (await this.db.collection('unconfirmed').deleteMany({ "$or": txids.map(txid => { return { "tx.h": txid }})})).deletedCount; 175 | return res; 176 | } 177 | 178 | async unconfirmedProcessedSlp(): Promise { 179 | await this.checkClientStatus(); 180 | return (await this.db.collection('unconfirmed').find().toArray()).filter((i:TNATxn) => i.slp); 181 | } 182 | 183 | async confirmedFetch(txid: string): Promise { 184 | await this.checkClientStatus(); 185 | return await this.db.collection('confirmed').findOne({ "tx.h": txid }) as TNATxn; 186 | } 187 | 188 | async confirmedDelete(txid: string): Promise { 189 | await this.checkClientStatus(); 190 | return await this.db.collection('confirmed').deleteMany({ "tx.h": txid }); 191 | } 192 | 193 | async confirmedFetchForReorg(blockIndex: number): Promise { 194 | await this.checkClientStatus(); 195 | return await this.db.collection('confirmed').find({ "blk.i": { "$gte": blockIndex }}).toArray(); 196 | } 197 | 198 | async confirmedDeleteForReorg(blockIndex: number): Promise { 199 | await this.checkClientStatus(); 200 | console.log(`[WARN] Deleting all transactions with block greater than or equal to ${blockIndex}.`) 201 | return await this.db.collection('confirmed').deleteMany({ "blk.i": { "$gte": blockIndex }}); 202 | } 203 | 204 | async confirmedReset() { 205 | await this.checkClientStatus(); 206 | await this.db.collection('confirmed').deleteMany({}).catch(function(err) { 207 | console.log('[ERROR] confirmedReset ERR ', err) 208 | throw err; 209 | }) 210 | } 211 | 212 | async confirmedReplace(items: TNATxn[], blockIndex: number) { 213 | await this.checkClientStatus(); 214 | 215 | if (items.filter(i => !i.blk).length > 0) { 216 | throw Error("Attempted to add items without BLK property."); 217 | } 218 | 219 | if (blockIndex) { 220 | console.log('[INFO] Updating block', blockIndex, 'with', items.length, 'items'); 221 | } 222 | 223 | for (let i=0; i < items.length; i++) { 224 | await this.db.collection('confirmed').replaceOne({ "tx.h": items[i].tx.h }, items[i], { upsert: true }); 225 | } 226 | } 227 | 228 | async confirmedIndex() { 229 | await this.checkClientStatus(); 230 | 231 | console.log('[INFO] * Indexing MongoDB...') 232 | console.time('TotalIndex') 233 | 234 | if (this.config.index) { 235 | let collectionNames = Object.keys(this.config.index) 236 | for(let j=0; j 0) { 262 | console.log('[INFO] Creating full text index...') 263 | let o: { [key:string]: string } = {} 264 | fulltext.forEach(function(key) { 265 | o[key] = 'text' 266 | }) 267 | console.time('Fulltext search for ' + collectionName) //,o) 268 | try { 269 | await this.db.collection(collectionName).createIndex(o, { name: 'fulltext' }) 270 | } catch (e) { 271 | console.log('[ERROR] blockindex error:', e) 272 | throw e; 273 | } 274 | console.timeEnd('Fulltext search for ' + collectionName) 275 | } 276 | } 277 | } 278 | 279 | //console.log('* Finished indexing MongoDB...') 280 | console.timeEnd('TotalIndex') 281 | 282 | try { 283 | let result = await this.db.collection('confirmed').indexInformation({ full: true }) // <- No MongoSession passed 284 | console.log('* Confirmed Index = ', result) 285 | result = await this.db.collection('unconfirmed').indexInformation({ full: true }) // <- No MongoSession passed 286 | console.log('* Unonfirmed Index = ', result) 287 | } catch (e) { 288 | console.log('[INFO] * Error fetching index info ', e) 289 | throw e; 290 | } 291 | } 292 | } -------------------------------------------------------------------------------- /example-filters.yml: -------------------------------------------------------------------------------- 1 | tokens: 2 | - name: USDH 3 | type: include-single 4 | info: c4b0d62156b3fa5c8f3436079b5394f7edc1bef5dc1cd2f9d0c4d46f82cca479 5 | - name: SPICE 6 | type: include-single 7 | info: 4de69e374a8ed21cbddd47f2338cc0f479dc58daa2bbe11cd604ca488eca0ddf 8 | -------------------------------------------------------------------------------- /example.env: -------------------------------------------------------------------------------- 1 | rpc_protocol='http' 2 | rpc_user='bitcoin' 3 | rpc_pass='password' 4 | rpc_host='0.0.0.0' 5 | rpc_port='8332' 6 | rpc_limit='150' 7 | grpc_url='localhost:8335' 8 | grpc_certPath='' 9 | db_name='slpdb' 10 | db_url='mongodb://localhost:27017' 11 | core_from='543375' 12 | core_from_testnet='1253801' 13 | core_slp_mempool_ignore_length='1000000' 14 | zmq_incoming_host='0.0.0.0' 15 | zmq_incoming_port='28332' 16 | zmq_outgoing_host='0.0.0.0' 17 | zmq_outgoing_port='28339' 18 | zmq_outgoing_enable=0 19 | enable_telemetry=0 20 | telemetry_host='' 21 | telemetry_advertised_url='' 22 | -------------------------------------------------------------------------------- /examples/addresses.js: -------------------------------------------------------------------------------- 1 | let mongo = require("mongodb"); 2 | let BigNumber = require('bignumber.js'); 3 | 4 | createBigNumber = function(e, c) { 5 | let a = new BigNumber(0); 6 | a.e = e; 7 | a.c = c; 8 | return a; 9 | } 10 | 11 | const queries = { 12 | async addresses(tokenId) { 13 | let client = await mongo.MongoClient.connect("mongodb://0.0.0.0:27017", { useNewUrlParser: true }); 14 | let db = client.db("bitdb"); 15 | let res = await db.collection('tokens').findOne({ "tokenDetails.tokenIdHex": tokenId }); 16 | let decimals = res.tokenDetails.decimals; 17 | let addresses = [] 18 | Object.keys(res.addresses).forEach((k, i, a) => addresses.push({ addr: k, tokens: createBigNumber(res.addresses[k].token_balance.e, res.addresses[k].token_balance.c).dividedBy(10**decimals).toString()})) 19 | console.log(addresses); 20 | return; 21 | } 22 | } 23 | 24 | queries.addresses("df808a41672a0a0ae6475b44f272a107bc9961b90f29dc918d71301f24fe92fb"); 25 | -------------------------------------------------------------------------------- /filters.ts: -------------------------------------------------------------------------------- 1 | import * as yaml from 'js-yaml'; 2 | import * as fs from 'fs'; 3 | 4 | class _TokenFilterRule { 5 | name: string; 6 | type: string; 7 | info: string; 8 | 9 | constructor({ name, type, info }: { name: string, type: string, info: string }) { 10 | this.name = name; 11 | this.type = type; 12 | this.info = info; 13 | } 14 | 15 | include(tokenId: string) { 16 | if(this.type === 'include-single') { 17 | if(tokenId === this.info) { 18 | return true; 19 | } else { 20 | return false; 21 | } 22 | } else if(this.type === 'exclude-single') { 23 | if(tokenId === this.info) { 24 | return false; 25 | } else { 26 | return true; 27 | } 28 | } 29 | } 30 | 31 | exclude(tokenId: string) { 32 | return !this.include(tokenId); 33 | } 34 | } 35 | 36 | class _TokenFilter { 37 | public static Instance() { 38 | return this._instance || (this._instance = new _TokenFilter()); 39 | } 40 | private static _instance: _TokenFilter; 41 | _rules = new Map(); 42 | _hasIncludeSingle = false; 43 | _hasExcludeSingle = false; 44 | 45 | constructor() { 46 | try { 47 | let o = yaml.safeLoad(fs.readFileSync('filters.yml', 'utf-8')) as { tokens: _TokenFilterRule[] }; 48 | o!.tokens.forEach((f: _TokenFilterRule) => { 49 | this.addRule(new _TokenFilterRule({ info: f.info, name: f.name, type: f.type })); 50 | console.log("[INFO] Loaded token filter:", f.name); 51 | }); 52 | } catch(e) { 53 | console.log("[INFO] No token filters loaded."); 54 | } 55 | } 56 | 57 | addRule(rule: _TokenFilterRule) { 58 | if(this._rules.has(rule.info)) 59 | return; 60 | if(rule.type === 'include-single') { 61 | if(this._hasExcludeSingle) 62 | throw Error('Invalid combination of filter rules. Filter already has exclude single rules added.'); 63 | this._hasIncludeSingle = true; 64 | } else if(rule.type === 'exclude-single') { 65 | if(this._hasIncludeSingle) 66 | throw Error('Invalid combination of filter rules. Filter already has include single rules added.'); 67 | this._hasIncludeSingle = true; 68 | } 69 | this._rules.set(rule.info, rule); 70 | } 71 | 72 | passesAllFilterRules(tokenId: string) { 73 | if(this._hasIncludeSingle) { 74 | let r = Array.from(this._rules).filter((v, i) => v[1].type === 'include-single'); 75 | for(let i = 0; i < r.length; i++) { 76 | if(r[i][1].type === 'include-single' && r[i][1].include(tokenId)) { 77 | return true; 78 | } 79 | } 80 | return false; 81 | } else if(this._hasExcludeSingle) { 82 | let r = Array.from(this._rules).filter((v, i) => v[1].type === 'exclude-single'); 83 | for(let i = 0; i < r.length; i++) { 84 | if(r[i][1].type === 'exclude-single' && r[i][1].exclude(tokenId)) { 85 | return false; 86 | } 87 | } 88 | return true; 89 | } else { 90 | return true; 91 | } 92 | } 93 | } 94 | 95 | // accessor to a singleton stack for filters 96 | export const TokenFilters = _TokenFilter.Instance; 97 | 98 | TokenFilters(); -------------------------------------------------------------------------------- /graphmap.ts: -------------------------------------------------------------------------------- 1 | import { SlpTokenGraph } from "./slptokengraph"; 2 | import { GraphTxnDbo, GraphTxnDetailsDbo, GraphTxnOutputDbo, TokenDBObject, 3 | GraphTxnInput, GraphTxnOutput, GraphTxn, SlpTransactionDetailsDbo, 4 | TokenPruneStateDbo } from "./interfaces"; 5 | import { Decimal128 } from "mongodb"; 6 | import { Config } from "./config"; 7 | import { RpcClient } from "./rpc"; 8 | import { SlpTransactionType, SlpTransactionDetails, SlpVersionType } from "slpjs"; 9 | import BigNumber from "bignumber.js"; 10 | 11 | import { slpUtxos } from './utxos'; 12 | const globalUtxoSet = slpUtxos(); 13 | 14 | export class GraphMap extends Map { 15 | private _pruned = new Map(); 16 | private _dirtyItems = new Set(); 17 | private _txidsToDelete = new Set(); // used for double spent transaction items and reorgs 18 | private _lastPruneHeight = 0; 19 | private _rootId: string; 20 | private _rootGraphTxn!: GraphTxn; 21 | private _container: SlpTokenGraph; 22 | private _prunedSendCount = 0; 23 | private _prunedMintCount = 0; 24 | 25 | constructor(graph: SlpTokenGraph) { 26 | super(); 27 | this._rootId = graph._tokenIdHex; 28 | this._container = graph; 29 | } 30 | 31 | get DirtyCount() { 32 | return this._dirtyItems.size; 33 | } 34 | 35 | get TotalTransactionCount() { 36 | return this._prunedSendCount + this._prunedMintCount + this.size - 1; 37 | } 38 | 39 | private setFromDb(txid: string, graphTxn: GraphTxn) { 40 | return super.set(txid, graphTxn); 41 | } 42 | 43 | // @ts-ignore 44 | public set(txid: string, graphTxn: GraphTxn) { 45 | throw Error("method is not implemented, use 'setDirty(txid, graphTxn)' instead"); 46 | } 47 | 48 | public setDirty(txid: string, graphTxn?: GraphTxn) { 49 | if (graphTxn && txid === this._rootId) { 50 | this._rootGraphTxn = graphTxn; 51 | } 52 | this._dirtyItems.add(txid); 53 | if (! graphTxn) { 54 | graphTxn = this.get(txid)!; 55 | } 56 | return super.set(txid, graphTxn); 57 | } 58 | 59 | public delete(txid: string) { 60 | this._txidsToDelete.add(txid); 61 | if (this.has(txid)) { 62 | return super.delete(txid); 63 | } 64 | return false; 65 | } 66 | 67 | public has(txid: string, includePrunedItems=false): boolean { 68 | if (includePrunedItems) { 69 | return super.has(txid) || this._pruned.has(txid); 70 | } 71 | return super.has(txid); 72 | } 73 | 74 | public get(txid: string, includePrunedItems=false): GraphTxn|undefined { 75 | if (txid === this._rootId && this._rootGraphTxn) { 76 | return this._rootGraphTxn; 77 | } 78 | if (includePrunedItems) { 79 | return super.get(txid) || this._pruned.get(txid); 80 | } 81 | return super.get(txid); 82 | } 83 | 84 | private prune(txid: string, pruneHeight: number) { 85 | if (txid === this._rootId) { 86 | return false; 87 | } 88 | this._lastPruneHeight = pruneHeight; 89 | if (this.has(txid)) { 90 | let gt = this.get(txid)!; 91 | if (! gt.prevPruneHeight || pruneHeight >= gt.prevPruneHeight) { 92 | this._pruned.set(txid, gt); 93 | this.delete(txid); 94 | console.log(`[INFO] Pruned ${txid} with prune height of ${pruneHeight}`); 95 | if (gt.details.transactionType === SlpTransactionType.SEND) { 96 | this._prunedSendCount++; 97 | } else if (gt.details.transactionType === SlpTransactionType.MINT) { 98 | this._prunedMintCount++; 99 | } 100 | return true; 101 | } else if (pruneHeight < gt.prevPruneHeight) { 102 | console.log(`[INFO] Pruning deferred until ${gt.prevPruneHeight}`); 103 | } 104 | } 105 | return false; 106 | } 107 | 108 | private _flush() { 109 | const txids = Array.from(this._pruned.keys()); 110 | this._pruned.forEach((i, txid) => { 111 | RpcClient.transactionCache.delete(txid); 112 | delete this._container._slpValidator.cachedRawTransactions[txid]; 113 | delete this._container._slpValidator.cachedValidations[txid]; 114 | }); 115 | this._txidsToDelete.clear(); 116 | this._pruned.clear(); 117 | this._dirtyItems.clear(); 118 | return txids; 119 | } 120 | 121 | public static toDbos(graph: GraphMap): { itemsToUpdate: GraphTxnDbo[], tokenDbo: TokenDBObject, txidsToDelete: string[] } { 122 | let tg = graph._container; 123 | let itemsToUpdate: GraphTxnDbo[] = []; 124 | 125 | for (const txid of graph._dirtyItems) { 126 | if (Array.from(graph._txidsToDelete).includes(txid)) { 127 | graph.delete(txid); 128 | continue; 129 | } 130 | let g = graph.get(txid); 131 | if (g) { 132 | let dbo: GraphTxnDbo = { 133 | tokenDetails: { tokenIdHex: graph._container._tokenIdHex }, 134 | graphTxn: { 135 | txid, 136 | details: GraphMap._mapTokenDetailsToDbo(g.details, tg._tokenDetails.decimals), 137 | outputs: GraphMap._txnOutputsToDbo(tg, g.outputs), 138 | inputs: g.inputs.map((i) => { 139 | return { 140 | address: i.address, 141 | txid: i.txid, 142 | vout: i.vout, 143 | bchSatoshis: i.bchSatoshis, 144 | slpAmount: Decimal128.fromString(i.slpAmount.dividedBy(10**tg._tokenDetails.decimals).toFixed()) 145 | } 146 | }), 147 | _blockHash: g.blockHash, 148 | _pruneHeight: g.prevPruneHeight 149 | } 150 | }; 151 | if (g.details.versionType === SlpVersionType.TokenVersionType1_NFT_Child) { 152 | dbo.tokenDetails.nftGroupIdHex = tg._nftParentId! 153 | } 154 | 155 | if ((g.details.transactionType === SlpTransactionType.SEND || 156 | g.details.transactionType === SlpTransactionType.MINT) && 157 | dbo.graphTxn.inputs.length === 0) { 158 | console.log(`[WARN] Cannot store a SEND or MINT transaction without any inputs (${txid})`); 159 | //throw Error("Cannot store a SEND or MINT transaction without any inputs"); 160 | } 161 | 162 | itemsToUpdate.push(dbo); 163 | } 164 | } 165 | 166 | let txidsToDelete = Array.from(graph._txidsToDelete); 167 | 168 | // Do the pruning here 169 | itemsToUpdate.forEach(dbo => { 170 | if (dbo.graphTxn._pruneHeight) { 171 | graph.prune(dbo.graphTxn.txid, dbo.graphTxn._pruneHeight); 172 | } 173 | }); 174 | graph._flush(); 175 | 176 | let tokenDbo = GraphMap._mapTokenToDbo(graph); 177 | return { itemsToUpdate, tokenDbo, txidsToDelete }; 178 | } 179 | 180 | public fromDbos(dag: GraphTxnDbo[], pruneState: TokenPruneStateDbo) { 181 | dag.forEach((item, idx) => { 182 | let gt = GraphMap.mapGraphTxnFromDbo(item, this._container._tokenDetails.decimals); 183 | gt.outputs.forEach(o => { 184 | globalUtxoSet.set(`${item.graphTxn.txid}:${o.vout}`, Buffer.from(this._rootId, "hex")); 185 | }); 186 | this.setFromDb(item.graphTxn.txid, gt); 187 | }); 188 | this._lastPruneHeight = pruneState.pruneHeight; 189 | this._prunedSendCount = pruneState.sendCount; 190 | this._prunedMintCount = pruneState.mintCount; 191 | } 192 | 193 | private static _mapTokenToDbo(graph: GraphMap): TokenDBObject { 194 | let tg = graph._container; 195 | let tokenDetails = GraphMap._mapTokenDetailsToDbo(tg._tokenDetails, tg._tokenDetails.decimals); 196 | 197 | let result: TokenDBObject = { 198 | schema_version: Config.db.token_schema_version, 199 | lastUpdatedBlock: tg._lastUpdatedBlock, 200 | tokenDetails: tokenDetails, 201 | mintBatonUtxo: tg._mintBatonUtxo, 202 | mintBatonStatus: tg._mintBatonStatus, 203 | tokenStats: { 204 | block_created: tg._blockCreated, 205 | approx_txns_since_genesis: graph.TotalTransactionCount 206 | }, 207 | _pruningState: { 208 | pruneHeight: graph._lastPruneHeight, 209 | sendCount: graph._prunedSendCount, 210 | mintCount: graph._prunedMintCount, 211 | } 212 | } 213 | if (tg._tokenDetails.versionType === SlpVersionType.TokenVersionType1_NFT_Child) { 214 | if (!tg._nftParentId) { 215 | throw Error("Missing NFT1 parent token Id."); 216 | } 217 | result.nftParentId = tg._nftParentId; 218 | } 219 | return result; 220 | } 221 | 222 | private static _txnOutputsToDbo(tokenGraph: SlpTokenGraph, outputs: GraphTxnOutput[]): GraphTxnOutputDbo[] { 223 | let mapped: GraphTxnDetailsDbo["outputs"] = []; 224 | outputs.forEach(o => { 225 | let m = Object.create(o); 226 | //console.log(m); 227 | try { 228 | m.slpAmount = Decimal128.fromString(m.slpAmount.dividedBy(10**tokenGraph._tokenDetails.decimals).toFixed()); 229 | } catch(_) { 230 | m.slpAmount = Decimal128.fromString("0"); 231 | } 232 | mapped.push(m); 233 | }) 234 | return mapped; 235 | } 236 | 237 | public static mapGraphTxnFromDbo(dbo: GraphTxnDbo, decimals: number): GraphTxn { 238 | dbo.graphTxn.outputs.map(o => { 239 | o.slpAmount = new BigNumber(o.slpAmount.toString()).multipliedBy(10**decimals) 240 | }); 241 | dbo.graphTxn.inputs.map(o => o.slpAmount = new BigNumber(o.slpAmount.toString()).multipliedBy(10**decimals)) 242 | let gt: GraphTxn = { 243 | details: SlpTokenGraph.MapDbTokenDetailsFromDbo(dbo.graphTxn.details, decimals), 244 | outputs: dbo.graphTxn.outputs as any as GraphTxnOutput[], 245 | inputs: dbo.graphTxn.inputs as any as GraphTxnInput[], 246 | blockHash: dbo.graphTxn._blockHash, 247 | prevPruneHeight: dbo.graphTxn._pruneHeight 248 | } 249 | return gt; 250 | } 251 | 252 | private static _mapTokenDetailsToDbo(details: SlpTransactionDetails, decimals: number): SlpTransactionDetailsDbo { 253 | let res: SlpTransactionDetailsDbo = { 254 | decimals: details.decimals, 255 | tokenIdHex: details.tokenIdHex, 256 | timestamp: details.timestamp ? details.timestamp : null, 257 | timestamp_unix: details.timestamp ? this.ConvertToUnixTime(details.timestamp) : null, 258 | transactionType: details.transactionType, 259 | versionType: details.versionType, 260 | documentUri: details.documentUri, 261 | documentSha256Hex: details.documentSha256 ? details.documentSha256.toString('hex') : null, 262 | symbol: details.symbol, 263 | name: details.name, 264 | batonVout: details.batonVout, 265 | containsBaton: details.containsBaton ? true : false, 266 | genesisOrMintQuantity: details.genesisOrMintQuantity ? Decimal128.fromString(details.genesisOrMintQuantity!.dividedBy(10**decimals).toFixed()) : null, 267 | sendOutputs: details.sendOutputs ? details.sendOutputs.map(o => Decimal128.fromString(o.dividedBy(10**decimals).toFixed())) : null 268 | } 269 | 270 | return res; 271 | } 272 | 273 | private static ConvertToUnixTime(Y_m_d_H_M_S: string): number|null { 274 | // timestamp is formatted as "%Y-%m-%d %H:%M:%S" 275 | if(Y_m_d_H_M_S) { 276 | let d = Y_m_d_H_M_S.split(" ")[0] + "T" + Y_m_d_H_M_S.split(" ")[1] + "Z"; 277 | return Date.parse(d)/1000; 278 | } 279 | return null; 280 | } 281 | 282 | } 283 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | import * as dotenv from 'dotenv'; 2 | dotenv.config() 3 | 4 | import { Bit } from './bit'; 5 | import { Db } from './db'; 6 | import { RpcClient } from './rpc'; 7 | import { Config } from './config'; 8 | import { SlpdbStatus } from './status'; 9 | import { Info, ChainSyncCheckpoint } from './info'; 10 | import { SlpGraphManager } from './slpgraphmanager'; 11 | import { TokenFilters } from './filters'; 12 | import { BlockchainInfoResult } from 'bitcoin-com-rest'; 13 | import { Query } from './query'; 14 | import { PruneStack } from './prunestack'; 15 | 16 | new RpcClient({ useGrpc: Boolean(Config.grpc.url) }); 17 | 18 | // init promise based resources 19 | const sp = require("synchronized-promise"); 20 | let getBlockchainInfoSync: () => BlockchainInfoResult = sp(RpcClient.getBlockchainInfo,{timeouts:Config.rpc.rpcTimeoutMs}); 21 | let setNetworkSync: (network: string) => void = sp(Info.setNetwork); 22 | let queryInitSync: () => void = sp(Query.init); 23 | let chain = getBlockchainInfoSync().chain; 24 | let network = chain === 'test' || chain === 'regtest' ? 'testnet' : 'mainnet'; 25 | setNetworkSync(network); 26 | queryInitSync(); 27 | 28 | let db = new Db({ 29 | dbName: network === 'mainnet' ? Config.db.name : Config.db.name_testnet, 30 | dbUrl: Config.db.url, 31 | config: Config.db 32 | }); 33 | let bit = new Bit(db); 34 | new SlpdbStatus(db, process.argv); 35 | 36 | let tokenManager: SlpGraphManager; 37 | 38 | const daemon = { 39 | run: async ({ startHeight }: { startHeight?: number } ) => { 40 | // persist updated SLPDB status every 10 minutes 41 | await SlpdbStatus.loadPreviousAttributes(); 42 | setInterval(async function() { 43 | await SlpdbStatus.saveStatus(); 44 | }, 60000); 45 | 46 | await bit.init(); 47 | 48 | // test RPC connection 49 | console.log("[INFO] Testing RPC connection..."); 50 | await RpcClient.getBlockCount(); 51 | console.log("[INFO] RPC is initialized."); 52 | 53 | // set start height override 54 | if (startHeight) { 55 | console.log("[WARN] Using the '--startHeight' option may result in missing data if the token schema is changed. Only use it on a one-off basis, if you know what you're doing."); 56 | await Info.updateBlockCheckpoint(startHeight, null); 57 | } 58 | 59 | await SlpdbStatus.saveStatus(); 60 | 61 | // check for confirmed collection schema update 62 | let schema = await Info.getConfirmedCollectionSchema(); 63 | if (!schema || schema !== Config.db.confirmed_schema_version) { 64 | await Info.setConfirmedCollectionSchema(Config.db.confirmed_schema_version); 65 | await Info.checkpointReset(); 66 | console.log("[INFO] Schema version for the confirmed collection was updated. Reseting block checkpoint reset to", (await Info.getBlockCheckpoint()).height) 67 | } 68 | 69 | let lastSynchronized = await Info.getBlockCheckpoint((await Info.getNetwork()) === 'mainnet' ? Config.core.from : Config.core.from_testnet); 70 | console.log("reprocessFrom: ", lastSynchronized.height); 71 | 72 | console.time('[PERF] Indexing Keys'); 73 | let from = (await Info.getNetwork()) === 'mainnet' ? Config.core.from : Config.core.from_testnet; 74 | if (lastSynchronized.height === from) { 75 | console.log('[INFO] Indexing MongoDB With Configured Keys...', new Date()); 76 | await db.confirmedIndex(); 77 | } 78 | console.timeEnd('[PERF] Indexing Keys'); 79 | 80 | console.log('[INFO] Starting to processing SLP Data.', new Date()); 81 | let currentHeight = await RpcClient.getBlockCount(); 82 | tokenManager = new SlpGraphManager(db, currentHeight, network, bit); 83 | bit._slpGraphManager = tokenManager; 84 | PruneStack(tokenManager._tokens); // call instantiates singleton 85 | 86 | console.log('[INFO] Synchronizing SLPDB with BCH blockchain data...', new Date()); 87 | console.time('[PERF] Initial Block Sync'); 88 | await SlpdbStatus.changeStateToStartupBlockSync({ 89 | network, 90 | getSyncdCheckpoint: async () => await Info.getBlockCheckpoint(), 91 | getSlpTokensCount: () => { return tokenManager._tokens.size; } 92 | }); 93 | 94 | // load token validation caches 95 | console.log("Init all tokens"); 96 | try { 97 | await tokenManager.initAllTokenGraphs(); 98 | } catch (err) { 99 | if (err.message === "DB schema does not match the current version.") { 100 | await db.drop(); 101 | await Info.checkpointReset(); 102 | throw Error("DB schema does not match the current version, so MongoDb and LevelDb have been reset, please resart SLPDB.") 103 | } else { 104 | throw err; 105 | } 106 | } 107 | console.log("Init all tokens Complete"); 108 | 109 | // sync with full node's block height 110 | await bit.processBlocksForSLP(); 111 | if (bit._exit) { 112 | return; 113 | } 114 | 115 | // sync with mempool and listen for wire notifications 116 | await db.unconfirmedReset(); 117 | await bit.processCurrentMempoolForSLP(); 118 | bit.listenToZmq(); 119 | console.timeEnd('[PERF] Initial Block Sync'); 120 | await bit.removeExtraneousMempoolTxns(); 121 | 122 | tokenManager._updatesQueue.start(); 123 | await SlpdbStatus.changeStateToRunning({ 124 | getSlpMempoolSize: () => tokenManager._bit.slpMempool.size 125 | }); 126 | } 127 | } 128 | 129 | const util = { 130 | reset_to_block: async (block_height: number) => { //592340 131 | let network = (await RpcClient.getBlockchainInfo())!.chain === 'test' ? 'testnet' : 'mainnet'; 132 | await Info.setNetwork(network); 133 | await Info.updateBlockCheckpoint(block_height, null); 134 | console.log("[INFO] Reset block done."); 135 | process.exit(1); 136 | } 137 | } 138 | 139 | const start = async () => { 140 | let args = process.argv; 141 | if (args.length > 2) { 142 | if(args[2] === "run") { 143 | let options: any = {}; 144 | if(args.includes("--startHeight")) { 145 | let index = args.indexOf("--startHeight"); 146 | console.log("[INFO] Resync from startHeight:", index); 147 | options.startHeight = parseInt(args[index+1]); 148 | } 149 | await daemon.run(options); 150 | } 151 | else if(args[2] === "tip") { 152 | await util.reset_to_block(parseInt(process.argv[3])); 153 | } 154 | } else { 155 | throw Error("No command provided after 'node ./index.js'."); 156 | } 157 | } 158 | 159 | // @ts-ignore 160 | process.on('uncaughtException', async (err: any, origin: any) => { 161 | console.log("[ERROR] uncaughtException", err); 162 | var message; 163 | if(err.stack) 164 | message = `[${(new Date()).toUTCString()}] ${err.stack}`; 165 | else if(err.message) 166 | message = `[${(new Date()).toUTCString()}] ${err.message}`; 167 | else if(typeof message === 'string') 168 | message = `[${(new Date()).toUTCString()}] ${err}`; 169 | else if(typeof message === 'object') 170 | message = `[${(new Date()).toUTCString()}] ${JSON.stringify(err)}`; 171 | else 172 | message = `[${(new Date()).toUTCString()}] SLPDB exited for an unknown reason.` 173 | try { 174 | await SlpdbStatus.logExitReason(message); 175 | console.log(err); 176 | console.log('[INFO] Shutting down SLPDB...', new Date().toString()); 177 | await db.exit(); 178 | } catch(error) { 179 | console.log("[ERROR] Could not log to DB:", error); 180 | } finally { 181 | process.exit(0); 182 | } 183 | }); 184 | 185 | process.on('unhandledRejection', async (err: any, promise: any) => { 186 | console.log("[ERROR] unhandledRejection", err); 187 | var message; 188 | if(err.stack) 189 | message = `[${(new Date()).toUTCString()}] ${err.stack}`; 190 | else if(err.message) 191 | message = `[${(new Date()).toUTCString()}] ${err.message}`; 192 | else if(typeof message === 'string') 193 | message = `[${(new Date()).toUTCString()}] ${err}`; 194 | else if(typeof message === 'object') 195 | message = `[${(new Date()).toUTCString()}] ${JSON.stringify(err)}`; 196 | else 197 | message = `[${(new Date()).toUTCString()}] SLPDB exited for an unknown reason.` 198 | try { 199 | await SlpdbStatus.logExitReason(message); 200 | console.log(err); 201 | console.log('[INFO] Shutting down SLPDB...', new Date().toString()); 202 | await db.exit(); 203 | } catch(error) { 204 | console.log("[ERROR] Could not log to DB:", error); 205 | } finally { 206 | process.exit(0); 207 | } 208 | }); 209 | 210 | process.on('SIGINT', async () => { 211 | await shutdown('SIGINT'); 212 | }); 213 | 214 | process.on('SIGTERM', async () => { 215 | await shutdown('SIGTERM'); 216 | }); 217 | 218 | process.on('SIGQUIT', async () => { 219 | await shutdown('SIGQUIT'); 220 | }); 221 | 222 | let shutdown = async (signal: string) => { 223 | console.log(`[INFO] Got ${signal}. Graceful shutdown start ${new Date().toISOString()}`); 224 | 225 | try { 226 | bit._zmqItemQueue.pause(); 227 | console.log('[INFO] ZMQ processing stopped.'); 228 | } catch (_) {} 229 | 230 | try { 231 | await bit.stop(); 232 | console.log('[INFO] Block sync processing stopped.'); 233 | } catch(_) {} 234 | 235 | try { 236 | console.log('[INFO] Stopping Token graph processing.'); 237 | 238 | await tokenManager.stop(); 239 | for (let [tokenId, token] of tokenManager._tokens) { 240 | await token.stop() 241 | } 242 | console.log('[INFO] Token graph processing stopped.'); 243 | } catch (_) {} 244 | 245 | try { 246 | await SlpdbStatus.logExitReason(signal); 247 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 248 | await sleep(2000); 249 | console.log('[INFO] Final telemetry update complete.'); 250 | } catch(_) {} 251 | 252 | try { 253 | await db.exit(); 254 | console.log('[INFO] Closed mongo DB connection.'); 255 | } catch (_) {} 256 | 257 | console.log(`[INFO] Graceful shutdown completed ${new Date().toISOString()}`); 258 | process.exit(); 259 | } 260 | 261 | start(); 262 | -------------------------------------------------------------------------------- /info.ts: -------------------------------------------------------------------------------- 1 | const level = require('level'); 2 | var kv = level('./_leveldb'); 3 | import * as crypto from 'crypto'; 4 | 5 | import { Config } from './config'; 6 | 7 | /** 8 | * Return the last synchronized checkpoint 9 | */ 10 | 11 | export interface ChainSyncCheckpoint { 12 | height: number; 13 | hash: string|null; 14 | hadReorg?: boolean; 15 | } 16 | 17 | export module Info { 18 | 19 | export const setNetwork = async function(network: string): Promise { 20 | try { 21 | if(network === 'testnet') 22 | kv = level('./_leveldb_testnet'); 23 | await kv.put('network', network); 24 | } catch(_) { } 25 | } 26 | 27 | export const getNetwork = async function(): Promise { 28 | try { 29 | return await kv.get('network'); 30 | } catch(_) { 31 | throw Error("Cannot get network"); 32 | } 33 | } 34 | 35 | export const getTelemetryName = async function(): Promise { 36 | if(Config.telemetry.advertised_host) { 37 | return Config.telemetry.advertised_host; 38 | } else { 39 | try { 40 | return await kv.get('telname'); 41 | } catch(_) { 42 | let name = 'unknown-' + Math.floor(Math.random()*100000).toFixed(0); 43 | await kv.put('telname', name); 44 | return name; 45 | } 46 | } 47 | } 48 | 49 | export const setTelemetrySecret = async function(secret: string): Promise { 50 | if(Config.telemetry.secret) 51 | await kv.put('telsecret', Config.telemetry.secret); 52 | else if(secret) 53 | await kv.put('telsecret', secret); 54 | } 55 | 56 | export const getTelemetrySecret = async function(): Promise { 57 | try { 58 | return await kv.get('telsecret'); 59 | } catch(_) { 60 | return ''; 61 | } 62 | } 63 | 64 | export const getTelemetrySecretHash = async function(): Promise { 65 | let secret; 66 | if(Config.telemetry.secret) 67 | secret = Config.telemetry.secret; 68 | else { 69 | try { 70 | secret = await kv.get('telsecret'); 71 | } catch(_) { 72 | return null; 73 | } 74 | } 75 | let hash = crypto.createHash('sha256'); 76 | return hash.update(Buffer.from(secret, 'hex')).digest().toString('hex').substring(0, 40); 77 | } 78 | 79 | export const getBlockCheckpoint = async function(fallback_index?: number): Promise { 80 | let value: number|null, hash: string|null; 81 | try { 82 | value = parseInt(await kv.get('tip')); 83 | } catch(_) { value = null; } 84 | 85 | try { 86 | hash = await kv.get(value + '-hash'); 87 | } catch(_) { hash = null; } 88 | 89 | if (value !== null && hash) { 90 | console.log("[INFO] Block checkpoint retrieved: ", value, hash); 91 | return { height: value!, hash: hash } 92 | } else if (value !== null) { 93 | console.log("[INFO] Block checkpoint retrieved without block hash:", value); 94 | return { height: value!, hash: null } 95 | } else if(fallback_index !== undefined && fallback_index >= 0) { 96 | console.log("[INFO] Block checkpoint not found, falling back to block", fallback_index); 97 | return { height: fallback_index, hash: null } 98 | } 99 | throw Error("Could not retrieve checkpoint from storage for block: " + value); 100 | } 101 | 102 | export const updateBlockCheckpoint = async function(index: number, hash: string|null): Promise { 103 | try { 104 | await kv.put('tip', index); 105 | if(hash) 106 | await kv.put(index + '-hash', hash); 107 | console.log("[INFO] Block checkpoint updated to:", index, hash); 108 | } catch (err) { 109 | console.log('[ERROR] updateBlockCheckpoint error:', err) 110 | } 111 | } 112 | 113 | export const checkpointReset = async function() { 114 | let start = (await Info.getNetwork()) === 'mainnet' ? Config.core.from : Config.core.from_testnet; 115 | await Info.updateBlockCheckpoint(start, null); 116 | } 117 | 118 | export const getCheckpointHash = async function(index: number) { 119 | try { 120 | return await kv.get(index + '-hash'); 121 | } catch(_) {} 122 | return null 123 | } 124 | 125 | export const getRecentBlocks = async (currentBlock: { hash: string, height: number }): Promise<{ hash: string, height: number }[]> => { 126 | let recentBlocks: { hash: string, height: number }[] = []; 127 | let tip = (await Info.getBlockCheckpoint()).height; 128 | let hash = await Info.getCheckpointHash(tip); 129 | while(hash && recentBlocks.length < 9) { 130 | recentBlocks.unshift({ hash, height: tip }); 131 | hash = await Info.getCheckpointHash(--tip); 132 | } 133 | recentBlocks.push({ hash: currentBlock.hash, height: currentBlock.height }); 134 | return recentBlocks; 135 | } 136 | 137 | // export const deleteTip = async function() { 138 | // try { 139 | // await kv.del('tip'); 140 | // console.log("[INFO] Block checkpoint deleted."); 141 | // } catch(err) { 142 | // console.log('[ERROR] deleteTip err', err) 143 | // } 144 | // } 145 | 146 | export const deleteBlockCheckpointHash = async function (index: number) { 147 | try { 148 | await kv.del(index + '-hash'); 149 | console.log("[INFO] Block hash record deleted for", index); 150 | } catch(err) { 151 | console.log('[ERROR] deleteTip err', err) 152 | } 153 | } 154 | 155 | export const getConfirmedCollectionSchema = async function(): Promise { 156 | try { 157 | return parseInt(await kv.get('confirmedSchemaVersion')); 158 | } catch(_) { } 159 | return null; 160 | } 161 | 162 | export const setConfirmedCollectionSchema = async function(version: number) { 163 | return await kv.put('confirmedSchemaVersion', version); 164 | } 165 | 166 | // Used for future lazy loading -- this is in commented code and not currently utilized 167 | export const getLastBlockSeen = async function(tokenId: string): Promise { 168 | try { 169 | return parseInt(await kv.get(`lastSeen-${tokenId}`)); 170 | } catch(_) { 171 | return null; 172 | } 173 | } 174 | 175 | // Used for future lazy loading -- this is in commented code and not currently utilized 176 | export const setLastBlockSeen = async function(tokenId: string, block: number) { 177 | return await kv.put(`lastSeen-${tokenId}`, block); 178 | } 179 | } -------------------------------------------------------------------------------- /interfaces.ts: -------------------------------------------------------------------------------- 1 | import { SlpTransactionType, SlpTransactionDetails } from 'slpjs'; 2 | import { Decimal128 } from 'mongodb'; 3 | import BigNumber from 'bignumber.js'; 4 | 5 | export type cashAddr = string; 6 | 7 | export interface GraphTxn { 8 | details: SlpTransactionDetails; 9 | outputs: GraphTxnOutput[]; 10 | inputs: GraphTxnInput[]; 11 | prevPruneHeight: number|null; 12 | blockHash: Buffer|null; 13 | } 14 | 15 | export interface GraphTxnOutput { 16 | address: string|null; 17 | vout: number; 18 | bchSatoshis: number|null; 19 | slpAmount: BigNumber; 20 | spendTxid: string | null; 21 | status: TokenUtxoStatus|BatonUtxoStatus; 22 | invalidReason: string | null; 23 | } 24 | 25 | export interface GraphTxnInput { 26 | txid: string; 27 | vout: number; 28 | slpAmount: BigNumber; 29 | address: string; 30 | bchSatoshis: number; 31 | } 32 | 33 | export interface TokenStatsDbo { 34 | block_created: number|null; 35 | approx_txns_since_genesis: number|null; 36 | } 37 | 38 | export interface TokenDBObject { 39 | schema_version: number; 40 | tokenDetails: SlpTransactionDetailsDbo; 41 | tokenStats: TokenStatsDbo; 42 | mintBatonUtxo: string; 43 | mintBatonStatus: TokenBatonStatus; 44 | lastUpdatedBlock: number; 45 | nftParentId?: string; 46 | _pruningState: TokenPruneStateDbo; 47 | } 48 | 49 | export interface TokenPruneStateDbo { 50 | pruneHeight: number; 51 | sendCount: number; 52 | mintCount: number; 53 | } 54 | 55 | export interface GraphTxnDbo { 56 | tokenDetails: { 57 | tokenIdHex: string; 58 | nftGroupIdHex?: string; 59 | }; 60 | graphTxn: GraphTxnDetailsDbo; 61 | } 62 | 63 | export interface SlpTransactionDetailsDbo { 64 | transactionType: SlpTransactionType; 65 | tokenIdHex: string; 66 | versionType: number; 67 | timestamp: string | null; 68 | timestamp_unix: number | null; 69 | symbol: string; 70 | name: string; 71 | documentUri: string; 72 | documentSha256Hex: string | null; 73 | decimals: number; 74 | containsBaton: boolean; 75 | batonVout: number | null; 76 | genesisOrMintQuantity: Decimal128 | null; 77 | sendOutputs: Decimal128[] | null; 78 | } 79 | 80 | export interface GraphTxnDetailsDbo { 81 | txid: string; 82 | details: SlpTransactionDetailsDbo; 83 | outputs: GraphTxnOutputDbo[]; 84 | inputs: GraphTxnInputDbo[]; 85 | _blockHash: Buffer | null; 86 | _pruneHeight: number | null; 87 | } 88 | 89 | export interface GraphTxnOutputDbo { 90 | address: string; 91 | vout: number; 92 | bchSatoshis: number; 93 | slpAmount: Decimal128; 94 | spendTxid: string | null; 95 | status: TokenUtxoStatus | BatonUtxoStatus; 96 | invalidReason: string | null; 97 | } 98 | export interface GraphTxnInputDbo { 99 | txid: string; 100 | vout: number; 101 | slpAmount: Decimal128; 102 | address: string; 103 | bchSatoshis: number; 104 | } 105 | 106 | export enum TokenUtxoStatus { 107 | "UNSPENT" = "UNSPENT", 108 | "SPENT_SAME_TOKEN" = "SPENT_SAME_TOKEN", 109 | "SPENT_WRONG_TOKEN" = "SPENT_WRONG_TOKEN", 110 | "SPENT_NOT_IN_SEND" = "SPENT_NOT_IN_SEND", 111 | "SPENT_INVALID_SLP" = "SPENT_INVALID_SLP", 112 | "MISSING_BCH_VOUT" = "MISSING_BCH_VOUT", 113 | "EXCESS_INPUT_BURNED" = "EXCESS_INPUT_BURNED", 114 | } 115 | 116 | export enum BatonUtxoStatus { 117 | "BATON_UNSPENT" = "BATON_UNSPENT", 118 | "BATON_SPENT_IN_MINT" = "BATON_SPENT_IN_MINT", 119 | "BATON_SPENT_NOT_IN_MINT" = "BATON_SPENT_NOT_IN_MINT", 120 | "BATON_SPENT_INVALID_SLP" = "BATON_SPENT_INVALID_SLP", 121 | "BATON_MISSING_BCH_VOUT" = "BATON_MISSING_BCH_VOUT" 122 | } 123 | 124 | export enum TokenBatonStatus { 125 | "NEVER_CREATED" = "NEVER_CREATED", 126 | "ALIVE" = "ALIVE", 127 | "DEAD_BURNED" = "DEAD_BURNED", 128 | "DEAD_ENDED" = "DEAD_ENDED", 129 | "UNKNOWN" = "UNKNOWN" 130 | } 131 | -------------------------------------------------------------------------------- /migrate-mongo-config.js: -------------------------------------------------------------------------------- 1 | const Config = require('./config').Config; 2 | 3 | const config = { 4 | mongodb: { 5 | url: Config.db.url, 6 | databaseName: Config.db.name, 7 | 8 | options: { 9 | useNewUrlParser: true // removes a deprecation warning when connecting 10 | // connectTimeoutMS: 3600000, // increase connection timeout to 1 hour 11 | // socketTimeoutMS: 3600000, // increase socket timeout to 1 hour 12 | } 13 | }, 14 | 15 | // The migrations dir, can be an relative or absolute path. Only edit this when really necessary. 16 | migrationsDir: "migrations", 17 | 18 | // The mongodb collection where the applied changes are stored. Only edit this when really necessary. 19 | changelogCollectionName: "changelog" 20 | }; 21 | 22 | //Return the config as a promise 23 | module.exports = config; 24 | -------------------------------------------------------------------------------- /migrations/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/simpleledger/SLPDB/9a85b1bd381a82e6d2094d6936774ee5a4503de3/migrations/.gitkeep -------------------------------------------------------------------------------- /notifications.ts: -------------------------------------------------------------------------------- 1 | import { Config } from "./config"; 2 | import * as zmq from 'zeromq'; 3 | import { GrpcClient, TransactionNotification, BlockNotification } from "grpc-bchrpc-node"; 4 | import { ClientReadableStream } from "grpc"; 5 | 6 | export class Notifications { 7 | useGrpc: boolean | undefined; 8 | sock: any | undefined; 9 | grpc: GrpcClient | undefined; 10 | onRawTxnCb: Function; 11 | onBlockHashCb: Function; 12 | constructor({ onRawTxnCb, onBlockHashCb, useGrpc }: { onRawTxnCb: (message:Buffer)=>any, onBlockHashCb: (message:Buffer)=>any, useGrpc?: boolean }) { 13 | this.onRawTxnCb = onRawTxnCb; 14 | this.onBlockHashCb = onBlockHashCb; 15 | if(useGrpc) { 16 | this.useGrpc = useGrpc; 17 | if(Boolean(Config.grpc.url) && Config.grpc.certPath) 18 | this.grpc = new GrpcClient({ url: Config.grpc.url, rootCertPath: Config.grpc.certPath }); 19 | else 20 | this.grpc = new GrpcClient({ url: Config.grpc.url }); 21 | this.grpcSubscribe(); 22 | } else { 23 | this.sock = zmq.socket('sub'); 24 | this.sock.connect('tcp://' + Config.zmq.incoming.host + ':' + Config.zmq.incoming.port); 25 | this.sock.subscribe('rawtx'); 26 | this.sock.subscribe('hashblock'); 27 | this.sock.on('message', async function(topic: string, message: Buffer) { 28 | if (topic.toString() === 'rawtx') { 29 | await onRawTxnCb(message); 30 | } else if(topic.toString() === 'hashblock') { 31 | await onBlockHashCb(message); 32 | } 33 | }) 34 | } 35 | } 36 | 37 | async grpcSubscribe() { 38 | let self = this; 39 | if(this.grpc) { 40 | let txnstream: ClientReadableStream; 41 | txnstream = await this.grpc.subscribeTransactions({ includeMempoolAcceptance: true, includeSerializedTxn: true }) 42 | txnstream.on('data', function(data: TransactionNotification) { 43 | self.onRawTxnCb(Buffer.from(data.getSerializedTransaction_asU8())) 44 | }) 45 | let blockstream: ClientReadableStream; // damnit.. i hate blockstream 46 | blockstream = await this.grpc.subscribeBlocks({}); 47 | blockstream.on('data', function(data: BlockNotification){ 48 | self.onBlockHashCb(Buffer.from(data.getBlockInfo()!.getHash_asU8().reverse())) 49 | }) 50 | } 51 | } 52 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "slpdb", 3 | "private": true, 4 | "version": "1.0.0", 5 | "description": "Indexer for the Simple Ledger Protocol with real-time validation notifications.", 6 | "main": "index.js", 7 | "scripts": { 8 | "test": "mocha --timeout 1000000 --exit --require mocha-steps", 9 | "start": "tsc && node --max_old_space_size=8192 index run", 10 | "lint": "./node_modules/.bin/eslint .", 11 | "migrate": "./node_modules/.bin/migrate-mongo" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/simpleledger/SLPDB.git" 16 | }, 17 | "keywords": [ 18 | "SLP" 19 | ], 20 | "license": "MIT", 21 | "bugs": { 22 | "url": "https://github.com/simpleledger/SLPDB/issues" 23 | }, 24 | "homepage": "https://github.com/simpleledger/SLPDB#readme", 25 | "dependencies": { 26 | "@types/dotenv": "^6.1.0", 27 | "@types/ip": "^1.1.0", 28 | "@types/js-yaml": "^3.12.5", 29 | "@types/mongodb": "^3.5.27", 30 | "@types/node": "^10.17.35", 31 | "@types/p-limit": "2.0.0", 32 | "@types/p-queue": "3.1.0", 33 | "@types/zeromq": "^4.6.3", 34 | "bcash": "^1.1.1", 35 | "bignumber.js": "^9.0.0", 36 | "bitbox-sdk": "8.2.1", 37 | "bitcoin-rpc-promise-retry": "^1.3.0", 38 | "bitcore-lib-cash": "8.22.2", 39 | "bufio": "^1.0.7", 40 | "cashaddrjs-slp": "^0.2.12", 41 | "dotenv": "^6.0.0", 42 | "fountainhead-core": "^0.0.12", 43 | "grpc-bchrpc-node": "^0.10.2", 44 | "iconv-lite": "^0.4.24", 45 | "ip": "^1.1.5", 46 | "js-yaml": "^3.14.0", 47 | "level": "^5.0.1", 48 | "migrate-mongo": "^5.0.1", 49 | "mingo": "^2.5.3", 50 | "mongodb": "^3.5.6", 51 | "node-jq": "1.6.0", 52 | "os-utils": "0.0.14", 53 | "p-limit": "2.0.0", 54 | "p-queue": "3.1.0", 55 | "slpjs": "^0.27.8", 56 | "synchronized-promise": "0.2.0", 57 | "traverse": "^0.6.6", 58 | "ts-node": "^7.0.1", 59 | "typescript": "^3.9.7", 60 | "zeromq": "^5.2.0" 61 | }, 62 | "devDependencies": { 63 | "@types/mocha": "^5.1.1", 64 | "eslint": "^5.6.1", 65 | "mocha": "^7.2.0", 66 | "mocha-steps": "^1.3.0", 67 | "source-map-support": "^0.5.19" 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /patches/1.bitcoind-rpc.patch: -------------------------------------------------------------------------------- 1 | diff --git a/node_modules/bitcoin-rpc-promise-retry/node_modules/bitcoind-rpc/lib/index.js b/node_modules/bitcoin-rpc-promise-retry/node_modules/bitcoind-rpc/lib/index.js 2 | index d28d541..bbf639a 100644 3 | --- a/node_modules/bitcoin-rpc-promise-retry/node_modules/bitcoind-rpc/lib/index.js 4 | +++ b/node_modules/bitcoin-rpc-promise-retry/node_modules/bitcoind-rpc/lib/index.js 5 | @@ -160,7 +160,8 @@ RpcClient.prototype.batch = function(batchCallback, resultCallback) { 6 | RpcClient.callspec = { 7 | abandonTransaction: 'str', 8 | addMultiSigAddress: '', 9 | - addNode: '', 10 | + addNode: 'str str', 11 | + disconnectNode: 'str', 12 | backupWallet: '', 13 | bumpFee: 'str', 14 | createMultiSig: '', 15 | -------------------------------------------------------------------------------- /prunestack.ts: -------------------------------------------------------------------------------- 1 | import { SlpTokenGraph } from "./slptokengraph"; 2 | import { CacheMap } from "./cache"; 3 | 4 | type TokenId = string; 5 | export type PruneStack = _PruningStack; 6 | class _PruningStack { 7 | public static Instance(tokenGraphs?: Map) { 8 | return this._instance || (this._instance = new _PruningStack(tokenGraphs)); 9 | } 10 | private static _instance: _PruningStack; 11 | private _stack = new CacheMap>(10); 12 | private _graphs?: Map; 13 | private constructor(tokenGraphs?: Map) { 14 | if (!this._graphs) { 15 | if (!tokenGraphs) { 16 | throw Error("Must init PruneStack with token graphs object."); 17 | } 18 | this._graphs = tokenGraphs; 19 | let i = 0; 20 | while (this._stack.length !== 10) { 21 | this._stack.set(i++, new Map()); 22 | } 23 | } 24 | } 25 | 26 | // This should be at start of block crawl(). 27 | public newBlock(blockIndex: number): IterableIterator|null { 28 | let nextBlock = Array.from(this._stack.keys())[0]; 29 | let pruneMap = this._stack.get(nextBlock); 30 | if (!pruneMap) { 31 | console.log(`[WARN] No pruneMap for ${nextBlock}.`); 32 | console.log(`[WARN] PruneStack Keys: ${Array.from(this._stack.keys())} before adding ${blockIndex}`); 33 | this._stack.set(blockIndex, new Map()); 34 | return null; 35 | } 36 | console.log(`[INFO] Prune stack at ${blockIndex}, about to pop ${nextBlock}.`); 37 | this._considerPruningMap(pruneMap, blockIndex); 38 | this._stack.set(blockIndex, new Map()); 39 | return pruneMap.keys(); 40 | } 41 | 42 | // This should be internal to the SlpTokenGraph. 43 | public addGraphTxidToPruningStack(blockIndex: number, tokenId: string, txid: string) { 44 | if (!this._stack.has(blockIndex)) { 45 | throw Error("Prune stack implementation error, must call 'newBlock' first."); 46 | } 47 | let stackItem = this._stack.get(blockIndex); 48 | if (!stackItem!.has(tokenId)) { 49 | stackItem!.set(tokenId, { txids: []}); 50 | } 51 | let tokenTxids = stackItem!.get(tokenId)!; 52 | tokenTxids.txids.push(txid); 53 | } 54 | 55 | private _considerPruningMap(pruneMap: Map, pruneHeight: number) { 56 | for (let [tokenId, tokenTxids] of pruneMap) { 57 | let graph = this._graphs!.get(tokenId)!; 58 | graph.considerTxidsForPruning(tokenTxids.txids, pruneHeight); 59 | } 60 | } 61 | } 62 | 63 | // accessor to a singleton stack for pruning 64 | export const PruneStack = _PruningStack.Instance; 65 | -------------------------------------------------------------------------------- /regtest/.dockerignore: -------------------------------------------------------------------------------- 1 | .env 2 | node_modules 3 | .idea 4 | .DS_STORE 5 | log 6 | 7 | regtest 8 | test -------------------------------------------------------------------------------- /regtest/Dockerfile.nodejs: -------------------------------------------------------------------------------- 1 | # Start from a Debian image with the latest version of Go installed 2 | # and a workspace (GOPATH) configured at /go. 3 | FROM node:14.16.0-slim 4 | 5 | RUN apt-get update && apt-get install -y build-essential 6 | 7 | # Copy the local package files to the container's workspace. 8 | ADD . /usr/src/SLPDB 9 | 10 | # Switch to the correct working directory. 11 | WORKDIR /usr/src/SLPDB/regtest 12 | 13 | # Create the data volume. 14 | VOLUME ["/data"] 15 | -------------------------------------------------------------------------------- /regtest/README.md: -------------------------------------------------------------------------------- 1 | # Regtest tests 2 | 3 | Tests are provided for SLPDB using a docker compose network that has two bitcoin node containers, one mongo db container, and one SLPDB container. Tests are run using mocha and cover the expected data written to MongoDB for various scenarios that would be expected in a live network. Continuous integration tests have been setup using Travis CI (see `../.travis.yml`). 4 | 5 | ## Run the tests 6 | 7 | `$ cd ./regtest && ./test.sh` -------------------------------------------------------------------------------- /regtest/_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | git apply ./patches/* 4 | 5 | export RPC1_HOST="bitcoin1" 6 | export RPC1_PORT="18443" 7 | export RPC2_HOST="bitcoin2" 8 | export RPC2_PORT="18443" 9 | export MONGO_HOST="mongo" 10 | export MONGO_PORT="27017" 11 | 12 | npm test -------------------------------------------------------------------------------- /regtest/bitcoind/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:stretch-slim 2 | 3 | RUN groupadd -r bitcoin && useradd -r -m -g bitcoin bitcoin 4 | 5 | RUN set -ex \ 6 | && apt-get update \ 7 | && apt-get install -qq --no-install-recommends ca-certificates dirmngr gosu gpg wget \ 8 | && rm -rf /var/lib/apt/lists/* 9 | 10 | ENV BITCOIN_VERSION 23.0.0 11 | ENV BITCOIN_URL https://github.com/bitcoin-cash-node/bitcoin-cash-node/releases/download/v23.0.0/bitcoin-cash-node-23.0.0-x86_64-linux-gnu.tar.gz 12 | ENV BITCOIN_SHA256 474d53ba3dc10cee20da4c1e8d77e31a6b3c54c805f72eab7d705c9211c879bd 13 | 14 | # install bitcoin binaries 15 | RUN set -ex \ 16 | && cd /tmp \ 17 | && wget -qO bitcoin.tar.gz "$BITCOIN_URL" \ 18 | && echo "$BITCOIN_SHA256 bitcoin.tar.gz" | sha256sum -c - \ 19 | && tar -xzvf bitcoin.tar.gz -C /usr/local --strip-components=1 --exclude=*-qt \ 20 | && rm -rf /tmp/* 21 | 22 | # create data directory 23 | ENV BITCOIN_DATA /data 24 | RUN mkdir "$BITCOIN_DATA" \ 25 | && chown -R bitcoin:bitcoin "$BITCOIN_DATA" \ 26 | && ln -sfn "$BITCOIN_DATA" /home/bitcoin/.bitcoin \ 27 | && chown -h bitcoin:bitcoin /home/bitcoin/.bitcoin 28 | VOLUME /data 29 | 30 | COPY docker-entrypoint.sh /entrypoint.sh 31 | ENTRYPOINT ["/entrypoint.sh"] 32 | 33 | EXPOSE 8332 8333 18332 18333 34 | CMD ["bitcoind"] 35 | -------------------------------------------------------------------------------- /regtest/bitcoind/bitcoin.conf: -------------------------------------------------------------------------------- 1 | # Must set txindex=1 so Bitcoin keeps the full index 2 | txindex=1 3 | 4 | regtest=1 5 | rpcport=18443 # is 18443 for regtest 6 | # [rpc] 7 | # Accept command line and JSON-RPC commands. 8 | server=1 9 | rest=1 10 | # Default Username and Password for JSON-RPC connections 11 | # BitDB uses these values by default, but if you can change the settings 12 | # By setting the config.json file in BitDB folder 13 | rpcuser=bitcoin 14 | rpcpassword=password 15 | # If you want to allow remote JSON-RPC access 16 | rpcallowip=0.0.0.0/0 17 | # [wallet] 18 | disablewallet=0 19 | # [ZeroMQ] 20 | # ZeroMQ messages power the realtime BitDB crawler 21 | # so it's important to set the endpoint 22 | zmqpubrawtx=tcp://*:28332 23 | zmqpubrawblock=tcp://*:28332 24 | zmqpubhashtx=tcp://*:28332 25 | zmqpubhashblock=tcp://*:28332 26 | # BitDB makes heavy use of JSON-RPC so it's set to a higher number 27 | # But you can tweak this number as you want 28 | rpcworkqueue=5120 29 | -------------------------------------------------------------------------------- /regtest/bitcoind/docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -e 3 | 4 | if [[ "$1" == "bitcoin-cli" || "$1" == "bitcoin-tx" || "$1" == "bitcoind" || "$1" == "test_bitcoin" ]]; then 5 | mkdir -p "$BITCOIN_DATA" 6 | 7 | if [[ ! -s "$BITCOIN_DATA/bitcoin.conf" ]]; then 8 | cat <<-EOF > "$BITCOIN_DATA/bitcoin.conf" 9 | printtoconsole=1 10 | rpcallowip=::/0 11 | rpcpassword=${BITCOIN_RPC_PASSWORD:-password} 12 | rpcuser=${BITCOIN_RPC_USER:-bitcoin} 13 | EOF 14 | chown bitcoin:bitcoin "$BITCOIN_DATA/bitcoin.conf" 15 | fi 16 | 17 | # ensure correct ownership and linking of data directory 18 | # we do not update group ownership here, in case users want to mount 19 | # a host directory and still retain access to it 20 | chown -R bitcoin "$BITCOIN_DATA" 21 | ln -sfn "$BITCOIN_DATA" /home/bitcoin/.bitcoin 22 | chown -h bitcoin:bitcoin /home/bitcoin/.bitcoin 23 | 24 | exec gosu bitcoin "$@" 25 | fi 26 | 27 | exec "$@" 28 | -------------------------------------------------------------------------------- /regtest/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.5" 2 | services: 3 | bitcoin1: # the node connected to SLPDB via RPC and zmq 4 | image: "bchn" 5 | restart: always 6 | build: 7 | context: "./bitcoind" 8 | command: "bitcoind" 9 | healthcheck: 10 | test: ["CMD", "/entrypoint.sh", "bitcoin-cli", "getblockchaininfo"] 11 | expose: 12 | - "18333" 13 | ports: 14 | - "18443:18443" 15 | - "28332:28332" 16 | volumes: 17 | - ./bitcoind/bitcoin.conf:/data/bitcoin.conf 18 | bitcoin2: # the node not connected to SLPDB 19 | image: "bchn" 20 | restart: always 21 | build: 22 | context: "./bitcoind" 23 | command: "bitcoind" 24 | healthcheck: 25 | test: ["CMD", "/entrypoint.sh", "bitcoin-cli", "getblockchaininfo"] 26 | expose: 27 | - "18333" 28 | ports: 29 | - "18444:18443" 30 | volumes: 31 | - ./bitcoind/bitcoin.conf:/data/bitcoin.conf 32 | depends_on: 33 | - bitcoin1 34 | mongo: 35 | image: mongo:4.4 36 | restart: always 37 | # entrypoint: [ "/usr/bin/mongod", "--bind_ip_all", "--replSet", "devrs" ] 38 | healthcheck: 39 | test: echo 'db.runCommand("ping").ok' | mongo --quiet 1 40 | interval: 10s 41 | timeout: 10s 42 | retries: 5 43 | start_period: 40s 44 | ports: 45 | - "26017:27017" 46 | depends_on: 47 | - bitcoin1 48 | - bitcoin2 49 | slpdb: 50 | build: 51 | context: ".." 52 | dockerfile: "./regtest/slpdb/Dockerfile" 53 | image: slpdb 54 | restart: always 55 | depends_on: 56 | - mongo 57 | ports: 58 | - "27339:27339" 59 | volumes: 60 | - .:/usr/src/SLPDB/regtest 61 | - ..:/usr/src/SLPDB/test 62 | -------------------------------------------------------------------------------- /regtest/slpdb/.env.regtest: -------------------------------------------------------------------------------- 1 | rpc_protocol='http' 2 | rpc_user='bitcoin' 3 | rpc_pass='password' 4 | rpc_host='bitcoin1' 5 | rpc_port='18443' 6 | rpc_limit='150' 7 | db_name='slpdb' 8 | db_url='mongodb://mongo:26017' 9 | core_from='543375' 10 | core_from_testnet='0' 11 | core_slp_mempool_ignore_length='1000000' 12 | zmq_incoming_host='bitcoin1' 13 | zmq_incoming_port='28332' 14 | zmq_outgoing_host='0.0.0.0' 15 | zmq_outgoing_port='27339' 16 | zmq_outgoing_enable=1 17 | telemetry_host='status.slpdb.io' 18 | telemetry_advertised_host="James' MBP" 19 | telemetry_advertised_graph_search_host='' 20 | telemetry_advertised_slp_socket_host='' 21 | telemetry_secret='7da8a6ac0a1f03f24ae852d6769ac27e9c411ff3ba7d8ec1feae81f81924e0ac' -------------------------------------------------------------------------------- /regtest/slpdb/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:12-alpine 2 | 3 | RUN apk update && apk upgrade && \ 4 | apk add --no-cache bash git openssh 5 | 6 | RUN apk add --no-cache --virtual build-dependencies python --update py-pip \ 7 | build-base python-dev make automake autoconf libtool \ 8 | && pip install --upgrade pip 9 | 10 | RUN mkdir -p /usr/src/SLPDB 11 | 12 | WORKDIR /usr/src 13 | 14 | ADD . /usr/src/SLPDB 15 | WORKDIR /usr/src/SLPDB 16 | RUN npm i 17 | 18 | COPY ./regtest/slpdb/.env.regtest .env 19 | COPY ./regtest/slpdb/filters.regtest.yml filters.yml 20 | COPY ./regtest/slpdb/docker-entrypoint.sh ./entrypoint.sh 21 | 22 | ENTRYPOINT [ "./entrypoint.sh" ] 23 | CMD [ "run" ] 24 | -------------------------------------------------------------------------------- /regtest/slpdb/docker-entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "Compiling..." 4 | ./node_modules/typescript/bin/tsc 5 | echo "Compiling done." 6 | 7 | echo "Checking for DB migrations..." 8 | export db_url=mongodb://mongo:27017 9 | ./node_modules/migrate-mongo/bin/migrate-mongo.js up 10 | echo "Finished DB migrations." 11 | 12 | echo "node --max_old_space_size=8192 ./index.js $@" 13 | node --max_old_space_size=8192 ./index.js "$@" 14 | -------------------------------------------------------------------------------- /regtest/slpdb/filters.regtest.yml: -------------------------------------------------------------------------------- 1 | tokens: 2 | # - name: USDH 3 | # type: include-single 4 | # info: c4b0d62156b3fa5c8f3436079b5394f7edc1bef5dc1cd2f9d0c4d46f82cca479 5 | -------------------------------------------------------------------------------- /regtest/test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | echo "INFO: Cleaning up from previous runs..." 4 | docker-compose down 5 | 6 | echo "INFO: Creating regtest network from source" 7 | docker-compose up -d 8 | 9 | echo "INFO: Running mocha tests in docker" 10 | docker-compose exec slpdb ./regtest/_test.sh 11 | exit_code=$? 12 | 13 | if [ $exit_code -eq 0 ]; then 14 | echo "INFO: All regtest network tests pass (code: $exit_code)" 15 | else 16 | echo "ERROR: One or more regtest network tests failed (code: $exit_code)" 17 | fi 18 | 19 | echo "INFO: Cleaning up." 20 | docker-compose down 21 | 22 | exit $exit_code 23 | -------------------------------------------------------------------------------- /rpc.ts: -------------------------------------------------------------------------------- 1 | import { Config } from "./config"; 2 | import { TxOutResult, BlockchainInfoResult, BlockHeaderResult, MempoolInfoResult } from "bitcoin-com-rest"; 3 | import { GrpcClient, BlockInfo, GetUnspentOutputResponse } from "grpc-bchrpc-node"; 4 | import { CacheMap } from "./cache"; 5 | 6 | const _rpcClient = require('bitcoin-rpc-promise-retry'); 7 | const connectionString = 'http://' + Config.rpc.user + ':' + Config.rpc.pass + '@' + Config.rpc.host + ':' + Config.rpc.port 8 | 9 | let grpc: GrpcClient; 10 | let rpc: any; 11 | let rpc_retry: any; 12 | 13 | export class RpcClient { 14 | static transactionCache = new CacheMap(500000); 15 | static useGrpc: boolean | undefined; 16 | constructor({ useGrpc }: { useGrpc?: boolean }) { 17 | if (useGrpc) { 18 | RpcClient.useGrpc = useGrpc; 19 | if (Boolean(Config.grpc.url) && Config.grpc.certPath) { 20 | grpc = new GrpcClient({ url: Config.grpc.url, rootCertPath: Config.grpc.certPath }); 21 | } else { 22 | grpc = new GrpcClient({ url: Config.grpc.url }); 23 | } 24 | } else { 25 | rpc = new _rpcClient(connectionString, { maxRetries: 0 }); 26 | rpc_retry = new _rpcClient(connectionString, { maxRetries: Config.rpc.rpcMaxRetries, timeoutMs: Config.rpc.rpcTimeoutMs, retryDelayMs: Config.rpc.rpcRetryDelayMs }); 27 | } 28 | } 29 | 30 | static loadTxnIntoCache(txid: string, txnBuf: Buffer) { 31 | RpcClient.transactionCache.set(txid, txnBuf); 32 | } 33 | 34 | static async getBlockCount(): Promise { 35 | if (this.useGrpc) { 36 | console.log("[INFO] gRPC: getBlockchainInfo"); 37 | return (await grpc.getBlockchainInfo()).getBestHeight(); 38 | } 39 | console.log("[INFO] JSON RPC: getBlockCount") 40 | return await rpc_retry.getBlockCount(); 41 | } 42 | 43 | static async getBlockchainInfo(): Promise { 44 | if (RpcClient.useGrpc) { 45 | console.log("[INFO] gRPC: getBlockchainInfo"); 46 | let info = await grpc.getBlockchainInfo(); 47 | return { 48 | chain: info.getBitcoinNet() ? 'test' : 'main', 49 | blocks: info.getBestHeight(), 50 | headers: 0, 51 | bestblockhash: Buffer.from(info.getBestBlockHash_asU8().reverse()).toString('hex'), 52 | difficulty: info.getDifficulty(), 53 | mediantime: info.getMedianTime(), 54 | verificationprogress: 0, 55 | chainwork: '', 56 | pruned: false, 57 | softforks: [], 58 | bip9_softforks: [] 59 | } 60 | } 61 | console.log("[INFO] JSON RPC: getBlockchainInfo") 62 | return await rpc_retry.getBlockchainInfo(); 63 | } 64 | 65 | static async getBlockHash(block_index: number, asBuffer=false): Promise { 66 | if (RpcClient.useGrpc) { 67 | console.log("[INFO] gRPC: getBlockInfo (for getBlockHash)"); 68 | let hash = Buffer.from((await grpc.getBlockInfo({ index: block_index })).getInfo()!.getHash_asU8().reverse()); 69 | if(asBuffer) { 70 | return hash; 71 | } 72 | return hash.toString('hex'); 73 | } 74 | console.log("[INFO] JSON RPC: getBlockHash", block_index); 75 | let hash = await rpc.getBlockHash(block_index); 76 | if (asBuffer) { 77 | return Buffer.from(hash, 'hex'); 78 | } 79 | return hash; 80 | } 81 | 82 | static async getRawBlock(hash: string): Promise { 83 | if (RpcClient.useGrpc) { 84 | console.log("[INFO] gRPC: getRawBlock"); 85 | return Buffer.from((await grpc.getRawBlock({ hash: hash, reversedHashOrder: true })).getBlock_asU8()).toString('hex') 86 | } 87 | return await rpc_retry.getBlock(hash, 0); 88 | } 89 | 90 | static async getBlockInfo({ hash, index }: { hash?: string, index?: number}): Promise { 91 | if (RpcClient.useGrpc) { 92 | console.log("[INFO] gRPC: getBlockInfo"); 93 | let blockinfo: BlockInfo; 94 | if (index) { 95 | blockinfo = (await grpc.getBlockInfo({ index: index })).getInfo()!; 96 | } else { 97 | blockinfo = (await grpc.getBlockInfo({ hash: hash, reversedHashOrder: true })).getInfo()!; 98 | } 99 | 100 | return { 101 | hash: Buffer.from(blockinfo.getHash_asU8().reverse()).toString('hex'), 102 | confirmations: blockinfo.getConfirmations(), 103 | height: blockinfo.getHeight(), 104 | version: blockinfo.getVersion(), 105 | versionHex: blockinfo.getVersion().toString(2), 106 | merkleroot: Buffer.from(blockinfo.getMerkleRoot_asU8().reverse()).toString('hex'), 107 | time: blockinfo.getTimestamp(), 108 | mediantime: blockinfo.getMedianTime(), 109 | nonce: blockinfo.getNonce(), 110 | difficulty: blockinfo.getDifficulty(), 111 | previousblockhash: Buffer.from(blockinfo.getPreviousBlock_asU8().reverse()).toString('hex'), 112 | nextblockhash: Buffer.from(blockinfo.getNextBlockHash_asU8().reverse()).toString('hex'), 113 | chainwork: '', 114 | bits: '' 115 | } 116 | } 117 | 118 | if (index) { 119 | console.log("[INFO] JSON RPC: getBlockInfo/getBlockHash", index); 120 | hash = await rpc.getBlockHash(index); 121 | } else if (!hash) { 122 | throw Error("No index or hash provided for block") 123 | } 124 | 125 | console.log("[INFO] JSON RPC: getBlockInfo/getBlockHeader", hash, true); 126 | return await rpc.getBlockHeader(hash); 127 | } 128 | 129 | static async getRawMemPool(): Promise { 130 | if (RpcClient.useGrpc) { 131 | console.log("[INFO] gRPC: getRawMemPool"); 132 | return (await grpc.getRawMempool({ fullTransactions: false })).getTransactionDataList().map(t => Buffer.from(t.getTransactionHash_asU8().reverse()).toString('hex')) 133 | } 134 | console.log("[INFO] JSON RPC: getRawMemPool") 135 | return await rpc_retry.getRawMemPool(); 136 | } 137 | 138 | static async getRawTransaction(hash: string, retryRpc=true): Promise { 139 | if (RpcClient.transactionCache.has(hash)) { 140 | console.log("[INFO] cache: getRawTransaction"); 141 | return RpcClient.transactionCache.get(hash)!.toString('hex'); 142 | } 143 | 144 | if (RpcClient.useGrpc) { 145 | console.log("[INFO] gRPC: getRawTransaction", hash); 146 | return Buffer.from((await grpc.getRawTransaction({ hash: hash, reversedHashOrder: true })).getTransaction_asU8()).toString('hex'); 147 | } 148 | 149 | console.log("[INFO] JSON RPC: getRawTransaction", hash); 150 | if (retryRpc) { 151 | return await rpc_retry.getRawTransaction(hash); 152 | } else { 153 | return await rpc.getRawTransaction(hash); 154 | } 155 | } 156 | 157 | static async getTransactionBlockHash(hash: string): Promise { 158 | if (RpcClient.useGrpc) { 159 | console.log("[INFO] gRPC: getTransaction", hash); 160 | let txn = await grpc.getTransaction({ hash: hash, reversedHashOrder: true }); 161 | return Buffer.from(txn.getTransaction()!.getBlockHash_asU8().reverse()).toString('hex'); 162 | } 163 | console.log("[INFO] JSON RPC: getRawTransaction", hash, 1); 164 | return (await rpc_retry.getRawTransaction(hash, 1)).blockhash; 165 | } 166 | 167 | static async getTxOut(hash: string, vout: number): Promise { 168 | if (RpcClient.useGrpc){ 169 | console.log("[INFO] gRPC: getTxOut", hash, vout); 170 | try { 171 | let utxo = (await grpc.getUnspentOutput({ hash: hash, vout: vout, reversedHashOrder: true, includeMempool: true })); 172 | return utxo; 173 | } catch(_) { 174 | return null 175 | } 176 | } 177 | console.log("[INFO] JSON RPC: getTxOut", hash, vout, true); 178 | return await rpc_retry.getTxOut(hash, vout, true); 179 | } 180 | 181 | static async getMempoolInfo(): Promise { 182 | if (RpcClient.useGrpc) { 183 | return {}; 184 | } 185 | console.log("[INFO] JSON RPC: getMempoolInfo"); 186 | return await rpc_retry.getMemPoolInfo(); 187 | } 188 | 189 | // DO NOT USE, THIS IS DEPRECIATED ON SOME NODES 190 | // async getInfo(): Promise { 191 | // console.log("[INFO] JSON RPC: getInfo") 192 | // return await rpc.getInfo(); 193 | // } 194 | } 195 | -------------------------------------------------------------------------------- /run-service.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd /root/SLPDB/ 4 | 5 | echo "Compiling..." 6 | ./node_modules/typescript/bin/tsc 7 | echo "Compiling done." 8 | 9 | echo "Checking for DB migrations..." 10 | export db_url=mongodb://127.0.0.1:27017 11 | ./node_modules/migrate-mongo/bin/migrate-mongo.js up 12 | echo "Finished DB migrations." 13 | 14 | FLAG=./ctl/REPROCESS 15 | if [ -f "$FLAG" ]; then 16 | echo "Found REPROCESS file flag" 17 | echo "node --max_old_space_size=8192 ./index.js run --reprocess" 18 | node --max_old_space_size=8192 ./index.js run --reprocess 19 | else 20 | echo "Starting normally based on CMD" 21 | echo "node --max_old_space_size=8192 ./index.js $@" 22 | node --max_old_space_size=8192 ./index.js "$@" 23 | fi 24 | 25 | -------------------------------------------------------------------------------- /status.ts: -------------------------------------------------------------------------------- 1 | import { Db } from "./db"; 2 | import { RpcClient } from "./rpc"; 3 | import { ChainSyncCheckpoint, Info } from "./info"; 4 | import * as fs from 'fs'; 5 | import { Config } from "./config"; 6 | 7 | import * as https from 'https'; 8 | import { CacheSet } from "./cache"; 9 | var pjson = require('./package.json'); 10 | var os = require('os-utils'); 11 | 12 | enum context { 13 | "SLPDB" = "SLPDB" 14 | } 15 | 16 | export class SlpdbStatus { 17 | static db: Db; 18 | static startCmd: string; 19 | static version = pjson.version; 20 | static versionHash: string|null = null; 21 | static deplVersionHash: string|null = null; 22 | static context: context = context.SLPDB; 23 | static lastIncomingTxnZmq: { utc: string, unix: number}|null = null; 24 | static lastIncomingBlockZmq: { utc: string, unix: number}|null = null; 25 | static lastOutgoingTxnZmq: { utc: string, unix: number}|null = null; 26 | static lastOutgoingBlockZmq: { utc: string, unix: number}|null = null; 27 | static slpProcessedBlockHeight: number|null = null; 28 | static state: SlpdbState; 29 | static stateHistory = new CacheSet<{ utc: string, state: SlpdbState }>(10); 30 | static network: string = ''; 31 | static pastStackTraces: any[] = []; 32 | static doubleSpendHistory: any[] = []; 33 | static reorgHistory: any[] = []; 34 | static rpc: RpcClient; 35 | static getSlpMempoolSize = function() { return -1; } 36 | static getSlpTokensCount = function() { return -1; } 37 | static getSyncdCheckpoint: () => Promise = async function() { return { hash: '', height: -1 }; } 38 | 39 | constructor(db: Db, startCmd: string[]) { 40 | SlpdbStatus.db = db; 41 | SlpdbStatus.setState(SlpdbState.PRE_STARTUP); 42 | SlpdbStatus.versionHash = SlpdbStatus.getVersion(); 43 | SlpdbStatus.deplVersionHash = SlpdbStatus.getDeplVersion(); 44 | let last = (a: string[]) => { let i = a.length-1; return a[i]; } 45 | SlpdbStatus.startCmd = "".concat(...startCmd.map(s => last(s.split('/')).concat(' '))).trimEnd(); 46 | } 47 | 48 | static setState(state: SlpdbState) { 49 | SlpdbStatus.state = state; 50 | SlpdbStatus.stateHistory.push({ utc: (new Date()).toUTCString(), state }); 51 | } 52 | 53 | static updateTimeIncomingTxnZmq() { 54 | let date = new Date(); 55 | SlpdbStatus.lastIncomingTxnZmq = { utc: date.toUTCString(), unix: Math.floor(date.getTime()/1000) } 56 | } 57 | 58 | static updateTimeIncomingBlockZmq() { 59 | let date = new Date(); 60 | SlpdbStatus.lastIncomingBlockZmq = { utc: date.toUTCString(), unix: Math.floor(date.getTime()/1000) } 61 | } 62 | 63 | static updateTimeOutgoingBlockZmq() { 64 | let date = new Date(); 65 | SlpdbStatus.lastOutgoingBlockZmq = { utc: date.toUTCString(), unix: Math.floor(date.getTime()/1000) } 66 | } 67 | 68 | static updateTimeOutgoingTxnZmq() { 69 | let date = new Date(); 70 | SlpdbStatus.lastOutgoingTxnZmq = { utc: date.toUTCString(), unix: Math.floor(date.getTime()/1000) } 71 | } 72 | 73 | static async updateSlpProcessedBlockHeight(height: number) { 74 | SlpdbStatus.slpProcessedBlockHeight = height; 75 | await SlpdbStatus.saveStatus(); 76 | } 77 | 78 | static async changeStateToStartupBlockSync({ network, getSyncdCheckpoint, getSlpTokensCount }: { network: string, getSyncdCheckpoint: () => Promise, getSlpTokensCount: () => number }) { 79 | SlpdbStatus.network = network; 80 | SlpdbStatus.getSyncdCheckpoint = getSyncdCheckpoint; 81 | SlpdbStatus.getSlpTokensCount = getSlpTokensCount; 82 | SlpdbStatus.setState(SlpdbState.STARTUP_BLOCK_SYNC); 83 | await SlpdbStatus.saveStatus(); 84 | } 85 | 86 | static async changeStateToRunning({ getSlpMempoolSize }: { getSlpMempoolSize: () => number }) { 87 | SlpdbStatus.setState(SlpdbState.RUNNING); 88 | SlpdbStatus.getSlpMempoolSize = getSlpMempoolSize; 89 | await SlpdbStatus.saveStatus(); 90 | } 91 | 92 | static async changeStateToExitOnError(trace: string) { 93 | SlpdbStatus.setState(SlpdbState.EXITED_ON_ERROR); 94 | SlpdbStatus.pastStackTraces.unshift(trace); 95 | if(SlpdbStatus.pastStackTraces.length > 5) 96 | SlpdbStatus.pastStackTraces.pop(); 97 | await SlpdbStatus.saveStatus(); 98 | } 99 | 100 | static async saveStatus() { 101 | let dbo = await SlpdbStatus.toDbo(); 102 | await SlpdbStatus.db.statusUpdate(dbo); 103 | } 104 | 105 | static async logExitReason(errorMsg: string) { 106 | if (errorMsg === "SIGINT") { 107 | SlpdbStatus.setState(SlpdbState.EXITED_SIGINT); 108 | await SlpdbStatus.saveStatus(); 109 | } else if (errorMsg === "SIGTERM") { 110 | SlpdbStatus.setState(SlpdbState.EXITED_SIGTERM); 111 | await SlpdbStatus.saveStatus(); 112 | } else if (errorMsg === "SIGQUIT") { 113 | SlpdbStatus.setState(SlpdbState.EXITED_SIGQUIT); 114 | await SlpdbStatus.saveStatus(); 115 | } else { 116 | await SlpdbStatus.changeStateToExitOnError(errorMsg); 117 | } 118 | } 119 | 120 | private static async toDbo() { 121 | let checkpoint = await SlpdbStatus.getSyncdCheckpoint(); 122 | 123 | let mempoolInfo = null; 124 | try { 125 | mempoolInfo = await RpcClient.getMempoolInfo(); 126 | } catch (_) { } 127 | 128 | let stackTraces = SlpdbStatus.pastStackTraces.map(t => { 129 | if(typeof t === 'string') 130 | return t; 131 | else { 132 | try { 133 | return t.toString(); 134 | } catch(_) { } 135 | try { 136 | return JSON.stringify(t); 137 | } catch(_) { 138 | return "Unknown stack trace."; 139 | } 140 | } 141 | }) 142 | let date = new Date(); 143 | let status = { 144 | version: this.version, 145 | versionHash: this.versionHash, 146 | deplVersionHash: this.deplVersionHash, 147 | startCmd: this.startCmd, 148 | context: this.context, 149 | lastStatusUpdate: { utc: date.toUTCString(), unix: Math.floor(date.getTime()/1000) }, 150 | lastIncomingTxnZmq: this.lastIncomingTxnZmq, 151 | lastIncomingBlockZmq: this.lastIncomingBlockZmq, 152 | lastOutgoingTxnZmq: this.lastOutgoingTxnZmq, 153 | lastOutgoingBlockZmq: this.lastOutgoingBlockZmq, 154 | state: this.state, 155 | stateHistory: Array.from(this.stateHistory.toSet()), 156 | network: this.network, 157 | bchBlockHeight: checkpoint.height, 158 | bchBlockHash: checkpoint.hash, 159 | slpProcessedBlockHeight: this.slpProcessedBlockHeight, 160 | mempoolInfoBch: mempoolInfo, 161 | mempoolSizeSlp: this.getSlpMempoolSize(), 162 | tokensCount: this.getSlpTokensCount(), 163 | pastStackTraces: stackTraces, 164 | doubleSpends: this.doubleSpendHistory, 165 | reorgs: this.reorgHistory, 166 | mongoDbStats: await this.db.db.stats({ scale: 1048576 }), 167 | publicUrl: await Info.getTelemetryName(), 168 | telemetryHash: await Info.getTelemetrySecretHash(), 169 | system: { loadAvg1: os.loadavg(1), loadAvg5: os.loadavg(5), loadAvg15: os.loadavg(15), platform: os.platform(), cpuCount: os.cpuCount(), freeMem: os.freemem(), totalMem: os.totalmem(), uptime: os.sysUptime(), processUptime: os.processUptime() } 170 | }; 171 | await this.updateTelemetry(status); 172 | return status; 173 | } 174 | 175 | private static async updateTelemetry(status: StatusDbo) { 176 | if (Config.telemetry.enable) { 177 | try { 178 | let data = JSON.stringify({ status: status }); 179 | let options = { 180 | hostname: Config.telemetry.host, 181 | port: Config.telemetry.port, 182 | path: '/status', 183 | method: 'POST', 184 | headers: { 185 | 'Content-Type': 'application/json', 186 | 'Content-Length': data.length, 187 | 'Authorization': await Info.getTelemetrySecret() 188 | } 189 | }; 190 | let req = https.request(options, res => { 191 | console.log(`[INFO] Telementry response code: ${res.statusCode}`); 192 | res.on('data', d => { 193 | console.log(`[INFO] Telemetry response from ${Config.telemetry.host}: ${d.toString('utf8')}`); 194 | try { JSON.parse(d).secretKey ? Info.setTelemetrySecret(JSON.parse(d).secretKey) : null; } catch (_) {} 195 | }); 196 | }); 197 | req.on('error', error => { 198 | let reason = error.message; 199 | if (Config.telemetry.host === '') { 200 | reason = "Env var 'telemetry_host' is not set"; 201 | } 202 | console.log("[ERROR] Telemetry update failed. Reason:", reason); 203 | }); 204 | console.log(`[INFO] Sending telemetry update to ${Config.telemetry.host} for ${await Info.getTelemetryName()}...`); 205 | req.write(data); 206 | req.end(); 207 | } catch (err) { 208 | console.log(`[ERROR] Could not updateTelemetry: ${err}`); 209 | } 210 | } 211 | } 212 | 213 | static async loadPreviousAttributes() { 214 | let dbo = await SlpdbStatus.db.statusFetch("SLPDB"); 215 | try { 216 | SlpdbStatus.pastStackTraces = dbo.pastStackTraces; 217 | let history = new CacheSet<{ utc: string, state: SlpdbState }>(10); 218 | dbo.stateHistory.forEach((state: { utc: string, state: SlpdbState }) => { history.push(state); }); 219 | Array.from(SlpdbStatus.stateHistory.toSet()).forEach((state: { utc: string, state: SlpdbState }) => { history.push(state); }); 220 | SlpdbStatus.stateHistory = history; 221 | } catch(_) {} 222 | } 223 | 224 | static getVersion() { 225 | try { 226 | const rev = fs.readFileSync('.git/HEAD').toString(); 227 | if (rev.indexOf(':') === -1) { 228 | return rev.trim(); 229 | } else { 230 | return fs.readFileSync('.git/' + rev.trim().substring(5)).toString().trim(); 231 | } 232 | } catch (_) { 233 | return null; 234 | } 235 | } 236 | 237 | static getDeplVersion() { 238 | try { 239 | const rev = fs.readFileSync('._git/HEAD').toString(); 240 | if (rev.indexOf(':') === -1) { 241 | return rev.trim(); 242 | } else { 243 | return fs.readFileSync('._git/' + rev.trim().substring(5)).toString().trim(); 244 | } 245 | } catch (_) { 246 | return null; 247 | } 248 | } 249 | } 250 | 251 | export enum SlpdbState { 252 | "PRE_STARTUP" = "PRE_STARTUP", // phase 1) checking connections with mongodb and bitcoin rpc 253 | "STARTUP_BLOCK_SYNC" = "STARTUP_BLOCK_SYNC", // phase 2) indexing blockchain data into confirmed collection (allows crawling tokens dag quickly) 254 | "STARTUP_TOKEN_PROCESSING" = "STARTUP_TOKEN_PROCESSING", // phase 3) load/update token graphs, hold a cache (allows fastest SLP validation) 255 | "RUNNING" = "RUNNING", // phase 4) startup completed, running normally 256 | "EXITED_ON_ERROR" = "EXITED_ON_ERROR", // process exited due to an error during normal operation 257 | "EXITED_SIGINT" = "EXITED_SIGINT", // process exited normally, clean shutdown or finished running a command 258 | "EXITED_SIGTERM" = "EXITED_SIGTERM", // process exited normally, clean shutdown or finished running a command 259 | "EXITED_SIGQUIT" = "EXITED_SIGQUIT" // process exited normally, clean shutdown or finished running a command 260 | } 261 | 262 | interface StatusDbo { 263 | version: string; 264 | versionHash: string | null; 265 | deplVersionHash: string | null; 266 | startCmd: string; 267 | context: context; 268 | lastStatusUpdate: { utc: string; unix: number; }; 269 | lastIncomingTxnZmq: { utc: string; unix: number; } | null; 270 | lastIncomingBlockZmq: { utc: string; unix: number; } | null; 271 | lastOutgoingTxnZmq: { utc: string; unix: number; } | null; 272 | lastOutgoingBlockZmq: { utc: string; unix: number; } | null; 273 | state: SlpdbState; 274 | stateHistory: { utc: string, state: SlpdbState }[]; 275 | network: string; 276 | bchBlockHeight: number; 277 | bchBlockHash: string | null; 278 | slpProcessedBlockHeight: number | null; 279 | mempoolInfoBch: {} | null; 280 | mempoolSizeSlp: number; 281 | tokensCount: number; 282 | pastStackTraces: string[]; 283 | doubleSpends: any[]; 284 | reorgs: any[]; 285 | mongoDbStats: any; 286 | publicUrl: string; 287 | telemetryHash: string|null; 288 | system: any; 289 | } 290 | -------------------------------------------------------------------------------- /test/10-long-chain.spec.ts: -------------------------------------------------------------------------------- 1 | // import * as assert from "assert"; 2 | // import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult, SlpTransactionType } from 'slpjs'; 3 | // import * as zmq from 'zeromq'; 4 | // import { BITBOX } from 'bitbox-sdk'; 5 | // import BigNumber from 'bignumber.js'; 6 | // import { step } from 'mocha-steps'; 7 | 8 | // import { Config } from "../config"; 9 | // import { Db } from '../db'; 10 | // import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | // import { CacheMap } from "../cache"; 12 | // import { TokenDBObject, TokenBatonStatus, GraphTxnDbo, UtxoDbo, AddressBalancesDbo } from "../interfaces"; 13 | 14 | // const bitbox = new BITBOX(); 15 | // const slp = new Slp(bitbox); 16 | // const txnHelpers = new TransactionHelpers(slp); 17 | // const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | // const TOKEN_DECIMALS = 9; 20 | // const TOKEN_GENESIS_QTY = 1000000; 21 | 22 | // const rawTxnCache = new CacheMap(100000); 23 | 24 | // // connect to bitcoin regtest network JSON-RPC 25 | // const rpcClient = require('bitcoin-rpc-promise-retry'); 26 | // const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // (optional) connect to a miner's rpc on 18444 that is not connected to SLPDB 27 | // const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 28 | 29 | // // setup a new local SLP validator instance 30 | // const validator = new LocalValidator(bitbox, async (txids: string[]) => { 31 | // let txn; 32 | // if (rawTxnCache.has(txids[0])) { 33 | // return [ rawTxnCache.get(txids[0])!.toString("hex") as string ]; 34 | // } 35 | // try { 36 | // txn = await rpcNode1_miner.getRawTransaction(txids[0]); 37 | // } catch(err) { 38 | // throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 39 | // } 40 | // return [ txn ]; 41 | // }, console); 42 | 43 | // // connect to SLPDB ZMQ notifications 44 | // let slpdbTxnNotifications: TNATxn[] = []; 45 | // let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 46 | // const sock: any = zmq.socket('sub'); 47 | // sock.connect('tcp://0.0.0.0:27339'); 48 | // sock.subscribe('mempool'); 49 | // sock.subscribe('block'); 50 | // sock.on('message', async function(topic: string, message: Buffer) { 51 | // if (topic.toString() === 'mempool') { 52 | // let obj = JSON.parse(message.toString('utf8')); 53 | // slpdbTxnNotifications.unshift(obj); 54 | // } else if (topic.toString() === 'block') { 55 | // let obj = JSON.parse(message.toString('utf8')); 56 | // slpdbBlockNotifications.unshift(obj); 57 | // } 58 | // }); 59 | 60 | 61 | // // connect to the regtest mongoDB 62 | // let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 63 | 64 | // // produced and shared between tests. 65 | // let receiverRegtest: string; 66 | // let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 67 | // let txnInputs: SlpAddressUtxoResult[]; 68 | // let tokenId: string; 69 | // let txid1: string; 70 | // let txid2: string; 71 | 72 | // let lastBlockHash: string; 73 | // let lastBlockIndex: number; 74 | // let perInputAmount: BigNumber; 75 | // let actualInputsCreated: number; 76 | // let fiTxid: string; 77 | // let privKey: string; 78 | // let inputTxnCount: number; 79 | 80 | // describe("10-long-chain", () => { 81 | 82 | // step("Initial setup for all tests", async () => { 83 | 84 | // // generate block to clear the mempool (may be dirty from previous tests) 85 | // lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 86 | 87 | // // connect miner node to a full node that is connected to slpdb 88 | // try { 89 | // await rpcNode1_miner.addNode("bitcoin2", "onetry"); 90 | // } catch(err) { } 91 | 92 | // // make sure we have coins to use in tests 93 | // let balance = await rpcNode1_miner.getBalance(); 94 | // while (balance < 1) { 95 | // lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 96 | // balance = await rpcNode1_miner.getBalance(); 97 | // } 98 | 99 | // // put all the funds on the receiver's address 100 | // receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 101 | // await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 102 | // lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 103 | 104 | // let unspent = await rpcNode1_miner.listUnspent(0); 105 | // unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 106 | // if (unspent.length === 0) throw Error("No unspent outputs."); 107 | // unspent.map((txo: any) => txo.cashAddress = txo.address); 108 | // unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 109 | // await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 110 | 111 | // // validate and categorize unspent TXOs 112 | // let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 113 | // txnInputs = utxos.nonSlpUtxos; 114 | 115 | // // create a new token 116 | // receiverSlptest = Utils.toSlpAddress(receiverRegtest); 117 | // let genesisTxnHex = txnHelpers.simpleTokenGenesis( 118 | // "unit-test-4", "ut4", 119 | // new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 120 | // null, null, 121 | // TOKEN_DECIMALS, receiverSlptest, receiverSlptest, 122 | // receiverSlptest, txnInputs 123 | // ); 124 | // tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 125 | // lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 126 | // lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 127 | // }); 128 | 129 | // const TXN_COUNT = 5000; 130 | // const OUTPUT_SIZE = 3; 131 | // const MAX_UNCONF_CHAIN_SIZE = 25; 132 | // const FEE_EST = 5000; 133 | // step("LC-1: create a massively large set of SLP transctions via fan-out", async () => { 134 | // let txnCount = 0; 135 | // let txnDepth = 1; // where 0 depth was the Genesis txn 136 | // while (txnCount < TXN_COUNT) { 137 | // slpdbBlockNotifications = []; 138 | 139 | // let unspent = await rpcNode1_miner.listUnspent(0); 140 | // unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 141 | // if (unspent.length === 0) throw Error("No unspent outputs."); 142 | // unspent.map((txo: any) => txo.cashAddress = txo.address); 143 | // unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 144 | // await Promise.all(unspent.map(async (txo: any) => { 145 | // if(!privKey) { 146 | // privKey = await rpcNode1_miner.dumpPrivKey(txo.address); 147 | // } 148 | // txo.wif = privKey; 149 | // })); 150 | 151 | // let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 152 | // let tokenInputs = utxos.slpTokenUtxos[tokenId].sort((a, b) => { return b.slpUtxoJudgementAmount.comparedTo(a.slpUtxoJudgementAmount) }); 153 | // let nonTokenInputs = utxos.nonSlpUtxos.sort((a, b) => b.satoshis - a.satoshis); 154 | 155 | // // create each transaction for this depth 156 | // for(let i = 0; i < OUTPUT_SIZE**(txnDepth-1); i++) { 157 | 158 | // if(txnCount > 0 && txnCount % MAX_UNCONF_CHAIN_SIZE === 0) { 159 | // console.log("Generating block.") 160 | // await rpcNode1_miner.generate(1); // prevent 25 txn chain restriction 161 | // } 162 | 163 | // txnInputs = [ tokenInputs[i], nonTokenInputs[i] ]; 164 | 165 | // let perOutputAmount = tokenInputs[i].slpUtxoJudgementAmount.div(OUTPUT_SIZE).decimalPlaces(0, BigNumber.ROUND_FLOOR); 166 | 167 | // console.log(`Please wait, signing ${txnInputs.length} inputs in ${txnCount}-th transaction.`); 168 | // let txnHex = txnHelpers.simpleTokenSend(tokenId, 169 | // Array(OUTPUT_SIZE).fill(perOutputAmount), 170 | // txnInputs, 171 | // Array(OUTPUT_SIZE).fill(receiverSlptest), 172 | // receiverSlptest, 173 | // Array(OUTPUT_SIZE).fill({ satoshis: Math.floor((nonTokenInputs[i].satoshis-FEE_EST) / OUTPUT_SIZE), receiverAddress: receiverSlptest }) 174 | // ); 175 | 176 | // fiTxid = await rpcNode1_miner.sendRawTransaction(txnHex, true); 177 | // rawTxnCache.set(fiTxid, Buffer.from(txnHex, "hex")); 178 | 179 | // txnCount++; 180 | // } 181 | // txnDepth++; 182 | // } 183 | // }); 184 | // }); 185 | -------------------------------------------------------------------------------- /test/11-send-invalid-1.spec.ts: -------------------------------------------------------------------------------- 1 | 2 | // 1) verify burn spent as invalid same token is marked SPENT_NON_SLP 3 | 4 | // 2) verify valid token input spent in different tokenID gets marked as burned in original graph 5 | 6 | // 3) verify SPENT_SAME_TOKEN is not accidentally marked as SPENT_NON_SLP because of being spent in the same block bug 7 | -------------------------------------------------------------------------------- /test/5-block-reorg-with-genesis.spec.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult, SlpTransactionType } from 'slpjs'; 3 | import * as zmq from 'zeromq'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import BigNumber from 'bignumber.js'; 6 | import { step } from 'mocha-steps'; 7 | 8 | import { Config } from "../config"; 9 | import { Db } from '../db'; 10 | import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | import { CacheMap } from "../cache"; 12 | import { TokenDBObject, TokenBatonStatus, GraphTxnDbo } from "../interfaces"; 13 | 14 | const bitbox = new BITBOX(); 15 | const slp = new Slp(bitbox); 16 | const txnHelpers = new TransactionHelpers(slp); 17 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | const TOKEN_DECIMALS = 9; 20 | const TOKEN_GENESIS_QTY = 1000000; 21 | 22 | const rawTxnCache = new CacheMap(10000); 23 | 24 | // connect to bitcoin regtest network JSON-RPC 25 | const rpcClient = require('bitcoin-rpc-promise-retry'); 26 | const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // node IS connected to SLPDB 27 | const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 28 | const connectionStringNode2_miner = `http://bitcoin:password@${process.env.RPC2_HOST}:${process.env.RPC2_PORT}`; // node IS NOT connected to SLPDB 29 | const rpcNode2_miner = new rpcClient(connectionStringNode2_miner, { maxRetries: 0 }); 30 | 31 | // setup a new local SLP validator instance 32 | const validator = new LocalValidator(bitbox, async (txids: string[]) => { 33 | let txn; 34 | if (rawTxnCache.has(txids[0])) { 35 | return [ rawTxnCache.get(txids[0]) as string ]; 36 | } 37 | try { 38 | txn = await rpcNode1_miner.getRawTransaction(txids[0]); 39 | } catch(err) { 40 | throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 41 | } 42 | return [ txn ]; 43 | }, console); 44 | 45 | // connect to SLPDB ZMQ notifications 46 | let slpdbTxnNotifications: TNATxn[] = []; 47 | let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 48 | const sock: any = zmq.socket('sub'); 49 | sock.connect('tcp://0.0.0.0:27339'); 50 | sock.subscribe('mempool'); 51 | sock.subscribe('block'); 52 | sock.on('message', async function(topic: string, message: Buffer) { 53 | if (topic.toString() === 'mempool') { 54 | let obj = JSON.parse(message.toString('utf8')); 55 | slpdbTxnNotifications.unshift(obj); 56 | } else if (topic.toString() === 'block') { 57 | let obj = JSON.parse(message.toString('utf8')); 58 | slpdbBlockNotifications.unshift(obj); 59 | } 60 | }); 61 | 62 | // connect to the regtest mongoDB 63 | let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 64 | 65 | // produced and shared between tests. 66 | let receiverRegtest: string; 67 | let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 68 | let txnInputs: SlpAddressUtxoResult[]; 69 | 70 | let invalidatedBlockHash: string; 71 | let invalidatedBlockHeight: number; 72 | 73 | let tokenId: string; 74 | let txid1: string; 75 | let txid2: string; 76 | 77 | let lastBlockHash: string; 78 | let lastBlockIndex: number; 79 | let perInputAmount: BigNumber; 80 | let actualInputsCreated: number; 81 | let privKey: string; 82 | let inputTxnCount: number; 83 | 84 | let startingBlockCount: number; 85 | let intendedBlockCount: number; 86 | 87 | let originalBlockHashHex: string; 88 | 89 | describe("5-Reorg-Removes-Data", () => { 90 | 91 | step("BR-1: Initial setup for all tests", async () => { 92 | 93 | startingBlockCount = await rpcNode1_miner.getBlockCount(); 94 | intendedBlockCount = startingBlockCount; 95 | 96 | // generate a block to clear the mempool (may be dirty from previous tests) 97 | invalidatedBlockHash = (await rpcNode1_miner.generate(1))[0]; 98 | intendedBlockCount++; 99 | 100 | // connect miner node to a full node that is not connected to slpdb 101 | try { 102 | await rpcNode1_miner.addNode("bitcoin2", "onetry"); 103 | } catch(err) { } 104 | let peerInfo: any[] = await rpcNode1_miner.getPeerInfo(); 105 | while (peerInfo.length < 1) { 106 | await sleep(100); 107 | peerInfo = await rpcNode1_miner.getPeerInfo(); 108 | } 109 | assert.strictEqual(peerInfo.length, 1); 110 | 111 | // make sure we have coins to use in tests 112 | let balance = await rpcNode1_miner.getBalance(); 113 | while (balance < 1) { 114 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 115 | intendedBlockCount++; 116 | //console.log((await rpcNode1_miner.getBlock(lastBlockHash, true)).height); 117 | balance = await rpcNode1_miner.getBalance(); 118 | } 119 | 120 | // put all the funds on the receiver's address 121 | receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 122 | await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 123 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 124 | intendedBlockCount++; 125 | 126 | let unspent = await rpcNode1_miner.listUnspent(0); 127 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 128 | if (unspent.length === 0) throw Error("No unspent outputs."); 129 | unspent.map((txo: any) => txo.cashAddress = txo.address); 130 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 131 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 132 | 133 | // validate and categorize unspent TXOs 134 | let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 135 | txnInputs = utxos.nonSlpUtxos; 136 | 137 | // create a new token 138 | receiverSlptest = Utils.toSlpAddress(receiverRegtest); 139 | let genesisTxnHex = txnHelpers.simpleTokenGenesis({ 140 | tokenName: "unit-test-5", 141 | tokenTicker: "ut5", 142 | tokenAmount: new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 143 | documentUri: null, 144 | documentHash: null, 145 | decimals: TOKEN_DECIMALS, 146 | tokenReceiverAddress: receiverSlptest, 147 | batonReceiverAddress: receiverSlptest, 148 | bchChangeReceiverAddress: receiverSlptest, 149 | inputUtxos: txnInputs 150 | }); 151 | tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 152 | 153 | while (slpdbTxnNotifications.filter(t => t.tx.h === tokenId).length === 0) { 154 | await sleep(100); 155 | } 156 | 157 | // give time for txn to propogate 158 | let mempool = await rpcNode2_miner.getRawMemPool(); 159 | while (mempool.length === 0) { 160 | await sleep(50); 161 | mempool = await rpcNode2_miner.getRawMemPool(); 162 | } 163 | 164 | // disconnect nodes 165 | peerInfo = await rpcNode1_miner.getPeerInfo(); 166 | await rpcNode1_miner.disconnectNode("bitcoin2"); 167 | while(peerInfo.length > 0) { 168 | await sleep(100); 169 | peerInfo = await rpcNode1_miner.getPeerInfo(); 170 | } 171 | assert.strictEqual(peerInfo.length === 0, true); 172 | }); 173 | 174 | step("BR-1: produces ZMQ output at block", async () => { 175 | // clear ZMQ cache 176 | slpdbTxnNotifications = []; 177 | slpdbBlockNotifications = []; 178 | 179 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 180 | let newBlockHash = (await rpcNode2_miner.generate(1))[0]; 181 | console.log(`[INFO] New block hash to reorg: ${newBlockHash}`); 182 | intendedBlockCount++; 183 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 184 | while (slpdbBlockNotifications.filter(b => b.hash === lastBlockHash).length === 0) { 185 | await sleep(50); 186 | } 187 | let notification = slpdbBlockNotifications.filter(b => b.hash === lastBlockHash)[0]; 188 | assert.strictEqual(notification.txns.length, 1); 189 | assert.strictEqual(notification.txns[0]!.txid, tokenId); 190 | assert.strictEqual(notification.txns[0]!.slp.detail!.tokenIdHex, tokenId); 191 | assert.strictEqual(notification.txns[0]!.slp.detail!.name, "unit-test-5"); 192 | assert.strictEqual(notification.txns[0]!.slp.detail!.symbol, "ut5"); 193 | // @ts-ignore 194 | assert.strictEqual(notification.txns[0]!.slp!.detail!.outputs![0].amount!, TOKEN_GENESIS_QTY.toFixed()); 195 | 196 | // Check block hash with block zmq notification 197 | assert.strictEqual(typeof slpdbBlockNotifications[0]!.hash, "string"); 198 | assert.strictEqual(slpdbBlockNotifications[0]!.hash.length, 64); 199 | originalBlockHashHex = slpdbBlockNotifications[0]!.hash; 200 | assert.strictEqual(lastBlockHash, originalBlockHashHex); 201 | }); 202 | 203 | step("BR-1: Make sure the token exists in the tokens collection (after block)", async () => { 204 | let t: TokenDBObject | null = await db.tokenFetch(tokenId); 205 | while (!t || t!.tokenStats!.block_created === null || typeof t!.tokenDetails.timestamp !== "string") { // || t!.tokenStats!.qty_token_burned.toString() !== "0" || typeof t!.tokenDetails.timestamp !== "string") { 206 | console.log(t!.tokenDetails.timestamp); 207 | await sleep(50); 208 | t = await db.tokenFetch(tokenId); 209 | } 210 | assert.strictEqual(typeof t!.tokenDetails.timestamp, "string"); 211 | assert.strictEqual(t!.tokenDetails.timestamp_unix! > 0, true); 212 | assert.strictEqual(t!.tokenDetails.tokenIdHex, tokenId); 213 | assert.strictEqual(t!.mintBatonUtxo, tokenId + ":2"); 214 | assert.strictEqual(t!.tokenStats!.block_created!, lastBlockIndex); 215 | assert.strictEqual(t!.mintBatonStatus, TokenBatonStatus.ALIVE); 216 | }); 217 | 218 | step("BR-1: Invalidate initial block and generate block to cause SLPDB reorg detection", async () => { 219 | await sleep(100); 220 | try { 221 | console.log(`Invalidating: ${lastBlockHash} for height ${intendedBlockCount}`); 222 | await rpcNode1_miner.invalidateBlock(lastBlockHash); 223 | } catch (_) { } 224 | 225 | // reconnect nodes 226 | try { 227 | await rpcNode1_miner.addNode("bitcoin2", "onetry"); 228 | } catch(err) { } 229 | let peerInfo: any[] = await rpcNode1_miner.getPeerInfo(); 230 | while (peerInfo.length < 1) { 231 | await sleep(100); 232 | peerInfo = await rpcNode1_miner.getPeerInfo(); 233 | } 234 | assert.strictEqual(peerInfo.length, 1); 235 | 236 | let blockCount = await rpcNode1_miner.getBlockCount(); 237 | while (blockCount !== intendedBlockCount) { 238 | await sleep(50); 239 | blockCount = await rpcNode1_miner.getBlockCount(); 240 | } 241 | assert.strictEqual(blockCount, intendedBlockCount); 242 | }); 243 | 244 | step("BR-1: Check updated graph txn block hash", async () => { 245 | let g = await db.graphTxnFetch(tokenId); 246 | let c = await db.confirmedFetch(tokenId); 247 | while (!g || g.graphTxn._blockHash?.toString("hex") === originalBlockHashHex || !c || c.blk?.h === originalBlockHashHex) { 248 | await sleep(50); 249 | //TODO: t = await db.tokenFetch(txid1); 250 | g = await db.graphTxnFetch(tokenId); 251 | c = await db.confirmedFetch(tokenId); 252 | } 253 | 254 | assert.notEqual(g.graphTxn._blockHash?.toString("hex"), originalBlockHashHex); 255 | assert.notEqual(c.blk?.h, originalBlockHashHex); 256 | 257 | // TODO: On a reorg with multiple block height difference does Token collection update? 258 | }); 259 | 260 | step("Clean up", async () => { 261 | // generate block to clear the mempool (may be dirty from previous tests) 262 | await rpcNode1_miner.generate(1); 263 | sock.disconnect('tcp://0.0.0.0:27339'); 264 | }); 265 | }); 266 | -------------------------------------------------------------------------------- /test/5a-block-reorg-with-genesis.spec.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult } from 'slpjs'; 3 | import * as zmq from 'zeromq'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import BigNumber from 'bignumber.js'; 6 | import { step } from 'mocha-steps'; 7 | 8 | import { Config } from "../config"; 9 | import { Db } from '../db'; 10 | import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | import { CacheMap } from "../cache"; 12 | import { TokenDBObject, TokenBatonStatus } from "../interfaces"; 13 | 14 | const bitbox = new BITBOX(); 15 | const slp = new Slp(bitbox); 16 | const txnHelpers = new TransactionHelpers(slp); 17 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | const TOKEN_DECIMALS = 9; 20 | const TOKEN_GENESIS_QTY = 1000000; 21 | 22 | const rawTxnCache = new CacheMap(10000); 23 | 24 | // connect to bitcoin regtest network JSON-RPC 25 | const rpcClient = require('bitcoin-rpc-promise-retry'); 26 | const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // node IS connected to SLPDB 27 | const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 28 | const connectionStringNode2_miner = `http://bitcoin:password@${process.env.RPC2_HOST}:${process.env.RPC2_PORT}`; // node IS NOT connected to SLPDB 29 | const rpcNode2_miner = new rpcClient(connectionStringNode2_miner, { maxRetries: 0 }); 30 | 31 | // setup a new local SLP validator instance 32 | const validator = new LocalValidator(bitbox, async (txids: string[]) => { 33 | let txn; 34 | if (rawTxnCache.has(txids[0])) { 35 | return [ rawTxnCache.get(txids[0]) as string ]; 36 | } 37 | try { 38 | txn = await rpcNode1_miner.getRawTransaction(txids[0]); 39 | } catch(err) { 40 | throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 41 | } 42 | return [ txn ]; 43 | }, console); 44 | 45 | // connect to SLPDB ZMQ notifications 46 | let slpdbTxnNotifications: TNATxn[] = []; 47 | let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 48 | const sock: any = zmq.socket('sub'); 49 | sock.connect('tcp://0.0.0.0:27339'); 50 | sock.subscribe('mempool'); 51 | sock.subscribe('block'); 52 | sock.on('message', async function(topic: string, message: Buffer) { 53 | if (topic.toString() === 'mempool') { 54 | let obj = JSON.parse(message.toString('utf8')); 55 | slpdbTxnNotifications.unshift(obj); 56 | } else if (topic.toString() === 'block') { 57 | let obj = JSON.parse(message.toString('utf8')); 58 | slpdbBlockNotifications.unshift(obj); 59 | } 60 | }); 61 | 62 | // connect to the regtest mongoDB 63 | let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 64 | 65 | // produced and shared between tests. 66 | let receiverRegtest: string; 67 | let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 68 | let txnInputs: SlpAddressUtxoResult[]; 69 | 70 | let invalidatedBlockHash: string; 71 | let invalidatedBlockHeight: number; 72 | 73 | let tokenId: string; 74 | let txid1: string; 75 | let txid2: string; 76 | 77 | let lastBlockHash: string; 78 | let lastBlockIndex: number; 79 | let perInputAmount: BigNumber; 80 | let actualInputsCreated: number; 81 | let privKey: string; 82 | let inputTxnCount: number; 83 | 84 | let genesisTxnHex: string; 85 | 86 | let startingBlockCount: number; 87 | let intendedBlockCount: number; 88 | 89 | let originalBlockHashHex: string; 90 | 91 | describe("5a-Reorg-Removes-Data", () => { 92 | 93 | step("BR-2: Initial setup for all tests", async () => { 94 | 95 | startingBlockCount = await rpcNode1_miner.getBlockCount(); 96 | intendedBlockCount = startingBlockCount; 97 | 98 | // generate a block to clear the mempool (may be dirty from previous tests) 99 | await rpcNode1_miner.generate(1); 100 | intendedBlockCount++; 101 | 102 | // connect miner node to a full node that is not connected to slpdb 103 | try { 104 | await rpcNode1_miner.addNode("bitcoin2", "onetry"); 105 | } catch(err) { } 106 | let peerInfo: any[] = await rpcNode1_miner.getPeerInfo(); 107 | while (peerInfo.length < 1) { 108 | await sleep(100); 109 | peerInfo = await rpcNode1_miner.getPeerInfo(); 110 | } 111 | assert.strictEqual(peerInfo.length, 1); 112 | 113 | // make sure we have coins to use in tests 114 | let balance = await rpcNode1_miner.getBalance(); 115 | while (balance < 1) { 116 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 117 | intendedBlockCount++; 118 | //console.log((await rpcNode1_miner.getBlock(lastBlockHash, true)).height); 119 | balance = await rpcNode1_miner.getBalance(); 120 | } 121 | 122 | // put all the funds on the receiver's address 123 | receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 124 | await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 125 | 126 | // give time for txn to propogate 127 | let mempool = await rpcNode2_miner.getRawMemPool(); 128 | while (mempool.length === 0) { 129 | await sleep(50); 130 | mempool = await rpcNode2_miner.getRawMemPool(); 131 | } 132 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 133 | intendedBlockCount++; 134 | 135 | // check both nodes are on the same block 136 | let node1Hash = await rpcNode1_miner.getbestblockhash(); 137 | let node2Hash = await rpcNode2_miner.getbestblockhash(); 138 | while (node1Hash !== node2Hash) { 139 | await sleep(50); 140 | node1Hash = await rpcNode1_miner.getbestblockhash(); 141 | node2Hash = await rpcNode2_miner.getbestblockhash(); 142 | } 143 | assert.strictEqual(node1Hash, node2Hash); 144 | 145 | // disconnect nodes 146 | peerInfo = await rpcNode1_miner.getPeerInfo(); 147 | await rpcNode1_miner.disconnectNode("bitcoin2"); 148 | while(peerInfo.length > 0) { 149 | await sleep(100); 150 | peerInfo = await rpcNode1_miner.getPeerInfo(); 151 | } 152 | assert.strictEqual(peerInfo.length === 0, true); 153 | 154 | let unspent = await rpcNode1_miner.listUnspent(0); 155 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 156 | if (unspent.length === 0) throw Error("No unspent outputs."); 157 | unspent.map((txo: any) => txo.cashAddress = txo.address); 158 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 159 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 160 | 161 | // validate and categorize unspent TXOs 162 | let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 163 | txnInputs = utxos.nonSlpUtxos; 164 | 165 | // create a new token 166 | receiverSlptest = Utils.toSlpAddress(receiverRegtest); 167 | genesisTxnHex = txnHelpers.simpleTokenGenesis({ 168 | tokenName: "unit-test-5a", 169 | tokenTicker: "ut5a", 170 | tokenAmount: new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 171 | documentUri: null, 172 | documentHash: null, 173 | decimals: TOKEN_DECIMALS, 174 | tokenReceiverAddress: receiverSlptest, 175 | batonReceiverAddress: receiverSlptest, 176 | bchChangeReceiverAddress: receiverSlptest, 177 | inputUtxos: txnInputs 178 | }); 179 | 180 | // broadcast to node1 181 | tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 182 | 183 | while (slpdbTxnNotifications.filter(t => t.tx.h === tokenId).length === 0) { 184 | await sleep(100); 185 | } 186 | }); 187 | 188 | step("BR-2: Produces ZMQ output at block", async () => { 189 | // clear ZMQ cache 190 | slpdbTxnNotifications = []; 191 | slpdbBlockNotifications = []; 192 | 193 | lastBlockHash = invalidatedBlockHash = (await rpcNode1_miner.generate(1))[0]; 194 | intendedBlockCount++; 195 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 196 | while (slpdbBlockNotifications.filter(b => b.hash === lastBlockHash).length === 0) { 197 | await sleep(50); 198 | } 199 | let notification = slpdbBlockNotifications.filter(b => b.hash === lastBlockHash)[0]; 200 | assert.strictEqual(notification.txns.length, 1); 201 | assert.strictEqual(notification.txns[0]!.txid, tokenId); 202 | assert.strictEqual(notification.txns[0]!.slp.detail!.tokenIdHex, tokenId); 203 | assert.strictEqual(notification.txns[0]!.slp.detail!.name, "unit-test-5a"); 204 | assert.strictEqual(notification.txns[0]!.slp.detail!.symbol, "ut5a"); 205 | // @ts-ignore 206 | assert.strictEqual(notification.txns[0]!.slp!.detail!.outputs![0].amount!, TOKEN_GENESIS_QTY.toFixed()); 207 | 208 | // Check block hash with block zmq notification 209 | assert.strictEqual(typeof slpdbBlockNotifications[0]!.hash, "string"); 210 | assert.strictEqual(slpdbBlockNotifications[0]!.hash.length, 64); 211 | originalBlockHashHex = slpdbBlockNotifications[0]!.hash; 212 | assert.strictEqual(lastBlockHash, originalBlockHashHex); 213 | }); 214 | 215 | step("BR-2: Make sure the token exists in the tokens collection (after block)", async () => { 216 | let t: TokenDBObject | null = await db.tokenFetch(tokenId); 217 | while (!t || t!.tokenStats!.block_created === null || typeof t!.tokenDetails.timestamp !== "string") { // || t!.tokenStats!.qty_token_burned.toString() !== "0" || typeof t!.tokenDetails.timestamp !== "string") { 218 | console.log(t!.tokenDetails.timestamp); 219 | await sleep(50); 220 | t = await db.tokenFetch(tokenId); 221 | } 222 | assert.strictEqual(typeof t!.tokenDetails.timestamp, "string"); 223 | assert.strictEqual(t!.tokenDetails.timestamp_unix! > 0, true); 224 | assert.strictEqual(t!.tokenDetails.tokenIdHex, tokenId); 225 | assert.strictEqual(t!.mintBatonUtxo, tokenId + ":2"); 226 | assert.strictEqual(t!.tokenStats!.block_created!, lastBlockIndex); 227 | assert.strictEqual(t!.mintBatonStatus, TokenBatonStatus.ALIVE); 228 | }); 229 | 230 | step("BR-2: Mine a longer chain on node 2 and broadcast txn into the mempool.", async () => { 231 | // let newBlockHash = (await rpcNode2_miner.generate(1))[0]; 232 | // console.log(`[INFO] New block hash to be reorg'd: ${newBlockHash}`); 233 | 234 | // broadcast to node2 235 | await rpcNode2_miner.generate(1); 236 | // intendedBlockCount++; we don't count this as one as it will replace block already counted w/ original genesis txn 237 | await rpcNode2_miner.generate(10); 238 | intendedBlockCount+=10; 239 | tokenId = await rpcNode2_miner.sendRawTransaction(genesisTxnHex, true); 240 | // genesis txn should be in the mempool on node 2, what about node 1? 241 | 242 | // make sure token genesis is in node #2 mempool 243 | let mempool = await rpcNode2_miner.getRawMemPool(); 244 | while (mempool.length === 0) { 245 | await sleep(50); 246 | mempool = await rpcNode2_miner.getRawMemPool(); 247 | } 248 | 249 | // confirm node 1 mempool is 0 length 250 | mempool = await rpcNode1_miner.getRawMemPool(); 251 | while (mempool.length > 0) { 252 | await sleep(50); 253 | mempool = await rpcNode1_miner.getRawMemPool(); 254 | } 255 | 256 | // invalidate node 1 last block 257 | try { 258 | console.log(`Invalidating: ${lastBlockHash} for height ${intendedBlockCount}`); 259 | await rpcNode1_miner.invalidateBlock(invalidatedBlockHash); 260 | } catch (_) { } 261 | 262 | // reconnect the 2 nodes 263 | try { 264 | await rpcNode1_miner.addNode("bitcoin2", "onetry"); 265 | } catch(err) { } 266 | let peerInfo: any[] = await rpcNode1_miner.getPeerInfo(); 267 | while (peerInfo.length < 1) { 268 | await sleep(100); 269 | peerInfo = await rpcNode1_miner.getPeerInfo(); 270 | } 271 | assert.strictEqual(peerInfo.length, 1); 272 | 273 | // check both nodes are on the same block 274 | let node1Hash = await rpcNode1_miner.getbestblockhash(); 275 | let node2Hash = await rpcNode2_miner.getbestblockhash(); 276 | while (node1Hash !== node2Hash) { 277 | await sleep(50); 278 | node1Hash = await rpcNode1_miner.getbestblockhash(); 279 | node2Hash = await rpcNode2_miner.getbestblockhash(); 280 | } 281 | assert.strictEqual(node1Hash, node2Hash); 282 | // lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 283 | // intendedBlockCount++; 284 | 285 | // make sure token genesis is back in the node #1 mempool 286 | mempool = await rpcNode1_miner.getRawMemPool(); 287 | while (mempool.length === 0) { 288 | await sleep(50); 289 | mempool = await rpcNode1_miner.getRawMemPool(); 290 | } 291 | }); 292 | 293 | step("BR-2: Check updated graph txn block hash (tokenId should be removed everywhere until it is added in a block)", async () => { 294 | 295 | // NOTE: We aren't able to keep the tokenId txn in the already seen unconfirmed transaction after the reorg, 296 | // the transaction will get added to all collections after it is mined. This is not a major issue, as it 297 | // is only a brief period during reorg where unconfirmed collection might be missing txns 298 | let t = await db.tokenFetch(tokenId); 299 | let g = await db.graphTxnFetch(tokenId); 300 | let c = await db.confirmedFetch(tokenId); 301 | let u = await db.unconfirmedFetch(tokenId); 302 | while (t || g || c || u) { 303 | await sleep(50); 304 | t = await db.tokenFetch(tokenId); 305 | g = await db.graphTxnFetch(tokenId); 306 | c = await db.confirmedFetch(tokenId); 307 | u = await db.unconfirmedFetch(tokenId); 308 | } 309 | 310 | // now we'll mine the tokenId transaction into a block 311 | let blockhash = (await rpcNode1_miner.generate(1))[0]; 312 | 313 | t = await db.tokenFetch(tokenId); 314 | g = await db.graphTxnFetch(tokenId); 315 | c = await db.confirmedFetch(tokenId); 316 | //u = await db.unconfirmedFetch(tokenId); 317 | while (!t || !g || !c || !g.graphTxn._blockHash) { // || u) { 318 | await sleep(50); 319 | t = await db.tokenFetch(tokenId); 320 | g = await db.graphTxnFetch(tokenId); 321 | c = await db.confirmedFetch(tokenId); 322 | //u = await db.unconfirmedFetch(tokenId); 323 | } 324 | assert.strictEqual(t.tokenStats.approx_txns_since_genesis, 0); 325 | let height = await rpcNode1_miner.getBlockCount(); 326 | assert.strictEqual(t.tokenStats.block_created, height); 327 | assert.strictEqual(g.graphTxn._blockHash!.toString("hex"), blockhash); 328 | assert.notEqual(c, null); 329 | //assert.strictEqual(u, null); 330 | }); 331 | 332 | step("Clean up", async () => { 333 | // generate block to clear the mempool (may be dirty from previous tests) 334 | await rpcNode1_miner.generate(1); 335 | sock.disconnect('tcp://0.0.0.0:27339'); 336 | }); 337 | }); 338 | -------------------------------------------------------------------------------- /test/6-burn-with-invalid-txn-fast.spec.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult, SlpTransactionType } from 'slpjs'; 3 | import * as zmq from 'zeromq'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import BigNumber from 'bignumber.js'; 6 | import { step } from 'mocha-steps'; 7 | 8 | import { Config } from "../config"; 9 | import { Db } from '../db'; 10 | import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | import { TokenBatonStatus } from "../interfaces"; 12 | import { TokenDBObject } from "../interfaces"; 13 | 14 | const bitbox = new BITBOX(); 15 | const slp = new Slp(bitbox); 16 | const txnHelpers = new TransactionHelpers(slp); 17 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | const TOKEN_DECIMALS = 1; 20 | const TOKEN_GENESIS_QTY = 100; 21 | const TOKEN_SEND_QTY = 1; 22 | 23 | // connect to bitcoin regtest network JSON-RPC 24 | const rpcClient = require('bitcoin-rpc-promise-retry'); 25 | const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // (optional) connect to a miner's rpc on 18444 that is not connected to SLPDB 26 | const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 27 | 28 | // setup a new local SLP validator instance 29 | const validator = new LocalValidator(bitbox, async (txids) => { 30 | let txn; 31 | try { 32 | txn = await rpcNode1_miner.getRawTransaction(txids[0]); 33 | } catch(err) { 34 | throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 35 | } 36 | return [ txn ]; 37 | }, console); 38 | 39 | // connect to SLPDB ZMQ notifications 40 | let slpdbTxnNotifications: TNATxn[] = []; 41 | let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 42 | const sock: any = zmq.socket('sub'); 43 | sock.connect('tcp://0.0.0.0:27339'); 44 | sock.subscribe('mempool'); 45 | sock.subscribe('block'); 46 | sock.on('message', async function(topic: string, message: Buffer) { 47 | if (topic.toString() === 'mempool') { 48 | let obj = JSON.parse(message.toString('utf8')); 49 | slpdbTxnNotifications.unshift(obj); 50 | } else if (topic.toString() === 'block') { 51 | let obj = JSON.parse(message.toString('utf8')); 52 | slpdbBlockNotifications.unshift(obj); 53 | } 54 | }); 55 | 56 | // connect to the regtest mongoDB 57 | let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 58 | 59 | // produced and shared between tests. 60 | let receiverRegtest: string; 61 | let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 62 | let txnInputs: SlpAddressUtxoResult[]; 63 | let tokenId: string; 64 | let sendTxid: string; 65 | let lastBlockHash: string; 66 | let lastBlockIndex: number; 67 | let genesisBlockIndex: number; 68 | 69 | describe("6-Burn-with-invalid-txn", () => { 70 | 71 | step("Initial setup for all tests", async () => { 72 | // generate block to clear the mempool (may be dirty from previous tests) 73 | await rpcNode1_miner.generate(1); 74 | 75 | // make sure we have coins to use in tests 76 | let balance = await rpcNode1_miner.getBalance(); 77 | while (balance < 1) { 78 | await rpcNode1_miner.generate(1); 79 | balance = await rpcNode1_miner.getBalance(); 80 | } 81 | 82 | // put all the funds on the receiver's address 83 | receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 84 | await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 85 | }); 86 | 87 | step("GENESIS: setup for the txn tests", async () => { 88 | let unspent = await rpcNode1_miner.listUnspent(0); 89 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 90 | if (unspent.length === 0) throw Error("No unspent outputs."); 91 | unspent.map((txo: any) => txo.cashAddress = txo.address); 92 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 93 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 94 | 95 | // validate and categorize unspent TXOs 96 | let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 97 | txnInputs = utxos.nonSlpUtxos; 98 | 99 | assert.strictEqual(txnInputs.length > 0, true); 100 | }); 101 | 102 | step("GENESIS: produces ZMQ output for the transaction", async () => { 103 | slpdbTxnNotifications = []; 104 | slpdbBlockNotifications = []; 105 | 106 | // create and broadcast SLP genesis transaction 107 | receiverSlptest = Utils.toSlpAddress(receiverRegtest); 108 | let genesisTxnHex = txnHelpers.simpleTokenGenesis({ 109 | tokenName: "unit-test-6", 110 | tokenTicker: "ut6", 111 | tokenAmount: new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 112 | documentUri: null, 113 | documentHash: null, 114 | decimals: TOKEN_DECIMALS, 115 | tokenReceiverAddress: receiverSlptest, 116 | batonReceiverAddress: receiverSlptest, 117 | bchChangeReceiverAddress: receiverSlptest, 118 | inputUtxos: txnInputs 119 | }); 120 | 121 | tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 122 | 123 | // give slpdb time to process 124 | while(slpdbTxnNotifications.length === 0) { 125 | await sleep(50); 126 | } 127 | 128 | // check that SLPDB made proper outgoing ZMQ messages for 129 | assert.strictEqual(slpdbTxnNotifications.length, 1); 130 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 131 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 132 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 133 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 134 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 135 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.GENESIS); 136 | // @ts-ignore 137 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, TOKEN_GENESIS_QTY.toFixed()); 138 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 139 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 140 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 141 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 142 | }); 143 | 144 | step("GENESIS: stores in confirmed collection (after block)", async () => { 145 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 146 | let txn = await db.confirmedFetch(tokenId); 147 | while(!txn) { 148 | await sleep(50); 149 | txn = await db.confirmedFetch(tokenId); 150 | } 151 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 152 | genesisBlockIndex = lastBlockIndex; 153 | assert.strictEqual(txn!.slp!.valid, true); 154 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 155 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 156 | // @ts-ignore 157 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_GENESIS_QTY.toFixed()); 158 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, txn!.tx.h); 159 | }); 160 | 161 | step("SEND: setup for the txn tests", async () => { 162 | // get current address UTXOs 163 | let unspent = await rpcNode1_miner.listUnspent(0); 164 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 165 | if (unspent.length === 0) throw Error("No unspent outputs."); 166 | unspent.map((txo: any) => txo.cashAddress = txo.address); 167 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 168 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 169 | 170 | // process raw UTXOs 171 | let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 172 | 173 | // select the inputs for transaction 174 | txnInputs = [ ...utxos.nonSlpUtxos, ...utxos.slpTokenUtxos[tokenId] ]; 175 | 176 | assert.strictEqual(txnInputs.length > 1, true); 177 | }); 178 | 179 | step("SEND: produces ZMQ output for the transaction", async () => { 180 | // clear ZMQ cache 181 | slpdbTxnNotifications = []; 182 | slpdbBlockNotifications = []; 183 | 184 | // create a SEND Transaction 185 | let sendTxnHex = txnHelpers.simpleTokenSend({ 186 | tokenId, 187 | sendAmounts: new BigNumber(TOKEN_SEND_QTY).times(10**TOKEN_DECIMALS), 188 | inputUtxos: txnInputs, 189 | tokenReceiverAddresses: receiverSlptest, 190 | changeReceiverAddress: receiverSlptest 191 | }); 192 | 193 | sendTxid = await rpcNode1_miner.sendRawTransaction(sendTxnHex, true); 194 | 195 | while(slpdbTxnNotifications.length === 0) { 196 | await sleep(50); 197 | } 198 | 199 | // check that SLPDB made proper outgoing ZMQ messages for 200 | assert.strictEqual(slpdbTxnNotifications.length, 1); 201 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 202 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 203 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 204 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 205 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 206 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.SEND); 207 | // @ts-ignore 208 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, (new BigNumber(TOKEN_SEND_QTY)).toFixed()); 209 | let change = (new BigNumber(TOKEN_GENESIS_QTY)).minus(TOKEN_SEND_QTY).toFixed(); 210 | // @ts-ignore 211 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![1].amount!, change); 212 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 213 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 214 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 215 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 216 | }); 217 | 218 | step("SEND: stores in confirmed collection (after block)", async () => { 219 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 220 | let txn = await db.confirmedFetch(sendTxid); 221 | while (!txn) { 222 | await sleep(50); 223 | txn = await db.confirmedFetch(sendTxid); 224 | } 225 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 226 | assert.strictEqual(txn!.slp!.valid, true); 227 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 228 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 229 | // @ts-ignore 230 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_SEND_QTY.toFixed()); 231 | // @ts-ignore 232 | assert.strictEqual(txn!.slp!.detail!.outputs![1].amount!.toString(), (TOKEN_GENESIS_QTY-TOKEN_SEND_QTY).toFixed()); 233 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, tokenId); 234 | assert.strictEqual(txn!.tx.h, sendTxid); 235 | }); 236 | 237 | step("BURN: make an invalid SLP transaction that burns all SLP coins", async () => { 238 | let balance = await rpcNode1_miner.getBalance(); 239 | await rpcNode1_miner.sendToAddress(receiverRegtest, balance, "", "", true); 240 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 241 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 242 | }); 243 | 244 | step("BURN: check that tokens collection records correct circulating supply", async () => { 245 | let t: TokenDBObject | null = await db.tokenFetch(tokenId); 246 | while (!t || t!.tokenStats!.block_created === null || t!.mintBatonUtxo !== "") { 247 | await sleep(50); 248 | t = await db.tokenFetch(tokenId); 249 | } 250 | assert.strictEqual(typeof t!.tokenDetails.timestamp, "string"); 251 | assert.strictEqual(t!.tokenDetails.timestamp_unix! > 0, true); 252 | assert.strictEqual(t!.tokenDetails.tokenIdHex, tokenId); 253 | assert.strictEqual(t!.mintBatonUtxo, ""); 254 | assert.strictEqual(t!.tokenStats!.block_created!, genesisBlockIndex); 255 | assert.strictEqual(t!.mintBatonStatus, TokenBatonStatus.DEAD_BURNED); 256 | }); 257 | 258 | step("Cleanup after tests", async () => { 259 | // generate block to clear the mempool (may be dirty from previous tests) 260 | await rpcNode1_miner.generate(1); 261 | sock.disconnect('tcp://0.0.0.0:27339'); 262 | }); 263 | }); 264 | -------------------------------------------------------------------------------- /test/7-burn-with-invalid-txn-slow.spec.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult, SlpTransactionType } from 'slpjs'; 3 | import * as zmq from 'zeromq'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import BigNumber from 'bignumber.js'; 6 | import { step } from 'mocha-steps'; 7 | 8 | import { Config } from "../config"; 9 | import { Db } from '../db'; 10 | import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | import { TokenBatonStatus } from "../interfaces"; 12 | import { TokenDBObject } from "../interfaces"; 13 | 14 | const bitbox = new BITBOX(); 15 | const slp = new Slp(bitbox); 16 | const txnHelpers = new TransactionHelpers(slp); 17 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | const TOKEN_DECIMALS = 1; 20 | const TOKEN_GENESIS_QTY = 100; 21 | const TOKEN_SEND_QTY = 1; 22 | 23 | // connect to bitcoin regtest network JSON-RPC 24 | const rpcClient = require('bitcoin-rpc-promise-retry'); 25 | const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // (optional) connect to a miner's rpc on 18444 that is not connected to SLPDB 26 | const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 27 | 28 | // setup a new local SLP validator instance 29 | const validator = new LocalValidator(bitbox, async (txids) => { 30 | let txn; 31 | try { 32 | txn = await rpcNode1_miner.getRawTransaction(txids[0]); 33 | } catch(err) { 34 | throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 35 | } 36 | return [ txn ]; 37 | }, console); 38 | 39 | // connect to SLPDB ZMQ notifications 40 | let slpdbTxnNotifications: TNATxn[] = []; 41 | let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 42 | const sock: any = zmq.socket('sub'); 43 | sock.connect('tcp://0.0.0.0:27339'); 44 | sock.subscribe('mempool'); 45 | sock.subscribe('block'); 46 | sock.on('message', async function(topic: string, message: Buffer) { 47 | if (topic.toString() === 'mempool') { 48 | let obj = JSON.parse(message.toString('utf8')); 49 | slpdbTxnNotifications.unshift(obj); 50 | } else if (topic.toString() === 'block') { 51 | let obj = JSON.parse(message.toString('utf8')); 52 | slpdbBlockNotifications.unshift(obj); 53 | } 54 | }); 55 | 56 | // connect to the regtest mongoDB 57 | let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 58 | 59 | // produced and shared between tests. 60 | let receiverRegtest: string; 61 | let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 62 | let txnInputs: SlpAddressUtxoResult[]; 63 | let tokenId: string; 64 | let sendTxid: string; 65 | let lastBlockHash: string; 66 | let lastBlockIndex: number; 67 | let genesisBlockIndex: number; 68 | 69 | describe("7-Burn-with-invalid-txn-slow", () => { 70 | 71 | step("Initial setup for all tests", async () => { 72 | // generate block to clear the mempool (may be dirty from previous tests) 73 | await rpcNode1_miner.generate(1); 74 | 75 | // make sure we have coins to use in tests 76 | let balance = await rpcNode1_miner.getBalance(); 77 | while (balance < 1) { 78 | await rpcNode1_miner.generate(1); 79 | balance = await rpcNode1_miner.getBalance(); 80 | } 81 | 82 | // put all the funds on the receiver's address 83 | receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 84 | await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 85 | }); 86 | 87 | step("GENESIS: setup for the txn tests", async () => { 88 | let unspent = await rpcNode1_miner.listUnspent(0); 89 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 90 | if (unspent.length === 0) throw Error("No unspent outputs."); 91 | unspent.map((txo: any) => txo.cashAddress = txo.address); 92 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 93 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 94 | 95 | // validate and categorize unspent TXOs 96 | let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 97 | txnInputs = utxos.nonSlpUtxos; 98 | 99 | assert.strictEqual(txnInputs.length > 0, true); 100 | }); 101 | 102 | step("GENESIS: produces ZMQ output for the transaction", async () => { 103 | slpdbTxnNotifications = []; 104 | slpdbBlockNotifications = []; 105 | 106 | // create and broadcast SLP genesis transaction 107 | receiverSlptest = Utils.toSlpAddress(receiverRegtest); 108 | let genesisTxnHex = txnHelpers.simpleTokenGenesis({ 109 | tokenName: "unit-test-6", 110 | tokenTicker: "ut6", 111 | tokenAmount: new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 112 | documentUri: null, 113 | documentHash: null, 114 | decimals: TOKEN_DECIMALS, 115 | tokenReceiverAddress: receiverSlptest, 116 | batonReceiverAddress: receiverSlptest, 117 | bchChangeReceiverAddress: receiverSlptest, 118 | inputUtxos: txnInputs 119 | }); 120 | 121 | tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 122 | 123 | // give slpdb time to process 124 | while(slpdbTxnNotifications.length === 0) { 125 | await sleep(50); 126 | } 127 | 128 | // check that SLPDB made proper outgoing ZMQ messages for 129 | assert.strictEqual(slpdbTxnNotifications.length, 1); 130 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 131 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 132 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 133 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 134 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 135 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.GENESIS); 136 | // @ts-ignore 137 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, TOKEN_GENESIS_QTY.toFixed()); 138 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 139 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 140 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 141 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 142 | }); 143 | 144 | step("GENESIS: stores in confirmed collection (after block)", async () => { 145 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 146 | let txn = await db.confirmedFetch(tokenId); 147 | while(!txn) { 148 | await sleep(50); 149 | txn = await db.confirmedFetch(tokenId); 150 | } 151 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 152 | genesisBlockIndex = lastBlockIndex; 153 | assert.strictEqual(txn!.slp!.valid, true); 154 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 155 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 156 | // @ts-ignore 157 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_GENESIS_QTY.toFixed()); 158 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, txn!.tx.h); 159 | }); 160 | 161 | step("SEND: setup for the txn tests", async () => { 162 | // get current address UTXOs 163 | let unspent = await rpcNode1_miner.listUnspent(0); 164 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 165 | if (unspent.length === 0) throw Error("No unspent outputs."); 166 | unspent.map((txo: any) => txo.cashAddress = txo.address); 167 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 168 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 169 | 170 | // process raw UTXOs 171 | let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 172 | 173 | // select the inputs for transaction 174 | txnInputs = [ ...utxos.nonSlpUtxos, ...utxos.slpTokenUtxos[tokenId] ]; 175 | 176 | assert.strictEqual(txnInputs.length > 1, true); 177 | }); 178 | 179 | step("SEND: produces ZMQ output for the transaction", async () => { 180 | // clear ZMQ cache 181 | slpdbTxnNotifications = []; 182 | slpdbBlockNotifications = []; 183 | 184 | // create a SEND Transaction 185 | let sendTxnHex = txnHelpers.simpleTokenSend({ 186 | tokenId, 187 | sendAmounts: new BigNumber(TOKEN_SEND_QTY).times(10**TOKEN_DECIMALS), 188 | inputUtxos: txnInputs, 189 | tokenReceiverAddresses: receiverSlptest, 190 | changeReceiverAddress: receiverSlptest 191 | }); 192 | 193 | sendTxid = await rpcNode1_miner.sendRawTransaction(sendTxnHex, true); 194 | 195 | while(slpdbTxnNotifications.length === 0) { 196 | await sleep(50); 197 | } 198 | 199 | // check that SLPDB made proper outgoing ZMQ messages for 200 | assert.strictEqual(slpdbTxnNotifications.length, 1); 201 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 202 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 203 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 204 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 205 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 206 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.SEND); 207 | // @ts-ignore 208 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, (new BigNumber(TOKEN_SEND_QTY)).toFixed()); 209 | let change = (new BigNumber(TOKEN_GENESIS_QTY)).minus(TOKEN_SEND_QTY).toFixed(); 210 | // @ts-ignore 211 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![1].amount!, change); 212 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 213 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 214 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 215 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 216 | }); 217 | 218 | step("SEND: stores in confirmed collection (after block)", async () => { 219 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 220 | let txn = await db.confirmedFetch(sendTxid); 221 | while(!txn) { 222 | await sleep(50); 223 | txn = await db.confirmedFetch(sendTxid); 224 | } 225 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 226 | assert.strictEqual(txn!.slp!.valid, true); 227 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 228 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 229 | // @ts-ignore 230 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_SEND_QTY.toFixed()); 231 | // @ts-ignore 232 | assert.strictEqual(txn!.slp!.detail!.outputs![1].amount!.toString(), (TOKEN_GENESIS_QTY-TOKEN_SEND_QTY).toFixed()); 233 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, tokenId); 234 | assert.strictEqual(txn!.tx.h, sendTxid); 235 | }); 236 | 237 | step("BURN: make an invalid SLP transaction that burns all SLP coins", async () => { 238 | let balance = await rpcNode1_miner.getBalance(); 239 | await sleep(500); 240 | await rpcNode1_miner.sendToAddress(receiverRegtest, balance, "", "", true); 241 | await sleep(500); 242 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 243 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 244 | }); 245 | 246 | step("BURN: check that tokens collection records correct circulating supply", async () => { 247 | let t: TokenDBObject | null = await db.tokenFetch(tokenId); 248 | while(!t || 249 | t!.tokenStats!.block_created === null || 250 | //t!.tokenStats!.qty_token_circulating_supply.toString() !== "0" || 251 | t!.mintBatonUtxo !== "" 252 | ) { 253 | await sleep(50); 254 | t = await db.tokenFetch(tokenId); 255 | } 256 | assert.strictEqual(typeof t!.tokenDetails.timestamp, "string"); 257 | assert.strictEqual(t!.tokenDetails.timestamp_unix! > 0, true); 258 | assert.strictEqual(t!.tokenDetails.tokenIdHex, tokenId); 259 | assert.strictEqual(t!.mintBatonUtxo, ""); 260 | assert.strictEqual(t!.tokenStats!.block_created!, genesisBlockIndex); 261 | assert.strictEqual(t!.mintBatonStatus, TokenBatonStatus.DEAD_BURNED); 262 | }); 263 | 264 | step("Cleanup after tests", async () => { 265 | // generate block to clear the mempool (may be dirty from previous tests) 266 | await rpcNode1_miner.generate(1); 267 | sock.disconnect('tcp://0.0.0.0:27339'); 268 | }); 269 | }); 270 | -------------------------------------------------------------------------------- /test/8-burn-with-valid-txn-fast.spec.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult, SlpTransactionType } from 'slpjs'; 3 | import * as zmq from 'zeromq'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import BigNumber from 'bignumber.js'; 6 | import { step } from 'mocha-steps'; 7 | 8 | import { Config } from "../config"; 9 | import { Db } from '../db'; 10 | import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | import { TokenBatonStatus } from "../interfaces"; 12 | import { TokenDBObject } from "../interfaces"; 13 | 14 | const bitbox = new BITBOX(); 15 | const slp = new Slp(bitbox); 16 | const txnHelpers = new TransactionHelpers(slp); 17 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | const TOKEN_DECIMALS = 1; 20 | const TOKEN_GENESIS_QTY = 100; 21 | const TOKEN_SEND_QTY = 1; 22 | const TOKEN_BURN_QTY = 5; 23 | 24 | // connect to bitcoin regtest network JSON-RPC 25 | const rpcClient = require('bitcoin-rpc-promise-retry'); 26 | const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // (optional) connect to a miner's rpc on 18444 that is not connected to SLPDB 27 | const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 28 | 29 | // setup a new local SLP validator instance 30 | const validator = new LocalValidator(bitbox, async (txids) => { 31 | let txn; 32 | try { 33 | txn = await rpcNode1_miner.getRawTransaction(txids[0]); 34 | } catch(err) { 35 | throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 36 | } 37 | return [ txn ]; 38 | }, console); 39 | 40 | // connect to SLPDB ZMQ notifications 41 | let slpdbTxnNotifications: TNATxn[] = []; 42 | let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 43 | const sock: any = zmq.socket('sub'); 44 | sock.connect('tcp://0.0.0.0:27339'); 45 | sock.subscribe('mempool'); 46 | sock.subscribe('block'); 47 | sock.on('message', async function(topic: string, message: Buffer) { 48 | if (topic.toString() === 'mempool') { 49 | let obj = JSON.parse(message.toString('utf8')); 50 | slpdbTxnNotifications.unshift(obj); 51 | } else if (topic.toString() === 'block') { 52 | let obj = JSON.parse(message.toString('utf8')); 53 | slpdbBlockNotifications.unshift(obj); 54 | } 55 | }); 56 | 57 | // connect to the regtest mongoDB 58 | let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 59 | 60 | // produced and shared between tests. 61 | let receiverRegtest: string; 62 | let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 63 | let txnInputs: SlpAddressUtxoResult[]; 64 | let tokenId: string; 65 | let sendTxid: string; 66 | let lastBlockHash: string; 67 | let lastBlockIndex: number; 68 | let genesisBlockIndex: number; 69 | 70 | describe("8-Burn-with-valid-txn-fast", () => { 71 | 72 | step("Initial setup for all tests", async () => { 73 | // generate block to clear the mempool (may be dirty from previous tests) 74 | await rpcNode1_miner.generate(1); 75 | 76 | // make sure we have coins to use in tests 77 | let balance = await rpcNode1_miner.getBalance(); 78 | while (balance < 1) { 79 | await rpcNode1_miner.generate(1); 80 | balance = await rpcNode1_miner.getBalance(); 81 | } 82 | 83 | // put all the funds on the receiver's address 84 | receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 85 | await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 86 | }); 87 | 88 | step("GENESIS: setup for the txn tests", async () => { 89 | let unspent = await rpcNode1_miner.listUnspent(0); 90 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 91 | if (unspent.length === 0) throw Error("No unspent outputs."); 92 | unspent.map((txo: any) => txo.cashAddress = txo.address); 93 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 94 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 95 | 96 | // validate and categorize unspent TXOs 97 | let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 98 | txnInputs = utxos.nonSlpUtxos; 99 | 100 | assert.strictEqual(txnInputs.length > 0, true); 101 | }); 102 | 103 | step("GENESIS: produces ZMQ output for the transaction", async () => { 104 | slpdbTxnNotifications = []; 105 | slpdbBlockNotifications = []; 106 | 107 | // create and broadcast SLP genesis transaction 108 | receiverSlptest = Utils.toSlpAddress(receiverRegtest); 109 | let genesisTxnHex = txnHelpers.simpleTokenGenesis({ 110 | tokenName: "unit-test-6", 111 | tokenTicker: "ut6", 112 | tokenAmount: new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 113 | documentUri: null, 114 | documentHash: null, 115 | decimals: TOKEN_DECIMALS, 116 | tokenReceiverAddress: receiverSlptest, 117 | batonReceiverAddress: receiverSlptest, 118 | bchChangeReceiverAddress: receiverSlptest, 119 | inputUtxos: txnInputs 120 | }); 121 | 122 | tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 123 | 124 | // give slpdb time to process 125 | while(slpdbTxnNotifications.length === 0) { 126 | await sleep(50); 127 | } 128 | 129 | // check that SLPDB made proper outgoing ZMQ messages for 130 | assert.strictEqual(slpdbTxnNotifications.length, 1); 131 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 132 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 133 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 134 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 135 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 136 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.GENESIS); 137 | // @ts-ignore 138 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, TOKEN_GENESIS_QTY.toFixed()); 139 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 140 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 141 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 142 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 143 | }); 144 | 145 | step("GENESIS: stores in confirmed collection (after block)", async () => { 146 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 147 | let txn = await db.confirmedFetch(tokenId); 148 | while(!txn) { 149 | await sleep(50); 150 | txn = await db.confirmedFetch(tokenId); 151 | } 152 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 153 | genesisBlockIndex = lastBlockIndex; 154 | assert.strictEqual(txn!.slp!.valid, true); 155 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 156 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 157 | // @ts-ignore 158 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_GENESIS_QTY.toFixed()); 159 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, txn!.tx.h); 160 | }); 161 | 162 | step("SEND: setup for the txn tests", async () => { 163 | // get current address UTXOs 164 | let unspent = await rpcNode1_miner.listUnspent(0); 165 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 166 | if (unspent.length === 0) throw Error("No unspent outputs."); 167 | unspent.map((txo: any) => txo.cashAddress = txo.address); 168 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 169 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 170 | 171 | // process raw UTXOs 172 | let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 173 | 174 | // select the inputs for transaction 175 | txnInputs = [ ...utxos.nonSlpUtxos, ...utxos.slpTokenUtxos[tokenId] ]; 176 | 177 | assert.strictEqual(txnInputs.length > 1, true); 178 | }); 179 | 180 | step("SEND: produces ZMQ output for the transaction", async () => { 181 | // clear ZMQ cache 182 | slpdbTxnNotifications = []; 183 | slpdbBlockNotifications = []; 184 | 185 | // create a SEND Transaction 186 | let sendTxnHex = txnHelpers.simpleTokenSend({ 187 | tokenId, 188 | sendAmounts: new BigNumber(TOKEN_SEND_QTY).times(10**TOKEN_DECIMALS), 189 | inputUtxos: txnInputs, 190 | tokenReceiverAddresses: receiverSlptest, 191 | changeReceiverAddress: receiverSlptest 192 | }); 193 | 194 | sendTxid = await rpcNode1_miner.sendRawTransaction(sendTxnHex, true); 195 | 196 | while(slpdbTxnNotifications.length === 0) { 197 | await sleep(50); 198 | } 199 | 200 | // check that SLPDB made proper outgoing ZMQ messages for 201 | assert.strictEqual(slpdbTxnNotifications.length, 1); 202 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 203 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 204 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 205 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 206 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 207 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.SEND); 208 | // @ts-ignore 209 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, (new BigNumber(TOKEN_SEND_QTY)).toFixed()); 210 | let change = (new BigNumber(TOKEN_GENESIS_QTY)).minus(TOKEN_SEND_QTY).toFixed(); 211 | // @ts-ignore 212 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![1].amount!, change); 213 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 214 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 215 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 216 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 217 | }); 218 | 219 | step("SEND: stores in confirmed collection (after block)", async () => { 220 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 221 | let txn = await db.confirmedFetch(sendTxid); 222 | while(!txn) { 223 | await sleep(50); 224 | txn = await db.confirmedFetch(sendTxid); 225 | } 226 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 227 | assert.strictEqual(txn!.slp!.valid, true); 228 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 229 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 230 | // @ts-ignore 231 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_SEND_QTY.toFixed()); 232 | // @ts-ignore 233 | assert.strictEqual(txn!.slp!.detail!.outputs![1].amount!.toString(), (TOKEN_GENESIS_QTY-TOKEN_SEND_QTY).toFixed()); 234 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, tokenId); 235 | assert.strictEqual(txn!.tx.h, sendTxid); 236 | }); 237 | 238 | step("BURN: make an invalid SLP transaction that burns all SLP coins", async () => { 239 | 240 | // get current address UTXOs 241 | let unspent = await rpcNode1_miner.listUnspent(0); 242 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 243 | if (unspent.length === 0) throw Error("No unspent outputs."); 244 | unspent.map((txo: any) => txo.cashAddress = txo.address); 245 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 246 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 247 | 248 | // process raw UTXOs 249 | let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 250 | 251 | // select the inputs for transaction 252 | txnInputs = [ ...utxos.nonSlpUtxos, ...utxos.slpTokenUtxos[tokenId] ]; 253 | 254 | assert.strictEqual(txnInputs.length > 1, true); 255 | 256 | // create a SEND Transaction 257 | let sendTxnHex = txnHelpers.simpleTokenBurn({ 258 | tokenId, 259 | burnAmount: new BigNumber(TOKEN_BURN_QTY).times(10**TOKEN_DECIMALS), 260 | inputUtxos: txnInputs, 261 | changeReceiverAddress: receiverSlptest 262 | }); 263 | sendTxid = await rpcNode1_miner.sendRawTransaction(sendTxnHex, true); 264 | 265 | //lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 266 | //lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 267 | }); 268 | 269 | step("BURN: check that tokens collection records correct circulating supply", async () => { 270 | let t: TokenDBObject | null = await db.tokenFetch(tokenId); 271 | while(!t || 272 | t!.tokenStats!.block_created === null //|| 273 | //t!.tokenStats!.qty_token_circulating_supply.toString() !== (TOKEN_GENESIS_QTY-TOKEN_BURN_QTY).toFixed() 274 | ) { 275 | await sleep(50); 276 | t = await db.tokenFetch(tokenId); 277 | } 278 | assert.strictEqual(typeof t!.tokenDetails.timestamp, "string"); 279 | assert.strictEqual(t!.tokenDetails.timestamp_unix! > 0, true); 280 | assert.strictEqual(t!.tokenDetails.tokenIdHex, tokenId); 281 | assert.strictEqual(t!.mintBatonUtxo, tokenId + ":2"); 282 | assert.strictEqual(t!.tokenStats!.block_created!, genesisBlockIndex); 283 | // assert.strictEqual(t!.tokenStats!.block_last_active_mint, null); 284 | // assert.strictEqual(t!.tokenStats!.block_last_active_send, lastBlockIndex); 285 | // assert.strictEqual(t!.tokenStats!.qty_token_burned.toString() === TOKEN_BURN_QTY.toFixed(), true); 286 | // assert.strictEqual(t!.tokenStats!.qty_token_circulating_supply.toString(), (TOKEN_GENESIS_QTY-TOKEN_BURN_QTY).toFixed()); 287 | // assert.strictEqual(t!.tokenStats!.qty_token_minted.toString(), TOKEN_GENESIS_QTY.toFixed()); 288 | assert.strictEqual(t!.mintBatonStatus, TokenBatonStatus.ALIVE); 289 | }); 290 | 291 | step("Cleanup after tests", async () => { 292 | // generate block to clear the mempool (may be dirty from previous tests) 293 | await rpcNode1_miner.generate(1); 294 | sock.disconnect('tcp://0.0.0.0:27339'); 295 | }); 296 | }); 297 | -------------------------------------------------------------------------------- /test/9-burn-with-valid-txn-slow.spec.ts: -------------------------------------------------------------------------------- 1 | import * as assert from "assert"; 2 | import { Slp, LocalValidator, TransactionHelpers, Utils, SlpAddressUtxoResult, SlpTransactionType } from 'slpjs'; 3 | import * as zmq from 'zeromq'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import BigNumber from 'bignumber.js'; 6 | import { step } from 'mocha-steps'; 7 | 8 | import { Config } from "../config"; 9 | import { Db } from '../db'; 10 | import { TNATxn, TNATxnSlpDetails } from "../tna"; 11 | import { TokenBatonStatus } from "../interfaces"; 12 | import { TokenDBObject } from "../interfaces"; 13 | 14 | const bitbox = new BITBOX(); 15 | const slp = new Slp(bitbox); 16 | const txnHelpers = new TransactionHelpers(slp); 17 | const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms)); 18 | 19 | const TOKEN_DECIMALS = 1; 20 | const TOKEN_GENESIS_QTY = 100; 21 | const TOKEN_SEND_QTY = 1; 22 | const TOKEN_BURN_QTY = 5; 23 | 24 | // connect to bitcoin regtest network JSON-RPC 25 | const rpcClient = require('bitcoin-rpc-promise-retry'); 26 | const connectionStringNode1_miner = `http://bitcoin:password@${process.env.RPC1_HOST}:${process.env.RPC1_PORT}`; // (optional) connect to a miner's rpc on 18444 that is not connected to SLPDB 27 | const rpcNode1_miner = new rpcClient(connectionStringNode1_miner, { maxRetries: 0 }); 28 | 29 | // setup a new local SLP validator instance 30 | const validator = new LocalValidator(bitbox, async (txids) => { 31 | let txn; 32 | try { 33 | txn = await rpcNode1_miner.getRawTransaction(txids[0]); 34 | } catch(err) { 35 | throw Error(`[ERROR] Could not get transaction ${txids[0]} in local validator: ${err}`) 36 | } 37 | return [ txn ]; 38 | }, console); 39 | 40 | // connect to SLPDB ZMQ notifications 41 | let slpdbTxnNotifications: TNATxn[] = []; 42 | let slpdbBlockNotifications: { txns: { slp: TNATxnSlpDetails, txid: string }[], hash: string }[] = []; 43 | const sock: any = zmq.socket('sub'); 44 | sock.connect('tcp://0.0.0.0:27339'); 45 | sock.subscribe('mempool'); 46 | sock.subscribe('block'); 47 | sock.on('message', async function(topic: string, message: Buffer) { 48 | if (topic.toString() === 'mempool') { 49 | let obj = JSON.parse(message.toString('utf8')); 50 | slpdbTxnNotifications.unshift(obj); 51 | } else if (topic.toString() === 'block') { 52 | let obj = JSON.parse(message.toString('utf8')); 53 | slpdbBlockNotifications.unshift(obj); 54 | } 55 | }); 56 | 57 | // connect to the regtest mongoDB 58 | let db = new Db({ dbUrl: `mongodb://${process.env.MONGO_HOST}:${process.env.MONGO_PORT}`, dbName: "slpdb_test", config: Config.db }); 59 | 60 | // produced and shared between tests. 61 | let receiverRegtest: string; 62 | let receiverSlptest: string; // this is same address as receiverRegtest, converted to slptest format 63 | let txnInputs: SlpAddressUtxoResult[]; 64 | let tokenId: string; 65 | let sendTxid: string; 66 | let lastBlockHash: string; 67 | let lastBlockIndex: number; 68 | let genesisBlockIndex: number; 69 | 70 | describe("9-Burn-with-valid-txn-slow", () => { 71 | 72 | step("Initial setup for all tests", async () => { 73 | // generate block to clear the mempool (may be dirty from previous tests) 74 | await rpcNode1_miner.generate(1); 75 | 76 | // (optional) connect miner node to a full node that is connected to slpdb 77 | // try { 78 | // await rpcNode1_miner.addNode("bitcoin2", "onetry"); 79 | // } catch(err) { } 80 | 81 | // make sure we have coins to use in tests 82 | let balance = await rpcNode1_miner.getBalance(); 83 | while (balance < 1) { 84 | await rpcNode1_miner.generate(1); 85 | balance = await rpcNode1_miner.getBalance(); 86 | } 87 | 88 | // put all the funds on the receiver's address 89 | receiverRegtest = await rpcNode1_miner.getNewAddress("0"); 90 | await rpcNode1_miner.sendToAddress(receiverRegtest, 1, "", "", true); 91 | }); 92 | 93 | step("GENESIS: setup for the txn tests", async () => { 94 | let unspent = await rpcNode1_miner.listUnspent(0); 95 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 96 | if (unspent.length === 0) throw Error("No unspent outputs."); 97 | unspent.map((txo: any) => txo.cashAddress = txo.address); 98 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 99 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 100 | 101 | // validate and categorize unspent TXOs 102 | let utxos = await slp.processUtxosForSlpAbstract([unspent[0]], validator); 103 | txnInputs = utxos.nonSlpUtxos; 104 | 105 | assert.strictEqual(txnInputs.length > 0, true); 106 | }); 107 | 108 | step("GENESIS: produces ZMQ output for the transaction", async () => { 109 | slpdbTxnNotifications = []; 110 | slpdbBlockNotifications = []; 111 | 112 | // create and broadcast SLP genesis transaction 113 | receiverSlptest = Utils.toSlpAddress(receiverRegtest); 114 | let genesisTxnHex = txnHelpers.simpleTokenGenesis({ 115 | tokenName: "unit-test-6", 116 | tokenTicker: "ut6", 117 | tokenAmount: new BigNumber(TOKEN_GENESIS_QTY).times(10**TOKEN_DECIMALS), 118 | documentUri: null, 119 | documentHash: null, 120 | decimals: TOKEN_DECIMALS, 121 | tokenReceiverAddress: receiverSlptest, 122 | batonReceiverAddress: receiverSlptest, 123 | bchChangeReceiverAddress: receiverSlptest, 124 | inputUtxos: txnInputs 125 | }); 126 | 127 | tokenId = await rpcNode1_miner.sendRawTransaction(genesisTxnHex, true); 128 | 129 | // give slpdb time to process 130 | while(slpdbTxnNotifications.length === 0) { 131 | await sleep(50); 132 | } 133 | 134 | // check that SLPDB made proper outgoing ZMQ messages for 135 | assert.strictEqual(slpdbTxnNotifications.length, 1); 136 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 137 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 138 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 139 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 140 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 141 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.GENESIS); 142 | // @ts-ignore 143 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, TOKEN_GENESIS_QTY.toFixed()); 144 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 145 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 146 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 147 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 148 | }); 149 | 150 | step("GENESIS: stores in confirmed collection (after block)", async () => { 151 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 152 | let txn = await db.confirmedFetch(tokenId); 153 | while(!txn) { 154 | await sleep(50); 155 | txn = await db.confirmedFetch(tokenId); 156 | } 157 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 158 | genesisBlockIndex = lastBlockIndex; 159 | assert.strictEqual(txn!.slp!.valid, true); 160 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 161 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 162 | // @ts-ignore 163 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_GENESIS_QTY.toFixed()); 164 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, txn!.tx.h); 165 | }); 166 | 167 | step("SEND: setup for the txn tests", async () => { 168 | // get current address UTXOs 169 | let unspent = await rpcNode1_miner.listUnspent(0); 170 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 171 | if (unspent.length === 0) throw Error("No unspent outputs."); 172 | unspent.map((txo: any) => txo.cashAddress = txo.address); 173 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 174 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 175 | 176 | // process raw UTXOs 177 | let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 178 | 179 | // select the inputs for transaction 180 | txnInputs = [ ...utxos.nonSlpUtxos, ...utxos.slpTokenUtxos[tokenId] ]; 181 | 182 | assert.strictEqual(txnInputs.length > 1, true); 183 | }); 184 | 185 | step("SEND: produces ZMQ output for the transaction", async () => { 186 | // clear ZMQ cache 187 | slpdbTxnNotifications = []; 188 | slpdbBlockNotifications = []; 189 | 190 | // create a SEND Transaction 191 | let sendTxnHex = txnHelpers.simpleTokenSend({ 192 | tokenId, 193 | sendAmounts: new BigNumber(TOKEN_SEND_QTY).times(10**TOKEN_DECIMALS), 194 | inputUtxos: txnInputs, 195 | tokenReceiverAddresses: receiverSlptest, 196 | changeReceiverAddress: receiverSlptest 197 | }); 198 | 199 | sendTxid = await rpcNode1_miner.sendRawTransaction(sendTxnHex, true); 200 | 201 | while(slpdbTxnNotifications.length === 0) { 202 | await sleep(50); 203 | } 204 | 205 | // check that SLPDB made proper outgoing ZMQ messages for 206 | assert.strictEqual(slpdbTxnNotifications.length, 1); 207 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.valid, true); 208 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.name, "unit-test-6"); 209 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.symbol, "ut6"); 210 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.tokenIdHex, tokenId); 211 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].address, receiverSlptest); 212 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.transactionType, SlpTransactionType.SEND); 213 | // @ts-ignore 214 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![0].amount!, (new BigNumber(TOKEN_SEND_QTY)).toFixed()); 215 | let change = (new BigNumber(TOKEN_GENESIS_QTY)).minus(TOKEN_SEND_QTY).toFixed(); 216 | // @ts-ignore 217 | assert.strictEqual(slpdbTxnNotifications[0]!.slp!.detail!.outputs![1].amount!, change); 218 | assert.strictEqual(slpdbTxnNotifications[0]!.blk === undefined, true); 219 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.in, "object"); 220 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.out, "object"); 221 | assert.strictEqual(typeof slpdbTxnNotifications[0]!.tx, "object"); 222 | }); 223 | 224 | step("SEND: stores in confirmed collection (after block)", async () => { 225 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 226 | let txn = await db.confirmedFetch(sendTxid); 227 | while(!txn) { 228 | await sleep(50); 229 | txn = await db.confirmedFetch(sendTxid); 230 | } 231 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 232 | assert.strictEqual(txn!.slp!.valid, true); 233 | assert.strictEqual(txn!.slp!.detail!.name, "unit-test-6"); 234 | assert.strictEqual(txn!.slp!.detail!.symbol, "ut6"); 235 | // @ts-ignore 236 | assert.strictEqual(txn!.slp!.detail!.outputs![0].amount!.toString(), TOKEN_SEND_QTY.toFixed()); 237 | // @ts-ignore 238 | assert.strictEqual(txn!.slp!.detail!.outputs![1].amount!.toString(), (TOKEN_GENESIS_QTY-TOKEN_SEND_QTY).toFixed()); 239 | assert.strictEqual(txn!.slp!.detail!.tokenIdHex, tokenId); 240 | assert.strictEqual(txn!.tx.h, sendTxid); 241 | }); 242 | 243 | step("BURN: make an invalid SLP transaction that burns all SLP coins", async () => { 244 | 245 | // get current address UTXOs 246 | let unspent = await rpcNode1_miner.listUnspent(0); 247 | unspent = unspent.filter((txo: any) => txo.address === receiverRegtest); 248 | if (unspent.length === 0) throw Error("No unspent outputs."); 249 | unspent.map((txo: any) => txo.cashAddress = txo.address); 250 | unspent.map((txo: any) => txo.satoshis = txo.amount*10**8); 251 | await Promise.all(unspent.map(async (txo: any) => txo.wif = await rpcNode1_miner.dumpPrivKey(txo.address))); 252 | 253 | // process raw UTXOs 254 | let utxos = await slp.processUtxosForSlpAbstract(unspent, validator); 255 | 256 | // select the inputs for transaction 257 | txnInputs = [ ...utxos.nonSlpUtxos, ...utxos.slpTokenUtxos[tokenId] ]; 258 | 259 | assert.strictEqual(txnInputs.length > 1, true); 260 | 261 | await sleep(500); 262 | 263 | // create a SEND Transaction 264 | let sendTxnHex = txnHelpers.simpleTokenBurn({ 265 | tokenId, 266 | burnAmount: new BigNumber(TOKEN_BURN_QTY).times(10**TOKEN_DECIMALS), 267 | inputUtxos: txnInputs, 268 | changeReceiverAddress: receiverSlptest 269 | }); 270 | sendTxid = await rpcNode1_miner.sendRawTransaction(sendTxnHex, true); 271 | 272 | await sleep(500); 273 | lastBlockHash = (await rpcNode1_miner.generate(1))[0]; 274 | lastBlockIndex = (await rpcNode1_miner.getBlock(lastBlockHash, true)).height; 275 | }); 276 | 277 | step("BURN: check that tokens collection records correct circulating supply", async () => { 278 | let t: TokenDBObject | null = await db.tokenFetch(tokenId); 279 | while (!t || t!.tokenStats!.block_created === null) { 280 | await sleep(50); 281 | t = await db.tokenFetch(tokenId); 282 | } 283 | assert.strictEqual(typeof t!.tokenDetails.timestamp, "string"); 284 | assert.strictEqual(t!.tokenDetails.timestamp_unix! > 0, true); 285 | assert.strictEqual(t!.tokenDetails.tokenIdHex, tokenId); 286 | assert.strictEqual(t!.mintBatonUtxo, tokenId + ":2"); 287 | assert.strictEqual(t!.tokenStats!.block_created!, genesisBlockIndex); 288 | assert.strictEqual(t!.mintBatonStatus, TokenBatonStatus.ALIVE); 289 | }); 290 | 291 | step("Cleanup after tests", async () => { 292 | // generate block to clear the mempool (may be dirty from previous tests) 293 | await rpcNode1_miner.generate(1); 294 | sock.disconnect('tcp://0.0.0.0:27339'); 295 | }); 296 | }); 297 | -------------------------------------------------------------------------------- /tna.ts: -------------------------------------------------------------------------------- 1 | require('dotenv').config() 2 | import { SlpTransactionDetailsTnaDbo } from './slpgraphmanager'; 3 | import { Utils } from 'slpjs'; 4 | import { BITBOX } from 'bitbox-sdk'; 5 | import * as Bitcore from 'bitcore-lib-cash'; 6 | 7 | const bitbox = new BITBOX(); 8 | let bitcore = require('bitcore-lib-cash'); 9 | 10 | export class TNA { 11 | fromTx(gene: Bitcore.Transaction, options?: any): TNATxn { 12 | let net = options.network === 'testnet' ? bitcore.Networks.testnet : bitcore.Networks.livenet; 13 | let t = gene.toObject() 14 | let inputs: Xput[] = []; 15 | let outputs: Xput[] = []; 16 | if (gene.inputs) { 17 | gene.inputs.forEach(function(input, input_index) { 18 | if (input.script) { 19 | let xput: Xput = { i: input_index }; 20 | input.script.chunks.forEach(function(c, chunk_index) { 21 | if (c.buf) { 22 | const key_prefix = (c.buf.length >= 512) ? 'l' : ''; 23 | xput[key_prefix + "b" + chunk_index] = c.buf.toString('base64'); 24 | if (options && options.h && options.h > 0) { 25 | xput[key_prefix + "h" + chunk_index] = c.buf.toString('hex'); 26 | } 27 | } else { 28 | if (typeof c.opcodenum !== 'undefined') { 29 | xput["b" + chunk_index] = { 30 | op: c.opcodenum 31 | } 32 | } else { 33 | const key_prefix = (c.len >= 512) ? 'l' : ''; // NOTE: c.length changed to c.len 34 | xput[key_prefix + "b" + chunk_index] = c; 35 | } 36 | } 37 | }) 38 | xput.str = input.script.toASM(); 39 | let sender: Sender = { 40 | h: input.prevTxId.toString('hex'), 41 | i: input.outputIndex, 42 | s: input._scriptBuffer, 43 | } 44 | 45 | try { 46 | if (input.script.toAddress(net).toString(bitcore.Address.CashAddrFormat) !== "false") { 47 | // let bitcore-lib-cash encode the address type 48 | sender.a = Utils.toSlpAddress(input.script.toAddress(net).toString(bitcore.Address.CashAddrFormat)); 49 | } else { 50 | // encode as p2sh address type 51 | const scriptSigHexArray = input.script.toASM().split(' ') 52 | const redeemScriptHex = scriptSigHexArray[scriptSigHexArray.length-1] 53 | const redeemScriptHash160 = bitbox.Crypto.hash160(Buffer.from(redeemScriptHex, 'hex')) 54 | sender.a = Utils.slpAddressFromHash160(redeemScriptHash160, options.network, "p2sh") 55 | } 56 | } catch (err) { 57 | throw Error(`txid: ${gene.hash}, input: ${input.prevTxId.toString('hex')}:${input.outputIndex}, address: ${input.script.toAddress(net).toString(bitcore.Address.CashAddrFormat)}, script ${input._scriptBuffer.toString("hex")}, err: ${err}`) 58 | } 59 | 60 | xput.e = sender; 61 | inputs.push(xput) 62 | } 63 | }) 64 | } 65 | if (gene.outputs) { 66 | gene.outputs.forEach(function(output, output_index) { 67 | if (output.script) { 68 | let xput: Xput = { i: output_index } 69 | output.script.chunks.forEach((c: Bitcore.Chunk, chunk_index: number) => { 70 | if (c.buf) { 71 | const key_prefix = (c.buf.length >= 512) ? 'l' : ''; 72 | 73 | xput[key_prefix + "b" + chunk_index] = c.buf.toString('base64') 74 | xput[key_prefix + "s" + chunk_index] = c.buf.toString('utf8') 75 | if (options && options.h && options.h > 0) { 76 | xput[key_prefix + "h" + chunk_index] = c.buf.toString('hex') 77 | } 78 | } else { 79 | if (typeof c.opcodenum !== 'undefined') { 80 | xput["b" + chunk_index] = { 81 | op: c.opcodenum 82 | } 83 | } else { 84 | const key_prefix = (c.len >= 512) ? 'l' : ''; // changed c.length to c.len 85 | xput[key_prefix + "b" + chunk_index] = c; 86 | } 87 | } 88 | }) 89 | xput.str = output.script.toASM() 90 | let receiver: Receiver = { 91 | v: output.satoshis, 92 | i: output_index, 93 | s: output._scriptBuffer 94 | } 95 | let address; 96 | try { address = Utils.toSlpAddress(output.script.toAddress(net).toString(bitcore.Address.CashAddrFormat));} catch(_) { } 97 | if (address && address.length > 0) { 98 | receiver.a = address; 99 | } 100 | xput.e = receiver; 101 | outputs.push(xput) 102 | } 103 | }) 104 | } 105 | return { tx: { h: t.hash, raw: gene.toBuffer() }, in: inputs, out: outputs }; 106 | } 107 | } 108 | 109 | export interface TNATxn { 110 | tx: { h: string, raw: Buffer }; 111 | in: Xput[]; 112 | out: Xput[]; 113 | blk?: { h: string; i: number; t: number; }; 114 | slp?: TNATxnSlpDetails; 115 | } 116 | 117 | export interface TNATxnSlpDetails { 118 | valid: boolean, 119 | detail: SlpTransactionDetailsTnaDbo|null, 120 | invalidReason: string|null, 121 | schema_version: number 122 | } 123 | 124 | export interface Xput { 125 | [key:string]: any; 126 | i: number; 127 | str?: string; 128 | e?: Sender|Receiver 129 | } 130 | 131 | export interface Sender { 132 | h: string; 133 | i: number; 134 | a?: string; 135 | s: Buffer; // scriptSig 136 | } 137 | 138 | export interface Receiver { 139 | v: number; 140 | i: number; 141 | a?: string; 142 | s: Buffer; // scriptPubkey 143 | } 144 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | "target": "es6", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */ 5 | "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */ 6 | "lib": ["es2015", "es2017", "esnext"], /* Specify library files to be included in the compilation. */ 7 | // "allowJs": true, /* Allow javascript files to be compiled. */ 8 | // "checkJs": true, /* Report errors in .js files. */ 9 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 10 | // "declaration": true, /* Generates corresponding '.d.ts' file. */ 11 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 12 | "sourceMap": true, /* Generates corresponding '.map' file. */ 13 | // "outFile": "./", /* Concatenate and emit output to single file. */ 14 | // "outDir": "./", /* Redirect output structure to the directory. */ 15 | // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ 16 | // "composite": true, /* Enable project compilation */ 17 | // "removeComments": true, /* Do not emit comments to output. */ 18 | // "noEmit": true, /* Do not emit outputs. */ 19 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 20 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 21 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 22 | 23 | /* Strict Type-Checking Options */ 24 | "strict": true, /* Enable all strict type-checking options. */ 25 | //"noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ 26 | // "strictNullChecks": true, /* Enable strict null checks. */ 27 | // "strictFunctionTypes": true, /* Enable strict checking of function types. */ 28 | // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ 29 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 30 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 31 | 32 | /* Additional Checks */ 33 | // "noUnusedLocals": true, /* Report errors on unused locals. */ 34 | // "noUnusedParameters": true, /* Report errors on unused parameters. */ 35 | // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ 36 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 37 | 38 | /* Module Resolution Options */ 39 | "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 40 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 41 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 42 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 43 | // "typeRoots": [], /* List of folders to include type definitions from. */ 44 | // "types": [], /* Type declaration files to be included in compilation. */ 45 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 46 | // "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ 47 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 48 | 49 | /* Source Map Options */ 50 | // "sourceRoot": "./", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 51 | // "mapRoot": "./", /* Specify the location where debugger should locate map files instead of generated locations. */ 52 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 53 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 54 | 55 | /* Experimental Options */ 56 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 57 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 58 | }, 59 | "include": [ 60 | "**/*.ts" 61 | ], 62 | "exclude": [ 63 | "node_modules", 64 | "test/**/*.ts", 65 | "examples" 66 | ], 67 | "files": [ 68 | "index.ts" 69 | ] 70 | } -------------------------------------------------------------------------------- /utxos.ts: -------------------------------------------------------------------------------- 1 | type Txo = string; 2 | type TokenId = Buffer; 3 | 4 | class GlobalUtxoSet extends Map { 5 | public static Instance() { 6 | return this._instance || (this._instance = new GlobalUtxoSet()); 7 | } 8 | private static _instance: GlobalUtxoSet; 9 | 10 | public set(key: string, value: Buffer): this { 11 | if (this.size % 100000 === 0) { 12 | console.log(`UTXO size: ${this.size}`); 13 | } 14 | return super.set(key, value); 15 | } 16 | private constructor() { super(); } 17 | } 18 | 19 | // accessor to a singleton utxo set 20 | export const slpUtxos = GlobalUtxoSet.Instance; 21 | --------------------------------------------------------------------------------