├── .envrc ├── tx-list-restore.json ├── scripts ├── db_backup_script.ts ├── api_tester.ts ├── update_config.ts ├── get_tx_receipt.ts ├── archiver_data_sync_check.ts ├── update_network_account.ts ├── verify_account_hash.ts ├── create_shut_down_cycle.ts ├── repair_missing_cycle.ts └── ngt_shutdown_repair.ts ├── src ├── NoOp.ts ├── types │ ├── tickets.ts │ ├── security.ts │ ├── enum │ │ └── AJVSchemaEnum.ts │ ├── errors.ts │ └── ajv │ │ ├── Helpers.ts │ │ ├── OriginalTxData.ts │ │ ├── Accounts.ts │ │ └── Receipts.ts ├── utils │ ├── ordering.ts │ ├── serialization.ts │ └── serialization │ │ └── SchemaHelpers.ts ├── dbstore │ ├── types.ts │ ├── sqlite3storage.ts │ ├── processedTxs.ts │ ├── index.ts │ └── cycles.ts ├── routes │ └── healthCheck.ts ├── test │ ├── api │ │ ├── cycles.ts │ │ ├── nodes.ts │ │ └── archivedCycles.ts │ ├── index.ts │ └── dataSync │ │ └── mulitpleArchivers.ts ├── txDigester │ ├── index.ts │ ├── api.ts │ ├── txDigests.ts │ └── txDigestFunctions.ts ├── saveConsoleOutput.ts ├── cache │ └── cycleRecordsCache.ts ├── schemas │ └── ticketSchema.ts ├── DebugMode.ts ├── Crypto.ts ├── txDigester.ts ├── ServiceQueue.ts ├── txDigestAPIserver.ts ├── worker-process │ └── index.ts ├── sync-v2 │ └── verify.ts ├── LostArchivers.ts ├── services │ └── ticketVerification.ts ├── Logger.ts ├── profiler │ ├── StringifyReduce.ts │ └── nestedCounters.ts ├── P2P.ts ├── shardeum │ ├── calculateAccountHash.ts │ ├── verifyGlobalTxReceipt.ts │ └── verifyAppReceiptData.ts ├── Data │ ├── GossipData.ts │ └── CycleParser.ts └── archivedCycle │ └── Gossip.ts ├── .secrets-EXAMPLE ├── .dockerignore ├── docs ├── main-data-flow.png └── planning.md ├── prettier.config.js ├── tslint.json ├── .gitignore ├── test ├── tsconfig.json └── unit │ └── src │ └── routes │ └── tickets.test.ts ├── tsconfig.json ├── tsconfig.test.json ├── CODEOWNERS ├── debugsecrets.patch ├── jest.config.js ├── Dockerfile ├── README.md ├── debug_mode.patch ├── .eslintrc.json ├── archiver-log.json ├── LICENSE ├── flake.lock ├── flake.nix ├── .github └── workflows │ └── ci.yml ├── CODE_OF_CONDUCT.md ├── static └── tickets.json ├── archiver-config.json └── package.json /.envrc: -------------------------------------------------------------------------------- 1 | use flake 2 | -------------------------------------------------------------------------------- /tx-list-restore.json: -------------------------------------------------------------------------------- 1 | [] -------------------------------------------------------------------------------- /scripts/db_backup_script.ts: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/NoOp.ts: -------------------------------------------------------------------------------- 1 | // delete me once we fix another file 2 | -------------------------------------------------------------------------------- /src/types/tickets.ts: -------------------------------------------------------------------------------- 1 | export type TicketData = { 2 | address: string; 3 | } -------------------------------------------------------------------------------- /.secrets-EXAMPLE: -------------------------------------------------------------------------------- 1 | ARCHIVER_SECRET_KEY= 2 | ARCHIVER_PUBLIC_KEY= 3 | ARCHIVER_HASH_KEY= -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | build 3 | archiver-logs 4 | logs 5 | db 6 | statistics.tsv 7 | .env 8 | -------------------------------------------------------------------------------- /src/utils/ordering.ts: -------------------------------------------------------------------------------- 1 | export enum Ordering { 2 | Less = -1, 3 | Equal = 0, 4 | Greater = 1, 5 | } 6 | -------------------------------------------------------------------------------- /docs/main-data-flow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/carnage-dima/archive-server/HEAD/docs/main-data-flow.png -------------------------------------------------------------------------------- /src/types/security.ts: -------------------------------------------------------------------------------- 1 | export enum DevSecurityLevel { 2 | NONE = 0, 3 | LOW = 1, 4 | MEDIUM = 2, 5 | HIGH = 3 6 | } -------------------------------------------------------------------------------- /prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | singleQuote: true, 3 | trailingComma: 'es5', 4 | semi: false, 5 | printWidth: 110, 6 | } 7 | -------------------------------------------------------------------------------- /tslint.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "gts/tslint.json", 3 | "linterOptions": { 4 | "exclude": [ 5 | "**/*.json" 6 | ] 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | .vscode/ 3 | *.sqlite 4 | build/ 5 | logs/ 6 | db/ 7 | archiver-db 8 | archiver-db.idx.db 9 | archiver-logs 10 | data-logs 11 | .direnv/ 12 | .secrets -------------------------------------------------------------------------------- /src/types/enum/AJVSchemaEnum.ts: -------------------------------------------------------------------------------- 1 | export enum AJVSchemaEnum { 2 | Receipt = 'Receipt', 3 | AccountsCopy = 'AccountsCopy', 4 | ArchiverReceipt = 'ArchiverReceipt', 5 | OriginalTxData = 'OriginalTxData' 6 | } 7 | -------------------------------------------------------------------------------- /test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": "../", 5 | "types": ["node", "jest"], 6 | "noEmit": true 7 | }, 8 | "include": [ 9 | "../src/**/*", 10 | "./**/*" 11 | ] 12 | } -------------------------------------------------------------------------------- /src/dbstore/types.ts: -------------------------------------------------------------------------------- 1 | import { P2P, StateManager } from '@shardeum-foundation/lib-types' 2 | export interface Cycle { 3 | counter: P2P.CycleCreatorTypes.CycleData['counter'] 4 | cycleRecord: P2P.CycleCreatorTypes.CycleData 5 | cycleMarker: StateManager.StateMetaDataTypes.CycleMarker 6 | } 7 | 8 | export type DbCycle = Cycle & { 9 | cycleRecord: string 10 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./node_modules/gts/tsconfig-google.json", 3 | "compilerOptions": { 4 | "target": "ES2021", 5 | "rootDir": "src", 6 | "outDir": "build", 7 | "resolveJsonModule": true, 8 | "skipLibCheck": true, 9 | "strict": false, 10 | "moduleResolution": "node" 11 | }, 12 | "exclude": ["build", "scripts", "test"] 13 | } 14 | -------------------------------------------------------------------------------- /tsconfig.test.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "compilerOptions": { 4 | "rootDir": ".", 5 | "outDir": "./test-build", 6 | "types": ["jest", "node"], 7 | "baseUrl": "src", 8 | "noEmit": true 9 | }, 10 | "include": [ 11 | "src/**/*", 12 | "test/**/*" 13 | ], 14 | "exclude": ["node_modules"] 15 | } -------------------------------------------------------------------------------- /src/routes/healthCheck.ts: -------------------------------------------------------------------------------- 1 | import { FastifyPluginCallback } from 'fastify' 2 | 3 | export const healthCheckRouter: FastifyPluginCallback = function (fastify, opts, done) { 4 | fastify.get('/is-alive', (req, res) => { 5 | return res.status(200).send('OK') 6 | }) 7 | 8 | fastify.get('/is-healthy', (req, res) => { 9 | // TODO: Add actual health check logic 10 | return res.status(200).send('OK') 11 | }) 12 | 13 | done() 14 | } -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # CODEOWNERS file 2 | # To add additional teams to any approval, include them on the same line separated by spaces 3 | # It is best practice to assign a team as a code owner and not an invidual. 4 | # Please submit requests for new teams to Systems and Automation 5 | 6 | # Global approval (all files) 7 | # * @shardeum/team-name 8 | 9 | # Directory-level approval 10 | /.github/ @shardeum/systems-and-automation 11 | 12 | # Specific file rules 13 | # README.md @shardeum/team-name 14 | -------------------------------------------------------------------------------- /debugsecrets.patch: -------------------------------------------------------------------------------- 1 | diff --git a/.secrets b/.secrets 2 | new file mode 100644 3 | index 0000000..1cafed7 4 | --- /dev/null 5 | +++ b/.secrets 6 | @@ -0,0 +1,3 @@ 7 | +ARCHIVER_SECRET_KEY=3be00019f23847529bd63e41124864983175063bb524bd54ea3c155f2fa12969758b1c119412298802cd28dbfa394cdfeecc4074492d60844cc192d632d84de3 8 | +ARCHIVER_PUBLIC_KEY=758b1c119412298802cd28dbfa394cdfeecc4074492d60844cc192d632d84de3 9 | +ARCHIVER_HASH_KEY=69fa4195670576c0160d660c3be36556ff8d504725be8a59b5a96509e0c994bc 10 | \ No newline at end of file 11 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: 'ts-jest', 3 | testEnvironment: 'node', 4 | roots: [ 5 | "/test/unit" 6 | ], 7 | testMatch: [ 8 | "**/__tests__/**/*.+(ts|tsx|js)", 9 | "**/?(*.)+(spec|test).+(ts|tsx|js)" 10 | ], 11 | transform: { 12 | "^.+\\.(ts|tsx)$": ["ts-jest", { 13 | tsconfig: "test/tsconfig.json" 14 | }] 15 | }, 16 | moduleDirectories: ["node_modules", "src"], 17 | globals: { 18 | 'ts-jest': { 19 | isolatedModules: true 20 | } 21 | }, 22 | timers: 'fake' 23 | } -------------------------------------------------------------------------------- /src/test/api/cycles.ts: -------------------------------------------------------------------------------- 1 | import { getJson } from '../../P2P' 2 | 3 | export async function queryCycles( 4 | ip: string, 5 | port: string, 6 | count: number, 7 | start: number, 8 | end: number 9 | ): Promise { 10 | let res: unknown = await getJson(`http://${ip}:${port}/cycleinfo/${count}`) 11 | console.log(res) 12 | 13 | res = await getJson(`http://${ip}:${port}/cycleinfo?start=${start}&end=${end}`) 14 | console.log(res) 15 | 16 | // const cycleInfo = res['cycleInfo'] 17 | 18 | // for (let i = 0; i < cycleInfo.length; i++) { 19 | // console.log(cycleInfo[i]) 20 | // } 21 | } 22 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Node.js LTS 10.x.x from Docker Hub 2 | FROM node:18.19.1 3 | 4 | # Create app directory 5 | WORKDIR /usr/src/app 6 | 7 | # Install app dependencies 8 | # A wildcard is used to ensure both package.json AND package-lock.json are copied 9 | # where available (npm@5+) 10 | COPY package*.json ./ 11 | 12 | # Bundle app source 13 | COPY . . 14 | 15 | # Install node_modules 16 | RUN npm install 17 | 18 | # Expose ports for app to bind to 19 | # Note: ports can be exposed at runtime too with --expose or -p : 20 | # EXPOSE 4000 21 | 22 | # Define run command 23 | CMD [ "node", "build/server.js" ] 24 | -------------------------------------------------------------------------------- /src/test/api/nodes.ts: -------------------------------------------------------------------------------- 1 | import { getJson } from '../../P2P' 2 | 3 | export async function queryNodes(ip: string, port: string, start: number, end: number): Promise { 4 | let result: unknown = await getJson(`http://${ip}:${port}/nodelist`) 5 | console.log(result) 6 | 7 | result = await getJson(`http://${ip}:${port}/full-nodelist`) 8 | console.log(result) 9 | 10 | result = await getJson(`http://${ip}:${port}/nodeids`) 11 | console.log(result) 12 | 13 | result = await getJson(`http://${ip}:${port}/lost?start=${start}&end=${end}`) 14 | console.log(result) 15 | 16 | result = await getJson(`http://${ip}:${port}/nodeinfo`) 17 | console.log(result) 18 | } 19 | -------------------------------------------------------------------------------- /src/types/errors.ts: -------------------------------------------------------------------------------- 1 | export interface ApiError { 2 | statusCode: number; 3 | response: { 4 | error: string; 5 | code: string; 6 | details?: unknown; 7 | }; 8 | } 9 | 10 | export const ErrorCodes = { 11 | TICKETS_FILE_NOT_ACCESSIBLE: 'TICKETS_FILE_NOT_ACCESSIBLE', 12 | INVALID_TICKETS_FORMAT: 'INVALID_TICKETS_FORMAT', 13 | INVALID_TICKETS_DATA: 'INVALID_TICKETS_DATA', 14 | INVALID_TICKET_SIGNATURES: 'INVALID_TICKET_SIGNATURES', 15 | TICKET_NOT_FOUND: 'TICKET_NOT_FOUND', 16 | INTERNAL_SERVER_ERROR: 'INTERNAL_SERVER_ERROR', 17 | INVALID_TICKET_TYPE: 'INVALID_TICKET_TYPE' 18 | } as const; 19 | 20 | export type ErrorCode = typeof ErrorCodes[keyof typeof ErrorCodes]; -------------------------------------------------------------------------------- /src/utils/serialization.ts: -------------------------------------------------------------------------------- 1 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 2 | import { config } from '../Config' 3 | 4 | export function SerializeToJsonString(obj: object): string { 5 | try { 6 | if (config.useSerialization) return StringUtils.safeStringify(obj, { bufferEncoding: 'base64' }) 7 | else return StringUtils.safeStringify(obj) 8 | } catch (e) { 9 | console.log('Error serializing object', e) 10 | console.log(obj) 11 | throw e 12 | } 13 | } 14 | 15 | export function DeSerializeFromJsonString(jsonString: string): T { 16 | try { 17 | return StringUtils.safeJsonParse(jsonString) 18 | } catch (e) { 19 | console.log('Error deserializing object', e) 20 | console.log(jsonString) 21 | throw e 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # The Shardus Archiver 2 | 3 | This is a node that runs as part of the shardus network, with the function of remembering the cycle chain and state data, depending on the app. Archiver nodes store the complete state and history of the network. 4 | 5 | ## Releasing 6 | 7 | To release, just run `npm run release` 8 | 9 | ## Health Check 10 | 11 | GET `/is-alive` this endpoint returns 200 if the server is running. 12 | GET `/is-healthy` currently the same as `/is-alive` but will be expanded. 13 | 14 | ## Contributing 15 | 16 | Contributions are very welcome! Everyone interacting in our codebases, issue trackers, and any other form of communication, including chat rooms and mailing lists, is expected to follow our [code of conduct](./CODE_OF_CONDUCT.md) so we can all enjoy the effort we put into this project. 17 | -------------------------------------------------------------------------------- /debug_mode.patch: -------------------------------------------------------------------------------- 1 | diff --git a/archiver-config.json b/archiver-config.json 2 | index 7fafd0a..da32776 100644 3 | --- a/archiver-config.json 4 | +++ b/archiver-config.json 5 | @@ -56,6 +56,6 @@ 6 | "publicKey": "aec5d2b663869d9c22ba99d8de76f3bff0f54fa5e39d2899ec1f3f4543422ec7" 7 | } 8 | ], 9 | - "ARCHIVER_MODE": "release", 10 | + "ARCHIVER_MODE": "debug", 11 | "DevPublicKey": "" 12 | } 13 | \ No newline at end of file 14 | diff --git a/src/Config.ts b/src/Config.ts 15 | index 49bb21a..69cda2a 100644 16 | --- a/src/Config.ts 17 | +++ b/src/Config.ts 18 | @@ -127,7 +127,7 @@ let config: Config = { 19 | save: true, 20 | interval: 1, 21 | }, 22 | - ARCHIVER_MODE: 'release', // 'debug'/'release' 23 | + ARCHIVER_MODE: 'debug', // 'debug'/'release' 24 | DevPublicKey: '', 25 | dataLogWrite: true, 26 | dataLogWriter: { 27 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "parser": "@typescript-eslint/parser", 4 | "env": { 5 | "node": true 6 | }, 7 | "plugins": ["@typescript-eslint", "security", "xss"], 8 | "extends": [ 9 | "eslint:recommended", 10 | "plugin:@typescript-eslint/eslint-recommended", 11 | "plugin:@typescript-eslint/recommended", 12 | "plugin:security/recommended", 13 | "plugin:no-unsanitized/DOM", 14 | "prettier" 15 | ], 16 | "ignorePatterns": [ 17 | 18 | ], 19 | "rules": { 20 | "no-empty": [ 21 | 1, 22 | { 23 | "allowEmptyCatch": true 24 | } 25 | ], 26 | "@typescript-eslint/camelcase": "off", 27 | "@typescript-eslint/member-delimiter-style": "off", 28 | "@typescript-eslint/no-non-null-assertion": "off", 29 | "@typescript-eslint/explicit-function-return-type": "error" 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/txDigester/index.ts: -------------------------------------------------------------------------------- 1 | import { Config } from '../Config' 2 | import { Database } from 'sqlite3' 3 | import { createDB, runCreate, close } from '../dbstore/sqlite3storage' 4 | import { createDirectories } from '../Utils' 5 | 6 | export let digesterDatabase: Database 7 | 8 | export const initializeDB = async (config: Config): Promise => { 9 | createDirectories(config.ARCHIVER_DB) 10 | digesterDatabase = await createDB(`${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.txDigestDB}`, 'TxDigestDB') 11 | await runCreate( 12 | digesterDatabase, 13 | 'CREATE TABLE if not exists `txDigests` (`cycleStart` NUMBER NOT NULL UNIQUE, `cycleEnd` NUMBER NOT NULL UNIQUE, `txCount` NUMBER NOT NULL, `hash` TEXT NOT NULL, PRIMARY KEY (`cycleEnd`))' 14 | ) 15 | } 16 | 17 | export const closeDatabase = async (): Promise => { 18 | await close(digesterDatabase, 'TxDigesterDB') 19 | } 20 | -------------------------------------------------------------------------------- /src/saveConsoleOutput.ts: -------------------------------------------------------------------------------- 1 | import { Console } from 'console' 2 | import { PassThrough } from 'stream' 3 | import { join } from 'path' 4 | import { RollingFileStream } from 'streamroller' 5 | 6 | export function startSaving(baseDir: string): void { 7 | // Create a file to save combined stdout and stderr output 8 | const outFileName = `out.log` 9 | const stream = new RollingFileStream(join(baseDir, outFileName), 10000000, 10) 10 | 11 | // Create passthroughs that write to stdout, stderr, and the output file 12 | const outPass = new PassThrough() 13 | outPass.pipe(process.stdout) 14 | outPass.pipe(stream) 15 | 16 | const errPass = new PassThrough() 17 | errPass.pipe(process.stderr) 18 | errPass.pipe(stream) 19 | 20 | // Monkey patch the global console with a new one that uses our passthroughs 21 | console = new Console({ stdout: outPass, stderr: errPass }) // eslint-disable-line no-global-assign 22 | } 23 | -------------------------------------------------------------------------------- /src/test/index.ts: -------------------------------------------------------------------------------- 1 | // import { queryNodes } from './api/nodes' 2 | // import { queryArchivedCycles } from './api/archivedCycles' 3 | // import { queryCycles } from './api/cycles' 4 | import * as MulitpleArchivers from './dataSync/mulitpleArchivers' 5 | 6 | const ARCHIVER_HOST = '127.0.0.1' 7 | // const ARCHIVER_PORT = '4000' 8 | 9 | // const numberOfConsensors = 10 10 | const numberOfArchivers = 1 11 | 12 | // queryArchivedCycles(ARCHIVER_HOST, ARCHIVER_PORT, 5, 10, 20) 13 | 14 | // queryCycles(ARCHIVER_HOST, ARCHIVER_PORT, 10, 10, 20) 15 | 16 | // queryNodes(ARCHIVER_HOST, ARCHIVER_PORT, 5, 10) 17 | 18 | const runTest = async (): Promise => { 19 | if (numberOfArchivers > 1) { 20 | await MulitpleArchivers.checkDataSyncBetweenArchivers(ARCHIVER_HOST, numberOfArchivers) 21 | await MulitpleArchivers.checkCyclesDataBetweenArchivers(ARCHIVER_HOST, numberOfArchivers) 22 | await MulitpleArchivers.checkReceiptsDataBetweenArchivers(ARCHIVER_HOST, numberOfArchivers) 23 | } 24 | } 25 | 26 | runTest() 27 | -------------------------------------------------------------------------------- /src/utils/serialization/SchemaHelpers.ts: -------------------------------------------------------------------------------- 1 | import * as Ajv from 'ajv' 2 | 3 | const ajv = new Ajv() 4 | 5 | const schemaMap: Map = new Map() 6 | const verifyFunctions: Map = new Map() 7 | 8 | export function addSchema(name: string, schema: object): void { 9 | if (schemaMap.has(name)) { 10 | throw new Error(`error already registered ${name}`) 11 | } 12 | schemaMap.set(name, schema) 13 | } 14 | 15 | export function initializeSerialization(): void { 16 | // Register each schema exactly once in AJV 17 | for (const [name, schema] of schemaMap.entries()) { 18 | ajv.addSchema(schema, name) 19 | } 20 | } 21 | 22 | export function getVerifyFunction(name: string): Ajv.ValidateFunction { 23 | const existingFn = verifyFunctions.get(name) 24 | if (existingFn) { 25 | return existingFn 26 | } 27 | const schema = schemaMap.get(name) 28 | if (!schema) { 29 | throw new Error(`error missing schema ${name}`) 30 | } 31 | const verifyFn = ajv.compile(schema) 32 | verifyFunctions.set(name, verifyFn) 33 | return verifyFn 34 | } 35 | -------------------------------------------------------------------------------- /archiver-log.json: -------------------------------------------------------------------------------- 1 | { 2 | "saveConsoleOutput": true, 3 | "dir": "archiver-logs", 4 | "files": { "main": "", "fatal": "", "net": "" }, 5 | "options": { 6 | "appenders": { 7 | "out": { "type": "console", "maxLogSize": 10000000, "backups": 50 }, 8 | "main": { 9 | "type": "file", 10 | "maxLogSize": 10000000, 11 | "backups": 50 12 | }, 13 | "fatal": { 14 | "type": "file", 15 | "maxLogSize": 10000000, 16 | "backups": 50 17 | }, 18 | "errorFile": { 19 | "type": "file", 20 | "maxLogSize": 10000000, 21 | "backups": 50 22 | }, 23 | "errors": { 24 | "type": "logLevelFilter", 25 | "level": "ERROR", 26 | "appender": "errorFile" 27 | } 28 | }, 29 | "categories": { 30 | "default": { "appenders": ["out"], "level": "trace" }, 31 | "main": { "appenders": ["main", "errors"], "level": "trace" }, 32 | "fatal": { "appenders": ["fatal"], "level": "fatal" } 33 | } 34 | } 35 | } 36 | 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Shardeum 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/types/ajv/Helpers.ts: -------------------------------------------------------------------------------- 1 | import { Utils } from '@shardeum-foundation/lib-types' 2 | import { ErrorObject } from 'ajv' 3 | import { getVerifyFunction } from '../../utils/serialization/SchemaHelpers' 4 | import { initReceipts } from './Receipts' 5 | import { initAccounts } from './Accounts' 6 | import { initOriginalTxData } from './OriginalTxData' 7 | 8 | export function initAjvSchemas(): void { 9 | initAccounts() 10 | initReceipts() 11 | initOriginalTxData() 12 | 13 | } 14 | 15 | export function verifyPayload(name: string, payload: T): string[] | null { 16 | const verifyFn = getVerifyFunction(name) 17 | const isValid = verifyFn(payload) 18 | if (!isValid) { 19 | return parseAjvErrors(verifyFn.errors) 20 | } else { 21 | return null 22 | } 23 | } 24 | 25 | function parseAjvErrors(errors: Array | null): string[] | null { 26 | if (!errors) return null 27 | 28 | return errors.map((error) => { 29 | let errorMsg = `${error.message}` 30 | if (error.params && Object.keys(error.params).length > 0) { 31 | errorMsg += `: ${Utils.safeStringify(error.params)}` 32 | } 33 | return errorMsg 34 | }) 35 | } 36 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "nixpkgs": { 4 | "locked": { 5 | "lastModified": 1678298120, 6 | "narHash": "sha256-iaV5xqgn29xy765Js3EoZePQyZIlLZA3pTYtTnKkejg=", 7 | "owner": "NixOS", 8 | "repo": "nixpkgs", 9 | "rev": "1e383aada51b416c6c27d4884d2e258df201bc11", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "NixOS", 14 | "ref": "nixpkgs-unstable", 15 | "repo": "nixpkgs", 16 | "type": "github" 17 | } 18 | }, 19 | "root": { 20 | "inputs": { 21 | "nixpkgs": "nixpkgs", 22 | "utils": "utils" 23 | } 24 | }, 25 | "utils": { 26 | "locked": { 27 | "lastModified": 1676283394, 28 | "narHash": "sha256-XX2f9c3iySLCw54rJ/CZs+ZK6IQy7GXNY4nSOyu2QG4=", 29 | "owner": "numtide", 30 | "repo": "flake-utils", 31 | "rev": "3db36a8b464d0c4532ba1c7dda728f4576d6d073", 32 | "type": "github" 33 | }, 34 | "original": { 35 | "owner": "numtide", 36 | "repo": "flake-utils", 37 | "type": "github" 38 | } 39 | } 40 | }, 41 | "root": "root", 42 | "version": 7 43 | } 44 | -------------------------------------------------------------------------------- /src/txDigester/api.ts: -------------------------------------------------------------------------------- 1 | import { FastifyInstance, FastifyRequest } from 'fastify' 2 | import { Server, IncomingMessage, ServerResponse } from 'http' 3 | import { getTxDigestsForACycleRange } from './txDigestFunctions' 4 | 5 | type GetTxDigestsRequest = FastifyRequest<{ 6 | Querystring: { 7 | cycleStart: number 8 | cycleEnd: number 9 | } 10 | }> 11 | 12 | /* To-Do: Add LRU cache for the tx-digests */ 13 | export function registerRoutes(server: FastifyInstance): void { 14 | server.get('/api/tx-digests', async (_request: GetTxDigestsRequest, reply) => { 15 | const cycleStart = Number(_request.query?.cycleStart) 16 | const cycleEnd = Number(_request.query?.cycleEnd) 17 | 18 | if ( 19 | isNaN(cycleStart) || 20 | isNaN(cycleEnd) || 21 | cycleEnd <= cycleStart || 22 | cycleStart < 0 || 23 | cycleEnd < 0 || 24 | cycleEnd - cycleStart > 10000 25 | ) { 26 | reply.status(400).send({ 27 | error: 'Invalid query parameters. They must be positive numbers with cycleEnd > cycleStart', 28 | }) 29 | return 30 | } 31 | 32 | console.log(`Fetching tx digests for cycles: ${cycleStart} to ${cycleEnd}`) 33 | const txDigests = await getTxDigestsForACycleRange(cycleStart, cycleEnd) 34 | console.log('Fetched Tx digests', txDigests) 35 | reply.send(txDigests) 36 | }) 37 | } 38 | -------------------------------------------------------------------------------- /src/types/ajv/OriginalTxData.ts: -------------------------------------------------------------------------------- 1 | 2 | import { addSchema } from '../../utils/serialization/SchemaHelpers'; 3 | import { AJVSchemaEnum } from '../enum/AJVSchemaEnum'; 4 | 5 | // Define the schema for OriginalTxData 6 | const schemaOriginalTxData = { 7 | type: 'object', 8 | properties: { 9 | txId: { type: 'string' }, // txId must be a string 10 | timestamp: { type: 'integer', minimum: 0 }, // timestamp must be an integer 11 | cycle: { type: 'integer', minimum: 0 }, // cycle must be an integer 12 | originalTxData: { type: 'object' }, // originalTxData must be an object 13 | // Uncomment if sign is required: 14 | // sign: { type: 'string' } // Sign (if used) must be a string 15 | }, 16 | required: ['txId', 'timestamp', 'cycle', 'originalTxData'], // Required fields 17 | additionalProperties: false, // Disallow other fields 18 | }; 19 | 20 | 21 | // Function to initialize schemas 22 | export function initOriginalTxData(): void { 23 | addSchemaDependencies(); 24 | addSchemas(); 25 | } 26 | 27 | // Function to add schema dependencies (if any external schemas are needed) 28 | function addSchemaDependencies(): void { 29 | // No external dependencies for now 30 | } 31 | 32 | // Function to register schemas 33 | function addSchemas(): void { 34 | addSchema(AJVSchemaEnum.OriginalTxData, schemaOriginalTxData); 35 | 36 | } 37 | -------------------------------------------------------------------------------- /docs/planning.md: -------------------------------------------------------------------------------- 1 | # Archive Server Planning 2 | 3 | ## What does it need to do? 4 | 5 | ### milestone-1 6 | 7 | - Seed a new Shardus app network 8 | 9 | - Initialize archiver node 10 | 11 | - Config param for admin keypair 12 | 13 | - Creates new keypair if not provided 14 | 15 | - Config param for another archiver node's info 16 | 17 | - If provided, trys to get nodelist from it 18 | 19 | - Otherwise, becomes first archiver node 20 | 21 | - HTTP server 22 | 23 | - GET `/nodeinfo` endpoint that returns this archiver node's info 24 | 25 | - GET `/exit` endpoint for debugging 26 | 27 | - Maintain a consensus node list accessible over HTTP 28 | 29 | - `/nodelist` endpoint that returns a list of consensus nodes in the network 30 | 31 | - If `consensus` param provided, requester is a consensus node 32 | 33 | ### milestone-2 34 | 35 | - Join an existing Shardus app network 36 | 37 | - Send archiver join request 38 | 39 | - HTTP client 40 | 41 | - Consensus nodes must handle it appropriately 42 | 43 | ### milestone-3 44 | 45 | - Get fresh cycle data 46 | 47 | - Send/handle internal `shardus-net` requests 48 | 49 | - `shardus-net` package 50 | 51 | * Update the consensus node list when nodes leave/join the network 52 | 53 | - Get fresh cycle data from consensus nodes 54 | 55 | ### milestone-4 56 | 57 | - Save cycle data to disk 58 | 59 | - Store/query cycle data 60 | 61 | - Relational DB 62 | -------------------------------------------------------------------------------- /scripts/api_tester.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from '@shardeum-foundation/lib-crypto-utils' 2 | import fetch from 'node-fetch' 3 | import { join } from 'path' 4 | import { config, overrideDefaultConfig } from '../src/Config' 5 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 6 | 7 | const configFile = join(process.cwd(), 'archiver-config.json') 8 | overrideDefaultConfig(configFile) 9 | 10 | crypto.init(config.ARCHIVER_HASH_KEY) 11 | 12 | const devAccount = { 13 | publicKey: config.ARCHIVER_PUBLIC_KEY, 14 | secretKey: config.ARCHIVER_SECRET_KEY, 15 | } 16 | const ARCHIVER_URL = `http://127.0.0.1:4000` 17 | 18 | const data: any = { 19 | count: 1, 20 | sender: devAccount.publicKey, 21 | } 22 | crypto.signObj(data, devAccount.secretKey, devAccount.publicKey) 23 | // console.log(data) 24 | 25 | // Update endpoints name ... totalData / cycleinfo / receipt / account / transaction 26 | fetch(`${ARCHIVER_URL}/totalData`, { 27 | // fetch(`${ARCHIVER_URL}/cycleinfo`, { 28 | // fetch(`${ARCHIVER_URL}/receipt`, { 29 | // fetch(`${ARCHIVER_URL}/account`, { 30 | method: 'post', 31 | body: StringUtils.safeStringify(data), 32 | headers: { 'Content-Type': 'application/json' }, 33 | timeout: 2000, 34 | }) 35 | .then(async (res) => { 36 | if (res.ok) console.log(await res.json()) 37 | // if (res.ok) console.dir(await res.json(), { depth: null }) 38 | else console.log(res.status) 39 | }) 40 | .catch((err) => { 41 | console.log(err) 42 | }) 43 | -------------------------------------------------------------------------------- /scripts/update_config.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios' 2 | import { join } from 'path' 3 | import { Utils } from '@shardeum-foundation/lib-types' 4 | import * as crypto from '@shardeum-foundation/lib-crypto-utils' 5 | import { config, overrideDefaultConfig } from '../src/Config' 6 | 7 | const configFile = join(process.cwd(), 'archiver-config.json') 8 | overrideDefaultConfig(configFile) 9 | 10 | crypto.init(config.ARCHIVER_HASH_KEY) 11 | 12 | const DEV_KEYS = { 13 | pk: config.ARCHIVER_PUBLIC_KEY, 14 | sk: config.ARCHIVER_SECRET_KEY, 15 | } 16 | 17 | function sign(obj: T, sk: string, pk: string): T & any { 18 | const objCopy = JSON.parse(crypto.stringify(obj)) 19 | crypto.signObj(objCopy, sk, pk) 20 | return objCopy 21 | } 22 | 23 | function createSignature(data: any, pk: string, sk: string): any { 24 | return sign({ ...data }, sk, pk) 25 | } 26 | 27 | const UPDATE_CONFIG = { 28 | /* Add Config properties that need to be updated here */ 29 | VERBOSE: true, 30 | RATE_LIMIT: 200, 31 | } 32 | 33 | const INPUT = Utils.safeStringify(createSignature(UPDATE_CONFIG, DEV_KEYS.pk, DEV_KEYS.sk)) 34 | 35 | axios 36 | .patch('http://127.0.0.1:4000/set-config', INPUT, { 37 | headers: { 38 | 'Content-Type': 'application/json', 39 | }, 40 | }) 41 | .then((response) => { 42 | console.log(response.data) 43 | }) 44 | .catch((error) => { 45 | if (error.response) { 46 | console.error(error.response) 47 | } else { 48 | console.error(error.message) 49 | } 50 | }) 51 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "Shardus archive server"; 3 | 4 | inputs = { 5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 6 | utils.url = "github:numtide/flake-utils"; 7 | }; 8 | 9 | outputs = { 10 | self, 11 | nixpkgs, 12 | utils, 13 | }: let 14 | appName = "shardus-archive-server"; 15 | out = 16 | utils.lib.eachDefaultSystem 17 | (system: let 18 | pkgs = import nixpkgs { 19 | inherit system; 20 | }; 21 | buildNodeJs = pkgs.callPackage "${nixpkgs}/pkgs/development/web/nodejs/nodejs.nix" {python = pkgs.python3;}; 22 | custom-nodejs = buildNodeJs { 23 | enableNpm = true; 24 | version = "18.19.1"; 25 | sha256 = "0wp2xyz5yqcvb6949xaqpan73rfhdc3cdfsvx7vzvzc9in64yh78"; 26 | }; 27 | 28 | nativeBuildInputs = with pkgs; [ 29 | pkg-config 30 | custom-nodejs 31 | ]; 32 | buildInputs = with pkgs; []; 33 | in { 34 | # `nix develop` or direnv 35 | devShell = pkgs.mkShell { 36 | packages = 37 | nativeBuildInputs 38 | ++ buildInputs 39 | ++ (with pkgs; [ 40 | nodePackages.typescript-language-server 41 | nodePackages.vscode-langservers-extracted 42 | nodePackages.prettier 43 | ]); 44 | }; 45 | }); 46 | in 47 | out 48 | // { 49 | overlay = final: prev: { 50 | ${appName} = self.defaultPackage.${prev.system}; 51 | }; 52 | }; 53 | } 54 | -------------------------------------------------------------------------------- /src/types/ajv/Accounts.ts: -------------------------------------------------------------------------------- 1 | import { addSchema } from '../../utils/serialization/SchemaHelpers'; 2 | import { AJVSchemaEnum } from '../enum/AJVSchemaEnum'; 3 | // Define the schema for AccountsCopy 4 | const schemaAccountsCopy = { 5 | type: 'object', 6 | properties: { 7 | accountId: { type: 'string' }, 8 | data: { type: 'object', additionalProperties: true }, // Allows nested objects with dynamic keys 9 | timestamp: { type: 'integer', minimum:0 }, 10 | hash: { type: 'string' }, 11 | cycleNumber: { type: 'integer', nullable: true }, // Optional field 12 | isGlobal: { type: 'boolean' } 13 | }, 14 | required: ['accountId', 'data', 'timestamp', 'hash', 'isGlobal'] // cycleNumber is optional 15 | }; 16 | 17 | // Define the schema for DbAccountCopy 18 | const schemaDbAccountCopy = { 19 | type: 'object', 20 | properties: { 21 | ...schemaAccountsCopy.properties, 22 | data: { type: 'string' } // Overriding the `data` field to be a string in DbAccountCopy 23 | }, 24 | required: ['accountId', 'data', 'timestamp', 'hash', 'isGlobal'] // Required fields remain the same 25 | }; 26 | 27 | // Function to initialize schemas 28 | export function initAccounts(): void { 29 | addSchemaDependencies(); 30 | addSchemas(); 31 | } 32 | 33 | // Function to add schema dependencies 34 | function addSchemaDependencies(): void { 35 | // No external dependencies 36 | } 37 | 38 | // Function to register schemas 39 | function addSchemas(): void { 40 | addSchema( AJVSchemaEnum.AccountsCopy, schemaAccountsCopy); 41 | } 42 | -------------------------------------------------------------------------------- /src/cache/cycleRecordsCache.ts: -------------------------------------------------------------------------------- 1 | import { P2P } from '@shardeum-foundation/lib-types' 2 | import { config } from '../Config' 3 | import { queryLatestCycleRecords } from '../dbstore/cycles' 4 | import * as Crypto from '../Crypto' 5 | import { ArchiverCycleResponse } from '../Data/Cycles' 6 | 7 | let cachedCycleRecords: P2P.CycleCreatorTypes.CycleData[] = [] 8 | const signedCacheCycleRecords: Map = new Map() 9 | let lastCacheUpdateFromDBRunning = false 10 | 11 | async function updateCacheFromDB(): Promise { 12 | if (lastCacheUpdateFromDBRunning) { 13 | return 14 | } 15 | 16 | lastCacheUpdateFromDBRunning = true 17 | 18 | try { 19 | cachedCycleRecords = await queryLatestCycleRecords(config.REQUEST_LIMIT.MAX_CYCLES_PER_REQUEST) 20 | } catch (error) { 21 | console.log('Error updating latest cache: ', error) 22 | } finally { 23 | lastCacheUpdateFromDBRunning = false 24 | } 25 | } 26 | 27 | export async function addCyclesToCache(cycles: P2P.CycleCreatorTypes.CycleData[]): Promise { 28 | if (cachedCycleRecords.length === 0) { 29 | await updateCacheFromDB() 30 | } 31 | 32 | for (const cycle of cycles) { 33 | cachedCycleRecords.unshift(cycle) 34 | } 35 | cycles.sort((a, b) => a.counter - b.counter) 36 | 37 | if (cachedCycleRecords.length > config.REQUEST_LIMIT.MAX_CYCLES_PER_REQUEST) { 38 | cachedCycleRecords.splice(config.REQUEST_LIMIT.MAX_CYCLES_PER_REQUEST) 39 | } 40 | signedCacheCycleRecords.clear() 41 | } 42 | 43 | export async function getLatestCycleRecordsFromCache(count: number): Promise { 44 | if (cachedCycleRecords.length === 0) { 45 | await updateCacheFromDB() 46 | } 47 | if (signedCacheCycleRecords.has(count)) return signedCacheCycleRecords.get(count) 48 | 49 | const cycleInfo = cachedCycleRecords.slice(0, count) 50 | const signedCycleRecords = Crypto.sign({ cycleInfo }) 51 | signedCacheCycleRecords.set(count, signedCycleRecords) 52 | return signedCycleRecords 53 | } 54 | -------------------------------------------------------------------------------- /src/schemas/ticketSchema.ts: -------------------------------------------------------------------------------- 1 | import { TicketData } from '../types/tickets' 2 | 3 | type Sign = { 4 | owner: string; 5 | sig: string; 6 | } 7 | 8 | type Ticket = { 9 | data: TicketData[]; 10 | sign: Sign[]; 11 | type: string; 12 | } 13 | 14 | export const ticketSchema = { 15 | type: 'array', 16 | items: { 17 | type: 'object', 18 | required: ['data', 'sign', 'type'], 19 | properties: { 20 | data: { 21 | type: 'array', 22 | items: { 23 | type: 'object', 24 | required: ['address'], 25 | properties: { 26 | address: { 27 | type: 'string', 28 | pattern: '^0x[a-fA-F0-9]{40}$' // Ethereum address format 29 | } 30 | }, 31 | additionalProperties: false 32 | }, 33 | minItems: 1 34 | }, 35 | sign: { 36 | type: 'array', 37 | items: { 38 | type: 'object', 39 | required: ['owner', 'sig'], 40 | properties: { 41 | owner: { 42 | type: 'string', 43 | pattern: '^0x[a-fA-F0-9]{40}$' // Ethereum address format 44 | }, 45 | sig: { 46 | type: 'string', 47 | pattern: '^0x[a-fA-F0-9]{130}$' // Ethereum signature format (65 bytes) 48 | } 49 | }, 50 | additionalProperties: false 51 | }, 52 | minItems: 1 53 | }, 54 | type: { 55 | type: 'string', 56 | enum: ['silver'] // Only silver tickets for now 57 | } 58 | }, 59 | additionalProperties: false 60 | } 61 | } as const 62 | 63 | export type { Sign, Ticket } -------------------------------------------------------------------------------- /src/DebugMode.ts: -------------------------------------------------------------------------------- 1 | import { config } from './Config' 2 | import * as Crypto from './Crypto' 3 | 4 | const MAX_COUNTER_BUFFER_MILLISECONDS = 10000 5 | let lastCounter = 0 6 | 7 | export function isDebugMode(): boolean { 8 | return !!(config && config.ARCHIVER_MODE && config.ARCHIVER_MODE === 'debug') 9 | } 10 | 11 | function getDevPublicKey(): string { 12 | if (config && config.DevPublicKey) { 13 | return config.DevPublicKey 14 | } 15 | return '' 16 | } 17 | 18 | export const isDebugMiddleware = (_req, res): void => { 19 | const isDebug = isDebugMode() 20 | if (!isDebug) { 21 | try { 22 | //auth my by checking a signature 23 | if (_req.query.sig != null && _req.query.sig_counter != null) { 24 | const ownerPk = getDevPublicKey() 25 | const requestSig = _req.query.sig 26 | //check if counter is valid 27 | const sigObj = { 28 | route: _req.routerPath, 29 | count: _req.query.sig_counter, 30 | sign: { owner: ownerPk, sig: requestSig }, 31 | } 32 | const currentCounter = parseInt(sigObj.count) 33 | //reguire a larger counter than before. This prevents replay attacks 34 | const currentTime = new Date().getTime() 35 | if (currentCounter > lastCounter && currentCounter <= currentTime + MAX_COUNTER_BUFFER_MILLISECONDS) { 36 | const verified = Crypto.verify(sigObj) 37 | if (!verified) { 38 | throw new Error('FORBIDDEN. signature authentication is failed.') 39 | } 40 | } else { 41 | console.log( 42 | `isDebugMiddleware: currentCounter=${currentCounter}, lastCounter=${lastCounter}, currentTime=${currentTime}` 43 | ) 44 | throw new Error('FORBIDDEN. signature counter is failed.') 45 | } 46 | lastCounter = currentCounter //update counter so we can't use it again 47 | return 48 | } 49 | throw new Error('FORBIDDEN. Endpoint is only available in debug mode.') 50 | } catch (error) { 51 | // console.log(error) 52 | // throw new Error('FORBIDDEN. Endpoint is only available in debug mode.') 53 | res.code(401).send(error) 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /scripts/get_tx_receipt.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from '@shardeum-foundation/lib-crypto-utils' 2 | import { join } from 'path' 3 | import { config, overrideDefaultConfig } from '../src/Config' 4 | import { postJson, getJson } from '../src/P2P' 5 | import { isEqual } from 'lodash' 6 | 7 | const configFile = join(process.cwd(), 'archiver-config.json') 8 | overrideDefaultConfig(configFile) 9 | 10 | crypto.init(config.ARCHIVER_HASH_KEY) 11 | 12 | const devAccount = { 13 | publicKey: config.ARCHIVER_PUBLIC_KEY, 14 | secretKey: config.ARCHIVER_SECRET_KEY, 15 | } 16 | const ARCHIVER_URL = `http://127.0.0.1:4000` 17 | 18 | const txId = '' 19 | const timestamp = 0 20 | const full_receipt = false 21 | 22 | const runProgram = async (): Promise => { 23 | const res: any = await getJson(`${ARCHIVER_URL}/full-nodelist?activeOnly=true`) 24 | if (!res || !res.nodeList) throw new Error('No active nodes found') 25 | const nodeList = res.nodeList 26 | const promises: any[] = [] 27 | for (const node of nodeList) { 28 | const data: any = { 29 | txId, 30 | timestamp, 31 | full_receipt, 32 | } 33 | crypto.signObj(data, devAccount.secretKey, devAccount.publicKey) 34 | promises.push(postJson(`http://${node.ip}:${node.port}/get-tx-receipt`, data)) 35 | } 36 | Promise.allSettled(promises) 37 | .then((responses) => { 38 | const result = {} 39 | responses.forEach((response, index) => { 40 | if (response.status === 'fulfilled') { 41 | const { value } = response 42 | 43 | let found = false 44 | for (const key in result) { 45 | if (isEqual(result[key].data, value)) { 46 | result[key].count++ 47 | result[key].nodes.push(nodeList[index]) 48 | found = true 49 | break 50 | } 51 | } 52 | if (!found) { 53 | result[Object.keys(result).length + 1] = { 54 | count: 1, 55 | data: value, 56 | nodes: [nodeList[index]], 57 | } 58 | } 59 | } 60 | }) 61 | console.dir(result, { depth: null }) 62 | }) 63 | .catch((error) => { 64 | // Handle any errors that occurred 65 | console.error(error) 66 | }) 67 | } 68 | 69 | runProgram() 70 | -------------------------------------------------------------------------------- /src/Crypto.ts: -------------------------------------------------------------------------------- 1 | import * as core from '@shardeum-foundation/lib-crypto-utils' 2 | import { SignedObject, TaggedObject, publicKey, curvePublicKey, sharedKey } from '@shardeum-foundation/lib-crypto-utils' 3 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 4 | import * as State from './State' 5 | 6 | // Crypto initialization fns 7 | 8 | export function setCryptoHashKey(hashkey: string): void { 9 | core.init(hashkey) 10 | core.setCustomStringifier(StringUtils.safeStringify, 'shardus_safeStringify') 11 | } 12 | 13 | export const hash = core.hash 14 | export const hashObj = core.hashObj 15 | 16 | // Asymmetric Encyption Sign/Verify API 17 | export type SignedMessage = SignedObject 18 | 19 | export function sign(obj: T): T & SignedObject { 20 | const objCopy = StringUtils.safeJsonParse(core.stringify(obj)) 21 | core.signObj(objCopy, State.getSecretKey(), State.getNodeInfo().publicKey) 22 | return objCopy 23 | } 24 | 25 | export function verify(obj: SignedObject): boolean { 26 | return core.verifyObj(obj) 27 | } 28 | 29 | // HMAC Tag/Authenticate API 30 | 31 | export interface TaggedMessage extends TaggedObject { 32 | publicKey: publicKey 33 | } 34 | 35 | const curvePublicKeys: Map = new Map() 36 | const sharedKeys: Map = new Map() 37 | 38 | export function getOrCreateCurvePk(pk: publicKey): curvePublicKey { 39 | let curvePk = curvePublicKeys.get(pk) 40 | if (!curvePk) { 41 | curvePk = core.convertPkToCurve(pk) 42 | curvePublicKeys.set(pk, curvePk) 43 | } 44 | return curvePk 45 | } 46 | 47 | export function getOrCreateSharedKey(pk: publicKey): sharedKey { 48 | let sharedK = sharedKeys.get(pk) 49 | if (!sharedK) { 50 | const ourCurveSk = State.getCurveSk() 51 | const theirCurvePk = getOrCreateCurvePk(pk) 52 | sharedK = core.generateSharedKey(ourCurveSk, theirCurvePk) as unknown as string 53 | } 54 | return sharedK 55 | } 56 | 57 | export function tag(obj: T, recipientPk: publicKey): T & TaggedMessage { 58 | const sharedKey = getOrCreateSharedKey(recipientPk) 59 | const objCopy = StringUtils.safeJsonParse(core.stringify(obj)) 60 | objCopy.publicKey = State.getNodeInfo().publicKey 61 | core.tagObj(objCopy, sharedKey) 62 | return objCopy 63 | } 64 | 65 | export function authenticate(msg: TaggedMessage): boolean { 66 | const sharedKey = getOrCreateSharedKey(msg.publicKey) 67 | return core.authenticateObj(msg, sharedKey) 68 | } 69 | 70 | export { core } 71 | -------------------------------------------------------------------------------- /scripts/archiver_data_sync_check.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from '@shardeum-foundation/lib-crypto-utils' 2 | import { writeFileSync } from 'fs' 3 | import { join } from 'path' 4 | import { postJson } from '../src/P2P' 5 | import { config, overrideDefaultConfig } from '../src/Config' 6 | import { ArchiverNodeInfo } from '../src/State' 7 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 8 | 9 | const configFile = join(process.cwd(), 'archiver-config.json') 10 | overrideDefaultConfig(configFile) 11 | 12 | crypto.init(config.ARCHIVER_HASH_KEY) 13 | 14 | export type ArchiverNode = Omit 15 | 16 | const archivers: ArchiverNode[] = [ 17 | { 18 | ip: '127.0.0.1', 19 | port: 4000, 20 | }, 21 | { 22 | ip: '127.0.0.1', 23 | port: 4001, 24 | }, 25 | ] 26 | 27 | const devAccount = { 28 | publicKey: '', 29 | secretKey: '', 30 | } 31 | 32 | const startCycle = 0 33 | const endCycle = 0 34 | 35 | // const URL = 'originalTx' 36 | const URL = 'receipt' 37 | 38 | const runProgram = async (): Promise => { 39 | for (const archiver of archivers) { 40 | const archiverInfo = archiver.ip + ':' + archiver.port 41 | const responses = {} 42 | for (let i = startCycle; i < endCycle; ) { 43 | const nextEnd = i + config.REQUEST_LIMIT.MAX_BETWEEN_CYCLES_PER_REQUEST 44 | console.log(i, nextEnd) 45 | 46 | const data: any = { 47 | startCycle: i, 48 | endCycle: nextEnd, 49 | type: 'tally', 50 | sender: devAccount.publicKey, 51 | } 52 | crypto.signObj(data, devAccount.secretKey, devAccount.publicKey) 53 | const response: any = await postJson(`http://${archiverInfo}/${URL}`, data, 100) 54 | if (!response || (!response.receipts && !response.originalTxs)) { 55 | console.error(`archiver ${archiverInfo} failed to respond for cycles ${i} to ${nextEnd}`) 56 | console.log(response) 57 | i = nextEnd + 1 58 | continue 59 | } 60 | // console.log(response) 61 | if (responses[archiverInfo]) { 62 | const result = response.receipts ? response.receipts : response.originalTxs 63 | responses[archiverInfo] = [...responses[archiverInfo], ...result] 64 | } else { 65 | responses[archiverInfo] = response.receipts ? response.receipts : response.originalTxs 66 | } 67 | i = nextEnd + 1 68 | } 69 | // console.dir(responses, { depth: null }) 70 | // save to file 71 | writeFileSync( 72 | `archiver_${archiverInfo}_${startCycle}_${endCycle}_${URL}.json`, 73 | StringUtils.safeStringify(responses) 74 | ) 75 | } 76 | } 77 | 78 | runProgram() 79 | -------------------------------------------------------------------------------- /scripts/update_network_account.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from 'fs' 2 | import { resolve } from 'path' 3 | import { join } from 'path' 4 | import { overrideDefaultConfig, config } from '../src/Config' 5 | import * as Crypto from '../src/Crypto' 6 | import * as dbstore from '../src/dbstore' 7 | import * as AccountDB from '../src/dbstore/accounts' 8 | import { startSaving } from '../src/saveConsoleOutput' 9 | import * as Logger from '../src/Logger' 10 | import { accountSpecificHash } from '../src/shardeum/calculateAccountHash' 11 | import { addSigListeners } from '../src/State' 12 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 13 | 14 | const activeVersion = '1.9.0' 15 | const latestVersion = '1.9.0' 16 | const minVersion = '1.9.0' 17 | // const archiver = { "activeVersion": "3.4.12", "latestVersion": "3.4.12", "minVersion": "3.4.12" } 18 | 19 | const runProgram = async (): Promise => { 20 | // Override default config params from config file, env vars, and cli args 21 | const file = join(process.cwd(), 'archiver-config.json') 22 | overrideDefaultConfig(file) 23 | // Set crypto hash keys from config 24 | const hashKey = config.ARCHIVER_HASH_KEY 25 | Crypto.setCryptoHashKey(hashKey) 26 | let logsConfig 27 | try { 28 | logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) 29 | } catch (err) { 30 | console.log('Failed to parse archiver log file:', err) 31 | } 32 | const logDir = `${config.ARCHIVER_LOGS}/${config.ARCHIVER_IP}_${config.ARCHIVER_PORT}` 33 | const baseDir = '.' 34 | logsConfig.dir = logDir 35 | Logger.initLogger(baseDir, logsConfig) 36 | if (logsConfig.saveConsoleOutput) { 37 | startSaving(join(baseDir, logsConfig.dir)) 38 | } 39 | await dbstore.initializeDB(config) 40 | addSigListeners() 41 | 42 | const networkAccountId = config.globalNetworkAccount 43 | const networkAccount = (await AccountDB.queryAccountByAccountId(networkAccountId)) as AccountDB.AccountsCopy 44 | console.log('Network account before', networkAccount) 45 | 46 | networkAccount.data.current = { 47 | ...networkAccount.data.current, 48 | activeVersion, 49 | latestVersion, 50 | minVersion, 51 | // archiver, 52 | } 53 | // If there is a validator config in the listOfChanges that need to be overridden at the network restart, we can add it here. eg: 54 | // networkAccount.data.listOfChanges.push({ change: { p2p: { minNodes: 150 } }, cycle: 55037 }) 55 | 56 | const calculatedAccountHash = accountSpecificHash(networkAccount.data) 57 | 58 | networkAccount.hash = calculatedAccountHash 59 | networkAccount.data.hash = calculatedAccountHash 60 | await AccountDB.insertAccount(networkAccount) 61 | console.log('Network account after', networkAccount) 62 | await dbstore.closeDatabase() 63 | } 64 | runProgram() 65 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Node CI Workflow 2 | # 3 | # The parameters are defaulted at the org level but can be overridden on the repository. 4 | # See the github-automation repo for more documentation 5 | # 6 | on: 7 | push: 8 | branches: 9 | - dev 10 | - main 11 | pull_request: 12 | branches: 13 | - dev 14 | - main 15 | issue_comment: 16 | inputs: 17 | workflowBranch: 18 | description: 'Branch of the reusable workflow. Defaults to main, select dev for testing only.' 19 | required: true 20 | default: 'main' 21 | type: choice 22 | options: 23 | - dev 24 | - main 25 | workflow_dispatch: 26 | inputs: 27 | workflowBranch: 28 | description: 'Branch of the reusable workflow. Defaults to main, select dev for testing only.' 29 | required: true 30 | default: 'main' 31 | type: choice 32 | options: 33 | - dev 34 | - main 35 | jobs: 36 | echo-inputs: 37 | name: Repo Workflow Debugging 38 | runs-on: ubuntu-latest 39 | steps: 40 | - name: Check Repo Vars 41 | run: | 42 | echo "*** Start - Check inputs in repo workflow ***" 43 | echo "Node Version: ${{ vars.NODE_VERSION }}" 44 | echo "Lint Required: ${{ vars.IS_LINT_REQUIRED }}" 45 | echo "Format Check Required: ${{ vars.IS_FORMAT_CHECK_REQUIRED }}" 46 | echo "Apply Patches Required: ${{ vars.IS_APPLY_PATCHES_REQUIRED }}" 47 | echo "Unit Tests Required: ${{ vars.IS_UNIT_TESTS_REQUIRED }}" 48 | echo "*** End - Check inputs in repo workflow ***" 49 | ci-test-only: 50 | if: ${{ github.event.inputs.workflowBranch == 'dev' }} 51 | uses: shardeum/github-automation/.github/workflows/reusable-node-ci.yml@dev 52 | permissions: 53 | issues: write 54 | pull-requests: write 55 | contents: write 56 | with: 57 | node-version: ${{ vars.NODE_VERSION }} 58 | lint-required: ${{ vars.IS_LINT_REQUIRED == 'true' }} 59 | format-check-required: ${{ vars.IS_FORMAT_CHECK_REQUIRED == 'true' }} 60 | apply-patches-required: ${{ vars.IS_APPLY_PATCHES_REQUIRED == 'true' }} 61 | unit-tests-required: ${{ vars.IS_UNIT_TESTS_REQUIRED == 'true' }} 62 | secrets: inherit 63 | 64 | ci: 65 | if: ${{ github.event.inputs.workflowBranch == 'main' || !github.event.inputs.workflowBranch }} 66 | uses: shardeum/github-automation/.github/workflows/reusable-node-ci.yml@main 67 | permissions: 68 | issues: write 69 | pull-requests: write 70 | contents: write 71 | with: 72 | node-version: ${{ vars.NODE_VERSION }} 73 | lint-required: ${{ vars.IS_LINT_REQUIRED == 'true' }} 74 | format-check-required: ${{ vars.IS_FORMAT_CHECK_REQUIRED == 'true' }} 75 | apply-patches-required: ${{ vars.IS_APPLY_PATCHES_REQUIRED == 'true' }} 76 | unit-tests-required: ${{ vars.IS_UNIT_TESTS_REQUIRED == 'true' }} 77 | secrets: inherit 78 | -------------------------------------------------------------------------------- /src/txDigester/txDigests.ts: -------------------------------------------------------------------------------- 1 | import * as db from '../dbstore/sqlite3storage' 2 | import { digesterDatabase } from '.' 3 | import { config } from '../Config' 4 | 5 | /** 6 | * TransactionDigest is for storing transaction digests, which is the hash of prevHash 7 | * and list of transactions in a timestamp range 8 | */ 9 | export interface TransactionDigest { 10 | cycleStart: number 11 | cycleEnd: number 12 | txCount: number 13 | hash: string 14 | } 15 | 16 | export async function insertTransactionDigest(txDigest: TransactionDigest): Promise { 17 | try { 18 | const fields = Object.keys(txDigest).join(', ') 19 | const placeholders = Object.keys(txDigest).fill('?').join(', ') 20 | const values = db.extractValues(txDigest) 21 | const sql = 22 | 'INSERT INTO txDigests (' + 23 | fields + 24 | ') VALUES (' + 25 | placeholders + 26 | ') ON CONFLICT (cycleEnd) DO UPDATE SET ' + 27 | 'cycleStart = excluded.cycleStart, ' + 28 | 'txCount = excluded.txCount, ' + 29 | 'hash = excluded.hash' 30 | 31 | await db.run(digesterDatabase, sql, values) 32 | if (config.VERBOSE) { 33 | console.log( 34 | `Successfully inserted txDigest for cycle records from ${txDigest.cycleStart} to ${txDigest.cycleEnd}` 35 | ) 36 | } 37 | } catch (e) { 38 | console.error(e) 39 | throw new Error( 40 | `Unable to insert txDigest for cycle records from ${txDigest.cycleStart} to ${txDigest.cycleEnd}` 41 | ) 42 | } 43 | } 44 | 45 | export async function getLastProcessedTxDigest(): Promise { 46 | try { 47 | const sql = `SELECT * FROM txDigests ORDER BY cycleEnd DESC LIMIT 1` 48 | const lastProcessedDigest = (await db.get(digesterDatabase, sql)) as TransactionDigest 49 | if (config.VERBOSE) { 50 | console.log('LastProcessed Tx Digest', lastProcessedDigest) 51 | } 52 | return lastProcessedDigest 53 | } catch (e) { 54 | console.error(e) 55 | return null 56 | } 57 | } 58 | 59 | export async function queryByEndCycle(endCycle: number): Promise { 60 | try { 61 | const sql = `SELECT * FROM txDigests WHERE cycleEnd=? LIMIT 1` 62 | const txDigest = (await db.get(digesterDatabase, sql, [endCycle])) as TransactionDigest 63 | if (config.VERBOSE) { 64 | console.log('Tx Digest by endCycle', txDigest) 65 | } 66 | return txDigest 67 | } catch (e) { 68 | console.error(e) 69 | return null 70 | } 71 | } 72 | 73 | export async function queryByCycleRange(startCycle: number, endCycle: number): Promise { 74 | try { 75 | const sql = `SELECT * FROM txDigests WHERE cycleStart >= ? AND cycleEnd <= ? ORDER BY cycleEnd` 76 | const txDigests = (await db.all(digesterDatabase, sql, [startCycle, endCycle])) as TransactionDigest[] 77 | if (config.VERBOSE) { 78 | console.log('Tx Digest by cycle range', txDigests) 79 | } 80 | return txDigests || [] 81 | } catch (e) { 82 | console.error(e) 83 | return [] 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /src/txDigester.ts: -------------------------------------------------------------------------------- 1 | import { join } from 'path' 2 | import * as cron from 'node-cron' 3 | import * as dbstore from './dbstore' 4 | import * as txDigesterDB from './txDigester/index' 5 | import * as txDigestFunctions from './txDigester/txDigestFunctions' 6 | import { overrideDefaultConfig, config } from './Config' 7 | import * as CycleDB from './dbstore/cycles' 8 | import * as Crypto from './Crypto' 9 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 10 | import { readFileSync } from 'fs' 11 | import { resolve } from 'path' 12 | import * as Logger from './Logger' 13 | import { startSaving } from './saveConsoleOutput' 14 | import axios from 'axios' 15 | 16 | const configFile = join(process.cwd(), 'archiver-config.json') 17 | 18 | const start = async (): Promise => { 19 | overrideDefaultConfig(configFile) 20 | 21 | const hashKey = config.ARCHIVER_HASH_KEY 22 | Crypto.setCryptoHashKey(hashKey) 23 | let logsConfig 24 | try { 25 | logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) 26 | } catch (err) { 27 | console.log('Failed to parse archiver log file:', err) 28 | } 29 | const logDir = `${config.ARCHIVER_LOGS}/txDigester` 30 | const baseDir = '.' 31 | logsConfig.dir = logDir 32 | Logger.initLogger(baseDir, logsConfig) 33 | if (logsConfig.saveConsoleOutput) { 34 | startSaving(join(baseDir, logsConfig.dir)) 35 | } 36 | 37 | await dbstore.initializeDB(config) 38 | 39 | await txDigesterDB.initializeDB(config) 40 | 41 | const ARCHIVER_STATUS_CHECK_URL = `http://${config.ARCHIVER_IP}:${config.ARCHIVER_PORT}/status` 42 | 43 | cron.schedule(config.txDigest.txCronSchedule, async () => { 44 | console.log('Running cron task....') 45 | console.log('Checking archiver status....') 46 | const archiverStatusResp = await axios.get(ARCHIVER_STATUS_CHECK_URL) 47 | const isArchiverActive: boolean = archiverStatusResp.data.status.isActive 48 | console.log('isArchiverActive: ', isArchiverActive) 49 | 50 | if (isArchiverActive) { 51 | const lastProcessedTxDigest = await txDigestFunctions.getLastProcessedTxDigest() 52 | console.log('lastProcessedTxDigest by txDigester: ', lastProcessedTxDigest) 53 | const lastCheckedCycle = lastProcessedTxDigest ? lastProcessedTxDigest.cycleEnd : -1 54 | console.log('lastCheckedCycle by txDigester: ', lastCheckedCycle) 55 | 56 | const latestCycleRecords = await CycleDB.queryLatestCycleRecords(1) 57 | const latestCycleCounter = latestCycleRecords.length > 0 ? latestCycleRecords[0].counter : -1 58 | console.log('latestCycleCounter reported by Archiver: ', latestCycleCounter) 59 | 60 | const latestSyncedCycleCounter = latestCycleCounter - config.txDigest.syncDelay 61 | if (latestSyncedCycleCounter - lastCheckedCycle >= config.txDigest.cycleDiff) { 62 | await txDigestFunctions.processAndInsertTxDigests(lastCheckedCycle + 1, latestSyncedCycleCounter) 63 | } 64 | } else { 65 | console.log('Archiver is not active. Skipping txDigest processing....') 66 | } 67 | }) 68 | } 69 | 70 | start() 71 | -------------------------------------------------------------------------------- /src/ServiceQueue.ts: -------------------------------------------------------------------------------- 1 | import { P2P } from '@shardeum-foundation/lib-types'; 2 | import * as Logger from './Logger' 3 | import { stringifyReduce } from "./profiler/StringifyReduce"; 4 | import * as crypto from './Crypto' 5 | import { config } from './Config' 6 | import { readFileSync } from 'fs' 7 | import * as path from 'path' 8 | 9 | const txListPath = path.join(__dirname, '..', 'tx-list-restore.json'); 10 | const rawData = readFileSync(txListPath, 'utf8'); 11 | const ngtJson = JSON.parse(rawData) 12 | 13 | let txList: P2P.ServiceQueueTypes.NetworkTxEntry[] = config.restoreNGTsFromSnapshot 14 | ? (ngtJson as P2P.ServiceQueueTypes.NetworkTxEntry[]) 15 | : [] 16 | 17 | export function addTxs(addTxs: P2P.ServiceQueueTypes.AddNetworkTx[]): boolean { 18 | try { 19 | for (const addTx of addTxs) { 20 | Logger.mainLogger.info(`Adding network tx of type ${addTx.type} and payload ${stringifyReduce(addTx.txData)}`) 21 | const { sign, ...txDataWithoutSign } = addTx.txData 22 | sortedInsert(txList, { 23 | hash: addTx.hash, 24 | tx: { 25 | hash: addTx.hash, 26 | txData: txDataWithoutSign, 27 | type: addTx.type, 28 | cycle: addTx.cycle, 29 | priority: addTx.priority, 30 | ...(addTx.subQueueKey && { subQueueKey: addTx.subQueueKey }), 31 | }, 32 | }) 33 | } 34 | return true 35 | } catch (e) { 36 | Logger.mainLogger.error(`ServiceQueue:addTxs: Error adding txs: ${e}`) 37 | return false 38 | } 39 | } 40 | 41 | export function removeTxs(removeTxs: P2P.ServiceQueueTypes.RemoveNetworkTx[]): boolean { 42 | try { 43 | for (const removeTx of removeTxs) { 44 | const index = txList.findIndex((entry) => entry.hash === removeTx.txHash) 45 | if (index === -1) { 46 | Logger.mainLogger.error(`TxHash ${removeTx.txHash} does not exist in txList`) 47 | } else { 48 | txList.splice(index, 1) 49 | } 50 | } 51 | return true 52 | } catch (e) { 53 | Logger.mainLogger.error(`ServiceQueue:removeTxs: Error removing txs: ${e}`) 54 | return false 55 | } 56 | } 57 | 58 | export function setTxList(_txList: P2P.ServiceQueueTypes.NetworkTxEntry[]): void { 59 | txList = _txList 60 | } 61 | 62 | export function getTxList(): P2P.ServiceQueueTypes.NetworkTxEntry[] { 63 | return txList 64 | } 65 | 66 | export function getNetworkTxsListHash(): string { 67 | return crypto.hashObj(txList) 68 | } 69 | 70 | function sortedInsert( 71 | list: P2P.ServiceQueueTypes.NetworkTxEntry[], 72 | entry: P2P.ServiceQueueTypes.NetworkTxEntry 73 | ): void { 74 | const index = list.findIndex( 75 | (item) => 76 | item.tx.cycle > entry.tx.cycle || 77 | (item.tx.cycle === entry.tx.cycle && item.tx.priority < entry.tx.priority) || // Compare by priority if cycle is the same 78 | (item.tx.cycle === entry.tx.cycle && item.tx.priority === entry.tx.priority && item.hash > entry.hash) // Compare by hash if both cycle and priority are the same 79 | ) 80 | if (index === -1) { 81 | list.push(entry) 82 | } else { 83 | list.splice(index, 0, entry) 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /scripts/verify_account_hash.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from 'fs' 2 | import { resolve } from 'path' 3 | import { join } from 'path' 4 | import { overrideDefaultConfig, config } from '../src/Config' 5 | import * as Crypto from '../src/Crypto' 6 | import * as dbstore from '../src/dbstore' 7 | import * as AccountDB from '../src/dbstore/accounts' 8 | import { startSaving } from '../src/saveConsoleOutput' 9 | import * as Logger from '../src/Logger' 10 | import { AccountType, accountSpecificHash } from '../src/shardeum/calculateAccountHash' 11 | import { addSigListeners } from '../src/State' 12 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 13 | 14 | const updateHash = false 15 | const runProgram = async (): Promise => { 16 | // Override default config params from config file, env vars, and cli args 17 | const file = join(process.cwd(), 'archiver-config.json') 18 | overrideDefaultConfig(file) 19 | // Set crypto hash keys from config 20 | const hashKey = config.ARCHIVER_HASH_KEY 21 | Crypto.setCryptoHashKey(hashKey) 22 | let logsConfig 23 | try { 24 | logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) 25 | } catch (err) { 26 | console.log('Failed to parse archiver log file:', err) 27 | } 28 | const logDir = `${config.ARCHIVER_LOGS}/${config.ARCHIVER_IP}_${config.ARCHIVER_PORT}` 29 | const baseDir = '.' 30 | logsConfig.dir = logDir 31 | Logger.initLogger(baseDir, logsConfig) 32 | if (logsConfig.saveConsoleOutput) { 33 | startSaving(join(baseDir, logsConfig.dir)) 34 | } 35 | await dbstore.initializeDB(config) 36 | addSigListeners() 37 | 38 | const totalAccounts = await AccountDB.queryAccountCount() 39 | console.log(totalAccounts) 40 | const limit = 10000 41 | let validHashAccounts = 0 42 | for (let i = 0; i < totalAccounts; i += limit) { 43 | console.log('From', i, 'To', i + limit) 44 | const accounts = await AccountDB.queryAccounts(i, limit) 45 | for (const account of accounts) { 46 | const accountHash1 = account.hash 47 | const accountHash2 = account.data.hash 48 | if (accountHash1 !== accountHash2) { 49 | console.log(account.accountId, 'accountHash', accountHash1, 'accountHash2', accountHash2) 50 | } 51 | const calculatedAccountHash = accountSpecificHash(account.data) 52 | 53 | if (accountHash1 !== calculatedAccountHash) { 54 | console.log( 55 | account.accountId, 56 | 'accountHash1', 57 | accountHash1, 58 | 'calculatedAccountHash', 59 | calculatedAccountHash 60 | ) 61 | if (updateHash) { 62 | account.hash = calculatedAccountHash 63 | account.data.hash = calculatedAccountHash 64 | await AccountDB.insertAccount(account) 65 | } 66 | } else { 67 | // console.log(accountHash1, accountHash2, calculatedAccountHash) 68 | validHashAccounts++ 69 | } 70 | } 71 | // if (i > 20000) break 72 | } 73 | console.log('totalAccounts', totalAccounts, 'validHashAccounts', validHashAccounts) 74 | await dbstore.closeDatabase() 75 | } 76 | runProgram() 77 | -------------------------------------------------------------------------------- /src/txDigestAPIserver.ts: -------------------------------------------------------------------------------- 1 | import { join } from 'path' 2 | import * as dbstore from './dbstore' 3 | import * as txDigesterDB from './txDigester/index' 4 | import { overrideDefaultConfig, config } from './Config' 5 | import * as Crypto from './Crypto' 6 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 7 | import { readFileSync } from 'fs' 8 | import { resolve } from 'path' 9 | import * as Logger from './Logger' 10 | import { startSaving } from './saveConsoleOutput' 11 | import fastify, { FastifyInstance } from 'fastify' 12 | import fastifyCors from '@fastify/cors' 13 | import fastifyRateLimit from '@fastify/rate-limit' 14 | import { Server, IncomingMessage, ServerResponse } from 'http' 15 | import { registerRoutes } from './txDigester/api' 16 | 17 | const configFile = join(process.cwd(), 'archiver-config.json') 18 | 19 | const start = async (): Promise => { 20 | overrideDefaultConfig(configFile) 21 | 22 | const hashKey = config.ARCHIVER_HASH_KEY 23 | Crypto.setCryptoHashKey(hashKey) 24 | let logsConfig 25 | try { 26 | logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) 27 | } catch (err) { 28 | console.log('Failed to parse archiver log file:', err) 29 | } 30 | const logDir = `${config.ARCHIVER_LOGS}/txDigesterAPI` 31 | const baseDir = '.' 32 | logsConfig.dir = logDir 33 | Logger.initLogger(baseDir, logsConfig) 34 | if (logsConfig.saveConsoleOutput) { 35 | startSaving(join(baseDir, logsConfig.dir)) 36 | } 37 | 38 | await dbstore.initializeDB(config) 39 | 40 | await txDigesterDB.initializeDB(config) 41 | 42 | console.log('Starting txDigest API server....') 43 | 44 | const server: FastifyInstance = fastify({ 45 | logger: false, 46 | }) 47 | 48 | await server.register(fastifyCors) 49 | await server.register(fastifyRateLimit, { 50 | global: true, 51 | max: config.RATE_LIMIT, 52 | timeWindow: 10, 53 | allowList: ['127.0.0.1', '0.0.0.0'], // Excludes local IPs from rate limits 54 | }) 55 | 56 | server.addContentTypeParser('application/json', { parseAs: 'string' }, (req, body, done) => { 57 | try { 58 | const jsonString = typeof body === 'string' ? body : body.toString('utf8') 59 | done(null, StringUtils.safeJsonParse(jsonString)) 60 | } catch (err) { 61 | err.statusCode = 400 62 | done(err, undefined) 63 | } 64 | }) 65 | 66 | server.setReplySerializer((payload) => { 67 | return StringUtils.safeStringify(payload) 68 | }) 69 | 70 | // Register API routes 71 | registerRoutes(server as FastifyInstance) 72 | 73 | // Start server and bind to port on all interfaces 74 | server.listen( 75 | { 76 | port: config.txDigest.apiServerPort, 77 | host: '0.0.0.0', 78 | }, 79 | (err) => { 80 | Logger.mainLogger.debug('TXDigestAPI Listening', config.txDigest.apiServerPort) 81 | if (err) { 82 | server.log.error(err) 83 | process.exit(1) 84 | } 85 | Logger.mainLogger.debug('txDigestAPI server has started.') 86 | } 87 | ) 88 | } 89 | 90 | start() 91 | -------------------------------------------------------------------------------- /src/worker-process/index.ts: -------------------------------------------------------------------------------- 1 | import { verifyArchiverReceipt, ReceiptVerificationResult } from '../Data/Collector' 2 | import { ChildMessageInterface } from '../primary-process' 3 | import { config } from '../Config' 4 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 5 | import { ArchiverReceipt } from '../dbstore/receipts' 6 | 7 | export const initWorkerProcess = async (): Promise => { 8 | console.log(`Worker ${process.pid} started`) 9 | let lastActivity = Date.now() 10 | 11 | // Worker processes 12 | process.on('message', async ({ type, data }: ChildMessageInterface) => { 13 | switch (type) { 14 | case 'receipt-verification': { 15 | if (!data.stringifiedReceipt) { 16 | console.error(`Worker ${process.pid} received invalid receipt for verification`, data) 17 | return 18 | } 19 | if (isNaN(data.requiredSignatures)) { 20 | console.error(`Worker ${process.pid} received invalid requiredSignatures for verification`, data) 21 | return 22 | } 23 | const receipt = StringUtils.safeJsonParse(data.stringifiedReceipt) as ArchiverReceipt 24 | // console.log(`Worker ${process.pid} verifying receipt`); 25 | let verificationResult: ReceiptVerificationResult = { 26 | success: false, 27 | failedReasons: [], 28 | nestedCounterMessages: [], 29 | } 30 | try { 31 | // console.log(`Worker process ${process.pid} is verifying receipt`, receipt.tx.txId, receipt.tx.timestamp) 32 | verificationResult = await verifyArchiverReceipt(receipt, data.requiredSignatures) 33 | } catch (error) { 34 | console.error(`Error in Worker ${process.pid} while verifying receipt`, error) 35 | verificationResult.failedReasons.push('Error in Worker while verifying receipt') 36 | verificationResult.nestedCounterMessages.push('Error in Worker while verifying receipt') 37 | } 38 | process.send({ 39 | type: 'receipt-verification', 40 | data: { 41 | txId: receipt.tx.txId, 42 | timestamp: receipt.tx.timestamp, 43 | verificationResult, 44 | }, 45 | }) 46 | break 47 | } 48 | default: 49 | console.log(`Worker ${process.pid} received unknown message type: ${type}`) 50 | console.log(data) 51 | break 52 | } 53 | lastActivity = Date.now() 54 | }) 55 | process.send({ type: 'child_ready' }) 56 | setInterval(() => { 57 | // console.log( 58 | // `lastActivityCheckTimeout: ${config.lastActivityCheckTimeout}, lastActivityCheckInterval: ${config.lastActivityCheckInterval}` 59 | // ) 60 | if (Date.now() - lastActivity > config.lastActivityCheckTimeout) { 61 | console.log(`Worker ${process.pid} is idle for more than 1 minute`) 62 | process.send({ type: 'child_close' }) 63 | } 64 | }, config.lastActivityCheckInterval) 65 | } 66 | 67 | process.on('uncaughtException', (error) => { 68 | console.error(`Uncaught Exception in Child Process: ${process.pid}`, error) 69 | }) 70 | 71 | process.on('unhandledRejection', (reason, promise) => { 72 | console.error('Unhandled Rejection in Child Process:', promise, 'reason:', reason); 73 | }); -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. 6 | 7 | ## Our Standards 8 | 9 | Examples of behavior that contribute to creating a positive environment include: 10 | 11 | - Using welcoming and inclusive language 12 | - Being respectful of differing viewpoints and experiences 13 | - Gracefully accepting constructive criticism 14 | - Focusing on what is best for the community 15 | - Showing empathy towards other community members 16 | 17 | Examples of unacceptable behavior by participants include: 18 | 19 | - The use of sexualized language or imagery and unwelcome sexual attention or advances 20 | - Trolling, insulting/derogatory comments, and personal or political attacks 21 | - Public or private harassment 22 | - Publishing others' private information, such as a physical or electronic address, without explicit permission 23 | - Other conduct which could reasonably be considered inappropriate in a professional setting 24 | 25 | ## Our Responsibilities 26 | 27 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 28 | 29 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behavior that they deem inappropriate, threatening, offensive, or harmful. 30 | 31 | ## Scope 32 | 33 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be 34 | further defined and clarified by project maintainers. 35 | 36 | ## Enforcement 37 | 38 | Instances of abuse, harassment, or otherwise unacceptable behavior may be reported by contacting . All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 39 | 40 | Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. 41 | 42 | ## Attribution 43 | 44 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] 45 | 46 | [homepage]: http://contributor-covenant.org 47 | [version]: http://contributor-covenant.org/version/1/4/ 48 | -------------------------------------------------------------------------------- /src/txDigester/txDigestFunctions.ts: -------------------------------------------------------------------------------- 1 | import { config } from '../Config' 2 | import * as processedTxs from '../dbstore/processedTxs' 3 | import * as txDigest from './txDigests' 4 | import * as Crypto from '../Crypto' 5 | 6 | let lastProcessedTxDigest: txDigest.TransactionDigest = null 7 | 8 | export interface txDigestObj { 9 | prevHash: string 10 | txIdsHash: string 11 | } 12 | 13 | export const getTxIds = async (startCycle: number, endCycle: number): Promise => { 14 | const sortedTxIds = await processedTxs.querySortedTxsBetweenCycleRange(startCycle, endCycle) 15 | return sortedTxIds 16 | } 17 | 18 | export const getHash = async (cycle: number): Promise => { 19 | if (cycle == -1) { 20 | return '0x0' 21 | } 22 | const txDigestHash = await txDigest.queryByEndCycle(cycle) 23 | if (!txDigestHash) { 24 | throw new Error(`Failed to fetch txDigestHash for cycle ${cycle}`) 25 | } 26 | 27 | return txDigestHash.hash 28 | } 29 | 30 | export const updateLastProcessedTxDigest = async (): Promise => { 31 | lastProcessedTxDigest = await txDigest.getLastProcessedTxDigest() 32 | } 33 | 34 | export const getLastProcessedTxDigest = async (): Promise => { 35 | if (!lastProcessedTxDigest) { 36 | await updateLastProcessedTxDigest() 37 | } 38 | return lastProcessedTxDigest 39 | } 40 | 41 | export const processAndInsertTxDigests = async ( 42 | lastCheckedCycle: number, 43 | latestCycleCounter: number 44 | ): Promise => { 45 | console.log('Processing and inserting txDigests from cycle: ', lastCheckedCycle, ' to ', latestCycleCounter) 46 | const batchSize = config.txDigest.cycleDiff 47 | let currentCycle = lastCheckedCycle 48 | let endCycle = currentCycle + batchSize - 1 49 | 50 | while (endCycle <= latestCycleCounter) { 51 | console.log(`Processing txDigests from cycle ${currentCycle} to ${endCycle}`) 52 | 53 | // Fetch txDigests in the current batch 54 | const txIds = await getTxIds(currentCycle, endCycle) 55 | if (txIds == null) { 56 | console.error(`Failed to fetch txIds for cycle ${currentCycle} to ${endCycle}`) 57 | return 58 | } 59 | 60 | if(config.VERBOSE) { 61 | console.log(`TxIds from ${currentCycle} to ${endCycle} of length ${txIds.length}: `, txIds) 62 | } 63 | 64 | const prevHash = await getHash(currentCycle - 1) 65 | console.log(`prevHash for cycle ${currentCycle}: `, prevHash) 66 | 67 | const txObj: txDigestObj = { 68 | prevHash: prevHash, 69 | txIdsHash: Crypto.hashObj(txIds), 70 | } 71 | 72 | const txRangeHash = Crypto.hashObj(txObj) 73 | 74 | const txDigestObj: txDigest.TransactionDigest = { 75 | cycleStart: currentCycle, 76 | cycleEnd: endCycle, 77 | txCount: txIds.length, 78 | hash: txRangeHash, 79 | } 80 | 81 | try { 82 | txDigest.insertTransactionDigest(txDigestObj) 83 | } catch (e) { 84 | console.error('Failed to insert txDigestObj: ', txDigestObj) 85 | console.error(e) 86 | return 87 | } 88 | 89 | currentCycle = endCycle + 1 90 | endCycle = currentCycle + batchSize - 1 91 | lastProcessedTxDigest = txDigestObj 92 | } 93 | 94 | console.log('Updated lastProcessedTxDigest: ', lastProcessedTxDigest) 95 | console.log('Finished processing txDigests.') 96 | } 97 | 98 | export const getTxDigestsForACycleRange = async ( 99 | cycleStart: number, 100 | cycleEnd: number 101 | ): Promise => { 102 | const txDigests: txDigest.TransactionDigest[] = await txDigest.queryByCycleRange(cycleStart, cycleEnd) 103 | return txDigests 104 | } 105 | -------------------------------------------------------------------------------- /src/sync-v2/verify.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * `verify` submodule. Defines functions used to verify objects against 3 | * given hashes. 4 | * 5 | * This module is functionally identical to the one in shardus-global-server/p2p/SyncV2/verify.ts. 6 | */ 7 | 8 | import { hashObj, Signature } from '@shardeum-foundation/lib-crypto-utils' 9 | import { P2P, hexstring } from '@shardeum-foundation/lib-types' 10 | import { err, ok, Result } from 'neverthrow' 11 | import { computeCycleMarker } from '../Data/Cycles' 12 | 13 | type HashableObject = (object | string) & { sign?: Signature } 14 | 15 | /** 16 | * Verifies if the hash of a given object matches the expected hash. 17 | * 18 | * This function hashes a 'HashableObject' and compares it to an expected hash value. If the hashes match, it returns a successful result with value true. If the hashes don't match, it returns an error with a detailed message describing the mismatch. 19 | * 20 | * @param object - The object to be hashed and verified. 21 | * @param expectedHash - The expected hash string to compare with the hash of the object. 22 | * @param [objectName='some object'] - An optional name for the object, used in the error message in case of a hash mismatch. 23 | * 24 | * @returns Returns a Result object. On successful hash verification, returns 'ok' with value true. On mismatch, returns 'err' with an Error object detailing the mismatch. 25 | */ 26 | function verify( 27 | object: HashableObject, 28 | expectedHash: hexstring, 29 | objectName = 'some object' 30 | ): Result { 31 | const newHash = hashObj(object) 32 | return newHash === expectedHash 33 | ? ok(true) 34 | : err(new Error(`hash mismatch for ${objectName}: expected ${expectedHash}, got ${newHash}`)) 35 | } 36 | 37 | /** Verifies that the hash of the validator list matches the expected hash. */ 38 | export function verifyValidatorList( 39 | validatorList: P2P.NodeListTypes.Node[], 40 | expectedHash: hexstring 41 | ): Result { 42 | return verify(validatorList, expectedHash, 'validator list') 43 | } 44 | 45 | /** Verifies that the hash of the standby list matches the expected hash. */ 46 | export function verifyStandbyList( 47 | standbyList: P2P.JoinTypes.JoinRequest[], 48 | expectedHash: hexstring 49 | ): Result { 50 | return verify(standbyList, expectedHash, 'standby list') 51 | } 52 | 53 | /** Verifies that the hash of the archiver list matches the expected hash. */ 54 | export function verifyArchiverList( 55 | archiverList: P2P.ArchiversTypes.JoinedArchiver[], 56 | expectedHash: hexstring 57 | ): Result { 58 | return verify(archiverList, expectedHash, 'archiver list') 59 | } 60 | 61 | /** Verifies that the hash of the tx list matches the expected hash. */ 62 | export function verifyTxList( 63 | txList: P2P.ServiceQueueTypes.NetworkTxEntry[], 64 | expectedHash: string 65 | ): Result { 66 | const actualHash = hashObj(txList) 67 | 68 | // verify that the hash of the CycleRecord matches the expected hash 69 | if (actualHash !== expectedHash) 70 | return err(new Error(`hash mismatch for txList: expected ${expectedHash}, got ${actualHash}`)) 71 | 72 | return ok(true) 73 | } 74 | 75 | /** Verifies that the hash of the cycle record matches the expected hash. */ 76 | export function verifyCycleRecord( 77 | cycleRecord: P2P.CycleCreatorTypes.CycleRecord, 78 | expectedHash: hexstring 79 | ): Result { 80 | const actualHash = computeCycleMarker(cycleRecord) 81 | 82 | // verify that the hash of the CycleRecord matches the expected hash 83 | if (actualHash !== expectedHash) 84 | return err(new Error(`hash mismatch for cycle: expected ${expectedHash}, got ${actualHash}`)) 85 | 86 | return ok(true) 87 | } 88 | -------------------------------------------------------------------------------- /scripts/create_shut_down_cycle.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from 'fs' 2 | import * as path from 'path' 3 | import { join } from 'path' 4 | import { overrideDefaultConfig, config } from '../src/Config' 5 | import * as Crypto from '../src/Crypto' 6 | import * as dbstore from '../src/dbstore' 7 | import * as CycleDB from '../src/dbstore/cycles' 8 | import { startSaving } from '../src/saveConsoleOutput' 9 | import * as Logger from '../src/Logger' 10 | import { P2P } from '@shardeum-foundation/lib-types' 11 | import { addSigListeners } from '../src/State' 12 | import { computeCycleMarker } from '../src/Data/Cycles' 13 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 14 | 15 | const archiversAtShutdown = [ 16 | { 17 | ip: '127.0.0.1', 18 | port: 4000, 19 | publicKey: '758b1c119412298802cd28dbfa394cdfeecc4074492d60844cc192d632d84de3', 20 | }, 21 | { 22 | ip: '127.0.0.1', 23 | port: 4001, 24 | publicKey: 'e8a5c26b9e2c3c31eb7c7d73eaed9484374c16d983ce95f3ab18a62521964a94', 25 | }, 26 | { 27 | ip: '127.0.0.1', 28 | port: 4002, 29 | publicKey: '9426b64e675cad739d69526bf7e27f3f304a8a03dca508a9180f01e9269ce447', 30 | }, 31 | ] 32 | 33 | const runProgram = async (): Promise => { 34 | // Override default config params from config file, env vars, and cli args 35 | const file = join(process.cwd(), 'archiver-config.json') 36 | overrideDefaultConfig(file) 37 | // Set crypto hash keys from config 38 | const hashKey = config.ARCHIVER_HASH_KEY 39 | if (!hashKey) { 40 | throw new Error('ARCHIVER_HASH_KEY is required') 41 | } 42 | Crypto.setCryptoHashKey(hashKey) 43 | let logsConfig 44 | try { 45 | logsConfig = StringUtils.safeJsonParse(readFileSync(path.resolve(__dirname, '../archiver-log.json'), 'utf8')) 46 | } catch (err) { 47 | console.log('Failed to parse archiver log file:', err) 48 | } 49 | const logDir = `${config.ARCHIVER_LOGS}/${config.ARCHIVER_IP}_${config.ARCHIVER_PORT}` 50 | const baseDir = '.' 51 | logsConfig.dir = logDir 52 | Logger.initLogger(baseDir, logsConfig) 53 | if (logsConfig.saveConsoleOutput) { 54 | startSaving(join(baseDir, logsConfig.dir)) 55 | } 56 | await dbstore.initializeDB(config) 57 | addSigListeners() 58 | 59 | const txListPath = path.join(__dirname, '..', 'tx-list-restore.json') 60 | const rawData = readFileSync(txListPath, 'utf8') 61 | const ngtJson = JSON.parse(rawData) 62 | 63 | const txListHash = Crypto.hashObj(ngtJson) 64 | 65 | let latestCycle = await CycleDB.queryLatestCycleRecords(1) 66 | let latestCycleRecord = latestCycle[0] 67 | console.log('latestCycleRecord before', latestCycleRecord) 68 | const newCycleRecord = { 69 | ...latestCycleRecord, 70 | counter: latestCycleRecord.counter + 1, 71 | mode: 'shutdown' as P2P.ModesTypes.Record['mode'], 72 | removed: ['all'], 73 | archiversAtShutdown: archiversAtShutdown.map((archiver) => { 74 | return { ...archiver, curvePk: Crypto.getOrCreateCurvePk(archiver.publicKey) } 75 | }), 76 | previous: latestCycleRecord.marker, 77 | lostArchivers: [], 78 | refutedArchivers: [], 79 | removedArchivers: [], 80 | standbyAdd: [], 81 | standbyRemove: [], 82 | txlisthash: txListHash, 83 | txadd: [], 84 | txremove: [], 85 | } 86 | delete newCycleRecord.marker 87 | const marker = computeCycleMarker(newCycleRecord) 88 | newCycleRecord.marker = marker 89 | // console.log('newCycleRecord', newCycleRecord) 90 | await CycleDB.insertCycle({ 91 | counter: newCycleRecord.counter, 92 | cycleMarker: newCycleRecord.marker, 93 | cycleRecord: newCycleRecord, 94 | }) 95 | latestCycle = await CycleDB.queryLatestCycleRecords(1) 96 | latestCycleRecord = latestCycle[0] 97 | console.log('latestCycleRecord after', latestCycleRecord) 98 | await dbstore.closeDatabase() 99 | } 100 | runProgram() 101 | -------------------------------------------------------------------------------- /src/dbstore/sqlite3storage.ts: -------------------------------------------------------------------------------- 1 | import { SerializeToJsonString } from '../utils/serialization' 2 | import { Database } from 'sqlite3' 3 | 4 | export const createDB = async (dbPath: string, dbName: string): Promise => { 5 | console.log('dbName', dbName, 'dbPath', dbPath) 6 | const db = new Database(dbPath, (err) => { 7 | if (err) { 8 | console.log('Error opening database:', err) 9 | throw err 10 | } 11 | }) 12 | await run(db, 'PRAGMA journal_mode=WAL') 13 | db.on('profile', (sql, time) => { 14 | if (time > 500 && time < 1000) { 15 | console.log('SLOW QUERY', process.pid, sql, time) 16 | } else if (time > 1000) { 17 | console.log('VERY SLOW QUERY', process.pid, sql, time) 18 | } 19 | }) 20 | console.log(`Database ${dbName} Initialized!`) 21 | return db 22 | } 23 | 24 | /** 25 | * Close Database Connections Gracefully 26 | */ 27 | export async function close(db: Database, dbName: string): Promise { 28 | try { 29 | console.log(`Terminating ${dbName} Database/Indexer Connections...`) 30 | await new Promise((resolve, reject) => { 31 | db.close((err) => { 32 | if (err) { 33 | console.error(`Error closing ${dbName} 0Database Connection.`) 34 | reject(err) 35 | } else { 36 | console.log(`${dbName} Database connection closed.`) 37 | resolve() 38 | } 39 | }) 40 | }) 41 | } catch (err) { 42 | console.error(`Error thrown in ${dbName} db close() function: `) 43 | console.error(err) 44 | } 45 | } 46 | 47 | export async function runCreate(db: Database, createStatement: string): Promise { 48 | await run(db, createStatement) 49 | } 50 | 51 | export async function run(db: Database, sql: string, params = [] || {}): Promise { 52 | return new Promise((resolve, reject) => { 53 | db.run(sql, params, function (err) { 54 | if (err) { 55 | console.log('Error running sql ' + sql) 56 | console.log(err) 57 | reject(err) 58 | } else { 59 | resolve({ id: this.lastID }) 60 | } 61 | }) 62 | }) 63 | } 64 | 65 | export async function get(db: Database, sql: string, params = []): Promise { 66 | return new Promise((resolve, reject) => { 67 | db.get(sql, params, (err, result) => { 68 | if (err) { 69 | console.log('Error running sql: ' + sql) 70 | console.log(err) 71 | reject(err) 72 | } else { 73 | resolve(result) 74 | } 75 | }) 76 | }) 77 | } 78 | 79 | export async function all(db: Database, sql: string, params = []): Promise { 80 | return new Promise((resolve, reject) => { 81 | db.all(sql, params, (err, rows) => { 82 | if (err) { 83 | console.log('Error running sql: ' + sql) 84 | console.log(err) 85 | reject(err) 86 | } else { 87 | resolve(rows) 88 | } 89 | }) 90 | }) 91 | } 92 | 93 | export function extractValues(object: object): unknown[] { 94 | try { 95 | const inputs = [] 96 | for (const column of Object.keys(object)) { 97 | let value = object[column] // eslint-disable-line security/detect-object-injection 98 | if (typeof value === 'object') value = SerializeToJsonString(value) 99 | inputs.push(value) 100 | } 101 | return inputs 102 | } catch (e) { 103 | console.log(e) 104 | return null 105 | } 106 | } 107 | 108 | export function extractValuesFromArray(arr: object[]): unknown[] { 109 | try { 110 | const inputs = [] 111 | for (const object of arr) { 112 | for (const column of Object.keys(object)) { 113 | let value = object[column] // eslint-disable-line security/detect-object-injection 114 | if (typeof value === 'object') value = SerializeToJsonString(value) 115 | inputs.push(value) 116 | } 117 | } 118 | return inputs 119 | } catch (e) { 120 | console.log(e) 121 | return null 122 | } 123 | } 124 | -------------------------------------------------------------------------------- /static/tickets.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "data": [ 4 | { 5 | "address": "0x002D3a2BfE09E3E29b6d38d58CaaD16EEe4C9BC5" 6 | }, 7 | { 8 | "address": "0x0a0844DA5e01E391d12999ca859Da8a897D5979A" 9 | }, 10 | { 11 | "address": "0x390878B18DeBe2A9f0d5c0252a109c84243D3beb" 12 | }, 13 | { 14 | "address": "0x32B6f2C027D4c9D99Ca07d047D17987390a5EB39" 15 | }, 16 | { 17 | "address": "0x80aF8E195B56aCC3b4ec8e2C99EC38957258635a" 18 | }, 19 | { 20 | "address": "0x7Efbb31431ac7C405E8eEba99531fF1254fCA3B6" 21 | }, 22 | { 23 | "address": "0xCc74bf387F6C102b5a7F828796C57A6D2D19Cb00" 24 | }, 25 | { 26 | "address": "0x4ed5C053BF2dA5F694b322EA93dce949F3276B85" 27 | }, 28 | { 29 | "address": "0xd31aBC7497aD8bC9fe8555C9eDe45DFd7FB3Bf6F" 30 | }, 31 | { 32 | "address": "0xe7e4cc292b424C6D50d16F1Bb5BAB2032c486980" 33 | }, 34 | { 35 | "address": "0xD815DA50966c19261B34Ffa3bE50A30A67D97456" 36 | }, 37 | { 38 | "address": "0xE856B2365641eba73Bc430AAC1E8F930dA513D9D" 39 | }, 40 | { 41 | "address": "0x8282F755e784414697421D4b59232E5d194e2262" 42 | }, 43 | { 44 | "address": "0x353Ad64Df4fAe5EffF717A1c41BE6dEBee543129" 45 | }, 46 | { 47 | "address": "0x9Ce1C3c114538c625aA2488b97fEb3723fdBB07B" 48 | }, 49 | { 50 | "address": "0x6A83e4e4eB0A2c8f562db6BB64b02a9A6237B314" 51 | }, 52 | { 53 | "address": "0x92E375E0c76CaE76D9DfBab17EE7B3B4EE407715" 54 | }, 55 | { 56 | "address": "0xBD79B430CA932e2D89bb77ACaE7367a07471c2eA" 57 | }, 58 | { 59 | "address": "0xEbe173a837Bc30BFEF6E13C9988a4771a4D83275" 60 | }, 61 | { 62 | "address": "0xfF2b584A947182c55BBc039BEAB78BC201D3AdDe" 63 | }, 64 | { 65 | "address": "0xCeA068d8DCB4B4020D30a9950C00cF8408611F67" 66 | }, 67 | { 68 | "address": "0x52F8d3DaA7b5FF25ca2bF7417E059aFe0bD5fB0E" 69 | }, 70 | { 71 | "address": "0x0341996A92193d8B7d80C4774fA2eff889e4b427" 72 | }, 73 | { 74 | "address": "0xF82BDA6Ef512e4219C6DCEea896E50e8180a5bff" 75 | }, 76 | { 77 | "address": "0xA04A1B214a2537139fE59488820D4dA06516933f" 78 | }, 79 | { 80 | "address": "0x550817e7B91244BBeFE2AD621ccD555A16B00405" 81 | }, 82 | { 83 | "address": "0x84C55a4bFfff1ADadb9C46e2B60979F519dAf874" 84 | }, 85 | { 86 | "address": "0x4563303BCE96D3f8d9C7fB94b36dfFC9d831871d" 87 | }, 88 | { 89 | "address": "0xdA058F9c7Ce86C1D21DD5DBDeBad5ab5c785520a" 90 | }, 91 | { 92 | "address": "0x891DF765C855E9848A18Ed18984B9f57cb3a4d47" 93 | }, 94 | { 95 | "address": "0x7Fb9b1C5E20bd250870F87659E46bED410221f17" 96 | }, 97 | { 98 | "address": "0x1e5e12568b7103E8B22cd680A6fa6256DD66ED76" 99 | }, 100 | { 101 | "address": "0xa58169308e7153B5Ce4ca5cA515cC4d0cBE7770B" 102 | } 103 | ], 104 | "sign": [ 105 | { 106 | "owner": "0x891DF765C855E9848A18Ed18984B9f57cb3a4d47", 107 | "sig": "0x9701d7ec08583a80981c6e4e2aee0bef44236bb08f35d42511561ee992ed7bae0c049fa781aa49e089da5ec4a1c61d5fc8393a5b39a34e35ac9ec47642e5de191c" 108 | }, 109 | { 110 | "owner": "0x1e5e12568b7103E8B22cd680A6fa6256DD66ED76", 111 | "sig": "0x36e4f0cd2180d134c2e8ccd6afdf73966b2a7c4fd58e62d75f3d9963704c59b664ad2b014151ce468c904eec4c92f0d4ecb917bf4ab79e51655dd21a80f676a31c" 112 | }, 113 | { 114 | "owner": "0xa58169308e7153B5Ce4ca5cA515cC4d0cBE7770B", 115 | "sig": "0xf3c928a262e250caff903352692c2baf636fe832e67330b55c9c0428db76d0854bebb2604a0fdc18c34b1eb6237fa8c0220a6e5d26e4abb36f3bdee7158bfe361b" 116 | } 117 | ], 118 | "type": "silver" 119 | } 120 | ] -------------------------------------------------------------------------------- /archiver-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "ARCHIVER_IP": "0.0.0.0", 3 | "ARCHIVER_PORT": 4000, 4 | "ARCHIVER_HASH_KEY": "", 5 | "ARCHIVER_PUBLIC_KEY": "", 6 | "ARCHIVER_SECRET_KEY": "", 7 | "archivers": [ 8 | { 9 | "ip": "127.0.0.1", 10 | "port": 4000, 11 | "publicKey": "758b1c119412298802cd28dbfa394cdfeecc4074492d60844cc192d632d84de3" 12 | }, 13 | { 14 | "ip": "127.0.0.1", 15 | "port": 4001, 16 | "publicKey": "e8a5c26b9e2c3c31eb7c7d73eaed9484374c16d983ce95f3ab18a62521964a94" 17 | }, 18 | { 19 | "ip": "127.0.0.1", 20 | "port": 4002, 21 | "publicKey": "9426b64e675cad739d69526bf7e27f3f304a8a03dca508a9180f01e9269ce447" 22 | }, 23 | { 24 | "ip": "127.0.0.1", 25 | "port": 4003, 26 | "publicKey": "ac9708eeb0ec408b42f6e2fdce85f96dbbb9f90a6c8e984fc7ea225baae7882b" 27 | }, 28 | { 29 | "ip": "127.0.0.1", 30 | "port": 4004, 31 | "publicKey": "7a95c68fa1a852e25e4f33e1dc5b1b8b142c4b52209ec3535ac059b4b4db3b4c" 32 | }, 33 | { 34 | "ip": "127.0.0.1", 35 | "port": 4005, 36 | "publicKey": "fd24ef72d1e3ea49165df43e6f3b2737d5480ae4b7309cc11143af4ab35d28b2" 37 | }, 38 | { 39 | "ip": "127.0.0.1", 40 | "port": 4006, 41 | "publicKey": "70c97b6a4cfaa7e75148e847dadc55314b1ce6c48eebe0044d6e80224728c93c" 42 | }, 43 | { 44 | "ip": "127.0.0.1", 45 | "port": 4007, 46 | "publicKey": "0423e72b38c57dcb51e0825c3038ad15faae31e455ab160513923ba3d1047626" 47 | }, 48 | { 49 | "ip": "127.0.0.1", 50 | "port": 4008, 51 | "publicKey": "41f24126b5326bb72300338b1dac97ef4dd2fab3e20e16606a8393a8d14eff54" 52 | }, 53 | { 54 | "ip": "127.0.0.1", 55 | "port": 4009, 56 | "publicKey": "aec5d2b663869d9c22ba99d8de76f3bff0f54fa5e39d2899ec1f3f4543422ec7" 57 | } 58 | ], 59 | "ARCHIVER_MODE": "release", 60 | "DevPublicKey": "", 61 | "tickets": { 62 | "allowedTicketSigners": { 63 | "0x002D3a2BfE09E3E29b6d38d58CaaD16EEe4C9BC5": 5, 64 | "0x0a0844DA5e01E391d12999ca859Da8a897D5979A": 5, 65 | "0x390878B18DeBe2A9f0d5c0252a109c84243D3beb": 5, 66 | "0x32B6f2C027D4c9D99Ca07d047D17987390a5EB39": 5, 67 | "0x80aF8E195B56aCC3b4ec8e2C99EC38957258635a": 5, 68 | "0x7Efbb31431ac7C405E8eEba99531fF1254fCA3B6": 5, 69 | "0xCc74bf387F6C102b5a7F828796C57A6D2D19Cb00": 5, 70 | "0x4ed5C053BF2dA5F694b322EA93dce949F3276B85": 5, 71 | "0xd31aBC7497aD8bC9fe8555C9eDe45DFd7FB3Bf6F": 5, 72 | "0xe7e4cc292b424C6D50d16F1Bb5BAB2032c486980": 5, 73 | "0xD815DA50966c19261B34Ffa3bE50A30A67D97456": 5, 74 | "0xE856B2365641eba73Bc430AAC1E8F930dA513D9D": 5, 75 | "0x8282F755e784414697421D4b59232E5d194e2262": 5, 76 | "0x353Ad64Df4fAe5EffF717A1c41BE6dEBee543129": 5, 77 | "0x9Ce1C3c114538c625aA2488b97fEb3723fdBB07B": 5, 78 | "0x6A83e4e4eB0A2c8f562db6BB64b02a9A6237B314": 5, 79 | "0x92E375E0c76CaE76D9DfBab17EE7B3B4EE407715": 5, 80 | "0xBD79B430CA932e2D89bb77ACaE7367a07471c2eA": 5, 81 | "0xEbe173a837Bc30BFEF6E13C9988a4771a4D83275": 5, 82 | "0xfF2b584A947182c55BBc039BEAB78BC201D3AdDe": 5, 83 | "0xCeA068d8DCB4B4020D30a9950C00cF8408611F67": 5, 84 | "0x52F8d3DaA7b5FF25ca2bF7417E059aFe0bD5fB0E": 5, 85 | "0x0341996A92193d8B7d80C4774fA2eff889e4b427": 5, 86 | "0xF82BDA6Ef512e4219C6DCEea896E50e8180a5bff": 5, 87 | "0xA04A1B214a2537139fE59488820D4dA06516933f": 5, 88 | "0x550817e7B91244BBeFE2AD621ccD555A16B00405": 5, 89 | "0x84C55a4bFfff1ADadb9C46e2B60979F519dAf874": 5, 90 | "0x4563303BCE96D3f8d9C7fB94b36dfFC9d831871d": 5, 91 | "0xdA058F9c7Ce86C1D21DD5DBDeBad5ab5c785520a": 5, 92 | "0x891DF765C855E9848A18Ed18984B9f57cb3a4d47": 5, 93 | "0x7Fb9b1C5E20bd250870F87659E46bED410221f17": 5, 94 | "0x1e5e12568b7103E8B22cd680A6fa6256DD66ED76": 5, 95 | "0xa58169308e7153B5Ce4ca5cA515cC4d0cBE7770B": 5 96 | }, 97 | "minSigRequired": 1, 98 | "requiredSecurityLevel": 5 99 | } 100 | } -------------------------------------------------------------------------------- /scripts/repair_missing_cycle.ts: -------------------------------------------------------------------------------- 1 | import { readFileSync } from 'fs' 2 | import { resolve, join } from 'path' 3 | import { overrideDefaultConfig, config } from '../src/Config' 4 | import * as Crypto from '../src/Crypto' 5 | import * as db from '../src/dbstore/sqlite3storage' 6 | import * as dbstore from '../src/dbstore' 7 | import * as CycleDB from '../src/dbstore/cycles' 8 | import { startSaving } from '../src/saveConsoleOutput' 9 | import * as Logger from '../src/Logger' 10 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 11 | 12 | const patchCycleData = false 13 | 14 | const start = async (): Promise => { 15 | // Override default config params from config file, env vars, and cli args 16 | const file = join(process.cwd(), 'archiver-config.json') 17 | overrideDefaultConfig(file) 18 | // Set crypto hash keys from config 19 | const hashKey = config.ARCHIVER_HASH_KEY 20 | Crypto.setCryptoHashKey(hashKey) 21 | let logsConfig 22 | try { 23 | logsConfig = StringUtils.safeJsonParse(readFileSync(resolve(__dirname, '../archiver-log.json'), 'utf8')) 24 | } catch (err) { 25 | console.log('Failed to parse archiver log file:', err) 26 | } 27 | const logDir = `${config.ARCHIVER_LOGS}/${config.ARCHIVER_IP}_${config.ARCHIVER_PORT}` 28 | const baseDir = '.' 29 | logsConfig.dir = logDir 30 | Logger.initLogger(baseDir, logsConfig) 31 | if (logsConfig.saveConsoleOutput) { 32 | startSaving(join(baseDir, logsConfig.dir)) 33 | } 34 | await dbstore.initializeDB(config) 35 | 36 | const lastStoredCycleCount = await CycleDB.queryCyleCount() 37 | const lastStoredCycle = (await CycleDB.queryLatestCycleRecords(1))[0] 38 | console.log('lastStoredCycleCount', lastStoredCycleCount, 'lastStoredCycleCounter', lastStoredCycle.counter) 39 | 40 | if (lastStoredCycleCount > 0 && lastStoredCycle.counter !== lastStoredCycleCount - 1) { 41 | console.error('Stored cycle count does not match the last cycle counter') 42 | } 43 | await checkCycleData(0, lastStoredCycle.counter) 44 | console.log('Cycle data check complete.') 45 | } 46 | 47 | /** 48 | * Generate an array of numbers within a specified range. 49 | */ 50 | function generateNumberArray(startNumber: number, endNumber: number): number[] { 51 | const numberOfItems = endNumber - startNumber + 1 52 | const items = Array.from({ length: numberOfItems }, (_, i) => startNumber + i) 53 | return items 54 | } 55 | 56 | async function checkCycleData(startCycleNumber = 0, latestCycleNumber: number): Promise { 57 | try { 58 | // Divide blocks into batches (e.g., batches of 1000 cycles each) 59 | const batchSize = 1000 60 | const cycleBatches: number[][] = [] 61 | let end = startCycleNumber + batchSize 62 | for (let start = startCycleNumber; start <= latestCycleNumber; ) { 63 | if (end > latestCycleNumber) end = latestCycleNumber 64 | cycleBatches.push(generateNumberArray(start, end)) 65 | start = end + 1 66 | end += batchSize 67 | } 68 | 69 | // Query cycle in batches in parallel using Promise.allSettled 70 | const promises = cycleBatches.map(async (cycleNumberBatch: number[]) => { 71 | const sql = 72 | 'SELECT counter FROM cycles WHERE counter IN (' + cycleNumberBatch + ') ORDER BY counter ASC' 73 | return db.all(sql) 74 | }) 75 | 76 | const results = await Promise.allSettled(promises) 77 | 78 | // Process results 79 | results.forEach((result, index) => { 80 | if (result.status === 'fulfilled') { 81 | const cycles = cycleBatches[index] 82 | const existingCycles = result.value.map((row: any) => (row ? row.counter : 0)) 83 | if (existingCycles.length !== cycles.length) console.log(existingCycles) 84 | const missingCycles = cycles.filter((cycle) => !existingCycles.includes(cycle)) 85 | if (missingCycles.length > 0) console.log('Missing cycles:', missingCycles) 86 | } else { 87 | console.error('Error checking cycles existence:', result.reason) 88 | } 89 | }) 90 | } catch (error) { 91 | console.error('Error checking cycle data:', error) 92 | } 93 | } 94 | 95 | start() 96 | -------------------------------------------------------------------------------- /src/LostArchivers.ts: -------------------------------------------------------------------------------- 1 | import * as CycleDB from './dbstore/cycles' 2 | import * as Cycles from './Data/Cycles' 3 | import * as Logger from './Logger' 4 | import * as NodeList from './NodeList' 5 | import { ArchiverRefutesLostMsg, Record } from '@shardeum-foundation/lib-types/build/src/p2p/LostArchiverTypes' 6 | import { config } from './Config' 7 | import { calcIncomingTimes } from './Data/Data' 8 | import { postJson } from './P2P' 9 | import { sign } from './Crypto' 10 | import { SignedObject } from '@shardeum-foundation/lib-types/build/src/p2p/P2PTypes' 11 | 12 | let shouldSendRefutes = false 13 | 14 | /** 15 | * Checks for the existence of our own public key in either the 16 | * 'refutedArchivers', 'lostArchivers', or 'removedArchivers' fields of the 17 | * supplied record. 18 | * If found in 'refutedArchivers', we'll stop sending refutes. 19 | * If found in 'lostArchivers', we'll schedule a refute in the next cycle's Q1. 20 | * If found in 'removedArchivers', we'll shut down. 21 | */ 22 | export function handleLostArchivers(record: R): void { 23 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 24 | const debug = (message: any, ...args: any[]): void => Logger.mainLogger.debug(message, ...args) 25 | debug('>> handleLostArchivers()') 26 | debug(' config.ARCHIVER_PUBLIC_KEY: ' + config.ARCHIVER_PUBLIC_KEY) 27 | // debug(' record: ' + StringUtils.safeStringify(record, null, 2)) 28 | 29 | if (record && record.refutedArchivers && record.lostArchivers && record.removedArchivers) { 30 | if (record.refutedArchivers.some((publicKey) => publicKey === config.ARCHIVER_PUBLIC_KEY)) { 31 | // if self is in 'refutedArchivers' field, stop sending refutes 32 | debug('archiver was found in `refutedArchivers` and will stop sending refutes') 33 | shouldSendRefutes = false 34 | } else if (record.lostArchivers.some((publicKey) => publicKey === config.ARCHIVER_PUBLIC_KEY)) { 35 | // if self is in 'lostArchivers' field, schedule a refute in the next cycle's Q1 36 | debug("archiver was found in `lostArchivers` and will send a refute in the next cycle's Q1") 37 | shouldSendRefutes = true 38 | scheduleRefute() 39 | } else if (record.removedArchivers.some((publicKey) => publicKey === config.ARCHIVER_PUBLIC_KEY)) { 40 | // if self is in 'removedArchivers' field, shut down 41 | debug('archiver was found in `removedArchivers`, shutting down') 42 | die() 43 | } 44 | } 45 | debug('<< handleLostArchivers()') 46 | } 47 | 48 | /** 49 | * Schedules to send a refute during the next cycle's Q1. 50 | */ 51 | async function scheduleRefute(): Promise { 52 | if (!shouldSendRefutes) { 53 | console.log('skipping refute scheduling') 54 | return 55 | } 56 | 57 | console.log('scheduling refute') 58 | 59 | const latestCycleInfo = await CycleDB.queryLatestCycleRecords(1) 60 | const latestCycle = latestCycleInfo[0] 61 | const { quarterDuration, startQ1 } = calcIncomingTimes(latestCycle) 62 | 63 | // ms until q1. add 500ms to make sure we're in q1 64 | const delay = startQ1 + 4 * quarterDuration - Date.now() + 500 65 | console.log(delay) 66 | setTimeout(sendRefute, delay) 67 | } 68 | 69 | /** 70 | * Sends a refute to 5 random active nodes. 71 | */ 72 | async function sendRefute(): Promise { 73 | if (!shouldSendRefutes) { 74 | console.log('skipping refute sending') 75 | return 76 | } 77 | 78 | console.log('sending refute') 79 | 80 | const refuteMsg: SignedObject = sign({ 81 | archiver: config.ARCHIVER_PUBLIC_KEY, 82 | cycle: Cycles.getCurrentCycleMarker(), 83 | }) 84 | 85 | const nodes = NodeList.getRandomActiveNodes(5) 86 | 87 | for (const node of nodes) { 88 | try { 89 | await postJson(`http://${node.ip}:${node.port}/lost-archiver-refute`, refuteMsg) 90 | } catch (e) { 91 | Logger.mainLogger.warn(`Failed to send refute to ${node.ip}:${node.port}:`, e) 92 | scheduleRefute() 93 | } 94 | } 95 | } 96 | 97 | /** 98 | * Shuts down the archiver with exit code 2. 99 | */ 100 | function die(): void { 101 | Logger.mainLogger.debug( 102 | 'Archiver was found in `removedArchivers` and will exit now without sending a leave request' 103 | ) 104 | process.exit(2) 105 | } 106 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@shardeum-foundation/archiver", 3 | "version": "3.5.7-prerelease.3", 4 | "engines": { 5 | "node": "18.19.1" 6 | }, 7 | "description": "", 8 | "main": "./build/server.js", 9 | "types": "./build/server.d.ts", 10 | "files": [ 11 | "build/**/*", 12 | "archiver-log.json", 13 | "tx-list-restore.json", 14 | "static/tickets.json" 15 | ], 16 | "bin": { 17 | "archive-server": "./build/server.js" 18 | }, 19 | "scripts": { 20 | "start": "npm run prepare && node build/server.js", 21 | "txDigestCronServer": "npm run prepare && node build/txDigester.js", 22 | "txDigestApiServer": "npm run prepare && node build/txDigestAPIserver.js", 23 | "check": "gts check", 24 | "clean": "npm-run-all clean:*", 25 | "clean:typescript": "gts clean", 26 | "lint": "eslint \"./src/**/*.ts\"", 27 | "test": "tsc -p tsconfig.test.json && jest", 28 | "test:watch": "jest --watch", 29 | "format-check": "prettier --check './src/**/*.ts'", 30 | "format-fix": "prettier --write './src/**/*.ts'", 31 | "clean:artifacts": "shx rm -rf archiver-logs/ archiver-db/ data-logs/", 32 | "compile": "tsc -p .", 33 | "fix": "gts fix", 34 | "prepare": "npm run compile", 35 | "pretest": "npm run compile", 36 | "update-docker": "docker build -t registry.gitlab.com/shardus/archive/archive-server:dev3 . && docker push registry.gitlab.com/shardus/archive/archive-server:dev3", 37 | "update-docker-dev": "docker build -t registry.gitlab.com/shardus/archive/archive-server:dev . && docker push registry.gitlab.com/shardus/archive/archive-server:dev", 38 | "update-schemas": "typescript-json-schema tsconfig.json NewData -o src/Data/schemas/NewData.json & typescript-json-schema tsconfig.json DataResponse -o src/Data/schemas/DataResponse.json", 39 | "build": "tsc && npm run copy-static", 40 | "copy-static": "cp -r static dist/", 41 | "release:prerelease": "npm run prepare && npm version prerelease --preid=prerelease && git push --follow-tags && npm publish --tag prerelease", 42 | "release:patch": "npm run prepare && npm version patch && git push --follow-tags && npm publish", 43 | "release:minor": "npm run prepare && npm version minor && git push --follow-tags && npm publish", 44 | "release:major": "npm run prepare && npm version major && git push --follow-tags && npm publish" 45 | }, 46 | "repository": { 47 | "type": "git", 48 | "url": "https://github.com/shardeum/archive-server.git" 49 | }, 50 | "publishConfig": { 51 | "access": "public" 52 | }, 53 | "author": "", 54 | "license": "ISC", 55 | "bugs": { 56 | "url": "https://gitlab.com/shardus/archive/archive-server/issues" 57 | }, 58 | "homepage": "https://gitlab.com/shardus/archive/archive-server#readme", 59 | "devDependencies": { 60 | "@types/node": "18.19.1", 61 | "@types/jest": "29.5.14", 62 | "@types/node-cron": "3.0.7", 63 | "@types/node-fetch": "2.6.11", 64 | "@types/socket.io": "2.1.13", 65 | "@types/socket.io-client": "1.4.36", 66 | "@typescript-eslint/eslint-plugin": "5.62.0", 67 | "@typescript-eslint/typescript-estree": "5.62.0", 68 | "eslint": "8.57.0", 69 | "eslint-config-prettier": "8.10.0", 70 | "eslint-config-standard": "14.1.1", 71 | "eslint-plugin-import": "2.30.0", 72 | "eslint-plugin-no-unsanitized": "4.1.0", 73 | "eslint-plugin-node": "10.0.0", 74 | "eslint-plugin-prettier": "3.4.1", 75 | "eslint-plugin-promise": "4.3.1", 76 | "eslint-plugin-security": "1.7.1", 77 | "eslint-plugin-standard": "4.1.0", 78 | "eslint-plugin-xss": "0.1.12", 79 | "gts": "3.1.1", 80 | "jest": "29.7.0", 81 | "np": "8.0.4", 82 | "npm-run-all": "4.1.5", 83 | "shx": "0.3.4", 84 | "ts-jest": "29.2.5", 85 | "typescript": "4.9.5", 86 | "typescript-json-schema": "0.51.0" 87 | }, 88 | "dependencies": { 89 | "@shardeum-foundation/lib-archiver-discovery": "1.1.1-prerelease.0", 90 | "@shardeum-foundation/lib-crypto-utils": "4.1.6-prerelease.0", 91 | "@shardeum-foundation/lib-types": "1.2.22-prerelease.1", 92 | "@fastify/cors": "8.5.0", 93 | "@fastify/rate-limit": "7.6.0", 94 | "deepmerge": "4.3.1", 95 | "ethers": "6.13.4", 96 | "fastify": "4.12.0", 97 | "log4js": "6.9.1", 98 | "log4js-extend": "0.2.1", 99 | "minimist": "1.2.8", 100 | "neverthrow": "6.2.2", 101 | "node-cron": "3.0.2", 102 | "node-fetch": "2.7.0", 103 | "rfdc": "1.4.1", 104 | "socket.io-client": "2.5.0", 105 | "sqlite3": "5.1.7", 106 | "streamroller": "3.1.5", 107 | "tydb": "0.1.5" 108 | }, 109 | "overrides": { 110 | "axios": "1.6.1" 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /src/services/ticketVerification.ts: -------------------------------------------------------------------------------- 1 | import { ethers } from 'ethers' 2 | import { Utils } from '@shardeum-foundation/lib-types' 3 | import { Ticket, Sign } from '../schemas/ticketSchema' 4 | import { DevSecurityLevel } from '../types/security' 5 | import * as Ajv from 'ajv' 6 | import { ticketSchema } from '../schemas/ticketSchema' 7 | 8 | export interface VerificationError { 9 | type: string; 10 | message: string; 11 | validSignatures: number; 12 | } 13 | 14 | export interface VerificationConfig { 15 | allowedTicketSigners: { [pubkey: string]: DevSecurityLevel }; 16 | minSigRequired: number; 17 | requiredSecurityLevel: DevSecurityLevel; 18 | } 19 | 20 | const ajv = new Ajv({ allErrors: true }) 21 | const validateTicketSchema = ajv.compile(ticketSchema) 22 | 23 | function validateVerificationConfig(config: VerificationConfig): void { 24 | if (!config.allowedTicketSigners || typeof config.allowedTicketSigners !== 'object') { 25 | throw new Error('Invalid allowedTicketSigners configuration'); 26 | } 27 | if (typeof config.minSigRequired !== 'number' || config.minSigRequired < 1) { 28 | throw new Error('minSigRequired must be a positive number'); 29 | } 30 | if (typeof config.requiredSecurityLevel !== 'number') { 31 | throw new Error('Invalid requiredSecurityLevel'); 32 | } 33 | } 34 | export function verifyMultiSigs( 35 | rawPayload: object, 36 | sigs: Sign[], 37 | allowedPubkeys: { [pubkey: string]: DevSecurityLevel }, 38 | minSigRequired: number, 39 | requiredSecurityLevel: DevSecurityLevel 40 | ): { isValid: boolean; validCount: number } { 41 | if (!rawPayload || !sigs || !allowedPubkeys || !Array.isArray(sigs)) { 42 | return { isValid: false, validCount: 0 } 43 | } 44 | if (sigs.length < minSigRequired) return { isValid: false, validCount: 0 } 45 | 46 | // no reason to allow more signatures than allowedPubkeys exist 47 | // this also prevent loop exhaustion 48 | if (sigs.length > Object.keys(allowedPubkeys).length) return { isValid: false, validCount: 0 } 49 | 50 | let validSigs = 0 51 | const payload_hash = ethers.keccak256(ethers.toUtf8Bytes(Utils.safeStringify(rawPayload))) 52 | const seen = new Set() 53 | 54 | for (let i = 0; i < sigs.length; i++) { 55 | /* eslint-disable security/detect-object-injection */ 56 | // The sig owner has not been seen before 57 | // The sig owner is listed on the server 58 | // The sig owner has enough security clearance 59 | // The signature is valid 60 | if ( 61 | !seen.has(sigs[i].owner) && 62 | allowedPubkeys[sigs[i].owner] && 63 | allowedPubkeys[sigs[i].owner] >= requiredSecurityLevel && 64 | ethers.verifyMessage(payload_hash, sigs[i].sig).toLowerCase() === sigs[i].owner.toLowerCase() 65 | ) { 66 | validSigs++ 67 | seen.add(sigs[i].owner) 68 | } 69 | /* eslint-enable security/detect-object-injection */ 70 | 71 | if (validSigs >= minSigRequired) break 72 | } 73 | 74 | return { 75 | isValid: validSigs >= minSigRequired, 76 | validCount: validSigs 77 | } 78 | } 79 | 80 | 81 | export function verifyTickets( 82 | tickets: Ticket[], 83 | config: VerificationConfig 84 | ): { isValid: boolean; errors: VerificationError[] } { 85 | validateVerificationConfig(config); 86 | 87 | if (!validateTicketSchema(tickets)) { 88 | return { 89 | isValid: false, 90 | errors: [{ 91 | type: 'schema', 92 | message: `Schema validation failed: ${ajv.errorsText(validateTicketSchema.errors)}`, 93 | validSignatures: 0 94 | }] 95 | }; 96 | } 97 | 98 | const errors: VerificationError[] = []; 99 | 100 | for (const ticket of tickets) { 101 | const { data, sign, type } = ticket; 102 | const messageObj = { data, type }; 103 | 104 | const verificationResult = verifyMultiSigs( 105 | messageObj, 106 | sign, 107 | config.allowedTicketSigners, 108 | config.minSigRequired, 109 | config.requiredSecurityLevel 110 | ); 111 | 112 | if (!verificationResult.isValid) { 113 | errors.push({ 114 | type, 115 | message: `Invalid signatures for ticket type ${type}. Found ${verificationResult.validCount} valid signatures, required ${config.minSigRequired} with security level ${DevSecurityLevel[config.requiredSecurityLevel]}`, 116 | validSignatures: verificationResult.validCount 117 | }); 118 | } 119 | } 120 | 121 | return { 122 | isValid: errors.length === 0, 123 | errors 124 | }; 125 | } -------------------------------------------------------------------------------- /scripts/ngt_shutdown_repair.ts: -------------------------------------------------------------------------------- 1 | import * as fs from 'fs' 2 | import * as path from 'path' 3 | import * as crypto from 'crypto' 4 | 5 | interface TxData { 6 | nodeId: string 7 | publicKey: string 8 | startTime?: number 9 | start?: number 10 | endTime?: number 11 | end?: number 12 | } 13 | 14 | interface Tx { 15 | cycle: number 16 | hash: string 17 | priority: number 18 | subQueueKey: string 19 | txData: TxData 20 | type: string 21 | } 22 | 23 | interface TransactionEntry { 24 | hash: string 25 | tx: Tx 26 | } 27 | 28 | interface NewNodeReward { 29 | hash: string 30 | tx: Tx 31 | } 32 | 33 | function findUnmatchedNodeInitRewards( 34 | transactions: TransactionEntry[], 35 | endCycle: number, 36 | endTime: number 37 | ): NewNodeReward[] { 38 | const unmatchedRewards: NewNodeReward[] = [] 39 | const nodeInitRewardMap = new Map() 40 | const nodeRewardMap = new Map() 41 | 42 | console.log('Mapping nodeInitRewards by nodeId and start cycle...') 43 | for (const transaction of transactions) { 44 | if (transaction.tx.type === 'nodeInitReward' && transaction.tx.txData.startTime) { 45 | const key = `${transaction.tx.txData.nodeId}-${transaction.tx.cycle}` 46 | nodeInitRewardMap.set(key, transaction) 47 | console.log( 48 | `Mapped nodeInitReward - Node ID: ${transaction.tx.txData.nodeId}, Cycle: ${transaction.tx.cycle}` 49 | ) 50 | } else if (transaction.tx.type === 'nodeReward' && transaction.tx.txData.start !== undefined) { 51 | const rewardKey = `${transaction.tx.txData.nodeId}-${transaction.tx.txData.start}` 52 | nodeRewardMap.set(rewardKey, transaction) 53 | console.log( 54 | `Mapped nodeReward - Node ID: ${transaction.tx.txData.nodeId}, Start Cycle: ${transaction.tx.txData.start}` 55 | ) 56 | } 57 | } 58 | 59 | console.log('Identifying unmatched nodeInitRewards without matching nodeRewards within ±5 cycle range...') 60 | for (const [key, transaction] of nodeInitRewardMap) { 61 | const { nodeId } = transaction.tx.txData // Access nodeId from txData 62 | const cycle = transaction.tx.cycle 63 | const rewardKey = `${nodeId}-${cycle}` 64 | 65 | // Check if any nodeReward exists within ±5 cycles of the nodeInitReward's cycle 66 | const hasMatchingReward = Array.from(nodeRewardMap.values()).some((rewardTx) => { 67 | const rewardCycle = rewardTx.tx.txData.start 68 | return ( 69 | rewardTx.tx.txData.nodeId === nodeId && 70 | rewardCycle !== undefined && 71 | Math.abs(rewardCycle - cycle) <= 5 72 | ) 73 | }) 74 | 75 | if (!hasMatchingReward) { 76 | const { publicKey, startTime } = transaction.tx.txData 77 | const newHash = generateHash(nodeId + startTime + endCycle) 78 | 79 | const newNodeReward: NewNodeReward = { 80 | hash: newHash, 81 | tx: { 82 | cycle: endCycle, 83 | hash: newHash, 84 | priority: transaction.tx.priority, 85 | subQueueKey: transaction.tx.subQueueKey, 86 | txData: { 87 | nodeId, 88 | publicKey, 89 | start: cycle, 90 | end: endCycle, 91 | endTime: endTime, 92 | }, 93 | type: 'nodeReward', 94 | }, 95 | } 96 | unmatchedRewards.push(newNodeReward) 97 | console.log( 98 | `Created new nodeReward - Node ID: ${nodeId}, Start Cycle: ${cycle}, End Cycle: ${endCycle}` 99 | ) 100 | } 101 | } 102 | 103 | 104 | console.log(`Total unmatched nodeInitRewards found: ${unmatchedRewards.length}`) 105 | return unmatchedRewards 106 | } 107 | 108 | function generateHash(input: string): string { 109 | return crypto.createHash('sha256').update(input).digest('hex') 110 | } 111 | 112 | async function main() { 113 | const filePath = path.join(__dirname, '..', 'tx-list-restore.json') 114 | const endCycle = Number(process.argv[2]) 115 | const endTime = Number(process.argv[3]) 116 | 117 | if (!endCycle || !endTime) { 118 | console.error('Please provide endCycle and endTime as arguments.') 119 | process.exit(1) 120 | } 121 | 122 | console.log(`Reading data from ${filePath}...`) 123 | const rawData = fs.readFileSync(filePath, 'utf-8') 124 | const transactions: TransactionEntry[] = JSON.parse(rawData) 125 | console.log(`Successfully read ${transactions.length} transactions.`) 126 | 127 | const newRewards = findUnmatchedNodeInitRewards(transactions, endCycle, endTime) 128 | 129 | const updatedTransactions = [...transactions, ...newRewards] 130 | console.log(`Appending ${newRewards.length} new nodeReward entries to the data.`) 131 | 132 | fs.writeFileSync(filePath, JSON.stringify(updatedTransactions, null, 2), 'utf-8') 133 | console.log(`Successfully added ${newRewards.length} new nodeReward entries and saved to ${filePath}.`) 134 | } 135 | 136 | main() 137 | -------------------------------------------------------------------------------- /src/Logger.ts: -------------------------------------------------------------------------------- 1 | import * as log4js from 'log4js' 2 | import { existsSync, mkdirSync } from 'fs' 3 | const log4jsExtend = require('log4js-extend') // eslint-disable-line @typescript-eslint/no-var-requires 4 | 5 | interface Logger { 6 | baseDir: string 7 | config: LogsConfiguration 8 | logDir: string 9 | log4Conf: any // eslint-disable-line @typescript-eslint/no-explicit-any 10 | } 11 | 12 | export interface LogsConfiguration { 13 | saveConsoleOutput?: boolean 14 | dir?: string 15 | files?: { 16 | main?: string 17 | fatal?: string 18 | net?: string 19 | app?: string 20 | } 21 | options?: { 22 | appenders?: { 23 | out?: { 24 | type?: string 25 | } 26 | main?: { 27 | type?: string 28 | maxLogSize?: number 29 | backups?: number 30 | } 31 | fatal?: { 32 | type?: string 33 | maxLogSize?: number 34 | backups?: number 35 | } 36 | errorFile?: { 37 | type?: string 38 | maxLogSize?: number 39 | backups?: number 40 | } 41 | errors?: { 42 | type?: string 43 | maxLogSize?: number 44 | backups?: number 45 | } 46 | } 47 | } 48 | categories?: { 49 | default?: { 50 | appenders?: string[] 51 | level?: string 52 | } 53 | main?: { 54 | appenders?: string[] 55 | level?: string 56 | } 57 | out?: { 58 | appenders?: string[] 59 | level?: string 60 | } 61 | fatal?: { 62 | appenders?: string[] 63 | level?: string 64 | } 65 | } 66 | } 67 | 68 | class Logger { 69 | constructor(baseDir: string, config: LogsConfiguration) { 70 | this.baseDir = baseDir 71 | this.config = config 72 | this.logDir = '' 73 | this.log4Conf = null 74 | this._setupLogs() 75 | } 76 | 77 | // Checks if the configuration has the required components 78 | _checkValidConfig(): void { 79 | const config = this.config 80 | if (!config.dir) throw Error('Fatal Error: Log directory not defined.') 81 | if (!config.files || typeof config.files !== 'object') 82 | throw Error('Fatal Error: Valid log file locations not provided.') 83 | } 84 | 85 | // Add filenames to each appender of type 'file' 86 | _addFileNamesToAppenders(): void { 87 | const conf = this.log4Conf 88 | for (const key in conf.appenders) { 89 | const appender = conf.appenders[key] // eslint-disable-line security/detect-object-injection 90 | if (appender.type !== 'file') continue 91 | appender.filename = `${this.logDir}/${key}.log` 92 | } 93 | } 94 | 95 | _configureLogs(): log4js.Log4js { 96 | return log4js.configure(this.log4Conf) 97 | } 98 | 99 | // Get the specified logger 100 | getLogger(logger: string): log4js.Logger { 101 | return log4js.getLogger(logger) 102 | } 103 | 104 | // Setup the logs with the provided configuration using the base directory provided for relative paths 105 | _setupLogs(): void { 106 | const baseDir = this.baseDir 107 | const config = this.config 108 | 109 | if (!baseDir) throw Error('Fatal Error: Base directory not defined.') 110 | if (!config) throw Error('Fatal Error: No configuration provided.') 111 | this._checkValidConfig() 112 | 113 | // Makes specified directory if it doesn't exist 114 | if (config.dir) { 115 | const allArchiversLogDir = `${baseDir}/${config.dir.split('/')[0]}` 116 | this.getLogger('main').info('allArchiversLogDir', allArchiversLogDir) 117 | if (!existsSync(allArchiversLogDir)) mkdirSync(allArchiversLogDir) // eslint-disable-line security/detect-non-literal-fs-filename 118 | } 119 | 120 | this.logDir = `${baseDir}/${config.dir}` 121 | if (!existsSync(this.logDir)) mkdirSync(this.logDir) // eslint-disable-line security/detect-non-literal-fs-filename 122 | // Read the log config from log config file 123 | this.log4Conf = config.options 124 | log4jsExtend(log4js) 125 | this._addFileNamesToAppenders() 126 | this._configureLogs() 127 | this.getLogger('main').info('Logger initialized.') 128 | } 129 | 130 | // Tells this module that the server is shutting down, returns a Promise that resolves when all logs have been written to file, sockets are closed, etc. 131 | shutdown(): Promise { 132 | return new Promise((resolve) => { 133 | log4js.shutdown(() => { 134 | resolve('done') 135 | }) 136 | }) 137 | } 138 | } 139 | 140 | export let mainLogger: log4js.Logger 141 | export let fatalLogger: log4js.Logger 142 | export let errorLogger: log4js.Logger 143 | 144 | export function initLogger(baseDir: string, logsConfig: LogsConfiguration): void { 145 | const logger = new Logger(baseDir, logsConfig) 146 | mainLogger = logger.getLogger('main') 147 | fatalLogger = logger.getLogger('fatal') 148 | errorLogger = logger.getLogger('errorFile') 149 | } 150 | 151 | export default Logger 152 | -------------------------------------------------------------------------------- /src/profiler/StringifyReduce.ts: -------------------------------------------------------------------------------- 1 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 2 | 3 | export const makeShortHash = (x: string, n = 4): string => { 4 | if (!x) { 5 | return x 6 | } 7 | if (x.length > 63) { 8 | if (x.length === 64) { 9 | return x.slice(0, n) + 'x' + x.slice(63 - n) 10 | } else if (x.length === 128) { 11 | return x.slice(0, n) + 'xx' + x.slice(127 - n) 12 | } else if (x.length === 192) { 13 | return x.slice(0, n) + 'xx' + x.slice(191 - n) 14 | } 15 | } 16 | return x 17 | } 18 | 19 | const objToString = Object.prototype.toString 20 | const objKeys = 21 | ((obj: object): string[] => { 22 | const keys = [] 23 | // tslint:disable-next-line: forin 24 | for (const name in obj) { 25 | keys.push(name) 26 | } 27 | return keys 28 | }) || Object.keys 29 | 30 | export type StringifyVal = string | number | boolean | null | undefined | object 31 | 32 | export const stringifyReduce = (val: unknown, isArrayProp?: boolean): string => { 33 | let i: number 34 | let max: number 35 | let str: string 36 | let keys: string | string[] 37 | let key: string | number 38 | let propVal: string 39 | let toStr: string 40 | if (val === true) { 41 | return 'true' 42 | } 43 | if (val === false) { 44 | return 'false' 45 | } 46 | switch (typeof val) { 47 | case 'object': 48 | if (val === null) { 49 | return null 50 | } 51 | // not used, don't compile for object 52 | // else if (val.toJSON && typeof val.toJSON === 'function') { 53 | // return stringifyReduce(val.toJSON(), isArrayProp) 54 | // } 55 | else if (val instanceof Map) { 56 | const mapContainer = { 57 | dataType: 'stringifyReduce_map_2_array', 58 | value: Array.from(val.entries()), // or with spread: value: [...originalObject] 59 | } 60 | return stringifyReduce(mapContainer) 61 | } else { 62 | toStr = objToString.call(val) 63 | if (toStr === '[object Array]') { 64 | str = '[' 65 | max = (val as []).length - 1 66 | for (i = 0; i < max; i++) { 67 | // eslint-disable-next-line security/detect-object-injection 68 | str += stringifyReduce(val[i], true) + ',' 69 | } 70 | if (max > -1) { 71 | // eslint-disable-next-line security/detect-object-injection 72 | str += stringifyReduce(val[i], true) 73 | } 74 | return str + ']' 75 | } else if (toStr === '[object Object]') { 76 | // only object is left 77 | keys = objKeys(val).sort() 78 | max = keys.length 79 | str = '' 80 | i = 0 81 | while (i < max) { 82 | // eslint-disable-next-line security/detect-object-injection 83 | key = keys[i] 84 | // eslint-disable-next-line security/detect-object-injection 85 | propVal = stringifyReduce(val[key], false) 86 | if (propVal !== undefined) { 87 | if (str) { 88 | str += ',' 89 | } 90 | str += StringUtils.safeStringify(key) + ':' + propVal 91 | } 92 | i++ 93 | } 94 | return '{' + str + '}' 95 | } else { 96 | return StringUtils.safeStringify(val) 97 | } 98 | } 99 | case 'function': 100 | case 'undefined': 101 | return isArrayProp ? null : undefined 102 | case 'string': { 103 | const reduced = makeShortHash(val) 104 | return StringUtils.safeStringify(reduced) 105 | } 106 | default: { 107 | const n = Number(val) 108 | return isFinite(n) ? n.toString() : null 109 | } 110 | } 111 | } 112 | 113 | //TODO not used 114 | // export const replacer = (key: any, value: any) => { 115 | // const originalObject = value // this[key] 116 | // if (originalObject instanceof Map) { 117 | // return { 118 | // dataType: 'stringifyReduce_map_2_array', 119 | // value: Array.from(originalObject.entries()), // or with spread: value: [...originalObject] 120 | // } 121 | // } else { 122 | // return value 123 | // } 124 | // } 125 | 126 | //TODO not used 127 | // export const reviver = (key: any, value: any) => { 128 | // if (typeof value === 'object' && value !== null) { 129 | // if (value.dataType === 'stringifyReduce_map_2_array') { 130 | // return new Map(value.value) 131 | // } 132 | // } 133 | // return value 134 | // } 135 | 136 | //TODO not used 137 | // export const reviverExpander = (key: string, value: any) => { 138 | // if (typeof value === 'object' && value !== null) { 139 | // if (value.dataType === 'stringifyReduce_map_2_array') { 140 | // return new Map(value.value) 141 | // } 142 | // } 143 | // if (typeof value === 'string' && value.length === 10 && value[4] === 'x') { 144 | // const res = value.slice(0, 4) + '0'.repeat(55) + value.slice(5, 5 + 5) 145 | // return res 146 | // } 147 | // return value 148 | // } 149 | -------------------------------------------------------------------------------- /src/P2P.ts: -------------------------------------------------------------------------------- 1 | import * as State from './State' 2 | import * as Crypto from './Crypto' 3 | import * as Data from './Data/Data' 4 | import * as NodeList from './NodeList' 5 | import 'node-fetch' 6 | import fetch from 'node-fetch' 7 | import { P2P as P2PTypes } from '@shardeum-foundation/lib-types' 8 | import { RequestInit, Response } from 'node-fetch' 9 | import { SignedObject } from '@shardeum-foundation/lib-crypto-utils' 10 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 11 | // eslint-disable-next-line @typescript-eslint/no-var-requires 12 | const { version } = require('../package.json') 13 | 14 | export enum RequestTypes { 15 | JOIN = 'JOIN', 16 | ACTIVE = 'ACTIVE', 17 | LEAVE = 'LEAVE', 18 | } 19 | export interface ArchiverJoinRequest { 20 | nodeInfo: State.ArchiverNodeInfo 21 | appData: unknown 22 | requestType: RequestTypes.JOIN 23 | requestTimestamp: number // in ms 24 | cycleRecord?: P2PTypes.CycleCreatorTypes.CycleRecord 25 | } 26 | export interface ArchiverActiveRequest { 27 | nodeInfo: State.ArchiverNodeInfo 28 | requestType: RequestTypes.ACTIVE 29 | } 30 | export interface ArchiverLeaveRequest { 31 | nodeInfo: State.ArchiverNodeInfo 32 | requestType: RequestTypes.LEAVE 33 | requestTimestamp: number // in ms 34 | } 35 | export interface FirstNodeInfo { 36 | nodeInfo: { 37 | externalIp: string 38 | externalPort: number 39 | publicKey: string 40 | } 41 | } 42 | export interface FirstNodeResponse { 43 | nodeList: NodeList.ConsensusNodeInfo[] 44 | joinRequest?: ArchiverJoinRequest & Crypto.SignedMessage 45 | dataRequestCycle?: 46 | | (Data.DataRequest & Crypto.TaggedMessage) 47 | | number 48 | dataRequestStateMetaData?: Data.DataRequest & Crypto.TaggedMessage 49 | } 50 | 51 | export function createArchiverJoinRequest(): ArchiverJoinRequest & SignedObject { 52 | const joinRequest: ArchiverJoinRequest = { 53 | nodeInfo: State.getNodeInfo(), 54 | appData: { version }, 55 | requestType: RequestTypes.JOIN, 56 | requestTimestamp: Date.now(), 57 | } 58 | return Crypto.sign(joinRequest) 59 | } 60 | 61 | export function createArchiverActiveRequest(): ArchiverActiveRequest & SignedObject { 62 | const activeRequest: ArchiverActiveRequest = { 63 | nodeInfo: State.getNodeInfo(), 64 | requestType: RequestTypes.ACTIVE, 65 | } 66 | return Crypto.sign(activeRequest) 67 | } 68 | 69 | export function createArchiverLeaveRequest(): ArchiverLeaveRequest & SignedObject { 70 | const leaveRequest: ArchiverLeaveRequest = { 71 | nodeInfo: State.getNodeInfo(), 72 | requestType: RequestTypes.LEAVE, 73 | requestTimestamp: Date.now(), 74 | } 75 | return Crypto.sign(leaveRequest) 76 | } 77 | 78 | export async function postJson( 79 | url: string, 80 | body: object, 81 | timeoutInSecond = 5 82 | ): Promise<(object & { success?: boolean }) | null> { 83 | try { 84 | const res = await fetch(url, { 85 | method: 'post', 86 | body: StringUtils.safeStringify(body), 87 | headers: { 'Content-Type': 'application/json' }, 88 | timeout: timeoutInSecond * 1000, 89 | }) 90 | if (res.ok) { 91 | const text = await res.text() 92 | try { 93 | return StringUtils.safeJsonParse(text) 94 | } catch (parseError) { 95 | console.warn(`getJson failed: invalid JSON response url: ${url} parseError: ${parseError}`) 96 | return null 97 | } 98 | } else { 99 | console.warn('postJson failed: got bad response') 100 | console.warn(res.headers) 101 | console.warn(res.statusText) 102 | console.warn(await res.text()) 103 | return null 104 | } 105 | } catch (err) { 106 | console.warn('postJson failed: could not reach host') 107 | console.warn(err) 108 | return null 109 | } 110 | } 111 | 112 | export async function getJson(url: string, timeoutInSecond = 5): Promise { 113 | try { 114 | const res = await get(url, timeoutInSecond, { 115 | headers: { 'Content-Type': 'application/json' }, 116 | }) 117 | if (res.ok) { 118 | const text = await res.text() 119 | try { 120 | return StringUtils.safeJsonParse(text) 121 | } catch (parseError) { 122 | console.warn(`getJson failed: invalid JSON response url: ${url} parseError: ${parseError}`) 123 | return null 124 | } 125 | } else { 126 | console.warn('getJson failed: got bad response') 127 | console.warn(url) 128 | console.warn(res.headers) 129 | console.warn(res.statusText) 130 | console.warn(await res.text()) 131 | return null 132 | } 133 | } catch (err) { 134 | console.warn('getJson failed: could not reach host') 135 | console.warn(err) 136 | return null 137 | } 138 | } 139 | 140 | export async function get(url: string, timeoutInSecond = 20, opts?: RequestInit): Promise { 141 | return fetch(url, { 142 | method: 'get', 143 | timeout: timeoutInSecond * 1000, 144 | ...opts, 145 | }) 146 | } 147 | -------------------------------------------------------------------------------- /src/dbstore/processedTxs.ts: -------------------------------------------------------------------------------- 1 | import * as db from './sqlite3storage' 2 | import { processedTxDatabase } from './' 3 | import * as Logger from '../Logger' 4 | import { config } from '../Config' 5 | 6 | // const superjson = require('superjson') 7 | /** 8 | * ProcessedTransaction stores transactions which have a receipt 9 | */ 10 | export interface ProcessedTransaction { 11 | txId: string 12 | cycle: number 13 | txTimestamp: number 14 | applyTimestamp: number 15 | } 16 | 17 | export async function insertProcessedTx(processedTx: ProcessedTransaction): Promise { 18 | 19 | try { 20 | 21 | // Define the table columns based on schema 22 | const columns = ['txId', 'cycle', 'txTimestamp', 'applyTimestamp']; 23 | 24 | // Construct the SQL query with placeholders 25 | const placeholders = `(${columns.map(() => '?').join(', ')})`; 26 | const sql = ` 27 | INSERT INTO processedTxs (${columns.join(', ')}) VALUES ${placeholders} 28 | ON CONFLICT (txId) DO UPDATE SET 29 | cycle = excluded.cycle, 30 | txTimestamp = excluded.txTimestamp, 31 | applyTimestamp = excluded.applyTimestamp 32 | `; 33 | 34 | // Map the `processedTx` object to match the columns 35 | const values = columns.map((column) => processedTx[column]); 36 | 37 | // Execute the query directly (single-row insert/update) 38 | await db.run(processedTxDatabase, sql, values); 39 | 40 | if (config.VERBOSE) { 41 | Logger.mainLogger.debug('Successfully inserted ProcessedTransaction', processedTx.txId); 42 | } 43 | } catch (err) { 44 | Logger.mainLogger.error(err); 45 | Logger.mainLogger.error( 46 | 'Unable to insert ProcessedTransaction or it is already stored in the database', 47 | processedTx.txId 48 | ); 49 | } 50 | } 51 | 52 | 53 | 54 | export async function bulkInsertProcessedTxs(processedTxs: ProcessedTransaction[]): Promise { 55 | 56 | try { 57 | 58 | // Define the table columns based on schema 59 | const columns = ['txId', 'cycle', 'txTimestamp', 'applyTimestamp']; 60 | 61 | // Construct the SQL query for bulk insertion 62 | const placeholders = processedTxs.map(() => `(${columns.map(() => '?').join(', ')})`).join(', '); 63 | const sql = ` 64 | INSERT INTO processedTxs (${columns.join(', ')}) VALUES ${placeholders} 65 | ON CONFLICT (txId) DO UPDATE SET 66 | cycle = excluded.cycle, 67 | txTimestamp = excluded.txTimestamp, 68 | applyTimestamp = excluded.applyTimestamp 69 | `; 70 | 71 | // Flatten the `processedTxs` array into a single list of values 72 | const values = processedTxs.flatMap((tx) => 73 | columns.map((column) => tx[column]) 74 | ); 75 | 76 | // Execute the single query 77 | await db.run(processedTxDatabase, sql, values); 78 | 79 | if (config.VERBOSE) { 80 | Logger.mainLogger.debug('Successfully inserted ProcessedTransactions', processedTxs.length); 81 | } 82 | } catch (err) { 83 | Logger.mainLogger.error(err); 84 | Logger.mainLogger.error('Unable to bulk insert ProcessedTransactions', processedTxs.length); 85 | } 86 | } 87 | 88 | 89 | 90 | export async function queryProcessedTxByTxId(txId: string): Promise { 91 | try { 92 | const sql = `SELECT * FROM processedTxs WHERE txId=?` 93 | const processedTx = (await db.get(processedTxDatabase, sql, [txId])) as ProcessedTransaction 94 | if (config.VERBOSE) { 95 | Logger.mainLogger.debug('ProcessedTransaction txId', processedTx) 96 | } 97 | return processedTx 98 | } catch (e) { 99 | Logger.mainLogger.error(e) 100 | return null 101 | } 102 | } 103 | 104 | export async function queryProcessedTxsByCycleNumber(cycleNumber: number): Promise { 105 | try { 106 | const sql = `SELECT * FROM processedTxs WHERE cycle=?` 107 | const processedTxs = (await db.all(processedTxDatabase, sql, [cycleNumber])) as ProcessedTransaction[] 108 | if (config.VERBOSE) { 109 | Logger.mainLogger.debug(`ProcessedTransactions for cycle: ${cycleNumber} ${processedTxs.length}`) 110 | } 111 | return processedTxs 112 | } catch (e) { 113 | Logger.mainLogger.error(e) 114 | return null 115 | } 116 | } 117 | 118 | export async function querySortedTxsBetweenCycleRange( 119 | startCycle: number, 120 | endCycle: number 121 | ): Promise { 122 | try { 123 | const sql = `SELECT txId FROM processedTxs WHERE cycle BETWEEN ? AND ?` 124 | const txIdsArray = (await db.all(processedTxDatabase, sql, [startCycle, endCycle])) as { txId: string }[] 125 | if (config.VERBOSE) { 126 | Logger.mainLogger.debug(`txIds between ${startCycle} and ${endCycle} are ${txIdsArray ? txIdsArray.length : 0}`) 127 | } 128 | 129 | if (!txIdsArray) { 130 | return [] 131 | } 132 | 133 | const txIds = txIdsArray.map((tx) => tx.txId) 134 | txIds.sort() 135 | return txIds 136 | } catch (e) { 137 | Logger.mainLogger.error('error in querySortedTxsBetweenCycleRange: ', e) 138 | return null 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/shardeum/calculateAccountHash.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from '../Crypto' 2 | import { ArchiverReceipt, SignedReceipt } from '../dbstore/receipts' 3 | import { verifyGlobalTxAccountChange } from './verifyGlobalTxReceipt' 4 | 5 | // account types in Shardeum 6 | export enum AccountType { 7 | Account = 0, // EOA or CA 8 | ContractStorage = 1, // Contract storage key value pair 9 | ContractCode = 2, // Contract code bytes 10 | Receipt = 3, //This holds logs for a TX 11 | Debug = 4, 12 | NetworkAccount = 5, 13 | NodeAccount = 6, 14 | NodeRewardReceipt = 7, 15 | DevAccount = 8, 16 | NodeAccount2 = 9, 17 | StakeReceipt = 10, 18 | UnstakeReceipt = 11, 19 | InternalTxReceipt = 12, 20 | SecureAccount = 13, 21 | } 22 | 23 | export const accountSpecificHash = (account: any): string => { 24 | let hash: string 25 | delete account.hash 26 | if ( 27 | account.accountType === AccountType.NetworkAccount || 28 | account.accountType === AccountType.NodeAccount || 29 | account.accountType === AccountType.NodeAccount2 || 30 | account.accountType === AccountType.NodeRewardReceipt || 31 | account.accountType === AccountType.StakeReceipt || 32 | account.accountType === AccountType.UnstakeReceipt || 33 | account.accountType === AccountType.InternalTxReceipt || 34 | account.accountType === AccountType.DevAccount || 35 | account.accountType === AccountType.SecureAccount 36 | ) { 37 | account.hash = crypto.hashObj(account) 38 | return account.hash 39 | } 40 | if (account.accountType === AccountType.Account) { 41 | const { account: EVMAccountInfo, operatorAccountInfo, timestamp } = account 42 | const accountData = operatorAccountInfo 43 | ? { EVMAccountInfo, operatorAccountInfo, timestamp } 44 | : { EVMAccountInfo, timestamp } 45 | hash = crypto.hashObj(accountData) 46 | } else if (account.accountType === AccountType.Debug) { 47 | hash = crypto.hashObj(account) 48 | } else if (account.accountType === AccountType.ContractStorage) { 49 | hash = crypto.hashObj({ key: account.key, value: account.value }) 50 | } else if (account.accountType === AccountType.ContractCode) { 51 | hash = crypto.hashObj({ key: account.codeHash, value: account.codeByte }) 52 | } else if (account.accountType === AccountType.Receipt) { 53 | hash = crypto.hashObj({ key: account.txId, value: account.receipt }) 54 | } 55 | 56 | // hash = hash + '0'.repeat(64 - hash.length) 57 | account.hash = hash 58 | return hash 59 | } 60 | 61 | export const verifyAccountHash = ( 62 | receipt: ArchiverReceipt, 63 | failedReasons = [], 64 | nestedCounterMessages = [] 65 | ): boolean => { 66 | try { 67 | if (receipt.globalModification) { 68 | const result = verifyGlobalTxAccountChange(receipt, failedReasons, nestedCounterMessages) 69 | if (!result) return false 70 | return true 71 | } 72 | const signedReceipt = receipt.signedReceipt as SignedReceipt 73 | const { accountIDs, afterStateHashes, beforeStateHashes } = signedReceipt.proposal 74 | if (accountIDs.length !== afterStateHashes.length) { 75 | failedReasons.push( 76 | `Modified account count specified in the receipt and the actual updated account count does not match! ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 77 | ) 78 | nestedCounterMessages.push( 79 | `Modified account count specified in the receipt and the actual updated account count does not match!` 80 | ) 81 | return false 82 | } 83 | if (beforeStateHashes.length !== afterStateHashes.length) { 84 | failedReasons.push( 85 | `Account state hash before and after count does not match! ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 86 | ) 87 | nestedCounterMessages.push(`Account state hash before and after count does not match!`) 88 | return false 89 | } 90 | for (const [index, accountId] of accountIDs.entries()) { 91 | const accountData = receipt.afterStates.find((acc) => acc.accountId === accountId) 92 | if (accountData === undefined) { 93 | failedReasons.push( 94 | `Account not found in the receipt's afterStates | Acc-ID: ${accountId}, txId: ${receipt.tx.txId}, Cycle: ${receipt.cycle}, timestamp: ${receipt.tx.timestamp}` 95 | ) 96 | nestedCounterMessages.push(`Account not found in the receipt`) 97 | return false 98 | } 99 | const calculatedAccountHash = accountSpecificHash(accountData.data) 100 | // eslint-disable-next-line security/detect-object-injection 101 | const expectedAccountHash = afterStateHashes[index] 102 | if (calculatedAccountHash !== expectedAccountHash) { 103 | failedReasons.push( 104 | `Account hash does not match | Acc-ID: ${accountId}, txId: ${receipt.tx.txId}, Cycle: ${receipt.cycle}, timestamp: ${receipt.tx.timestamp}` 105 | ) 106 | nestedCounterMessages.push(`Account hash does not match`) 107 | return false 108 | } 109 | } 110 | return true 111 | } catch (e) { 112 | console.error(`Error in verifyAccountHash`, e) 113 | failedReasons.push(`Error in verifyAccountHash ${e}`) 114 | nestedCounterMessages.push('Error in verifyAccountHash') 115 | return false 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/Data/GossipData.ts: -------------------------------------------------------------------------------- 1 | import * as State from '../State' 2 | import * as Logger from '../Logger' 3 | import * as Crypto from '../Crypto' 4 | import { postJson } from '../P2P' 5 | import { Signature } from '@shardeum-foundation/lib-crypto-utils' 6 | import { P2P as P2PTypes } from '@shardeum-foundation/lib-types' 7 | import * as Utils from '../Utils' 8 | import { config } from '../Config' 9 | import { nestedCountersInstance } from '../profiler/nestedCounters' 10 | 11 | // adjacentArchivers are one archiver from left and one archiver from right of the current archiver 12 | export let adjacentArchivers: State.ArchiverNodeInfo[] = [] 13 | 14 | export enum DataType { 15 | RECEIPT = 'RECEIPT', 16 | ORIGINAL_TX_DATA = 'ORIGINAL_TX_DATA', 17 | CYCLE = 'CYCLE', 18 | } 19 | 20 | export type TxData = { txId: string; timestamp: number } 21 | 22 | export interface GossipData { 23 | dataType: DataType 24 | data: TxData[] | P2PTypes.CycleCreatorTypes.CycleData[] 25 | sign: Signature 26 | } 27 | 28 | // List of archivers that are not adjacent to the current archiver 29 | const remainingArchivers = [] 30 | 31 | export const getAdjacentLeftAndRightArchivers = (): void => { 32 | if (State.activeArchivers.length <= 1) { 33 | adjacentArchivers = [] 34 | return 35 | } 36 | // Treat the archivers list as a circular list and get one left and one right archivers of the current archiver 37 | const currentArchiverIndex = State.activeArchiversByPublicKeySorted.findIndex( 38 | (archiver) => archiver.publicKey === State.getNodeInfo().publicKey 39 | ) 40 | let leftArchiver: State.ArchiverNodeInfo | null = null 41 | let rightArchiver: State.ArchiverNodeInfo | null = null 42 | if (State.activeArchiversByPublicKeySorted.length === 2) { 43 | if (currentArchiverIndex === 0) rightArchiver = State.activeArchiversByPublicKeySorted[1] 44 | else leftArchiver = State.activeArchiversByPublicKeySorted[0] 45 | } else { 46 | let leftArchiverIndex = currentArchiverIndex - 1 47 | let rightArchiverIndex = currentArchiverIndex + 1 48 | if (leftArchiverIndex < 0) leftArchiverIndex = State.activeArchiversByPublicKeySorted.length - 1 49 | if (rightArchiverIndex > State.activeArchiversByPublicKeySorted.length - 1) rightArchiverIndex = 0 50 | /* eslint-disable security/detect-object-injection */ 51 | leftArchiver = State.activeArchiversByPublicKeySorted[leftArchiverIndex] 52 | rightArchiver = State.activeArchiversByPublicKeySorted[rightArchiverIndex] 53 | /* eslint-enable security/detect-object-injection */ 54 | } 55 | adjacentArchivers.length = 0 56 | if (leftArchiver) adjacentArchivers.push(leftArchiver) 57 | if (rightArchiver) adjacentArchivers.push(rightArchiver) 58 | remainingArchivers.length = 0 59 | for (const archiver of State.otherArchivers) { 60 | if (!adjacentArchivers.some((a) => a.publicKey === archiver.publicKey)) { 61 | remainingArchivers.push(archiver) 62 | } 63 | } 64 | } 65 | 66 | export async function sendDataToAdjacentArchivers( 67 | dataType: DataType, 68 | data: GossipData['data'] 69 | ): Promise { 70 | if (config.stopGossipTxData) return 71 | if (State.otherArchivers.length === 0) return 72 | const gossipPayload = { 73 | dataType, 74 | data, 75 | } as GossipData 76 | const signedDataToSend = Crypto.sign(gossipPayload) 77 | try { 78 | const promises = [] 79 | const archiversToSend = [...adjacentArchivers] 80 | if (config.gossipToMoreArchivers && remainingArchivers.length > 0) { 81 | const randomArchivers = Utils.getRandomItemFromArr( 82 | remainingArchivers, 83 | 0, 84 | config.randomGossipArchiversCount 85 | ) 86 | if (randomArchivers.length > 0) archiversToSend.push(...randomArchivers) 87 | } 88 | if (config.VERBOSE) 89 | Logger.mainLogger.debug( 90 | `Sending ${dataType} data to the archivers: ${archiversToSend.map((n) => `${n.ip}:${n.port}`)}` 91 | ) 92 | for (const archiver of archiversToSend) { 93 | const url = `http://${archiver.ip}:${archiver.port}/gossip-data` 94 | try { 95 | const GOSSIP_DATA_TIMEOUT_SECOND = 10 // 10 seconds 96 | const promise = postJson(url, signedDataToSend, GOSSIP_DATA_TIMEOUT_SECOND) 97 | promise.catch((err) => { 98 | Logger.mainLogger.error(`Unable to send archiver ${archiver.ip}: ${archiver.port}`, err) 99 | }) 100 | promises.push(promise) 101 | } catch (e) { 102 | Logger.mainLogger.error(`Gossip Error to archiver ${archiver.ip}: ${archiver.port}`, e) 103 | } 104 | } 105 | try { 106 | await Promise.allSettled(promises).then((results) => { 107 | results.forEach((result) => { 108 | if (nestedCountersInstance) { 109 | if (result.status === 'fulfilled') { 110 | if (result.value !== null) nestedCountersInstance.countEvent('gossip-data', 'success') 111 | else nestedCountersInstance.countEvent('gossip-data', 'failure') 112 | } else nestedCountersInstance.countEvent('gossip-data', 'failure') 113 | } 114 | }) 115 | }) 116 | } catch (err) { 117 | Logger.mainLogger.error('Gossip Error: ' + err) 118 | if (nestedCountersInstance) nestedCountersInstance.countEvent('gossip-data', 'error 1', err) 119 | } 120 | } catch (ex) { 121 | Logger.mainLogger.debug(ex) 122 | Logger.mainLogger.debug('Fail to gossip') 123 | if (nestedCountersInstance) nestedCountersInstance.countEvent('gossip-data', 'error 2', ex) 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/shardeum/verifyGlobalTxReceipt.ts: -------------------------------------------------------------------------------- 1 | import { P2P } from '@shardeum-foundation/lib-types' 2 | import { ArchiverReceipt } from '../dbstore/receipts' 3 | import { accountSpecificHash } from './calculateAccountHash' 4 | 5 | // Refer to https://github.com/shardeum/shardeum/blob/89db23e1d4ffb86b4353b8f37fb360ea3cd93c5b/src/shardeum/shardeumTypes.ts#L242 6 | export interface SetGlobalTxValue { 7 | isInternalTx: boolean 8 | internalTXType: InternalTXType 9 | timestamp: number 10 | from: string 11 | change: { 12 | cycle: number 13 | change: object 14 | } 15 | } 16 | 17 | // Refer to https://github.com/shardeum/shardeum/blob/89db23e1d4ffb86b4353b8f37fb360ea3cd93c5b/src/shardeum/shardeumTypes.ts#L87-L88 18 | export enum InternalTXType { 19 | SetGlobalCodeBytes = 0, //Deprecated 20 | InitNetwork = 1, 21 | NodeReward = 2, //Deprecated 22 | ChangeConfig = 3, 23 | ApplyChangeConfig = 4, 24 | SetCertTime = 5, 25 | Stake = 6, 26 | Unstake = 7, 27 | InitRewardTimes = 8, 28 | ClaimReward = 9, 29 | ChangeNetworkParam = 10, 30 | ApplyNetworkParam = 11, 31 | Penalty = 12, 32 | } 33 | 34 | export const verifyGlobalTxAccountChange = ( 35 | receipt: ArchiverReceipt, 36 | failedReasons = [], 37 | nestedCounterMessages = [] 38 | ): boolean => { 39 | try { 40 | const signedReceipt = receipt.signedReceipt as P2P.GlobalAccountsTypes.GlobalTxReceipt 41 | const internalTx = signedReceipt.tx.value as SetGlobalTxValue 42 | 43 | if (internalTx.internalTXType === InternalTXType.InitNetwork) { 44 | // Refer to https://github.com/shardeum/shardeum/blob/89db23e1d4ffb86b4353b8f37fb360ea3cd93c5b/src/index.ts#L2334 45 | // no need to do anything, as it is network account creation 46 | return true 47 | } else if ( 48 | internalTx.internalTXType === InternalTXType.ApplyChangeConfig || 49 | internalTx.internalTXType === InternalTXType.ApplyNetworkParam 50 | ) { 51 | if (signedReceipt.tx.addressHash !== '') { 52 | for (const account of receipt.beforeStates) { 53 | if (account.accountId !== signedReceipt.tx.address) { 54 | failedReasons.push( 55 | `Unexpected account found in before accounts ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 56 | ) 57 | nestedCounterMessages.push(`Unexpected account found in before accounts`) 58 | return false 59 | } 60 | const expectedAccountHash = signedReceipt.tx.addressHash 61 | const calculatedAccountHash = accountSpecificHash(account.data) 62 | if (expectedAccountHash !== calculatedAccountHash) { 63 | failedReasons.push( 64 | `Account hash before does not match in globalModification tx - ${account.accountId} , ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 65 | ) 66 | nestedCounterMessages.push(`Account hash before does not match in globalModification tx`) 67 | return false 68 | } 69 | } 70 | } 71 | for (const account of receipt.afterStates) { 72 | if (account.accountId !== signedReceipt.tx.address) { 73 | failedReasons.push( 74 | `Unexpected account found in accounts ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 75 | ) 76 | nestedCounterMessages.push(`Unexpected account found in accounts`) 77 | return false 78 | } 79 | const networkAccountBefore = receipt.beforeStates.find( 80 | (bAccount) => bAccount?.accountId === account.accountId 81 | ) 82 | const networkAccountAfter = receipt.afterStates.find( 83 | (fAccount) => fAccount?.accountId === signedReceipt.tx.address 84 | ) 85 | if (!networkAccountBefore || !networkAccountAfter) { 86 | failedReasons.push( 87 | `No network account found in accounts ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 88 | ) 89 | nestedCounterMessages.push(`No network account found in accounts`) 90 | return false 91 | } 92 | networkAccountBefore.data.listOfChanges?.push(internalTx.change) 93 | networkAccountBefore.data.timestamp = signedReceipt.tx.when 94 | const expectedAccountHash = networkAccountAfter.hash 95 | console.dir(networkAccountBefore, { depth: null }) 96 | const calculatedAccountHash = accountSpecificHash(networkAccountBefore.data) 97 | if (expectedAccountHash !== calculatedAccountHash) { 98 | failedReasons.push( 99 | `Account hash does not match in globalModification tx - ${networkAccountAfter.accountId} , ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 100 | ) 101 | nestedCounterMessages.push(`Account hash does not match in globalModification tx`) 102 | return false 103 | } 104 | } 105 | return true 106 | } else { 107 | failedReasons.push( 108 | `Unexpected internal transaction type in the globalModification tx ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 109 | ) 110 | nestedCounterMessages.push(`Unexpected internal transaction type in the globalModification tx`) 111 | return false 112 | } 113 | } catch (error) { 114 | console.error(`verifyGlobalTxAccountChange error`, error) 115 | failedReasons.push( 116 | `Error while verifying global account change ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}, ${error}` 117 | ) 118 | nestedCounterMessages.push(`Error while verifying global account change`) 119 | return false 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/dbstore/index.ts: -------------------------------------------------------------------------------- 1 | import { Database } from 'sqlite3' 2 | import { Config } from '../Config' 3 | import { createDB, runCreate, close } from './sqlite3storage' 4 | import { createDirectories } from '../Utils' 5 | 6 | export let cycleDatabase: Database 7 | export let accountDatabase: Database 8 | export let transactionDatabase: Database 9 | export let receiptDatabase: Database 10 | export let originalTxDataDatabase: Database 11 | export let processedTxDatabase: Database 12 | 13 | export const initializeDB = async (config: Config): Promise => { 14 | createDirectories(config.ARCHIVER_DB) 15 | accountDatabase = await createDB(`${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.accountDB}`, 'Account') 16 | cycleDatabase = await createDB(`${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.cycleDB}`, 'Cycle') 17 | transactionDatabase = await createDB( 18 | `${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.transactionDB}`, 19 | 'Transaction' 20 | ) 21 | receiptDatabase = await createDB(`${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.receiptDB}`, 'Receipt') 22 | originalTxDataDatabase = await createDB( 23 | `${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.originalTxDataDB}`, 24 | 'OriginalTxData' 25 | ) 26 | processedTxDatabase = await createDB( 27 | `${config.ARCHIVER_DB}/${config.ARCHIVER_DATA.processedTxDB}`, 28 | 'ProcessedTransaction' 29 | ) 30 | await runCreate( 31 | transactionDatabase, 32 | 'CREATE TABLE if not exists `transactions` (`txId` TEXT NOT NULL UNIQUE PRIMARY KEY, `appReceiptId` TEXT, `timestamp` BIGINT NOT NULL, `cycleNumber` NUMBER NOT NULL, `data` JSON NOT NULL, `originalTxData` JSON NOT NULL)' 33 | ) 34 | await runCreate( 35 | transactionDatabase, 36 | 'CREATE INDEX if not exists `transactions_timestamp` ON `transactions` (`timestamp` ASC)' 37 | ) 38 | await runCreate( 39 | transactionDatabase, 40 | 'CREATE INDEX if not exists `transactions_cycleNumber_timestamp` ON `transactions` (`cycleNumber` ASC, `timestamp` ASC)' 41 | ) 42 | await runCreate( 43 | transactionDatabase, 44 | 'CREATE INDEX if not exists `transactions_appReceiptId_idx` ON `transactions` (`appReceiptId`)' 45 | ) 46 | await runCreate( 47 | cycleDatabase, 48 | 'CREATE TABLE if not exists `cycles` (`cycleMarker` TEXT NOT NULL UNIQUE PRIMARY KEY, `counter` NUMBER NOT NULL, `cycleRecord` JSON NOT NULL)' 49 | ) 50 | await runCreate(cycleDatabase, 'CREATE INDEX if not exists `cycles_idx` ON `cycles` (`counter` ASC)') 51 | await runCreate( 52 | accountDatabase, 53 | 'CREATE TABLE if not exists `accounts` (`accountId` TEXT NOT NULL UNIQUE PRIMARY KEY, `data` JSON NOT NULL, `timestamp` BIGINT NOT NULL, `hash` TEXT NOT NULL, `cycleNumber` NUMBER NOT NULL, `isGlobal` BOOLEAN NOT NULL)' 54 | ) 55 | await runCreate( 56 | accountDatabase, 57 | 'CREATE INDEX if not exists `accounts_cycleNumber` ON `accounts` (`cycleNumber` ASC)' 58 | ) 59 | await runCreate( 60 | accountDatabase, 61 | 'CREATE INDEX if not exists `accounts_timestamp` ON `accounts` (`timestamp` ASC)' 62 | ) 63 | await runCreate( 64 | accountDatabase, 65 | 'CREATE INDEX if not exists `accounts_cycleNumber_timestamp` ON `accounts` (`cycleNumber` ASC, `timestamp` ASC)' 66 | ) 67 | await runCreate( 68 | receiptDatabase, 69 | 'CREATE TABLE if not exists `receipts` (`receiptId` TEXT NOT NULL UNIQUE PRIMARY KEY, `tx` JSON NOT NULL, `cycle` NUMBER NOT NULL, `applyTimestamp` BIGINT NOT NULL, `timestamp` BIGINT NOT NULL, `signedReceipt` JSON NOT NULL, `afterStates` JSON, `beforeStates` JSON, `appReceiptData` JSON, `executionShardKey` TEXT NOT NULL, `globalModification` BOOLEAN NOT NULL)' 70 | ) 71 | await runCreate(receiptDatabase, 'CREATE INDEX if not exists `receipts_cycle` ON `receipts` (`cycle` ASC)') 72 | await runCreate( 73 | receiptDatabase, 74 | 'CREATE INDEX if not exists `receipts_timestamp` ON `receipts` (`timestamp` ASC)' 75 | ) 76 | await runCreate(receiptDatabase, 'CREATE INDEX if not exists `receipts_cycle` ON `receipts` (`cycle` ASC)') 77 | await runCreate( 78 | receiptDatabase, 79 | 'CREATE INDEX if not exists `receipts_cycle_timestamp` ON `receipts` (`cycle` ASC, `timestamp` ASC)' 80 | ) 81 | await runCreate( 82 | originalTxDataDatabase, 83 | 'CREATE TABLE if not exists `originalTxsData` (`txId` TEXT NOT NULL, `timestamp` BIGINT NOT NULL, `cycle` NUMBER NOT NULL, `originalTxData` JSON NOT NULL, PRIMARY KEY (`txId`, `timestamp`))' 84 | ) 85 | await runCreate( 86 | originalTxDataDatabase, 87 | 'CREATE INDEX if not exists `originalTxsData_cycle` ON `originalTxsData` (`cycle` ASC)' 88 | ) 89 | await runCreate( 90 | originalTxDataDatabase, 91 | 'CREATE INDEX if not exists `originalTxsData_timestamp` ON `originalTxsData` (`timestamp` ASC)' 92 | ) 93 | await runCreate( 94 | originalTxDataDatabase, 95 | 'CREATE INDEX if not exists `originalTxsData_cycle_timestamp` ON `originalTxsData` (`cycle` ASC, `timestamp` ASC)' 96 | ) 97 | await runCreate( 98 | originalTxDataDatabase, 99 | 'CREATE INDEX if not exists `originalTxsData_txId` ON `originalTxsData` (`txId`)' 100 | ) 101 | 102 | // Transaction digester service tables 103 | await runCreate( 104 | processedTxDatabase, 105 | 'CREATE TABLE if not exists `processedTxs` (`txId` VARCHAR(128) NOT NULL, `cycle` BIGINT NOT NULL, `txTimestamp` BIGINT NOT NULL, `applyTimestamp` BIGINT NOT NULL, PRIMARY KEY (`txId`))' 106 | ) 107 | await runCreate( 108 | processedTxDatabase, 109 | 'CREATE INDEX if not exists `processedTxs_cycle_idx` ON `processedTxs` (`cycle`)' 110 | ) 111 | } 112 | 113 | export const closeDatabase = async (): Promise => { 114 | const promises = [] 115 | promises.push(close(accountDatabase, 'Account')) 116 | promises.push(close(transactionDatabase, 'Transaction')) 117 | promises.push(close(cycleDatabase, 'Cycle')) 118 | promises.push(close(receiptDatabase, 'Receipt')) 119 | promises.push(close(originalTxDataDatabase, 'OriginalTxData')) 120 | promises.push(close(processedTxDatabase, 'ProcessedTransaction')) 121 | await Promise.all(promises) 122 | } 123 | -------------------------------------------------------------------------------- /src/profiler/nestedCounters.ts: -------------------------------------------------------------------------------- 1 | import * as fastify from 'fastify' 2 | import { stringifyReduce } from './StringifyReduce' 3 | import * as core from '@shardeum-foundation/lib-crypto-utils' 4 | import { isDebugMiddleware } from '../DebugMode' 5 | 6 | type CounterMap = Map 7 | interface CounterNode { 8 | count: number 9 | subCounters: CounterMap 10 | } 11 | 12 | interface EntriesCounter { 13 | key: string 14 | count: number 15 | subArray: EntriesCounter[] 16 | } 17 | 18 | export let nestedCountersInstance: NestedCounters 19 | 20 | export function setNestedCountersInstance(instance: NestedCounters): void { 21 | nestedCountersInstance = instance 22 | } 23 | 24 | class NestedCounters { 25 | eventCounters: Map 26 | rareEventCounters: Map 27 | infLoopDebug: boolean 28 | server: fastify.FastifyInstance 29 | 30 | constructor(server: fastify.FastifyInstance) { 31 | // this.sectionTimes = {} 32 | this.eventCounters = new Map() 33 | this.rareEventCounters = new Map() 34 | this.infLoopDebug = false 35 | this.server = server 36 | } 37 | 38 | registerEndpoints(): void { 39 | this.server.get( 40 | '/counts', 41 | { 42 | preHandler: async (_request, reply) => { 43 | isDebugMiddleware(_request, reply) 44 | }, 45 | }, 46 | (req, res) => { 47 | let outputStr = '' 48 | const arrayReport = this.arrayitizeAndSort(this.eventCounters) 49 | outputStr += `${Date.now()}\n` 50 | outputStr = this.printArrayReport(arrayReport, outputStr, 0) 51 | res.send(outputStr) 52 | } 53 | ) 54 | this.server.get( 55 | '/counts-reset', 56 | { 57 | preHandler: async (_request, reply) => { 58 | isDebugMiddleware(_request, reply) 59 | }, 60 | }, 61 | (req, res) => { 62 | this.eventCounters = new Map() 63 | res.send(`counts reset ${Date.now()}`) 64 | } 65 | ) 66 | 67 | this.server.get( 68 | '/debug-inf-loop', 69 | { 70 | preHandler: async (_request, reply) => { 71 | isDebugMiddleware(_request, reply) 72 | }, 73 | }, 74 | (req, res) => { 75 | res.send('starting inf loop, goodbye') 76 | this.infLoopDebug = true 77 | while (this.infLoopDebug) { 78 | const s = 'asdf' 79 | const s2 = stringifyReduce({ test: [s, s, s, s, s, s, s] }) 80 | const s3 = stringifyReduce({ test: [s2, s2, s2, s2, s2, s2, s2] }) 81 | core.hash(s3) 82 | } 83 | } 84 | ) 85 | 86 | this.server.get( 87 | '/debug-inf-loop-off', 88 | { 89 | preHandler: async (_request, reply) => { 90 | isDebugMiddleware(_request, reply) 91 | }, 92 | }, 93 | (req, res) => { 94 | this.infLoopDebug = false 95 | res.send('stopping inf loop, who knows if this is possible') 96 | } 97 | ) 98 | } 99 | 100 | countEvent(category1: string, category2: string, count = 1): void { 101 | let counterMap: CounterMap = this.eventCounters 102 | 103 | let nextNode: CounterNode 104 | if (counterMap.has(category1) === false) { 105 | nextNode = { count: 0, subCounters: new Map() } 106 | counterMap.set(category1, nextNode) 107 | } else { 108 | nextNode = counterMap.get(category1) 109 | } 110 | nextNode.count += count 111 | counterMap = nextNode.subCounters 112 | 113 | //unrolled loop to avoid memory alloc 114 | category1 = category2 115 | if (counterMap.has(category1) === false) { 116 | nextNode = { count: 0, subCounters: new Map() } 117 | counterMap.set(category1, nextNode) 118 | } else { 119 | nextNode = counterMap.get(category1) 120 | } 121 | nextNode.count += count 122 | counterMap = nextNode.subCounters 123 | } 124 | 125 | countRareEvent(category1: string, category2: string, count = 1): void { 126 | // trigger normal event counter 127 | this.countEvent(category1, category2, count) 128 | 129 | // start counting rare event 130 | let counterMap: CounterMap = this.rareEventCounters 131 | 132 | let nextNode: CounterNode = { count: 0, subCounters: new Map() } 133 | if (!counterMap.has(category1)) { 134 | nextNode = { count: 0, subCounters: new Map() } 135 | counterMap.set(category1, nextNode) 136 | } else { 137 | nextNode = counterMap.get(category1) 138 | } 139 | nextNode.count += count 140 | counterMap = nextNode.subCounters 141 | 142 | //unrolled loop to avoid memory alloc 143 | category1 = category2 144 | if (counterMap.has(category1) === false) { 145 | nextNode = { count: 0, subCounters: new Map() } 146 | counterMap.set(category1, nextNode) 147 | } else { 148 | nextNode = counterMap.get(category1) 149 | } 150 | nextNode.count += count 151 | counterMap = nextNode.subCounters 152 | } 153 | 154 | arrayitizeAndSort(counterMap: CounterMap): EntriesCounter[] { 155 | const array = [] 156 | for (const key of counterMap.keys()) { 157 | const valueObj = counterMap.get(key) 158 | 159 | const newValueObj: EntriesCounter = { key, count: valueObj.count, subArray: null } 160 | // newValueObj.key = key 161 | array.push(newValueObj) 162 | 163 | let subArray = [] 164 | if (valueObj.subCounters != null) { 165 | subArray = this.arrayitizeAndSort(valueObj.subCounters) 166 | } 167 | 168 | // if (valueObj.count != null && valueObj.logLen != null) { 169 | // valueObj.avgLen = valueObj.logLen / valueObj.count 170 | // } 171 | 172 | newValueObj.subArray = subArray 173 | // delete valueObj['subCounters'] 174 | } 175 | 176 | array.sort((a, b) => b.count - a.count) 177 | return array 178 | } 179 | 180 | printArrayReport(arrayReport: EntriesCounter[], outputStr: string, indent = 0): string { 181 | const indentText = '___'.repeat(indent) 182 | for (const item of arrayReport) { 183 | const { key, count, subArray } = item 184 | const countStr = `${count}` 185 | outputStr += `${countStr.padStart(10)} ${indentText} ${key}\n` 186 | if (subArray != null && subArray.length > 0) { 187 | outputStr = this.printArrayReport(subArray, outputStr, indent + 1) 188 | } 189 | } 190 | return outputStr 191 | } 192 | } 193 | 194 | export default NestedCounters 195 | -------------------------------------------------------------------------------- /src/dbstore/cycles.ts: -------------------------------------------------------------------------------- 1 | import * as db from './sqlite3storage' 2 | import { cycleDatabase } from '.' 3 | import { P2P } from '@shardeum-foundation/lib-types' 4 | import * as Logger from '../Logger' 5 | import { config } from '../Config' 6 | import { DeSerializeFromJsonString, SerializeToJsonString } from '../utils/serialization' 7 | import { Cycle, DbCycle } from './types' 8 | 9 | 10 | export async function insertCycle(cycle: Cycle): Promise { 11 | 12 | try { 13 | // Define the table columns based on schema 14 | const columns = ['cycleMarker', 'counter', 'cycleRecord']; 15 | 16 | // Construct the SQL query with placeholders 17 | const placeholders = `(${columns.map(() => '?').join(', ')})`; 18 | const sql = `INSERT OR REPLACE INTO cycles (${columns.join(', ')}) VALUES ${placeholders}`; 19 | 20 | // Map the `cycle` object to match the columns 21 | const values = columns.map((column) => 22 | typeof cycle[column] === 'object' 23 | ? SerializeToJsonString(cycle[column]) // Serialize objects to JSON 24 | : cycle[column] 25 | ); 26 | 27 | // Execute the query directly (single-row insert) 28 | await db.run(cycleDatabase, sql, values); 29 | 30 | if (config.VERBOSE) { 31 | Logger.mainLogger.debug( 32 | 'Successfully inserted Cycle', 33 | cycle.counter, 34 | cycle.cycleMarker 35 | ); 36 | } 37 | } catch (err) { 38 | Logger.mainLogger.error(err); 39 | Logger.mainLogger.error( 40 | 'Unable to insert cycle or it is already stored in the database', 41 | cycle.counter, 42 | cycle.cycleMarker 43 | ); 44 | } 45 | } 46 | 47 | export async function bulkInsertCycles(cycles: Cycle[]): Promise { 48 | 49 | try { 50 | // Define the table columns based on schema 51 | const columns = ['cycleMarker', 'counter', 'cycleRecord']; 52 | 53 | // Construct the SQL query for bulk insertion with all placeholders 54 | const placeholders = cycles.map(() => `(${columns.map(() => '?').join(', ')})`).join(', '); 55 | const sql = `INSERT OR REPLACE INTO cycles (${columns.join(', ')}) VALUES ${placeholders}`; 56 | 57 | // Flatten the `cycles` array into a single list of values 58 | const values = cycles.flatMap((cycle) => 59 | columns.map((column) => 60 | typeof cycle[column] === 'object' 61 | ? SerializeToJsonString(cycle[column]) // Serialize objects to JSON 62 | : cycle[column] 63 | ) 64 | ); 65 | 66 | // Execute the single query for all cycles 67 | await db.run(cycleDatabase, sql, values); 68 | 69 | if (config.VERBOSE) { 70 | Logger.mainLogger.debug('Successfully inserted Cycles', cycles.length); 71 | } 72 | } catch (err) { 73 | Logger.mainLogger.error(err); 74 | Logger.mainLogger.error('Unable to bulk insert Cycles', cycles.length); 75 | } 76 | } 77 | 78 | export async function updateCycle(marker: string, cycle: Cycle): Promise { 79 | try { 80 | const sql = `UPDATE cycles SET counter = $counter, cycleRecord = $cycleRecord WHERE cycleMarker = $marker ` 81 | await db.run(cycleDatabase, sql, { 82 | $counter: cycle.counter, 83 | $cycleRecord: cycle.cycleRecord && SerializeToJsonString(cycle.cycleRecord), 84 | $marker: marker, 85 | }) 86 | if (config.VERBOSE) { 87 | Logger.mainLogger.debug('Updated cycle for counter', cycle.cycleRecord.counter, cycle.cycleMarker) 88 | } 89 | } catch (e) { 90 | Logger.mainLogger.error(e) 91 | Logger.mainLogger.error('Unable to update Cycle', cycle.cycleMarker) 92 | } 93 | } 94 | 95 | export async function queryCycleByMarker(marker: string): Promise { 96 | try { 97 | const sql = `SELECT * FROM cycles WHERE cycleMarker=? LIMIT 1` 98 | const dbCycle = (await db.get(cycleDatabase, sql, [marker])) as DbCycle 99 | let cycle: Cycle 100 | if (dbCycle) { 101 | cycle = { 102 | counter: dbCycle.counter, 103 | cycleRecord: DeSerializeFromJsonString(dbCycle.cycleRecord), 104 | cycleMarker: dbCycle.cycleMarker, 105 | } 106 | } 107 | if (config.VERBOSE) { 108 | Logger.mainLogger.debug('cycle marker', cycle) 109 | } 110 | return cycle 111 | } catch (e) { 112 | Logger.mainLogger.error(e) 113 | return null 114 | } 115 | } 116 | 117 | export async function queryLatestCycleRecords(count: number): Promise { 118 | if (!Number.isInteger(count)) { 119 | Logger.mainLogger.error('queryLatestCycleRecords - Invalid count value') 120 | return [] 121 | } 122 | try { 123 | const sql = `SELECT * FROM cycles ORDER BY counter DESC LIMIT ${count ? count : 100}` 124 | const dbCycles = (await db.all(cycleDatabase, sql)) as DbCycle[] 125 | const cycleRecords: P2P.CycleCreatorTypes.CycleData[] = [] 126 | if (dbCycles.length > 0) { 127 | for (const cycle of dbCycles) { 128 | if (cycle.cycleRecord) cycleRecords.push(DeSerializeFromJsonString(cycle.cycleRecord)) 129 | } 130 | } 131 | if (config.VERBOSE) { 132 | Logger.mainLogger.debug('cycle latest', cycleRecords) 133 | } 134 | return cycleRecords 135 | } catch (e) { 136 | Logger.mainLogger.error(e) 137 | return [] 138 | } 139 | } 140 | 141 | export async function queryCycleRecordsBetween( 142 | start: number, 143 | end: number 144 | ): Promise { 145 | try { 146 | const sql = `SELECT * FROM cycles WHERE counter BETWEEN ? AND ? ORDER BY counter ASC` 147 | const dbCycles = (await db.all(cycleDatabase, sql, [start, end])) as DbCycle[] 148 | const cycleRecords: P2P.CycleCreatorTypes.CycleData[] = [] 149 | if (dbCycles.length > 0) { 150 | for (const cycle of dbCycles) { 151 | if (cycle.cycleRecord) cycleRecords.push(DeSerializeFromJsonString(cycle.cycleRecord)) 152 | } 153 | } 154 | if (config.VERBOSE) { 155 | Logger.mainLogger.debug('cycle between', cycleRecords) 156 | } 157 | return cycleRecords 158 | } catch (e) { 159 | Logger.mainLogger.error(e) 160 | return [] 161 | } 162 | } 163 | 164 | export async function queryCyleCount(): Promise { 165 | let cycles 166 | try { 167 | const sql = `SELECT COUNT(*) FROM cycles` 168 | cycles = await db.get(cycleDatabase, sql, []) 169 | } catch (e) { 170 | Logger.mainLogger.error(e) 171 | } 172 | if (config.VERBOSE) { 173 | Logger.mainLogger.debug('Cycle count', cycles) 174 | } 175 | if (cycles) cycles = cycles['COUNT(*)'] 176 | else cycles = 0 177 | return cycles 178 | } 179 | -------------------------------------------------------------------------------- /src/test/dataSync/mulitpleArchivers.ts: -------------------------------------------------------------------------------- 1 | import { getJson } from '../../P2P' 2 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 3 | 4 | interface ReceiptResponse { 5 | receipts: Array<{ cycle: number; receipts: unknown[] }> 6 | } 7 | 8 | interface TotalData { 9 | totalCycles: number 10 | totalAccounts: number 11 | totalTransactions: number 12 | totalReceipts: number 13 | } 14 | 15 | interface CycleInfoResponse { 16 | cycleInfo: unknown 17 | } 18 | 19 | export async function checkDataSyncBetweenArchivers(ip, numberOfArchivers): Promise { 20 | const dataInfos = { 21 | archiverInfo: [], 22 | totalCycles: [], 23 | totalAccounts: [], 24 | totalTransactions: [], 25 | totalReceipts: [], 26 | } 27 | 28 | for (let i = 0; i < numberOfArchivers; i++) { 29 | const archiverURL = ip + ':' + (4000 + i) 30 | const res = (await getJson(`http://${archiverURL}/totalData`)) as TotalData 31 | if (typeof res === 'object' && res !== null && 'cycleInfo' in res) { 32 | dataInfos.archiverInfo.push(archiverURL) 33 | dataInfos.totalCycles.push(res.totalCycles) 34 | dataInfos.totalAccounts.push(res.totalAccounts) 35 | dataInfos.totalTransactions.push(res.totalTransactions) 36 | dataInfos.totalReceipts.push(res.totalReceipts) 37 | } else console.log(`Fail to fetch totalData from archiver ${archiverURL}`) 38 | } 39 | 40 | if (dataInfos.archiverInfo.length > 1) { 41 | const expectedTotalCycles = dataInfos.totalCycles[0] 42 | const totalCyclesIsMatched = dataInfos.totalCycles.every((cycle) => cycle === expectedTotalCycles) 43 | if (totalCyclesIsMatched) console.log('TotalCycles is matched!') 44 | 45 | const expectedTotalAccounts = dataInfos.totalAccounts[0] 46 | const totalAccountsIsMatched = dataInfos.totalAccounts.every((cycle) => cycle === expectedTotalAccounts) 47 | if (totalAccountsIsMatched) console.log('TotalAccounts is matched!') 48 | 49 | const expectedTotalTransactions = dataInfos.totalTransactions[0] 50 | const totalTransactionsIsMatched = dataInfos.totalTransactions.every( 51 | (cycle) => cycle === expectedTotalTransactions 52 | ) 53 | if (totalTransactionsIsMatched) console.log('TotalTransactions is matched!') 54 | 55 | const expectedTotalReceipts = dataInfos.totalReceipts[0] 56 | const totalReceiptsIsMatched = dataInfos.totalReceipts.every((cycle) => cycle === expectedTotalReceipts) 57 | if (totalReceiptsIsMatched) console.log('TotalReceipts is matched!') 58 | } 59 | } 60 | 61 | export async function checkCyclesDataBetweenArchivers(ip, numberOfArchivers): Promise { 62 | const dataInfos = {} 63 | 64 | for (let i = 0; i < numberOfArchivers; i++) { 65 | const archiverURL = ip + ':' + (4000 + i) 66 | const res = (await getJson(`http://${archiverURL}/cycleinfo/100`)) as CycleInfoResponse 67 | if (res) { 68 | // eslint-disable-next-line security/detect-object-injection 69 | dataInfos[archiverURL] = res.cycleInfo 70 | } else console.log(`Fail to fetch cycle data from archiver ${archiverURL}`) 71 | } 72 | 73 | if (Object.keys(dataInfos).length > 0) { 74 | const archiverInfos = Object.keys(dataInfos) 75 | const expectedCycles = dataInfos[archiverInfos[0]] 76 | let allCyclesAreMatched = true 77 | for (let i = 0; i < expectedCycles.length; i++) { 78 | // eslint-disable-next-line security/detect-object-injection 79 | const cycleInfo = expectedCycles[i] 80 | const cycleInfoToMatch = StringUtils.safeStringify(cycleInfo) 81 | for (let j = 1; j < archiverInfos.length; j++) { 82 | // console.log(cycleInfo.counter, dataInfos[archiverInfos[j]][i].counter) 83 | // eslint-disable-next-line security/detect-object-injection 84 | if (cycleInfoToMatch !== StringUtils.safeStringify(dataInfos[archiverInfos[j]][i])) { 85 | allCyclesAreMatched = false 86 | console.log(`Cycle ${cycleInfo.counter} is not matched between archivers!`) 87 | } 88 | } 89 | } 90 | if (allCyclesAreMatched) console.log('All the latest 100 cycles are match!') 91 | } 92 | } 93 | 94 | export async function checkReceiptsDataBetweenArchivers(ip, numberOfArchivers): Promise { 95 | const randomPort = getRndInteger(0, numberOfArchivers - 1) 96 | const randomArchiver = ip + ':' + (4000 + randomPort) 97 | const response = (await getJson(`http://${randomArchiver}/receipt/1`)) as ReceiptResponse 98 | const latestReceiptInfo = response.receipts[0] 99 | const endCycle = latestReceiptInfo.cycle 100 | const startCycle = latestReceiptInfo.cycle - 10 101 | 102 | const dataInfos = { 103 | archiverInfo: [], 104 | cycles: {}, 105 | } 106 | 107 | for (let i = startCycle; i <= endCycle; i++) { 108 | // eslint-disable-next-line security/detect-object-injection 109 | dataInfos.cycles[i] = [] 110 | } 111 | 112 | for (let i = 0; i < numberOfArchivers; i++) { 113 | const archiverURL = ip + ':' + (4000 + i) 114 | const res = (await getJson( 115 | `http://${archiverURL}/receipt?startCycle=${startCycle}&endCycle=${endCycle}&type=tally` 116 | )) as ReceiptResponse 117 | if (res && res.receipts) { 118 | dataInfos.archiverInfo.push(archiverURL) 119 | for (const receiptInfo of res.receipts) { 120 | dataInfos.cycles[receiptInfo.cycle].push(receiptInfo.receipts) 121 | } 122 | // Place 0 for cycles that are not in the query 123 | for (let j = startCycle; j <= endCycle; j++) { 124 | let found = false 125 | for (const receiptInfo of res.receipts) { 126 | if (j === receiptInfo.cycle) found = true 127 | } 128 | // eslint-disable-next-line security/detect-object-injection 129 | if (!found) dataInfos.cycles[j].push(0) 130 | } 131 | } else console.log(`Fail to fetch receipt data between cycles from archiver ${archiverURL}`) 132 | } 133 | 134 | if (dataInfos.archiverInfo.length > 1) { 135 | let allReceiptsAreMatched = true 136 | for (let i = startCycle; i <= endCycle; i++) { 137 | // eslint-disable-next-line security/detect-object-injection 138 | const expectedReceipts = dataInfos.cycles[i][0] 139 | // eslint-disable-next-line security/detect-object-injection 140 | const receiptsIsMatched = dataInfos.cycles[i].every((receipts) => receipts === expectedReceipts) 141 | if (!receiptsIsMatched) { 142 | allReceiptsAreMatched = false 143 | console.log(`Receipts Count do not match in cycle ${i}`) 144 | } 145 | } 146 | if (allReceiptsAreMatched) 147 | console.log(`All receipts count of cycles between ${startCycle} - ${endCycle} are matched!`) 148 | } 149 | } 150 | 151 | function getRndInteger(min, max): number { 152 | return Math.floor(Math.random() * (max - min + 1)) + min 153 | } 154 | -------------------------------------------------------------------------------- /src/types/ajv/Receipts.ts: -------------------------------------------------------------------------------- 1 | import { addSchema } from '../../utils/serialization/SchemaHelpers'; 2 | import { AJVSchemaEnum } from '../enum/AJVSchemaEnum'; 3 | // import { schemaAccountsCopy } from './Accounts'; // Import the schema from Accounts.ts 4 | 5 | // Define the regex for IPv4 validation (if needed in nested objects) 6 | const ipv4Regex = /^(?:(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)\.){3}(?:25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)$/; 7 | 8 | // Define schemas for nested components 9 | const schemaProposal = { 10 | type: 'object', 11 | properties: { 12 | applied: { type: 'boolean' }, 13 | cant_preApply: { type: 'boolean' }, 14 | accountIDs: { type: 'array', items: { type: 'string' } }, 15 | beforeStateHashes: { type: 'array', items: { type: 'string' } }, 16 | afterStateHashes: { type: 'array', items: { type: 'string' } }, 17 | appReceiptDataHash: { type: 'string' }, 18 | txid: { type: 'string' } 19 | }, 20 | required: ['applied', 'cant_preApply', 'accountIDs', 'beforeStateHashes', 'afterStateHashes', 'appReceiptDataHash', 'txid'], 21 | additionalProperties: false 22 | }; 23 | 24 | const schemaSignature = { 25 | type: 'object', 26 | properties: { 27 | owner: { type: 'string' }, 28 | sig: { type: 'string' } 29 | }, 30 | required: ['owner', 'sig'], 31 | additionalProperties: false 32 | }; 33 | 34 | const schemaSignedReceipt = { 35 | type: 'object', 36 | properties: { 37 | proposal: schemaProposal, 38 | proposalHash: { type: 'string' }, 39 | signaturePack: { 40 | type: 'array', 41 | items: schemaSignature 42 | }, 43 | voteOffsets: { 44 | type: 'array', 45 | items: { type: 'integer' } 46 | }, 47 | sign: { type: 'object', ...schemaSignature }, 48 | txGroupCycle: { type: 'integer', minimum: 0 } 49 | }, 50 | required: ['proposal', 'proposalHash', 'signaturePack', 'voteOffsets'], 51 | additionalProperties: false 52 | }; 53 | 54 | const schemaGlobalTxReceipt = { 55 | type: 'object', 56 | properties: { 57 | signs: { 58 | type: 'array', 59 | items: schemaSignature 60 | }, 61 | tx: { 62 | type: 'object', 63 | properties: { 64 | address: { type: 'string' }, 65 | addressHash: { type: 'string' }, 66 | value: {}, 67 | when: { type: 'integer' }, 68 | source: { type: 'string' } 69 | }, 70 | required: ['address', 'addressHash', 'value', 'when', 'source'], 71 | additionalProperties: false 72 | }, 73 | txGroupCycle: { type: 'integer', minimum: 0 } 74 | }, 75 | required: ['signs', 'tx'], 76 | additionalProperties: false // Excludes `consensusGroup` by default 77 | }; 78 | 79 | 80 | const schemaAppReceiptData = { 81 | type: 'object', 82 | properties: { 83 | accountId: { type: 'string' }, 84 | data: { type: 'object', additionalProperties: true } 85 | }, 86 | required: ['data'], 87 | additionalProperties: true 88 | }; 89 | 90 | const schemaTx = { 91 | type: 'object', 92 | properties: { 93 | originalTxData: { type: 'object', additionalProperties: true }, 94 | txId: { type: 'string' }, 95 | timestamp: { type: 'integer', minimum: 0 } 96 | }, 97 | required: ['originalTxData', 'txId', 'timestamp'], 98 | additionalProperties: false 99 | }; 100 | 101 | // Define the main ArchiverReceipt schema 102 | const schemaArchiverReceipt = { 103 | type: 'object', 104 | properties: { 105 | tx: schemaTx, 106 | cycle: { type: 'integer', minimum: 0 }, 107 | signedReceipt: { oneOf: [schemaSignedReceipt, schemaGlobalTxReceipt] }, 108 | afterStates: { type: 'array', items: { $ref: AJVSchemaEnum.AccountsCopy } }, // Using imported schema 109 | beforeStates: { type: 'array', items: { $ref: AJVSchemaEnum.AccountsCopy } }, // Using imported schema 110 | appReceiptData: schemaAppReceiptData, 111 | executionShardKey: { type: 'string' }, 112 | globalModification: { type: 'boolean' } 113 | }, 114 | required: ['tx', 'cycle', 'signedReceipt', 'appReceiptData', 'executionShardKey', 'globalModification'], 115 | additionalProperties: false 116 | }; 117 | 118 | 119 | const schemaAppliedVote = { 120 | type: 'object', 121 | properties: { 122 | txid: { type: 'string' }, 123 | transaction_result: { type: 'boolean' }, 124 | account_id: { 125 | type: 'array', 126 | items: { type: 'string' } 127 | }, 128 | account_state_hash_after: { 129 | type: 'array', 130 | items: { type: 'string' } 131 | }, 132 | account_state_hash_before: { 133 | type: 'array', 134 | items: { type: 'string' } 135 | }, 136 | cant_apply: { type: 'boolean' }, 137 | node_id: { type: 'string' }, 138 | sign: schemaSignature, // Reference to schemaSignature 139 | app_data_hash: { type: 'string' } 140 | }, 141 | required: [ 142 | 'txid', 143 | 'transaction_result', 144 | 'account_id', 145 | 'account_state_hash_after', 146 | 'account_state_hash_before', 147 | 'cant_apply', 148 | 'node_id', 149 | 'sign', 150 | 'app_data_hash' 151 | ], 152 | additionalProperties: false 153 | }; 154 | 155 | const schemaConfirmOrChallengeMessage = { 156 | type: 'object', 157 | properties: { 158 | message: { type: 'string' }, 159 | nodeId: { type: 'string' }, 160 | appliedVote: schemaAppliedVote , 161 | sign: schemaSignature 162 | }, 163 | required: ['message', 'nodeId', 'appliedVote', 'sign'], // All properties are required 164 | additionalProperties: false 165 | }; 166 | 167 | 168 | // Define the main Receipt schema 169 | const schemaReceipt = { 170 | type: 'object', 171 | properties: { 172 | receiptId: { type: 'string' }, 173 | timestamp: { type: 'integer' }, 174 | applyTimestamp: { type: 'integer' }, 175 | ...schemaArchiverReceipt.properties 176 | }, 177 | required: ['receiptId', 'timestamp', 'applyTimestamp', ...schemaArchiverReceipt.required 178 | ], 179 | additionalProperties: false 180 | }; 181 | 182 | // Function to initialize schemas 183 | export function initReceipts(): void { 184 | addSchemaDependencies(); 185 | addSchemas(); 186 | } 187 | 188 | // Function to add schema dependencies 189 | function addSchemaDependencies(): void { 190 | // No external dependencies 191 | } 192 | 193 | // Function to register schemas 194 | function addSchemas(): void { 195 | // addSchema('ReceiptTx', schemaTx); 196 | addSchema(AJVSchemaEnum.ArchiverReceipt, schemaArchiverReceipt); 197 | addSchema(AJVSchemaEnum.Receipt, schemaReceipt); 198 | } 199 | -------------------------------------------------------------------------------- /src/archivedCycle/Gossip.ts: -------------------------------------------------------------------------------- 1 | import * as Crypto from '../Crypto' 2 | import * as StateMetaData from './StateMetaData' 3 | import * as State from '../State' 4 | import * as P2P from '../P2P' 5 | import { config } from '../Config' 6 | import * as Logger from '../Logger' 7 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 8 | 9 | interface HashItem { 10 | counter: number 11 | partitionHashes: { [key: string]: unknown } 12 | networkHash: string 13 | receiptMapHashes: { [key: string]: unknown } 14 | networkReceiptHash: string 15 | } 16 | 17 | interface Gossip extends StateMetadata { 18 | counter: number 19 | } 20 | 21 | interface GossipCounterItem { 22 | count: number 23 | gossip: Gossip 24 | } 25 | 26 | interface StateMetadata { 27 | stateHashes: HashItem[] 28 | receiptHashes: HashItem[] 29 | summaryHashes: HashItem[] 30 | } 31 | 32 | interface HashCollectorItem { 33 | counter: number 34 | stateHashes?: HashItem 35 | receiptHashes?: HashItem 36 | summaryHashes?: HashItem 37 | } 38 | 39 | const gossipCollector = new Map() 40 | 41 | export async function sendGossip(type: string, payload: Record): Promise { 42 | let archivers: State.ArchiverNodeInfo[] = [...State.activeArchivers] 43 | 44 | if (archivers.length === 0) return 45 | const gossipPayload = { 46 | type, 47 | data: payload, 48 | sender: config.ARCHIVER_PUBLIC_KEY, 49 | } 50 | 51 | archivers = archivers.sort( 52 | (a: State.ArchiverNodeInfo, b: State.ArchiverNodeInfo) => Number(a.publicKey) - Number(b.publicKey) 53 | ) 54 | 55 | // TODO: check if need to select random archivers instead of sending to all other archivers 56 | const recipients: State.ArchiverNodeInfo[] = archivers.filter( 57 | (a) => a.publicKey !== config.ARCHIVER_PUBLIC_KEY 58 | ) 59 | 60 | if (recipients.length === 0) { 61 | Logger.mainLogger.debug('There is no other archivers to send our gossip') 62 | return 63 | } 64 | 65 | try { 66 | Logger.mainLogger.debug( 67 | `GossipingIn ${type} request to these nodes: ${StringUtils.safeStringify( 68 | recipients.map((node) => node.ip + ':' + node.port + `/gossip-${type}`) 69 | )}` 70 | ) 71 | await tell(recipients, `gossip-${type}`, gossipPayload) 72 | } catch (ex) { 73 | Logger.mainLogger.debug(ex) 74 | Logger.mainLogger.debug('Fail to gossip') 75 | } 76 | } 77 | 78 | async function tell( 79 | nodes: State.ArchiverNodeInfo[], 80 | route: string, 81 | message: { [key: string]: unknown } 82 | ): Promise { 83 | let InternalTellCounter = 0 84 | const promises = [] 85 | for (const node of nodes) { 86 | InternalTellCounter++ 87 | if (config.VERBOSE) { 88 | Logger.mainLogger.debug(`InternalTellCounter: ${InternalTellCounter}`) 89 | } 90 | const url = `http://${node.ip}:${node.port}/${route}` 91 | try { 92 | const promise = P2P.postJson(url, message) 93 | promise.catch((err) => { 94 | Logger.mainLogger.error(`Unable to tell node ${node.ip}: ${node.port}`, err) 95 | }) 96 | promises.push(promise) 97 | } catch (e) { 98 | Logger.mainLogger.error('Error', e) 99 | } 100 | } 101 | try { 102 | await Promise.all(promises) 103 | } catch (err) { 104 | Logger.mainLogger.error('Network: ' + err) 105 | } 106 | } 107 | 108 | export function convertStateMetadataToHashArray(STATE_METATDATA: StateMetadata): HashCollectorItem[] { 109 | const hashCollector: Record = {} 110 | STATE_METATDATA.stateHashes.forEach((h: HashItem) => { 111 | if (!hashCollector[h.counter]) { 112 | hashCollector[h.counter] = { 113 | counter: h.counter, 114 | } 115 | } 116 | hashCollector[h.counter]['stateHashes'] = h 117 | }) 118 | STATE_METATDATA.receiptHashes.forEach((h: HashItem) => { 119 | if (!hashCollector[h.counter]) { 120 | hashCollector[h.counter] = { 121 | counter: h.counter, 122 | } 123 | } 124 | hashCollector[h.counter]['receiptHashes'] = h 125 | }) 126 | STATE_METATDATA.summaryHashes.forEach((h: HashItem) => { 127 | if (!hashCollector[h.counter]) { 128 | hashCollector[h.counter] = { 129 | counter: h.counter, 130 | } 131 | } 132 | hashCollector[h.counter]['summaryHashes'] = h 133 | }) 134 | return Object.values(hashCollector) 135 | } 136 | 137 | export function addHashesGossip(sender: string, gossip: Gossip): void { 138 | const counter = gossip.counter 139 | if (gossipCollector.has(counter)) { 140 | const existingGossips = gossipCollector.get(counter) 141 | // eslint-disable-next-line security/detect-object-injection 142 | existingGossips[sender] = gossip 143 | } else { 144 | const obj: Record = {} 145 | // eslint-disable-next-line security/detect-object-injection 146 | obj[sender] = gossip 147 | gossipCollector.set(counter, obj) 148 | } 149 | const totalGossip = gossipCollector.get(counter) 150 | if (totalGossip && Object.keys(totalGossip).length > 0.5 * State.activeArchivers.length) { 151 | setTimeout(() => { 152 | processGossip(counter) 153 | gossipCollector.delete(counter) 154 | }, 500) 155 | } 156 | } 157 | 158 | function processGossip(counter: number): void { 159 | Logger.mainLogger.debug('Processing gossips for counter', counter, gossipCollector.get(counter)) 160 | const gossips = gossipCollector.get(counter) 161 | if (!gossips) { 162 | return 163 | } 164 | const ourHashes = StateMetaData.StateMetaDataMap.get(counter) 165 | const gossipCounter: Record = {} 166 | for (const sender in gossips) { 167 | /* eslint-disable security/detect-object-injection */ 168 | const hashedGossip = Crypto.hashObj(gossips[sender]) 169 | if (!gossipCounter[hashedGossip]) { 170 | gossipCounter[hashedGossip] = { 171 | count: 1, 172 | gossip: gossips[sender], 173 | } 174 | // To count our StateMetaData also 175 | if (hashedGossip === Crypto.hashObj(ourHashes)) { 176 | gossipCounter[hashedGossip].count += 1 177 | } 178 | } else { 179 | gossipCounter[hashedGossip].count += 1 180 | /* eslint-enable security/detect-object-injection */ 181 | } 182 | } 183 | const gossipWithHighestCount: Gossip[] = [] 184 | let highestCount = 0 185 | let hashWithHighestCounter: string 186 | for (const key in gossipCounter) { 187 | // eslint-disable-next-line security/detect-object-injection 188 | if (gossipCounter[key].count > highestCount) { 189 | // eslint-disable-next-line security/detect-object-injection 190 | gossipWithHighestCount.push(gossipCounter[key].gossip) 191 | hashWithHighestCounter = key 192 | // eslint-disable-next-line security/detect-object-injection 193 | highestCount = gossipCounter[key].count 194 | } 195 | } 196 | 197 | if (!ourHashes) { 198 | Logger.mainLogger.error(`Unable to find our stored statemetadata hashes for counter ${counter}`) 199 | return 200 | } 201 | if (hashWithHighestCounter && hashWithHighestCounter !== Crypto.hashObj(ourHashes)) { 202 | if (gossipWithHighestCount.length === 0) { 203 | return 204 | } 205 | Logger.mainLogger.error('our hash is different from other archivers hashes. Storing the correct hashes') 206 | Logger.mainLogger.debug('gossipWithHighestCount', gossipWithHighestCount[0].summaryHashes) 207 | StateMetaData.processStateMetaData({ gossipWithHighestCount }) 208 | StateMetaData.replaceDataSender(StateMetaData.currentDataSender) 209 | } 210 | } 211 | -------------------------------------------------------------------------------- /test/unit/src/routes/tickets.test.ts: -------------------------------------------------------------------------------- 1 | // Mock modules before importing routes 2 | jest.mock('fs', () => ({ 3 | readFileSync: jest.fn().mockReturnValue(JSON.stringify([])) 4 | })); 5 | 6 | jest.mock('../../../../src/Logger', () => ({ 7 | mainLogger: { 8 | debug: jest.fn(), 9 | error: jest.fn(), 10 | info: jest.fn() 11 | } 12 | })); 13 | 14 | jest.mock('../../../../src/Config', () => ({ 15 | config: { 16 | tickets: { 17 | allowedTicketSigners: { 18 | "0x891DF765C855E9848A18Ed18984B9f57cb3a4d47": 3 // HIGH = 3 19 | }, 20 | minSigRequired: 1, 21 | requiredSecurityLevel: 3 22 | } 23 | } 24 | })); 25 | 26 | // Import after mocks 27 | import { FastifyInstance, FastifyReply, FastifyRequest } from 'fastify' 28 | import { readFileSync } from 'fs' 29 | import { ticketsRouter, ticketCache } from '../../../../src/routes/tickets' 30 | import * as path from 'path'; 31 | 32 | // Updated mock data to match schema 33 | const mockValidTickets = [{ 34 | data: [{ address: "0x37a9FCf5628B1C198A01C9eDaB0BF5C4d453E928" }], 35 | sign: [{ 36 | owner: "0x891DF765C855E9848A18Ed18984B9f57cb3a4d47", 37 | sig: "0x5f1aad2caa2cca1f725715ed050b1928527f0c4eb815fb282fad113ca866a63568d9c003b5310e16de67103521bf284fda10728b4fffc66055c55fde5934438d1b" 38 | }], 39 | type: "silver" 40 | }]; 41 | 42 | const TICKETS_PATH = path.resolve(process.cwd(), 'static', 'tickets.json'); 43 | 44 | function createMockReply() { 45 | const mockSend = jest.fn(); 46 | const mockCode = jest.fn().mockImplementation((code) => ({ 47 | send: mockSend 48 | })); 49 | 50 | return { 51 | reply: { 52 | send: mockSend, 53 | code: mockCode, 54 | header: jest.fn().mockReturnThis(), 55 | status: jest.fn().mockReturnThis(), 56 | type: jest.fn().mockReturnThis(), 57 | } as unknown as FastifyReply, 58 | send: mockSend, 59 | code: mockCode 60 | }; 61 | } 62 | 63 | describe('Ticket Routes', () => { 64 | let mockFastify: FastifyInstance; 65 | let routes: { [key: string]: Function } = {}; 66 | 67 | beforeEach(() => { 68 | jest.resetModules(); 69 | jest.clearAllMocks(); 70 | 71 | // Reset the cache 72 | (ticketCache as any) = null; 73 | 74 | // Create mock Fastify instance 75 | mockFastify = { 76 | get: jest.fn((path: string, handler: Function) => { 77 | routes[path] = handler; 78 | }) 79 | } as unknown as FastifyInstance; 80 | 81 | // Initialize the router with valid tickets first 82 | (readFileSync as jest.Mock).mockReturnValue(JSON.stringify(mockValidTickets)); 83 | 84 | ticketsRouter(mockFastify, {}, (err) => { 85 | if (err) throw err; 86 | }); 87 | }); 88 | 89 | describe('GET /', () => { 90 | it('should handle file not found error', async () => { 91 | const { reply, send, code } = createMockReply(); 92 | 93 | (ticketCache as any) = null; 94 | 95 | (readFileSync as jest.Mock).mockImplementationOnce(() => { 96 | const error: NodeJS.ErrnoException = new Error('ENOENT: no such file'); 97 | error.code = 'ENOENT'; 98 | throw error; 99 | }); 100 | 101 | await routes['/']( 102 | {} as FastifyRequest, 103 | reply 104 | ); 105 | 106 | expect(code).toHaveBeenCalledWith(500); 107 | expect(send).toHaveBeenCalledWith({ 108 | code: 'TICKETS_FILE_NOT_ACCESSIBLE', 109 | error: `Unable to access tickets configuration: ${TICKETS_PATH}` 110 | }); 111 | }); 112 | 113 | it('should handle invalid JSON format', async () => { 114 | const { reply, send, code } = createMockReply(); 115 | 116 | (ticketCache as any) = null; 117 | 118 | (readFileSync as jest.Mock).mockReturnValueOnce('invalid json'); 119 | 120 | await routes['/']( 121 | {} as FastifyRequest, 122 | reply 123 | ); 124 | 125 | expect(code).toHaveBeenCalledWith(400); 126 | expect(send).toHaveBeenCalledWith({ 127 | code: 'INVALID_TICKETS_DATA', 128 | error: 'Invalid tickets configuration data' 129 | }); 130 | }); 131 | 132 | it('should handle non-array tickets data', async () => { 133 | const { reply, send, code } = createMockReply(); 134 | 135 | (ticketCache as any) = null; 136 | 137 | (readFileSync as jest.Mock).mockReturnValueOnce(JSON.stringify({ not: "an array" })); 138 | 139 | await routes['/']( 140 | {} as FastifyRequest, 141 | reply 142 | ); 143 | 144 | expect(code).toHaveBeenCalledWith(400); 145 | expect(send).toHaveBeenCalledWith({ 146 | code: 'INVALID_TICKETS_FORMAT', 147 | error: 'Invalid tickets configuration format' 148 | }); 149 | }); 150 | }); 151 | 152 | describe('GET /:type', () => { 153 | it('should use cached tickets for type lookup', async () => { 154 | const { reply, send } = createMockReply(); 155 | 156 | await routes['/:type']( 157 | { params: { type: 'silver' } } as unknown as FastifyRequest, 158 | reply 159 | ); 160 | 161 | expect(readFileSync).toHaveBeenCalledTimes(1); 162 | expect(send).toHaveBeenCalledWith(mockValidTickets[0]); 163 | }); 164 | }); 165 | 166 | describe('Cache invalidation', () => { 167 | it('should reload tickets after TTL expires', async () => { 168 | const { reply, send } = createMockReply(); 169 | 170 | // First call to populate cache 171 | await routes['/']( 172 | {} as FastifyRequest, 173 | reply 174 | ); 175 | 176 | // Reset mock 177 | (readFileSync as jest.Mock).mockClear(); 178 | 179 | // Force cache invalidation 180 | (ticketCache as any) = { 181 | tickets: mockValidTickets, 182 | lastRead: Date.now() - (70 * 1000) // 70 seconds ago 183 | }; 184 | 185 | // Second call should reload due to expired cache 186 | await routes['/']( 187 | {} as FastifyRequest, 188 | reply 189 | ); 190 | 191 | expect(readFileSync).toHaveBeenCalledTimes(1); 192 | }); 193 | 194 | it('should use cache within TTL', async () => { 195 | const { reply, send } = createMockReply(); 196 | 197 | // First call to populate cache 198 | await routes['/']( 199 | {} as FastifyRequest, 200 | reply 201 | ); 202 | 203 | // Reset mock 204 | (readFileSync as jest.Mock).mockClear(); 205 | 206 | // Second call within TTL should use cache 207 | await routes['/']( 208 | {} as FastifyRequest, 209 | reply 210 | ); 211 | 212 | expect(readFileSync).not.toHaveBeenCalled(); 213 | }); 214 | }); 215 | }); -------------------------------------------------------------------------------- /src/test/api/archivedCycles.ts: -------------------------------------------------------------------------------- 1 | import * as Crypto from '../../Crypto' 2 | import { config } from '../../Config' 3 | import { calculateNetworkHash } from '../../archivedCycle/StateMetaData' 4 | import { getJson } from '../../P2P' 5 | 6 | Crypto.setCryptoHashKey(config.ARCHIVER_HASH_KEY) 7 | 8 | interface Data { 9 | length: string 10 | } 11 | 12 | export async function queryArchivedCycles( 13 | ip: string, 14 | port: string, 15 | count: number, 16 | start: number, 17 | end: number 18 | ): Promise { 19 | let res: unknown = await getJson(`http://${ip}:${port}/full-archive/${count}`) 20 | console.log(res) 21 | validateArchivedCycle(res) 22 | 23 | res = await getJson(`http://${ip}:${port}/full-archive?start=${start}&end=${end}`) 24 | console.log(res) 25 | validateArchivedCycle(res) 26 | } 27 | 28 | const validateArchivedCycle = (res): void => { 29 | const archivedCycles = res['archivedCycles'] 30 | 31 | for (let i = 0; i < archivedCycles.length; i++) { 32 | // console.log( 33 | // archivedCycles[i].receipt 34 | // ? archivedCycles[i].receipt.partitionTxs[0] 35 | // : 'no receipt' 36 | // ) 37 | // eslint-disable-next-line security/detect-object-injection 38 | const result = valueCheck(archivedCycles[i]) 39 | if (result.length > 0) { 40 | console.log( 41 | // eslint-disable-next-line security/detect-object-injection 42 | `Archived Cyle ${archivedCycles[i].cycleRecord.counter} doesn't have these values --> ${result}` 43 | ) 44 | } 45 | } 46 | } 47 | 48 | const expectedValue = { 49 | cycleMarker: 'ff4a09332190573b706add1a8e5cd513adf85acfea7aa39cc94383ce2e3620a8', 50 | cycleRecord: { 51 | activated: [], 52 | activatedPublicKeys: [], 53 | active: 5, 54 | apoptosized: [], 55 | counter: 80, 56 | desired: 2000, 57 | duration: 16, 58 | expired: 5, 59 | joined: [], 60 | joinedArchivers: [], 61 | joinedConsensors: [], 62 | leavingArchivers: [], 63 | lost: [], 64 | marker: 'ff4a09332190573b706add1a8e5cd513adf85acfea7aa39cc94383ce2e3620a8', 65 | networkDataHash: [[Object]], 66 | networkId: '411b3e07ac4e2f1faadb081e9eea762c943699c4691bd011588398f533ae1ed1', 67 | networkReceiptHash: [[Object]], 68 | networkStateHash: '47de6951e9907a2e187ecd077c82ea7e568745b48a3616b1d7f7c48a363f683c', 69 | networkSummaryHash: [[Object]], 70 | previous: 'f4944a99bdf7b8ce9b7244d6a4e6eeed92ba778156478927590d46d8f41e8243', 71 | refreshedArchivers: [[Object]], 72 | refreshedConsensors: [[Object], [Object]], 73 | refuted: [], 74 | removed: [], 75 | returned: [], 76 | safetyMode: false, 77 | safetyNum: 0, 78 | start: 1650372600, 79 | syncing: 0, 80 | }, 81 | data: { 82 | networkHash: '4bed936c3b08bd7b23e03b1fdefa32e7f01bdfa1fdd288617ed70feebf4feda2', 83 | parentCycle: 'ff4a09332190573b706add1a8e5cd513adf85acfea7aa39cc94383ce2e3620a8', 84 | partitionHashes: { 85 | '0': 'aef4a946d7268af2f12604d9b6e8e9fd4c5d528cc6b37e1c60ec4225b100ce34', 86 | '1': '9e3cf6e8cc73349cdd1875dbaa43e2be2d468bbc90a99e748f3af62625962332', 87 | '2': '0ee512ffed3cd68cc4695a9ed332e57be8bd4cc4565d61393fa4e2f19286af3d', 88 | '3': '9ee75e6b7d6eb50fe6cbab806a3dcd3b9bfddf54b51ad0afa95dd2d9a6b96c79', 89 | '4': '18d83747e4d9bbc8acfdebcaca8d1892461d574fe95f10d8b492b7ef94afd9c6', 90 | '-1': '11800a39aecdafc33f365c772529ee9b823415232acd252c660fc8256d41e21c', 91 | }, 92 | }, 93 | receipt: { 94 | networkHash: '97ab6b4502137e30788d97659d232e8c1077b394323402fbdb4512c76be6e0c7', 95 | parentCycle: 'ff4a09332190573b706add1a8e5cd513adf85acfea7aa39cc94383ce2e3620a8', 96 | partitionHashes: { 97 | '0': 'de8723bf28eb9ae6d92f7b26b12b4d77edc61362c6735b942c45eb89a1271ed9', 98 | '1': 'afaff32d4929857416fe17a088be9db717738dfba0103280a579b6a4dd716ba7', 99 | '2': 'f97b663ea6485283cbf5b9fe4f7061f4a4388237db4e204a4879dd4568b04a7c', 100 | '3': '439fd0a12c08861408aafbd2db15cfc546bb51fd80737bf4d86f2f587fd617ea', 101 | '4': 'bc073ecbefad4cc13200566f5753c0154da044a58c2d84a5d25f130a3d7987e8', 102 | '-1': '573808deeba726aaec51c2fe4df03130516189de016b9c53448261a5b5c88b1d', 103 | }, 104 | partitionMaps: { '0': {}, '1': {}, '2': {}, '3': {}, '4': {} }, 105 | partitionTxs: { '0': {}, '1': {}, '2': {}, '3': {}, '4': {} }, 106 | }, 107 | summary: { 108 | networkHash: '78c7d4bfca718a92b57a31832c1c8460f43dee960b5f4cf4bbdae3bcce2deb6d', 109 | parentCycle: 'ff4a09332190573b706add1a8e5cd513adf85acfea7aa39cc94383ce2e3620a8', 110 | partitionBlobs: {}, 111 | partitionHashes: {}, 112 | }, 113 | } 114 | 115 | const valueCheck = (obj): string[] => { 116 | const valueNotFound = [] 117 | for (const i of Object.keys(expectedValue)) { 118 | // console.log(i, typeof obj[i], typeof expectedValue[i]) 119 | //eslint-disable-next-line security/detect-object-injection 120 | if (!Object.prototype.hasOwnProperty.call(obj, i) && typeof obj[i] !== typeof expectedValue[i]) { 121 | valueNotFound.push(i) 122 | } 123 | 124 | if (i === 'data' || i === 'receipt' || i === 'summary') { 125 | //eslint-disable-next-line security/detect-object-injection 126 | if (obj[i]) { 127 | //eslint-disable-next-line security/detect-object-injection 128 | const networkHash = obj[i].networkHash 129 | // console.log(obj[i].networkHash, obj[i].partitionHashes) 130 | //eslint-disable-next-line security/detect-object-injection 131 | const calculatedHash = calculateNetworkHash(obj[i].partitionHashes) 132 | if (networkHash !== calculatedHash) { 133 | console.log( 134 | `The specified networkHash and calculatedHash of ${i} are different in Archived Cyle ${obj.cycleRecord.counter}` 135 | ) 136 | } 137 | if (i === 'receipt') { 138 | //eslint-disable-next-line security/detect-object-injection 139 | for (const partitionId in Object.values(obj[i].partitionMaps)) { 140 | // console.log( 141 | // obj[i].partitionMaps[partitionId], 142 | // obj[i].partitionTxs[partitionId] 143 | // ) 144 | let txCount = 0 145 | //eslint-disable-next-line security/detect-object-injection 146 | for (const data of Object.values(obj[i].partitionMaps[partitionId])) { 147 | const a: Data = data as Data 148 | txCount += parseInt(a.length) 149 | } 150 | const partitionBlock = { 151 | cycle: obj.cycleRecord.counter, 152 | partition: parseInt(partitionId), 153 | //eslint-disable-next-line security/detect-object-injection 154 | receiptMap: obj[i].partitionMaps[partitionId], 155 | //eslint-disable-next-line security/detect-object-injection 156 | txsMap: obj[i].partitionTxs[partitionId], 157 | txCount: txCount, 158 | } 159 | // console.log(partitionBlock) 160 | // console.log( 161 | // obj[i].partitionHashes[partitionId], 162 | // Crypto.hashObj(partitionBlock) 163 | // ) 164 | //eslint-disable-next-line security/detect-object-injection 165 | if (obj[i].partitionHashes[partitionId] !== Crypto.hashObj(partitionBlock)) { 166 | console.log( 167 | `The specified partitionHash and calculatedHash of ${i} are different in Archived Cyle ${obj.cycleRecord.counter}, ${partitionId}` 168 | ) 169 | } 170 | } 171 | } 172 | } 173 | } 174 | } 175 | return valueNotFound 176 | } 177 | -------------------------------------------------------------------------------- /src/shardeum/verifyAppReceiptData.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from '../Crypto' 2 | import { ArchiverReceipt, Receipt, SignedReceipt } from '../dbstore/receipts' 3 | import { Utils as StringUtils } from '@shardeum-foundation/lib-types' 4 | 5 | export type ShardeumReceipt = object & { 6 | amountSpent: string 7 | readableReceipt: { status: number } 8 | } 9 | 10 | export const verifyAppReceiptData = async ( 11 | receipt: ArchiverReceipt, 12 | existingReceipt?: Receipt | null, 13 | failedReasons = [], 14 | nestedCounterMessages = [] 15 | ): Promise<{ valid: boolean; needToSave: boolean }> => { 16 | let result = { valid: false, needToSave: false } 17 | const { appReceiptData, globalModification } = receipt 18 | if (globalModification) return { valid: true, needToSave: true } 19 | const signedReceipt = receipt.signedReceipt as SignedReceipt 20 | const newShardeumReceipt = appReceiptData.data as ShardeumReceipt 21 | if (!newShardeumReceipt.amountSpent || !newShardeumReceipt.readableReceipt) { 22 | failedReasons.push(`appReceiptData missing amountSpent or readableReceipt`) 23 | nestedCounterMessages.push(`appReceiptData missing amountSpent or readableReceipt`) 24 | return result 25 | } 26 | const { accountIDs, afterStateHashes, beforeStateHashes } = signedReceipt.proposal 27 | if ( 28 | newShardeumReceipt.amountSpent === '0x0' && 29 | newShardeumReceipt.readableReceipt.status === 0 && 30 | afterStateHashes.length > 0 31 | ) { 32 | for (let i = 0; i < accountIDs.length; i++) { 33 | if ( 34 | // eslint-disable-next-line security/detect-object-injection 35 | !beforeStateHashes[i] || 36 | // eslint-disable-next-line security/detect-object-injection 37 | !afterStateHashes[i] 38 | ) { 39 | failedReasons.push( 40 | `The account state hash before or after is missing in the receipt! ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 41 | ) 42 | nestedCounterMessages.push(`The account state hash before or after is missing in the receipt!`) 43 | } 44 | if ( 45 | // eslint-disable-next-line security/detect-object-injection 46 | beforeStateHashes[i] !== 47 | // eslint-disable-next-line security/detect-object-injection 48 | afterStateHashes[i] 49 | ) { 50 | failedReasons.push( 51 | `The receipt has 0 amountSpent and status 0 but has state updated accounts! ${receipt.tx.txId} , ${receipt.cycle} , ${receipt.tx.timestamp}` 52 | ) 53 | nestedCounterMessages.push( 54 | `The receipt has 0 amountSpent and status 0 but has state updated accounts!` 55 | ) 56 | break 57 | } 58 | } 59 | } 60 | result = { valid: true, needToSave: false } 61 | if (existingReceipt && existingReceipt.timestamp !== receipt.tx.timestamp) { 62 | const existingShardeumReceipt = existingReceipt.appReceiptData.data as ShardeumReceipt 63 | /** 64 | * E: existing receipt, N: new receipt, X: any value 65 | * E: status = 0, N: status = 1, E: amountSpent = 0, N: amountSpent = X, needToSave = true 66 | * E: status = 0, N: status = 1, E: amountSpent > 0, N: amountSpent > 0, needToSave = false (success and failed receipts with gas charged) 67 | * E: status = 0, N: status = 0, E: amountSpent = 0, N: amountSpent = 0, needToSave = false 68 | * E: status = 0, N: status = 0, E: amountSpent = 0, N: amountSpent > 0, needToSave = true 69 | * E: status = 0, N: status = 0, E: amountSpent > 0, N: amountSpent = 0, needToSave = false 70 | * E: status = 0, N: status = 0, E: amountSpent > 0, N: amountSpent > 0, needToSave = false (both failed receipts with gas charged) 71 | * E: status = 1, N: status = 0, E: amountSpent = X, N: amountSpent = X, needToSave = false 72 | * E: status = 1, N: status = 1, E: amountSpent = X, N: amountSpent = X, needToSave = false (duplicate success receipt) 73 | * 74 | **/ 75 | // Added only logging of unexpected cases and needToSave = true cases ( check `else` condition ) 76 | if (existingShardeumReceipt.readableReceipt.status === 0) { 77 | if (newShardeumReceipt.readableReceipt.status === 1) { 78 | if (existingShardeumReceipt.amountSpent !== '0x0') { 79 | failedReasons.push( 80 | `Success and failed receipts with gas charged`, 81 | StringUtils.safeStringify(existingReceipt), 82 | StringUtils.safeStringify(receipt) 83 | ) 84 | } else result = { valid: true, needToSave: true } // Success receipt 85 | } else { 86 | if (existingShardeumReceipt.amountSpent !== '0x0' && newShardeumReceipt.amountSpent !== '0x0') { 87 | failedReasons.push( 88 | `Both failed receipts with gas charged`, 89 | StringUtils.safeStringify(existingReceipt), 90 | StringUtils.safeStringify(receipt) 91 | ) 92 | } else if (newShardeumReceipt.amountSpent !== '0x0') { 93 | // Failed receipt with gas charged 94 | result = { valid: true, needToSave: true } 95 | } 96 | } 97 | } else if (newShardeumReceipt.readableReceipt.status === 1) { 98 | failedReasons.push( 99 | `Duplicate success receipt`, 100 | StringUtils.safeStringify(existingReceipt), 101 | StringUtils.safeStringify(receipt) 102 | ) 103 | } 104 | // } 105 | } else result = { valid: true, needToSave: true } 106 | 107 | if (!validateAppReceiptData(appReceiptData, failedReasons, nestedCounterMessages)) { 108 | result = { valid: false, needToSave: false } 109 | return result 110 | } 111 | 112 | const calculatedAppReceiptDataHash = calculateAppReceiptDataHash(appReceiptData) 113 | 114 | if (calculatedAppReceiptDataHash !== signedReceipt.proposal.appReceiptDataHash) { 115 | failedReasons.push( 116 | `appReceiptData hash mismatch: ${calculatedAppReceiptDataHash} != ${signedReceipt.proposal.appReceiptDataHash}` 117 | ) 118 | nestedCounterMessages.push(`appReceiptData hash mismatch`) 119 | result = { valid: false, needToSave: false } 120 | } 121 | return result 122 | } 123 | const validateAppReceiptData = (appReceiptData: any, failedReasons = [], nestedCounterMessages = []): boolean => { 124 | try { 125 | if (appReceiptData.data && appReceiptData.data.receipt) { 126 | if (appReceiptData.data.receipt.bitvector) { 127 | appReceiptData.data.receipt.bitvector = Uint8Array.from( 128 | Object.values(appReceiptData.data.receipt.bitvector) 129 | ) 130 | } 131 | if (appReceiptData.data.receipt.logs && appReceiptData.data.receipt.logs.length > 0) { 132 | appReceiptData.data.receipt.logs = appReceiptData.data.receipt.logs.map((log) => { 133 | return log.map((log1) => { 134 | if (Array.isArray(log1)) { 135 | return log1.map((log2) => { 136 | log2 = Uint8Array.from(Object.values(log2)) 137 | return log2 138 | }) 139 | } else { 140 | log1 = Uint8Array.from(Object.values(log1)) 141 | return log1 142 | } 143 | }) 144 | }) 145 | } 146 | } 147 | return true 148 | } catch (err) { 149 | console.error(`validateAppReceiptData error: ${err}`) 150 | failedReasons.push(`validateAppReceiptData error: ${err}`) 151 | nestedCounterMessages.push(`validateAppReceiptData error`) 152 | return false 153 | } 154 | } 155 | 156 | // Use validateAppReceiptData to ensure appReceiptData is valid before calculating its hash with calculateAppReceiptDataHash 157 | const calculateAppReceiptDataHash = (appReceiptData: any): string => { 158 | return crypto.hashObj(appReceiptData) 159 | } -------------------------------------------------------------------------------- /src/Data/CycleParser.ts: -------------------------------------------------------------------------------- 1 | import * as NodeList from '../NodeList' 2 | import { P2P } from '@shardeum-foundation/lib-types' 3 | 4 | export enum NodeStatus { 5 | ACTIVE = 'active', 6 | SYNCING = 'syncing', 7 | REMOVED = 'removed', 8 | } 9 | 10 | type OptionalExceptFor = Partial & Pick 11 | 12 | export interface Node extends NodeList.JoinedConsensor { 13 | curvePublicKey: string 14 | status: NodeStatus 15 | } 16 | 17 | export type Update = OptionalExceptFor 18 | 19 | export interface Change { 20 | added: NodeList.JoinedConsensor[] // order joinRequestTimestamp [OLD, ..., NEW] 21 | removed: Array // order doesn't matter 22 | updated: Update[] // order doesn't matter 23 | } 24 | 25 | export function reversed(thing: Iterable): Iterable { 26 | const arr = Array.isArray(thing) ? thing : Array.from(thing) 27 | let i = arr.length - 1 28 | 29 | const reverseIterator = { 30 | next: (): IteratorResult => { 31 | const done = i < 0 32 | // eslint-disable-next-line security/detect-object-injection 33 | const value = done ? undefined : arr[i] 34 | i-- 35 | return { value, done } 36 | }, 37 | } 38 | return { 39 | [Symbol.iterator]: () => reverseIterator, 40 | } 41 | } 42 | 43 | export class ChangeSquasher { 44 | final: Change 45 | removedIds: Set 46 | seenUpdates: Map 47 | addedIds: Set 48 | constructor() { 49 | this.final = { 50 | added: [], 51 | removed: [], 52 | updated: [], 53 | } 54 | this.addedIds = new Set() 55 | this.removedIds = new Set() 56 | this.seenUpdates = new Map() 57 | } 58 | 59 | addChange(change: Change): void { 60 | for (const id of change.removed) { 61 | // Ignore if id is already removed 62 | if (this.removedIds.has(id)) continue 63 | // Mark this id as removed 64 | this.removedIds.add(id) 65 | } 66 | 67 | for (const update of change.updated) { 68 | // Ignore if update.id is already removed 69 | if (this.removedIds.has(update.id)) continue 70 | // Skip if it's already seen in the update 71 | if (this.seenUpdates.has(update.id)) continue 72 | // Mark this id as updated 73 | this.seenUpdates.set(update.id, update) 74 | // console.log('seenUpdates', this.seenUpdates, update) 75 | } 76 | 77 | for (const joinedConsensor of reversed(change.added)) { 78 | // Ignore if it's already been added 79 | if (this.addedIds.has(joinedConsensor.id)) continue 80 | 81 | // Ignore if joinedConsensor.id is already removed 82 | if (this.removedIds.has(joinedConsensor.id)) { 83 | continue 84 | } 85 | // Check if this id has updates 86 | const update = this.seenUpdates.get(joinedConsensor.id) 87 | if (update) { 88 | // If so, put them into final.updated 89 | this.final.updated.unshift(update) 90 | this.seenUpdates.delete(joinedConsensor.id) 91 | } 92 | // Add joinedConsensor to final.added 93 | this.final.added.unshift(joinedConsensor) 94 | // Mark this id as added 95 | this.addedIds.add(joinedConsensor.id) 96 | } 97 | } 98 | } 99 | 100 | export function parseRecord(record: P2P.CycleCreatorTypes.CycleRecord): Change { 101 | // For all nodes described by activated, make an update to change their status to active 102 | const activated = record.activated.map((id: string) => ({ 103 | id, 104 | activeTimestamp: record.start, 105 | status: NodeStatus.ACTIVE, 106 | })) 107 | 108 | const refreshAdded: Change['added'] = [] 109 | const refreshUpdated: Change['updated'] = [] 110 | for (const refreshed of record.refreshedConsensors) { 111 | // const node = NodeList.nodes.get(refreshed.id) 112 | const node = NodeList.getNodeInfoById(refreshed.id) as NodeList.JoinedConsensor 113 | if (node) { 114 | // If it's in our node list, we update its counterRefreshed 115 | // (IMPORTANT: update counterRefreshed only if its greater than ours) 116 | if (record.counter > node.counterRefreshed) { 117 | refreshUpdated.push({ 118 | id: refreshed.id, 119 | counterRefreshed: record.counter, 120 | }) 121 | } 122 | } else { 123 | // If it's not in our node list, we add it... 124 | refreshAdded.push(NodeList.fromP2PTypesNode(refreshed)) 125 | // and immediately update its status to ACTIVE 126 | // (IMPORTANT: update counterRefreshed to the records counter) 127 | refreshUpdated.push({ 128 | id: refreshed.id, 129 | status: NodeStatus.ACTIVE, 130 | counterRefreshed: record.counter, 131 | }) 132 | } 133 | } 134 | // Logger.mainLogger.debug('parseRecord', record.counter, { 135 | // added: [...record.joinedConsensors], 136 | // removed: [...record.apoptosized], 137 | // updated: [...activated, ...refreshUpdated], 138 | // }) 139 | 140 | const added = (record.joinedConsensors || []).map((joinedConsensor: P2P.JoinTypes.JoinedConsensor) => 141 | NodeList.fromP2PTypesJoinedConsensor(joinedConsensor) 142 | ) 143 | 144 | return { 145 | added: [...added], 146 | removed: [...record.apoptosized, ...record.removed, ...record.appRemoved], 147 | updated: [...activated, ...refreshUpdated], 148 | } 149 | } 150 | 151 | export function parse(record: P2P.CycleCreatorTypes.CycleRecord): Change { 152 | const changes = parseRecord(record) 153 | // const mergedChange = deepmerge.all(changes) 154 | // return mergedChange 155 | return changes 156 | } 157 | 158 | export function applyNodeListChange(change: Change): void { 159 | // console.log('change', change) 160 | if (change.added.length > 0) { 161 | const nodesBycycleJoined: { [cycleJoined: number]: NodeList.JoinedConsensor[] } = {} 162 | for (const node of change.added) { 163 | const joinedConsensor: NodeList.JoinedConsensor = node 164 | const consensorInfo: object = { 165 | ip: joinedConsensor.externalIp, 166 | port: joinedConsensor.externalPort, 167 | publicKey: joinedConsensor.publicKey, 168 | id: joinedConsensor.id, 169 | } 170 | if (!nodesBycycleJoined[joinedConsensor.cycleJoined]) { 171 | nodesBycycleJoined[joinedConsensor.cycleJoined] = [consensorInfo] 172 | } else nodesBycycleJoined[joinedConsensor.cycleJoined].push(consensorInfo) 173 | } 174 | 175 | for (const cycleJoined in nodesBycycleJoined) { 176 | // eslint-disable-next-line security/detect-object-injection 177 | NodeList.addNodes(NodeList.NodeStatus.SYNCING, nodesBycycleJoined[cycleJoined]) 178 | } 179 | } 180 | // This is not needed though since no removed nodes are ever added to this list 181 | // If we ever add removed nodes to this list, we need to update to removeNodes by publicKey instead of id 182 | // Commenting out for now 183 | // if (change.removed.length > 0) { 184 | // NodeList.removeNodes(change.removed) 185 | // } 186 | if (change.updated.length > 0) { 187 | const activatedPublicKeys = change.updated.reduce((keys: string[], update: Update) => { 188 | const nodeInfo = NodeList.getNodeInfoById(update.id) 189 | if (nodeInfo) { 190 | keys.push(nodeInfo.publicKey) 191 | } 192 | return keys 193 | }, []) 194 | NodeList.setStatus(NodeList.NodeStatus.ACTIVE, activatedPublicKeys) 195 | } 196 | } 197 | export function activeNodeCount(cycle: P2P.CycleCreatorTypes.CycleRecord): number { 198 | return ( 199 | cycle.active + 200 | cycle.activated.length + 201 | -cycle.apoptosized.length + 202 | -cycle.removed.length + 203 | -cycle.appRemoved.length + 204 | -cycle.lost.length 205 | ) 206 | } 207 | 208 | export function totalNodeCount(cycle: P2P.CycleCreatorTypes.CycleRecord): number { 209 | return ( 210 | cycle.syncing + 211 | cycle.joinedConsensors.length + 212 | cycle.active + 213 | // cycle.activated.length - // don't count activated because it was already counted in syncing 214 | -cycle.apoptosized.length + 215 | -cycle.removed.length + 216 | -cycle.appRemoved.length 217 | // -cycle.lost.length 218 | ) 219 | } 220 | --------------------------------------------------------------------------------