├── .gitignore ├── .vscode └── launch.json ├── README.md ├── package.json ├── src ├── LDK │ ├── LightningClient.ts │ ├── bitcoin_clients │ │ ├── BitcoinD.mts │ │ ├── ElectrumClient.mts │ │ ├── PolarClient.mts │ │ ├── TorClient.mts │ │ └── txid.json │ ├── init │ │ ├── LDKClientFactory.ts │ │ ├── initializeLDK.ts │ │ └── initializeWasm.ts │ ├── structs │ │ ├── MercuryChannelMessageHandler.ts │ │ ├── MercuryCustomMessageHandler.ts │ │ ├── MercuryEventHandler.ts │ │ ├── MercuryFeeEstimator.mts │ │ ├── MercuryFilter.ts │ │ ├── MercuryLogger.ts │ │ ├── MercuryOnionMessageHandler.ts │ │ ├── MercuryPersist.ts │ │ ├── MercuryPersister.ts │ │ ├── MercuryRouter.ts │ │ ├── MercuryRoutingMessageHandler.ts │ │ └── NodeLDKNet.mts │ ├── sync │ │ ├── Error.ts │ │ ├── EsploraSyncClient.ts │ │ └── FilterQueue.ts │ ├── types │ │ ├── ChannelTypes.ts │ │ ├── LightningClientInterface.ts │ │ └── PeerDetails.ts │ └── utils │ │ ├── Logger.ts │ │ ├── ldk-utils.ts │ │ └── utils.ts ├── db │ ├── database.ts │ └── db.ts ├── debug_lightning.ts ├── mercury-node.d.ts ├── routes │ ├── channelRoutes.ts │ ├── peerRoutes.ts │ └── serverRoutes.ts └── server.js ├── test ├── ElectrumClient.test.ts ├── TorClient.test.ts ├── channelRoutes.test.ts ├── db-mock.ts ├── db.test.ts ├── mocks │ └── MockLightningClient.ts ├── peerRoutes.test.ts └── serverRoutes.test.ts ├── tsconfig.json └── wallets └── .gitkeep /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | channel_manager_data.bin 3 | lightning-mock.db 4 | lightning.db 5 | network_graph_data.bin 6 | package-lock.json 7 | .ldk 8 | .scorer 9 | channels 10 | private_key.txt 11 | .env -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "name": "Debug Express", 6 | "type": "node", 7 | "request": "launch", 8 | "runtimeExecutable": "node", 9 | "runtimeArgs": [ 10 | "--loader", 11 | "ts-node/esm", 12 | "--experimental-specifier-resolution=node", 13 | "${workspaceFolder}/src/server.js" 14 | ], 15 | "cwd": "${workspaceFolder}", 16 | "protocol": "inspector", 17 | "skipFiles": ["/**/*.js"], 18 | "console": "integratedTerminal" 19 | } 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/layer2tech/mercury-node/cc25d4bfcd7f3148ac4e95dd85fb4a44a1e73ee9/README.md -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mercury-node", 3 | "jest": { 4 | "moduleDirectories": [ 5 | "node_modules", 6 | "src" 7 | ], 8 | "modulePaths": [ 9 | "src", 10 | "node_modules" 11 | ], 12 | "moduleFileExtensions": [ 13 | "js", 14 | "jsx", 15 | "ts", 16 | "mjs", 17 | "mts", 18 | "d.ts", 19 | "wasm" 20 | ], 21 | "modulePathIgnorePatterns": [], 22 | "moduleNameMapper": { 23 | "^(\\.{1,2}/.*)\\.js$": "$1", 24 | "^(\\.{1,2}/.*)\\.m?js$": "$1" 25 | }, 26 | "transform": { 27 | "^.+\\.(ts|tsx)$": [ 28 | "ts-jest", 29 | { 30 | "useESM": true 31 | } 32 | ], 33 | "^.+\\.m?[tj]sx?$": [ 34 | "ts-jest", 35 | { 36 | "useESM": true 37 | } 38 | ] 39 | }, 40 | "extensionsToTreatAsEsm": [ 41 | ".ts", 42 | ".mts" 43 | ] 44 | }, 45 | "version": "1.0.0", 46 | "description": "", 47 | "main": "src/server.js", 48 | "type": "module", 49 | "ava": { 50 | "extensions": { 51 | "ts": "module" 52 | }, 53 | "nonSemVerExperiments": { 54 | "configurableModuleFormat": true 55 | }, 56 | "nodeArguments": [ 57 | "--loader=ts-node/esm" 58 | ] 59 | }, 60 | "scripts": { 61 | "build": "tsc -p tsconfig.json", 62 | "dev": "npm run build && ts-node-dev --respawn --pretty --transpile-only src/server.js", 63 | "start": "npm run build && node --loader ts-node/esm --experimental-specifier-resolution=node ./src/server.js", 64 | "debug": "npm run build && node --loader ts-node/esm --experimental-specifier-resolution=node --experimental-modules ./src/debug_lightning.ts", 65 | "test": "npm run build && node --loader ts-node/esm --experimental-vm-modules node_modules/jest/bin/jest.js" 66 | }, 67 | "moduleFileExtensions": [ 68 | "js", 69 | ".mjs", 70 | "jsx", 71 | "ts", 72 | "d.ts", 73 | "wasm" 74 | ], 75 | "keywords": [], 76 | "author": "", 77 | "license": "ISC", 78 | "devDependencies": { 79 | "@electron/rebuild": "^3.2.10", 80 | "@types/cors": "^2.8.13", 81 | "@types/express": "^4.17.16", 82 | "@types/jest": "^29.5.1", 83 | "@types/json-bigint": "^1.0.1", 84 | "@types/node": "^18.13.0", 85 | "@types/request-promise-native": "^1.0.18", 86 | "@types/secp256k1": "^4.0.3", 87 | "@types/supertest": "^2.0.12", 88 | "@types/tiny-secp256k1": "^2.0.1", 89 | "bitcoinjs-lib": "^6.1.0", 90 | "express": "^4.18.2", 91 | "ts-jest": "^29.0.5", 92 | "ts-loader": "^9.4.2", 93 | "ts-node": "^10.9.1", 94 | "ts-node-dev": "^2.0.0", 95 | "webpack": "^5.75.0", 96 | "webpack-cli": "^5.0.1" 97 | }, 98 | "dependencies": { 99 | "@jest/globals": "^29.5.0", 100 | "@psf/bip32-utils": "^1.0.4", 101 | "@types/express": "^4.17.6", 102 | "async-mutex": "^0.3.2", 103 | "axios": "^1.2.2", 104 | "bech32": "^1.1.4", 105 | "body-parser": "^1.19.0", 106 | "chalk": "^5.2.0", 107 | "cors": "^2.8.5", 108 | "crypto": "^1.0.1", 109 | "default-shell": "^1.0.1", 110 | "dotenv": "^16.0.3", 111 | "ecpair": "^2.1.0", 112 | "electron-root-path": "^1.1.0", 113 | "es-module-shims": "^1.6.3", 114 | "esm": "^3.2.25", 115 | "express": "^4.17.3", 116 | "fs": "0.0.1-security", 117 | "jest": "^29.4.1", 118 | "json-bigint": "^1.0.0", 119 | "lightningdevkit": "^0.0.11-5.0", 120 | "nanoid": "^4.0.0", 121 | "netcat": "^1.5.0", 122 | "node-fetch": "^3.3.0", 123 | "node-tor-control": "0.0.2", 124 | "request": "^2.88.2", 125 | "request-promise": "^4.2.6", 126 | "request-promise-native": "^1.0.9", 127 | "secp256k1": "^5.0.0", 128 | "socks-proxy-agent": "^5.0.0", 129 | "sqlite3": "^5.1.4", 130 | "supertest": "^6.3.3", 131 | "tiny-secp256k1": "^2.2.1", 132 | "tor-control": "0.0.3", 133 | "ts-node": "^10.1.0", 134 | "uuid": "^8.3.2", 135 | "websocket": "^1.0.34", 136 | "wif": "^2.0.6", 137 | "winston": "^3.3.3" 138 | } 139 | } 140 | -------------------------------------------------------------------------------- /src/LDK/bitcoin_clients/BitcoinD.mts: -------------------------------------------------------------------------------- 1 | export interface BitcoinDaemonClientInterface { 2 | getBestBlockHash(): any; 3 | getBestBlockHeight(): any; 4 | getBlockHeader(height: number): any; 5 | getBlockHeader(hash: string): any; 6 | getTxIdData(txid: string): any; 7 | getHeaderByHash(hash: string): any; 8 | getBlockStatus(hash: string): any; 9 | getRawTransaction(txid: string): any; 10 | getTxOut(txid: string, vout: number): any; 11 | setTx(txid: string): any; 12 | getMerkleProofPosition(txid: string): any; 13 | } 14 | -------------------------------------------------------------------------------- /src/LDK/bitcoin_clients/ElectrumClient.mts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { BitcoinDaemonClientInterface } from "./BitcoinD.mjs"; 3 | import dotenv from "dotenv"; 4 | dotenv.config(); 5 | 6 | const TIMEOUT = 20000; 7 | 8 | // CHANGE THESE TO THE ESPLORA HOST 9 | const HOST = "http://" + process.env["HOSTNAME"]; 10 | const PORT = process.env["NODE_PORT"]; 11 | const USER = ""; 12 | const PASS = ""; 13 | 14 | // Custom Logger 15 | import { ChalkColor, Logger } from "../utils/Logger.js"; 16 | const DEBUG = new Logger(ChalkColor.Yellow, "ElectrumClient.ts"); 17 | 18 | class ElectrumClient implements BitcoinDaemonClientInterface { 19 | async getMerkleProofPosition(txid: string): Promise { 20 | DEBUG.log( 21 | "getMerkleProofPosition... txid->", 22 | "getMerkleProofPosition", 23 | txid 24 | ); 25 | try { 26 | let res = (await ElectrumClient.get(`tx/${txid}/merkle-proof`)).data; 27 | return res; 28 | } catch (e) { 29 | DEBUG.err("[ElectrumClient.mts]: Error getTxOut", e); 30 | } 31 | } 32 | 33 | // POST / tx; 34 | async setTx(txid: string): Promise { 35 | DEBUG.log("setTx...", "setTx"); 36 | try { 37 | let res = ElectrumClient.post("tx", txid); 38 | return res; 39 | } catch (e) { 40 | DEBUG.err("[ElectrumClient.ts]: Error setTx", e); 41 | } 42 | } 43 | /* 44 | Example output: 45 | {"spent":false} 46 | */ 47 | async getTxOut(txid: string, vout: number): Promise { 48 | DEBUG.log("getTxOut...", "getTxOut"); 49 | try { 50 | let res = (await ElectrumClient.get(`tx/${txid}/outspend/${vout}`)).data; 51 | return res; 52 | } catch (e) { 53 | DEBUG.err("[ElectrumClient.mts]: Error getTxOut", e); 54 | } 55 | } 56 | 57 | /* 58 | Example output: 59 | 020000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff0402360100ffffffff02807c814a000000001600143d27b06f0539ca6a16e4d521ec2ee7b9ab720fcd0000000000000000266a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf90120000000000000000000000000000000000000000000000000000000000000000000000000 60 | */ 61 | async getRawTransaction(txid: string): Promise { 62 | DEBUG.log("getRawTransaction...", "getRawTransaction", txid); 63 | try { 64 | let res = (await ElectrumClient.get(`tx/${txid}/hex`)).data; 65 | return res; 66 | } catch (e) { 67 | DEBUG.err("[ElectrumClient.mts]: Error Getting raw transaction", e); 68 | } 69 | } 70 | 71 | /* 72 | Example output: 73 | 00000030e856389b81d90c101f736098a4e023741d5b7e87d6cd0d709a2acbbe5c45f965aa24be79c071ea43c4c59d211ac764507daaa405b397cc475b1308984b1b265b94cf5364ffff7f2000000000 74 | */ 75 | async getHeaderByHash(hash: String) { 76 | DEBUG.log("getHeaderByHash...", "getHeaderByHash"); 77 | let res; 78 | try { 79 | res = (await ElectrumClient.get(`block/${hash}/header`)).data; 80 | DEBUG.log("returning res... ->", "getHeaderByHash", res); 81 | return res; 82 | } catch (e) { 83 | DEBUG.err("[ElectrumClient.mts]: Error getHeaderByHash", e); 84 | } 85 | } 86 | 87 | /* 88 | Example output: 89 | {"in_best_chain":true,"height":320,"next_best":null} 90 | */ 91 | async getBlockStatus(hash: String) { 92 | DEBUG.log("getBlockStatus...", "getBlockStatus"); 93 | let res; 94 | try { 95 | res = (await ElectrumClient.get(`block/${hash}/status`)).data; 96 | DEBUG.log("returning block status ->", "getBlockStatus", res); 97 | return res; 98 | } catch (e) { 99 | DEBUG.err("[ElectrumClient.mts]: Error getBlockStatus", e); 100 | } 101 | } 102 | 103 | async getBestBlockHash() { 104 | DEBUG.log("getBestBlockHash...", "getBestBlockHash"); 105 | let res; 106 | try { 107 | res = (await ElectrumClient.get("blocks/tip/hash")).data; 108 | DEBUG.log("returning block hash ->", "getBestBlockHash", res); 109 | return res; 110 | } catch (e) { 111 | DEBUG.err("[ElectrumClient.mts]: Error Getting Block Height"); 112 | } 113 | } 114 | 115 | async getBestBlockHeight() { 116 | DEBUG.log("getBestBlockHeight...", "getBestBlockHeight"); 117 | let res; 118 | try { 119 | res = (await ElectrumClient.get("blocks/tip/height")).data; 120 | DEBUG.log("returning blockheight ->", "getBestBlockHeight", res); 121 | return res; 122 | } catch (e) { 123 | DEBUG.err("[ElectrumClient.mts]: Error Getting Block Height"); 124 | } 125 | } 126 | 127 | async getHashByHeight(height: number | string) { 128 | DEBUG.log("height entered ->", "getHashByHeight", height); 129 | if (typeof height === "string") { 130 | return height; 131 | } 132 | 133 | // First get the hash of the block height 134 | try { 135 | let block_hash = (await ElectrumClient.get(`block-height/${height}`)) 136 | .data; 137 | DEBUG.log("returning hash ->", "getHashByHeight", block_hash); 138 | return block_hash; 139 | } catch (e) { 140 | DEBUG.err("[ElectrumClient.mts]: Error Getting Current Block Hash"); 141 | } 142 | } 143 | 144 | /* 145 | Example output: 146 | { "id":"6308b34593df109d39b2c9dfd12ee181a57ce0b8d277c09ef423db6f644e37a3", 147 | "height":320,"version":805306368,"timestamp":1683214228,"tx_count":1,"size":250,"weight":892, 148 | "merkle_root":"5b261b4b9808135b47cc97b305a4aa7d5064c71a219dc5c443ea71c079be24aa", 149 | "previousblockhash":"65f9455cbecb2a9a700dcdd6877e5b1d7423e0a49860731f100cd9819b3856e8", 150 | "mediantime":1683214227,"nonce":0,"bits":545259519,"difficulty":0 } 151 | */ 152 | async getBlockHeader(height: number | string) { 153 | let currentBlockHash = await this.getHashByHeight(height); 154 | DEBUG.log("Get Latest Block Header", "getBlockHeader", height); 155 | 156 | try { 157 | let block_header = (await ElectrumClient.get(`block/${currentBlockHash}`)) 158 | .data; 159 | DEBUG.log("returning block header ->", "getBlockHeader", block_header); 160 | return block_header; 161 | } catch (e) { 162 | DEBUG.err("[ElectrumClient.mts]: Error in getting header: ", e); 163 | } 164 | } 165 | 166 | /* 167 | Example output: 168 | {"id":"6308b34593df109d39b2c9dfd12ee181a57ce0b8d277c09ef423db6f644e37a3","height":320,"version":805306368,"timestamp":1683214228,"tx_count":1,"size":250,"weight":892,"merkle_root":"5b261b4b9808135b47cc97b305a4aa7d5064c71a219dc5c443ea71c079be24aa","previousblockhash":"65f9455cbecb2a9a700dcdd6877e5b1d7423e0a49860731f100cd9819b3856e8","mediantime":1683214227,"nonce":0,"bits":545259519,"difficulty":0} 169 | */ 170 | async getTxIdData(txid: string) { 171 | DEBUG.log("txid->", "getTxIdData", txid); 172 | try { 173 | const res = (await ElectrumClient.get(`tx/${txid}`)).data; 174 | 175 | return { 176 | txid: res?.txid ?? "", 177 | vout: res?.vout ?? -1, 178 | sequence: res?.vin[0]?.sequence ?? -1, 179 | height: res?.status?.block_height ?? -1, 180 | confirmed: res?.status?.confirmed ?? false, 181 | hash: res?.status?.block_hash ?? -1, 182 | }; 183 | } catch (e) { 184 | DEBUG.err("[ElectrumClient.mts]: Error in getTxIdData", e); 185 | return null; 186 | } 187 | } 188 | 189 | static async get(endpoint: string, timeout_ms = TIMEOUT) { 190 | const url = HOST + ":" + PORT + "/" + endpoint; 191 | const config = { 192 | method: "get", 193 | url: url, 194 | headers: { Accept: "application/json" }, 195 | timeout: timeout_ms, 196 | }; 197 | 198 | return await axios(config); 199 | } 200 | 201 | static async post(endpoint: string, body: string, timeout_ms = TIMEOUT) { 202 | console.log("[ElectrumClient.ts/post]: body is equal to:", body); 203 | const url = HOST + ":" + PORT + "/" + endpoint; 204 | return await axios.post(url, body); 205 | } 206 | } 207 | 208 | export const GET_ROUTE = { 209 | PING: "/electrs/ping", 210 | //latestBlockHeader "/Electrs/block/:hash/header", 211 | BLOCK: "/electrs/block", 212 | BLOCKS_TIP_HASH: "/electrs/blocks/tip/hash", 213 | HEADER: "header", 214 | BLOCKS_TIP_HEIGHT: "/electrs/blocks/tip/height", 215 | //getTransaction /tx/:txid 216 | TX: "/electrs/tx", 217 | //getScriptHashListUnspent /scripthash/:hash/utxo 218 | SCRIPTHASH: "/electrs/scripthash", 219 | UTXO: "utxo", 220 | //getFeeEstimates 221 | FEE_ESTIMATES: "/electrs/fee-estimates", 222 | UTXO_SPENT: "/electrs/tx/:txid/outspend/:vout", 223 | }; 224 | Object.freeze(GET_ROUTE); 225 | 226 | export const POST_ROUTE = { 227 | //broadcast transaction 228 | TX: "/electrs/tx", 229 | }; 230 | Object.freeze(POST_ROUTE); 231 | 232 | export default ElectrumClient; 233 | -------------------------------------------------------------------------------- /src/LDK/bitcoin_clients/PolarClient.mts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | 3 | const TIMEOUT = 20000; 4 | 5 | // CHANGE THESE TO MATCH POLAR 6 | const HOST = "http://127.0.0.1"; 7 | const PORT = "18443"; 8 | const USER = "polaruser"; 9 | const PASS = "polarpass"; 10 | 11 | class PolarClient { 12 | endpoint; 13 | constructor(endpoint: string) { 14 | this.endpoint = endpoint; 15 | } 16 | 17 | async getBestBlockHash() { 18 | console.log("[PolarClient.mts]: getBestBlockHash..."); 19 | let res; 20 | try { 21 | res = (await PolarClient.get("rest/chaininfo.json")).data; 22 | } catch (e) { 23 | console.log("[PolarClient.mts]: Error Getting Block Height"); 24 | } 25 | if (res) { 26 | return res.bestblockhash; 27 | } 28 | } 29 | 30 | async getBlockHeight() { 31 | console.log("[PolarClient.mts]: getBlockHeight..."); 32 | let res; 33 | try { 34 | res = (await PolarClient.get("rest/chaininfo.json")).data; 35 | return res.blocks; 36 | } catch (e) { 37 | console.log("[PolarClient.mts]: Error Getting Block Height"); 38 | } 39 | } 40 | 41 | async getLatestBlockHeader(height: number) { 42 | let currentBlockHash; 43 | try { 44 | console.log( 45 | "[PolarClient.mts]: getLatestBlockHeader, block_height:", 46 | height 47 | ); 48 | currentBlockHash = ( 49 | await PolarClient.get(`rest/blockhashbyheight/${height}.json`) 50 | ).data.blockhash; 51 | } catch (e) { 52 | console.log("[PolarClient.mts]: Error Getting Current Block Hash"); 53 | } 54 | 55 | // return currentBlockHash 56 | console.log("[PolarClient.mts]: Get Latest Block Header..."); 57 | let res; 58 | try { 59 | res = (await PolarClient.get(`rest/headers/1/${currentBlockHash}.hex`)) 60 | .data; 61 | } catch (e) { 62 | console.log("[PolarClient.mts]: Error in getting header: ", e); 63 | } 64 | 65 | if (res) { 66 | return res; 67 | } 68 | } 69 | 70 | async getTxIdData(txid: string) { 71 | let res = (await PolarClient.get(`rest/tx/${txid}.json`)).data; 72 | 73 | return { 74 | txid: res.txid, 75 | vout: res.vin[0].vout, 76 | sequence: res.vin[0].sequence, 77 | }; 78 | } 79 | 80 | async getUtxoSpentData(txid: string, vout: number) { 81 | throw new Error("Not yet implemented"); 82 | } 83 | 84 | static async get(endpoint: string, timeout_ms = TIMEOUT) { 85 | const url = HOST + ":" + PORT + "/" + endpoint; 86 | const config = { 87 | method: "get", 88 | url: url, 89 | headers: { Accept: "application/json" }, 90 | timeout: timeout_ms, 91 | }; 92 | 93 | return await axios(config); 94 | } 95 | 96 | static async post(endpoint: string, timeout_ms = TIMEOUT) { 97 | const options = { 98 | headers: { 99 | "Content-Type": "text/plain", 100 | }, 101 | data: { 102 | jsonrpc: "1.0", 103 | id: "curltest", 104 | method: "getblockchaininfo", 105 | }, 106 | }; 107 | 108 | axios 109 | .post( 110 | "http://" + USER + ":" + PASS + "@" + HOST + ":" + PORT + "/", 111 | options 112 | ) 113 | .then((response) => { 114 | console.log("[PolarClient.mts]: RESPONSE: ", response.data); 115 | return response.data; 116 | }) 117 | .catch((error) => { 118 | console.log("[PolarClient.mts]: ERROR: ", error); 119 | }); 120 | } 121 | } 122 | 123 | export const GET_ROUTE = { 124 | PING: "/electrs/ping", 125 | //latestBlockHeader "/Electrs/block/:hash/header", 126 | BLOCK: "/electrs/block", 127 | BLOCKS_TIP_HASH: "/electrs/blocks/tip/hash", 128 | HEADER: "header", 129 | BLOCKS_TIP_HEIGHT: "/electrs/blocks/tip/height", 130 | //getTransaction /tx/:txid 131 | TX: "/electrs/tx", 132 | //getScriptHashListUnspent /scripthash/:hash/utxo 133 | SCRIPTHASH: "/electrs/scripthash", 134 | UTXO: "utxo", 135 | //getFeeEstimates 136 | FEE_ESTIMATES: "/electrs/fee-estimates", 137 | UTXO_SPENT: "/electrs/tx/:txid/outspend/:vout", 138 | }; 139 | Object.freeze(GET_ROUTE); 140 | 141 | export const POST_ROUTE = { 142 | //broadcast transaction 143 | TX: "/electrs/tx", 144 | }; 145 | Object.freeze(POST_ROUTE); 146 | 147 | export default PolarClient; 148 | -------------------------------------------------------------------------------- /src/LDK/bitcoin_clients/TorClient.mts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { RawAxiosRequestConfig } from "axios"; 3 | import { BitcoinDaemonClientInterface } from "./BitcoinD.mjs"; 4 | 5 | const TIMEOUT = 20000; 6 | const HOST = "http://localhost"; 7 | const PORT = 3001; 8 | 9 | export const TOR_ENDPOINT = "http://localhost:3001"; 10 | 11 | // Custom Logger 12 | import { ChalkColor, Logger } from "../utils/Logger.js"; 13 | const DEBUG = new Logger(ChalkColor.Yellow, "TorClient.ts"); 14 | 15 | class TorClient implements BitcoinDaemonClientInterface { 16 | async getMerkleProofPosition(txid: string): Promise { 17 | DEBUG.log( 18 | "getMerkleProofPosition... txid->", 19 | "getMerkleProofPosition", 20 | txid 21 | ); 22 | try { 23 | let res = (await TorClient.get(`electrs/tx/${txid}/merkle-proof`)).data; 24 | return res; 25 | } catch (e) { 26 | DEBUG.err("[ElectrumClient.mts]: Error getTxOut", e); 27 | } 28 | } 29 | 30 | async setTx(txid: string): Promise { 31 | DEBUG.log("setTx...", "setTx"); 32 | try { 33 | let res = TorClient.post("electrs/tx", txid); 34 | return res; 35 | } catch (e) { 36 | DEBUG.err("[ElectrumClient.ts]: Error setTx", e); 37 | } 38 | } 39 | /* 40 | Example output: 41 | {"spent":false} 42 | */ 43 | async getTxOut(txid: string, vout: number): Promise { 44 | DEBUG.log("getTxOut...", "getTxOut"); 45 | try { 46 | let res = (await TorClient.get(`electrs/tx/${txid}/outspend/${vout}`)) 47 | .data; 48 | return res; 49 | } catch (e) { 50 | DEBUG.err("[ElectrumClient.mts]: Error getTxOut", e); 51 | } 52 | } 53 | async getRawTransaction(txid: string): Promise { 54 | DEBUG.log("getRawTransaction...", "getRawTransaction", txid); 55 | try { 56 | let res = (await TorClient.get(`electrs/tx/${txid}/hex`)).data; 57 | return res; 58 | } catch (e) { 59 | DEBUG.err("[ElectrumClient.mts]: Error Getting raw transaction", e); 60 | } 61 | } 62 | 63 | /* 64 | Example output: 65 | 00000030e856389b81d90c101f736098a4e023741d5b7e87d6cd0d709a2acbbe5c45f965aa24be79c071ea43c4c59d211ac764507daaa405b397cc475b1308984b1b265b94cf5364ffff7f2000000000 66 | */ 67 | async getHeaderByHash(hash: String) { 68 | DEBUG.log("getHeaderByHash...", "getHeaderByHash"); 69 | let res; 70 | try { 71 | res = (await TorClient.get(`electrs/block/${hash}/header`)).data; 72 | DEBUG.log("returning res... ->", "getHeaderByHash", res); 73 | return res; 74 | } catch (e) { 75 | DEBUG.err("[ElectrumClient.mts]: Error getHeaderByHash", e); 76 | } 77 | } 78 | 79 | /* 80 | Example output: 81 | {"in_best_chain":true,"height":320,"next_best":null} 82 | */ 83 | async getBlockStatus(hash: String) { 84 | DEBUG.log("getBlockStatus...", "getBlockStatus"); 85 | let res; 86 | try { 87 | res = (await TorClient.get(`electrs/block/${hash}/status`)).data; 88 | DEBUG.log("returning block status ->", "getBlockStatus", res); 89 | return res; 90 | } catch (e) { 91 | DEBUG.err("[ElectrumClient.mts]: Error getBlockStatus", e); 92 | } 93 | } 94 | 95 | async getBestBlockHash() { 96 | console.log("[TorClient.mts]: getBestBlockHash..."); 97 | let res; 98 | try { 99 | res = await TorClient.get(`${GET_ROUTE.BLOCKS_TIP_HASH}`); 100 | 101 | res = res && res.data; 102 | } catch (e) { 103 | console.log("[TorClient.mts]: Error Getting Block Height"); 104 | } 105 | if (res) { 106 | return res; 107 | } 108 | } 109 | 110 | async getBestBlockHeight() { 111 | console.log("[TorClient.mts]: getBlockHeight..."); 112 | let res; 113 | try { 114 | res = await TorClient.get(`${GET_ROUTE.BLOCKS_TIP_HEIGHT}`); 115 | 116 | res = res && res.data; 117 | } catch (e) { 118 | console.log("[TorClient.mts]: Error Getting Block Height"); 119 | } 120 | if (res) { 121 | return res; 122 | } 123 | } 124 | 125 | async getBlockHeader(height: number | string) { 126 | let currentBlockHash; 127 | try { 128 | console.log("[TorClient.mts]: Get latest block header..............."); 129 | console.log("[TorClient.mts]: block_height: ", height); 130 | console.log(`[TorClient.mts]: ${GET_ROUTE.BLOCKS_TIP_HASH}`); 131 | let res = await TorClient.get(`${GET_ROUTE.BLOCKS_TIP_HASH}`); 132 | 133 | currentBlockHash = res && res.data; 134 | } catch (e) { 135 | console.log("[TorClient.mts]: Error Getting Current Block Hash"); 136 | } 137 | 138 | console.log("[TorClient.mts]: Get Latest Block Header..."); 139 | let res; 140 | try { 141 | res = await TorClient.get(`/electrs/block/${currentBlockHash}/header`); 142 | 143 | res = res && res.data; 144 | } catch (e) { 145 | console.log("[TorClient.mts]: Error in getting header: ", e); 146 | } 147 | 148 | if (res) { 149 | return res; 150 | } 151 | } 152 | 153 | async getTxIdData(txid: string) { 154 | let res = (await TorClient.get(`${GET_ROUTE.TX}/${txid}`)).data; 155 | 156 | console.log(JSON.stringify(res)); 157 | 158 | return { 159 | txid: res?.txid ?? "", 160 | vout: res?.vout ?? -1, 161 | sequence: res?.vin[0]?.sequence ?? -1, 162 | height: res?.status?.block_height ?? -1, 163 | confirmed: res?.status?.confirmed ?? false, 164 | hash: res?.status?.block_hash ?? -1, 165 | }; 166 | } 167 | 168 | async getUtxoSpentData(txid: string, vout: number) { 169 | try { 170 | const res = ( 171 | await TorClient.get( 172 | `${GET_ROUTE.UTXO_SPENT}` 173 | .replace(":txid", txid) 174 | .replace(":vout", String(vout)) 175 | ) 176 | ).data; 177 | if (res) { 178 | return res; 179 | } 180 | throw new Error("Error fetching UTXO spent data"); 181 | } catch (e: any) { 182 | throw new Error(e); 183 | } 184 | } 185 | 186 | static async get(endpoint: string, timeout_ms = TIMEOUT) { 187 | const url = HOST + ":" + PORT + "/" + endpoint; 188 | const config = { 189 | method: "get", 190 | url: url, 191 | headers: { Accept: "application/json" }, 192 | timeout: timeout_ms, 193 | }; 194 | 195 | return await axios(config); 196 | } 197 | 198 | static async post(endpoint: string, body: string, timeout_ms = TIMEOUT) { 199 | console.log("[ElectrumClient.ts/post]: body is equal to:", body); 200 | const url = HOST + ":" + PORT + "/" + endpoint; 201 | return await axios.post(url, body); 202 | } 203 | } 204 | 205 | export const GET_ROUTE = { 206 | PING: "/electrs/ping", 207 | //latestBlockHeader "/Electrs/block/:hash/header", 208 | BLOCK: "/electrs/block", 209 | BLOCKS_TIP_HASH: "/electrs/blocks/tip/hash", 210 | HEADER: "header", 211 | BLOCKS_TIP_HEIGHT: "/electrs/blocks/tip/height", 212 | //getTransaction /tx/:txid 213 | TX: "/electrs/tx", 214 | //getScriptHashListUnspent /scripthash/:hash/utxo 215 | SCRIPTHASH: "/electrs/scripthash", 216 | UTXO: "utxo", 217 | //getFeeEstimates 218 | FEE_ESTIMATES: "/electrs/fee-estimates", 219 | UTXO_SPENT: "/electrs/tx/:txid/outspend/:vout", 220 | }; 221 | Object.freeze(GET_ROUTE); 222 | 223 | export const POST_ROUTE = { 224 | //broadcast transaction 225 | TX: "/electrs/tx", 226 | }; 227 | Object.freeze(POST_ROUTE); 228 | 229 | export default TorClient; 230 | -------------------------------------------------------------------------------- /src/LDK/bitcoin_clients/txid.json: -------------------------------------------------------------------------------- 1 | { 2 | "txid": "b1d5d298febf7d1ce1cd068f9685bc1791f882e14622e76c9f4ad20a3ecf576b", 3 | "version": 2, 4 | "locktime": 0, 5 | "vin": [ 6 | { 7 | "txid": "0000000000000000000000000000000000000000000000000000000000000000", 8 | "vout": 4294967295, 9 | "prevout": null, 10 | "scriptsig": "02360100", 11 | "scriptsig_asm": "OP_PUSHBYTES_2 3601 OP_0", 12 | "witness": [ 13 | "0000000000000000000000000000000000000000000000000000000000000000" 14 | ], 15 | "is_coinbase": true, 16 | "sequence": 4294967295 17 | } 18 | ], 19 | "vout": [ 20 | { 21 | "scriptpubkey": "00143d27b06f0539ca6a16e4d521ec2ee7b9ab720fcd", 22 | "scriptpubkey_asm": "OP_0 OP_PUSHBYTES_20 3d27b06f0539ca6a16e4d521ec2ee7b9ab720fcd", 23 | "scriptpubkey_type": "v0_p2wpkh", 24 | "scriptpubkey_address": "bcrt1q85nmqmc9889x59hy65s7cth8hx4hyr7d0zf09a", 25 | "value": 1250000000 26 | }, 27 | { 28 | "scriptpubkey": "6a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf9", 29 | "scriptpubkey_asm": "OP_RETURN OP_PUSHBYTES_36 aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf9", 30 | "scriptpubkey_type": "op_return", 31 | "value": 0 32 | } 33 | ], 34 | "size": 169, 35 | "weight": 568, 36 | "fee": 0, 37 | "status": { 38 | "confirmed": true, 39 | "block_height": 310, 40 | "block_hash": "214b12d96223a516a345aefcf2ba7dd34d1c8e02edfaa3e5f6adf178355c996c", 41 | "block_time": 1683213797 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/LDK/init/LDKClientFactory.ts: -------------------------------------------------------------------------------- 1 | import LightningClient from "../LightningClient"; 2 | import { initializeLDK } from "./initializeLDK"; 3 | import { MockLightningClient } from "../../../test/mocks/MockLightningClient"; 4 | 5 | class LDKClientFactory { 6 | private static instance: LDKClientFactory; 7 | private client: LightningClient | MockLightningClient | null; 8 | 9 | private constructor() { 10 | this.client = null; 11 | } 12 | 13 | public static getInstance(): LDKClientFactory { 14 | if (!LDKClientFactory.instance) { 15 | LDKClientFactory.instance = new LDKClientFactory(); 16 | } 17 | return LDKClientFactory.instance; 18 | } 19 | 20 | public async createLDKClient( 21 | wallet_name: string, 22 | bitcoind_client: string = "prod" 23 | ): Promise { 24 | console.log( 25 | "[LDKClientFactory/createLDKClient]: bitcoind_client settings: ", 26 | bitcoind_client 27 | ); 28 | 29 | if (!this.client) { 30 | if (bitcoind_client === "mock") { 31 | this.client = new MockLightningClient(); 32 | return; 33 | } 34 | try { 35 | const initLDK = await initializeLDK(wallet_name, bitcoind_client); 36 | if (initLDK) { 37 | this.client = new LightningClient(initLDK); 38 | } else { 39 | throw new Error( 40 | "[LDKClientFactory/createLDKClient]: initLDK undefined \n" 41 | ); 42 | } 43 | } catch (e) { 44 | throw new Error( 45 | `[LDKClientFactory/createLDKClient]: Couldn't await initializeLDK(bitcoind_client); \n ${e} \n` 46 | ); 47 | } 48 | } 49 | } 50 | 51 | public isInitialized() { 52 | return this.client !== null; 53 | } 54 | 55 | public destroy() { 56 | this.client = null; 57 | } 58 | 59 | public getLDKClient(): LightningClient | MockLightningClient { 60 | if (!this.client) { 61 | throw new Error( 62 | "[LDKClientFactory/getLDKClient]: Can't getLDKClient - LDKClient is not instantiated." 63 | ); 64 | } 65 | 66 | return this.client; 67 | } 68 | } 69 | 70 | export default LDKClientFactory.getInstance(); 71 | -------------------------------------------------------------------------------- /src/LDK/init/initializeLDK.ts: -------------------------------------------------------------------------------- 1 | import { 2 | PeerManager, 3 | FeeEstimator, 4 | Logger, 5 | BroadcasterInterface, 6 | Network, 7 | BestBlock, 8 | NetworkGraph, 9 | Persist, 10 | EventHandler, 11 | Filter, 12 | ChainMonitor, 13 | KeysManager, 14 | UserConfig, 15 | ChannelHandshakeConfig, 16 | ChainParameters, 17 | ChannelManager, 18 | IgnoringMessageHandler, 19 | Option_FilterZ, 20 | ProbabilisticScorer, 21 | ProbabilisticScoringParameters, 22 | ChannelMonitor, 23 | DefaultRouter, 24 | LockableScore, 25 | Persister, 26 | UtilMethods, 27 | TwoTuple_BlockHashChannelManagerZ, 28 | TwoTuple_BlockHashChannelMonitorZ, 29 | OutPoint, 30 | Result_OutPointDecodeErrorZ_OK, 31 | Result_OutPointDecodeErrorZ, 32 | Result_C2Tuple_BlockHashChannelManagerZDecodeErrorZ, 33 | Result_C2Tuple_BlockHashChannelManagerZDecodeErrorZ_OK, 34 | Result_C2Tuple_BlockHashChannelManagerZDecodeErrorZ_Err, 35 | } from "lightningdevkit"; 36 | 37 | import fs from "fs"; 38 | import crypto from "crypto"; 39 | 40 | import MercuryFeeEstimator from "../structs/MercuryFeeEstimator.mjs"; 41 | import MercuryLogger from "../structs/MercuryLogger.js"; 42 | // @ts-ignore 43 | import MercuryEventHandler from "../structs/MercuryEventHandler.js"; 44 | // import MercuryFilter from "../structs/MercuryFilter.js"; - removed 45 | import LightningClientInterface from "../types/LightningClientInterface.js"; 46 | import ElectrumClient from "../bitcoin_clients/ElectrumClient.mjs"; 47 | import TorClient from "../bitcoin_clients/TorClient.mjs"; 48 | import MercuryPersist from "../structs/MercuryPersist.js"; 49 | import MercuryPersister from "../structs/MercuryPersister.js"; 50 | import EsploraSyncClient from "../sync/EsploraSyncClient.js"; 51 | import { 52 | ChannelMonitorRead, 53 | readChannelsFromDictionary, 54 | } from "../utils/ldk-utils.js"; 55 | import { uint8ArrayToHexString } from "../utils/utils.js"; 56 | 57 | import { ChalkColor, Logger as UtilLogger } from "../../LDK/utils/Logger.js"; 58 | const DEBUG = new UtilLogger(ChalkColor.Blue, "initializeLDK.ts"); 59 | 60 | export async function initializeLDK( 61 | wallet_name: string, 62 | electrum: string = "dev" 63 | ) { 64 | try { 65 | DEBUG.log("walletName:" + wallet_name); 66 | DEBUG.log("electrum:" + electrum); 67 | 68 | let walletDirectory = "./wallets/" + wallet_name; 69 | 70 | DEBUG.log("started initializeLDK"); 71 | 72 | // Initialize the LDK data directory if necessary. 73 | const ldk_data_dir = walletDirectory + "/.ldk/"; 74 | DEBUG.log("Trying to find ldk_data_dir:" + ldk_data_dir); 75 | 76 | if (!fs.existsSync(ldk_data_dir)) { 77 | fs.mkdirSync(ldk_data_dir); 78 | } 79 | 80 | // Initialize our bitcoind client. 81 | let bitcoind_client: TorClient | ElectrumClient; 82 | DEBUG.logD("env for bitcoind_client: ", electrum); 83 | if (electrum === "prod") { 84 | DEBUG.log("Using TorClient for bitcoind_client"); 85 | bitcoind_client = new TorClient(); 86 | } else { 87 | DEBUG.log("Using ElectrumClient for bitcoind_client "); 88 | bitcoind_client = new ElectrumClient(); 89 | } 90 | 91 | // Check that the bitcoind we've connected to is running the network we expect 92 | let network; 93 | if (electrum === "prod") { 94 | network = Network.LDKNetwork_Bitcoin; 95 | } else if (electrum === "test") { 96 | network = Network.LDKNetwork_Testnet; 97 | } else { 98 | network = Network.LDKNetwork_Regtest; 99 | } 100 | 101 | // ## Setup 102 | // Step 1: Initialize the FeeEstimator 103 | DEBUG.log("Step 1: Initialize the FeeEstimator"); 104 | const feeEstimator = FeeEstimator.new_impl(new MercuryFeeEstimator()); 105 | 106 | // Step 2: Initialize the Logger 107 | DEBUG.log("Step 2: Initialize the Logger"); 108 | const logger = Logger.new_impl(new MercuryLogger()); 109 | 110 | // Step 3: broadcast interface 111 | DEBUG.log("Step 3: Initialize the BroadcasterInterface and broadcast"); 112 | const txBroadcaster = BroadcasterInterface.new_impl({ 113 | async broadcast_transaction(tx: Uint8Array) { 114 | DEBUG.log("Tx Broadcast: " + uint8ArrayToHexString(tx)); 115 | await bitcoind_client.setTx(uint8ArrayToHexString(tx)); 116 | }, 117 | }); 118 | const txBroadcasted = new Promise((resolve, reject) => { 119 | txBroadcaster.broadcast_transaction = async (tx: Uint8Array) => { 120 | DEBUG.log("Tx Broadcast: " + uint8ArrayToHexString(tx)); 121 | await bitcoind_client.setTx(uint8ArrayToHexString(tx)); 122 | resolve(tx); 123 | }; 124 | }); 125 | 126 | // Step 4: Initialize Persist 127 | DEBUG.log("Step 4: Initialize Persist"); 128 | const persist = Persist.new_impl(new MercuryPersist(wallet_name)); 129 | const persister = Persister.new_impl(new MercuryPersister(wallet_name)); 130 | 131 | // Step 5: Initialize the ChainMonitor 132 | DEBUG.log("Step 5: Initialize the ChainMonitor, filter and sync client"); 133 | // Our sync client 134 | const syncClient = new EsploraSyncClient(bitcoind_client); 135 | const filter = Filter.new_impl(syncClient); 136 | const chainMonitor: ChainMonitor = ChainMonitor.constructor_new( 137 | Option_FilterZ.constructor_some(filter), 138 | txBroadcaster, 139 | logger, 140 | feeEstimator, 141 | persist 142 | ); 143 | const chainWatch = chainMonitor.as_Watch(); 144 | 145 | // Step 6: Initialize the KeysManager 146 | DEBUG.log("Step 6: Initialize the KeysManager"); 147 | const keys_seed_path = ldk_data_dir + "keys_seed"; 148 | var seed: any; 149 | if (!fs.existsSync(keys_seed_path)) { 150 | seed = crypto.randomBytes(32); 151 | fs.writeFileSync(keys_seed_path, seed); 152 | } else { 153 | seed = fs.readFileSync(keys_seed_path); 154 | } 155 | 156 | const current_time = Date.now(); 157 | const keysManager = KeysManager.constructor_new( 158 | seed, 159 | BigInt(Math.floor(Date.now() / 1000)), 160 | current_time.valueOf() 161 | ); 162 | 163 | let entropy_source = keysManager.as_EntropySource(); 164 | let node_signer = keysManager.as_NodeSigner(); 165 | let signer_provider = keysManager.as_SignerProvider(); 166 | 167 | // Step 7: Read ChannelMonitor state from disk 168 | DEBUG.log("Step 7: Read ChannelMonitor state from disk"); 169 | DEBUG.log("reading channel monitor data..."); 170 | let channel_monitor_data: ChannelMonitorRead[] = []; 171 | if (!fs.existsSync(`wallets/${wallet_name}/channels`)) { 172 | fs.mkdirSync(`wallets/${wallet_name}/channels`); 173 | } 174 | if (!fs.existsSync(walletDirectory + "/channels/channel_lookup.json")) { 175 | fs.writeFileSync( 176 | walletDirectory + "/channels/channel_lookup.json", 177 | JSON.stringify([{}]) 178 | ); 179 | } 180 | if (fs.existsSync(walletDirectory + "channels/channel_lookup.json")) { 181 | try { 182 | channel_monitor_data = readChannelsFromDictionary( 183 | walletDirectory + "/channels/channel_lookup.json" 184 | ); 185 | } catch (e) { 186 | console.error("error:" + e); 187 | } 188 | } 189 | 190 | // Step 8: Poll for the best chain tip, which may be used by the channel manager & spv client 191 | DEBUG.log("Step 8: Poll for the best chain tip"); 192 | 193 | // Step 9: Initialize Network Graph, routing ProbabilisticScorer 194 | DEBUG.log("Step 9: Initialize Network Graph, routing ProbabilisticScorer"); 195 | const genesisBlock = BestBlock.constructor_from_network(network); 196 | const genesisBlockHash = genesisBlock.block_hash(); 197 | const networkGraph = NetworkGraph.constructor_new(network, logger); 198 | 199 | const ldk_scorer_dir = walletDirectory + "/.scorer/"; 200 | if (!fs.existsSync(ldk_scorer_dir)) { 201 | fs.mkdirSync(ldk_scorer_dir); 202 | } 203 | let scorer_params = ProbabilisticScoringParameters.constructor_default(); 204 | let scorer = ProbabilisticScorer.constructor_new( 205 | scorer_params, 206 | networkGraph, 207 | logger 208 | ); 209 | 210 | let locked_score = LockableScore.new_impl({ 211 | lock() { 212 | return scorer.as_Score(); 213 | }, 214 | }); 215 | 216 | // Step 10: Create Router 217 | DEBUG.log("Step 10: Create Router"); 218 | 219 | let default_router = DefaultRouter.constructor_new( 220 | networkGraph, 221 | logger, 222 | seed, 223 | locked_score 224 | ); 225 | 226 | let router = default_router.as_Router(); 227 | 228 | // Step 11: Initialize the ChannelManager 229 | DEBUG.log("Step 11: Initialize the ChannelManager"); 230 | const config = UserConfig.constructor_default(); 231 | 232 | DEBUG.log("Call GET block_height, block_hash, block_header"); 233 | let block_height: number = await bitcoind_client.getBestBlockHeight(); 234 | let block_hash: string = await bitcoind_client.getBestBlockHash(); 235 | let block_header = await bitcoind_client.getBlockHeader(block_height); 236 | 237 | DEBUG.log("intiialize chain parameters"); 238 | const params = ChainParameters.constructor_new( 239 | network, 240 | BestBlock.constructor_new(Buffer.from(block_hash, "hex"), block_height) 241 | ); 242 | 243 | const channel_monitor_mut_references: ChannelMonitor[] = []; 244 | let channelManager: any; 245 | DEBUG.log("At ChannelManager create/restore"); 246 | if (fs.existsSync(walletDirectory + "/channel_manager_data.bin")) { 247 | DEBUG.log("Loading the channel manager from disk..."); 248 | const f = fs.readFileSync(`${walletDirectory}/channel_manager_data.bin`); 249 | 250 | try { 251 | DEBUG.log("create channel_monitor_references"); 252 | channel_monitor_data.forEach((channel_monitor: ChannelMonitorRead) => { 253 | let val: any = 254 | UtilMethods.constructor_C2Tuple_BlockHashChannelMonitorZ_read( 255 | channel_monitor.bytes, 256 | entropy_source, 257 | signer_provider 258 | ); 259 | if (val.is_ok()) { 260 | let read_channelMonitor: TwoTuple_BlockHashChannelMonitorZ = 261 | val.res; 262 | let channel_monitor = read_channelMonitor.get_b(); 263 | channel_monitor_mut_references.push(channel_monitor); 264 | } 265 | }); 266 | DEBUG.log("try and read the channel manager"); 267 | let readManager: any; 268 | readManager = 269 | UtilMethods.constructor_C2Tuple_BlockHashChannelManagerZ_read( 270 | f, 271 | entropy_source, 272 | node_signer, 273 | signer_provider, 274 | feeEstimator, 275 | chainMonitor.as_Watch(), 276 | txBroadcaster, 277 | router, 278 | logger, 279 | config, 280 | channel_monitor_mut_references 281 | ); 282 | DEBUG.log("read channel manager constructed successfully"); 283 | if ( 284 | readManager instanceof 285 | Result_C2Tuple_BlockHashChannelManagerZDecodeErrorZ_OK && 286 | readManager.is_ok() 287 | ) { 288 | DEBUG.log("readManager is_ok"); 289 | let read_channelManager: TwoTuple_BlockHashChannelManagerZ = 290 | readManager.res; 291 | channelManager = read_channelManager.get_b(); 292 | DEBUG.log( 293 | "read_channelManager.get_b() passed as channelManager successfully" 294 | ); 295 | } else if ( 296 | readManager instanceof 297 | Result_C2Tuple_BlockHashChannelManagerZDecodeErrorZ_Err 298 | ) { 299 | DEBUG.log( 300 | "Error occured in reading channel manager received a Decode Error" 301 | ); 302 | console.table(readManager.err); 303 | } else { 304 | throw Error("Couldn't recreate channel manager from disk \n"); 305 | } 306 | } catch (e) { 307 | throw Error("Error occured reading channel manager:" + e); 308 | } 309 | } else { 310 | DEBUG.log("Create fresh channel manager"); 311 | // fresh manager 312 | channelManager = ChannelManager.constructor_new( 313 | feeEstimator, 314 | chainWatch, 315 | txBroadcaster, 316 | router, 317 | logger, 318 | entropy_source, 319 | node_signer, 320 | signer_provider, 321 | config, 322 | params 323 | ); 324 | } 325 | 326 | if (channelManager === undefined) { 327 | throw new Error("[initializeLDK.ts]: Channel Manager is still undefined"); 328 | } 329 | 330 | const channelHandshakeConfig = ChannelHandshakeConfig.constructor_default(); 331 | 332 | // Step 12: Sync ChainMonitor and ChannelManager to chain tip 333 | DEBUG.log("Step 12: Sync ChainMonitor and ChannelManager to chain tip"); 334 | await syncClient.sync([ 335 | channelManager.as_Confirm(), 336 | chainMonitor.as_Confirm(), 337 | ]); 338 | 339 | // Step 13: Give ChannelMonitors to ChainMonitor 340 | DEBUG.log("Step 13: Give ChannelMonitors to ChainMonitor"); 341 | if (channel_monitor_mut_references.length > 0) { 342 | let outpoints_mut: OutPoint[] = []; 343 | 344 | channel_monitor_data.forEach((channel_monitor: ChannelMonitorRead) => { 345 | // Rebuild OutPoint from the first Uint8Array in the tuple 346 | const outpointResult: Result_OutPointDecodeErrorZ = 347 | OutPoint.constructor_read(channel_monitor.outpoint); 348 | if (outpointResult.is_ok()) { 349 | const outpoint: OutPoint = (( 350 | outpointResult 351 | )).res; 352 | outpoints_mut.push(outpoint); 353 | } 354 | }); 355 | 356 | // ensure outpoints_mut and channel_monitor_mut are the same length 357 | if (outpoints_mut.length !== channel_monitor_mut_references.length) { 358 | throw Error("No equal amounts of outpoints to channel monitor."); 359 | } 360 | 361 | // give chainWatch the output and serialized form of channel to watch 362 | for (let i = 0; i < outpoints_mut.length; i++) { 363 | const outpoint = outpoints_mut[i]; 364 | const serializedByte = channel_monitor_mut_references[i]; 365 | if (outpoint && serializedByte) { 366 | chainWatch.watch_channel(outpoint, serializedByte); 367 | } 368 | } 369 | } 370 | 371 | // Step 14: Optional: Initialize the P2PGossipSync 372 | DEBUG.log("Step 14: Optional: Initialize the P2PGossipSync - TODO"); 373 | 374 | // Step 15: Initialize the PeerManager 375 | DEBUG.log("Step 15: Initialize the PeerManager"); 376 | const routingMessageHandler = 377 | IgnoringMessageHandler.constructor_new().as_RoutingMessageHandler(); 378 | let channelMessageHandler; 379 | if (channelManager) { 380 | channelMessageHandler = channelManager.as_ChannelMessageHandler(); 381 | } 382 | const customMessageHandler = 383 | IgnoringMessageHandler.constructor_new().as_CustomMessageHandler(); 384 | const onionMessageHandler = 385 | IgnoringMessageHandler.constructor_new().as_OnionMessageHandler(); 386 | const nodeSecret = new Uint8Array(32); 387 | for (var i = 0; i < 32; i++) nodeSecret[i] = 42; 388 | const ephemeralRandomData = new Uint8Array(32); 389 | 390 | const peerManager = 391 | channelMessageHandler && 392 | PeerManager.constructor_new( 393 | channelMessageHandler, 394 | routingMessageHandler, 395 | onionMessageHandler, 396 | Date.now(), 397 | ephemeralRandomData, 398 | logger, 399 | customMessageHandler, 400 | node_signer 401 | ); 402 | 403 | // ## Running LDK 404 | // Step 16: Initialize networking 405 | DEBUG.log("Step 16: Initialize networking - TODO"); 406 | 407 | // Step 17: Connect and Disconnect Blocks 408 | DEBUG.log("Step 17: Connect and Disconnect Blocks"); 409 | let channel_manager_listener = channelManager; 410 | let chain_monitor_listener = chainMonitor; 411 | let bitcoind_block_source = bitcoind_client; 412 | 413 | /* 414 | const chain_poller = new ChainPoller(bitcoind_block_source, network); 415 | const chain_listener = [chain_monitor_listener, channel_manager_listener]; 416 | const spv_client = new SpvClient( 417 | chain_tip, 418 | chain_poller, 419 | cache, 420 | chain_listener 421 | ); 422 | 423 | setInterval(async () => { 424 | await spv_client.poll_best_tip(); 425 | }, 1000); 426 | */ 427 | 428 | // check on interval 429 | 430 | // Step 18: Handle LDK Events 431 | DEBUG.log("Step 18: Handle LDK Events"); 432 | let eventHandler; 433 | 434 | if (channelManager) { 435 | let mercuryEventHandler = new MercuryEventHandler( 436 | channelManager, 437 | electrum 438 | ); 439 | eventHandler = EventHandler.new_impl(mercuryEventHandler); 440 | } 441 | 442 | // Step 19: Persist ChannelManager and NetworkGraph 443 | DEBUG.log("Step 19: Persist ChannelManager and NetworkGraph"); 444 | persister.persist_manager(channelManager); 445 | persister.persist_graph(networkGraph); 446 | 447 | // ************************************************************************************************ 448 | // Step 20: Background Processing 449 | DEBUG.log("Step 20: Background Processing"); 450 | 451 | // Regularly reconnect to channel peers. 452 | // peerManager?.timer_tick_occurred() - use this, checks for disconnected peers 453 | 454 | // Regularly broadcast our node_announcement. This is only required (or possible) if we have 455 | // some public channels, and is only useful if we have public listen address(es) to announce. 456 | // In a production environment, this should occur only after the announcement of new channels 457 | // to avoid churn in the global network graph. 458 | // peerManager?.broadcast_node_announcement() 459 | 460 | // ************************************************************************************************ 461 | 462 | // Pass everything to initLDK 463 | if (chainMonitor && channelManager && peerManager && eventHandler) { 464 | const LDKInit: LightningClientInterface = { 465 | walletName: wallet_name, 466 | feeEstimator: feeEstimator, 467 | bitcoind_client: bitcoind_client, 468 | logger: logger, 469 | txBroadcasted: txBroadcasted, 470 | txBroadcaster: txBroadcaster, 471 | network: network, 472 | genesisBlock: genesisBlock, 473 | genesisBlockHash: genesisBlockHash, 474 | networkGraph: networkGraph, 475 | filter: filter, 476 | persist: persist, 477 | persister: persister, 478 | eventHandler: eventHandler, 479 | router: router, 480 | chainMonitor: chainMonitor, 481 | chainWatch: chainWatch, 482 | keysManager: keysManager, 483 | config: config, 484 | channelHandshakeConfig: channelHandshakeConfig, 485 | params: params, 486 | channelManager: channelManager, 487 | peerManager: peerManager, 488 | txdata: [], 489 | currentConnections: [], 490 | blockHeight: undefined, 491 | latestBlockHeader: undefined, 492 | netHandler: undefined, 493 | syncClient: syncClient, 494 | }; 495 | return LDKInit; 496 | } 497 | 498 | // If we can't return a LDKInit then throw 499 | throw new Error( 500 | `Unable to initialize the LDK, check values-> chainMonitor:${chainMonitor}, channelManager:${channelManager}, peerManager:${peerManager}, eventHandler:${eventHandler} \n` 501 | ); 502 | } catch (e) { 503 | throw new Error( 504 | `[initializeLDK.ts]: Unable to initalize the LDK, error occured \n ${e} \n` 505 | ); 506 | } 507 | } 508 | -------------------------------------------------------------------------------- /src/LDK/init/initializeWasm.ts: -------------------------------------------------------------------------------- 1 | import * as ldk from "lightningdevkit"; 2 | import fs from "fs"; 3 | 4 | export default async function initializeWasm() { 5 | try { 6 | const wasm_file = await fs.promises.readFile( 7 | "./node_modules/lightningdevkit/liblightningjs.wasm" 8 | ); 9 | await ldk.initializeWasmFromBinary(wasm_file); 10 | } catch (e) { 11 | throw new Error(`[initialiseWasm.ts]: InitialiseWasmError: ${e}`); 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryChannelMessageHandler.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ChannelMessageHandler, 3 | InitFeatures, 4 | OpenChannel, 5 | } from "lightningdevkit"; 6 | 7 | class MercuryChannelMessageHandler extends ChannelMessageHandler { 8 | // TODO: Lookup why this has been made into a custom class 9 | /* 10 | override handle_open_channel( 11 | their_node_id: Uint8Array, 12 | msg: OpenChannel 13 | ): void { 14 | 15 | } */ 16 | } 17 | 18 | export default MercuryChannelMessageHandler; 19 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryCustomMessageHandler.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CustomMessageHandler, 3 | Result_NoneLightningErrorZ, 4 | Type, 5 | } from "lightningdevkit"; 6 | 7 | class MercuryCustomMessageHandler extends CustomMessageHandler { 8 | override handle_custom_message( 9 | msg: Type, 10 | sender_node_id: Uint8Array 11 | ): Result_NoneLightningErrorZ { 12 | return Result_NoneLightningErrorZ.constructor_ok(); 13 | } 14 | } 15 | 16 | export default MercuryCustomMessageHandler; 17 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryEventHandler.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Event, 3 | EventsProvider, 4 | Event_FundingGenerationReady, 5 | // Event_PaymentReceived, 6 | Event_PaymentSent, 7 | Event_PaymentPathFailed, 8 | Event_PendingHTLCsForwardable, 9 | Event_SpendableOutputs, 10 | Event_PaymentForwarded, 11 | Event_ChannelClosed, 12 | Event_OpenChannelRequest, 13 | Event_ChannelPending, 14 | Result_NoneAPIErrorZ, 15 | Result_NoneAPIErrorZ_OK, 16 | EventHandlerInterface, 17 | ChannelManager, 18 | Event_ChannelReady, 19 | Event_PaymentClaimed, 20 | Option_PaymentFailureReasonZ_Some, 21 | Result_PaymentPreimageAPIErrorZ_OK, 22 | Result_PaymentPreimageAPIErrorZ, 23 | PaymentPurpose_InvoicePayment, 24 | PaymentPurpose, 25 | PaymentPurpose_SpontaneousPayment, 26 | Result_PaymentSecretAPIErrorZ, 27 | Event_PaymentClaimable, 28 | Event_PaymentPathSuccessful, 29 | Event_HTLCHandlingFailed, 30 | } from "lightningdevkit"; 31 | 32 | import * as bitcoin from "bitcoinjs-lib"; 33 | import { 34 | uint8ArrayToHexString, 35 | hexToUint8Array, 36 | validateSigFunction, 37 | } from "../utils/utils.js"; 38 | import { ECPairFactory } from "ecpair"; 39 | import * as ecc from "tiny-secp256k1"; 40 | import crypto from "crypto"; 41 | import chalk from "chalk"; 42 | import { Transaction } from "bitcoinjs-lib"; 43 | import fs from "fs"; 44 | import { 45 | saveChannelIdToDb, 46 | saveEventDataToDb, 47 | replaceTempChannelIdInDb, 48 | } from "../utils/ldk-utils.js"; 49 | import { ChalkColor, Logger as UtilLogger } from "../../LDK/utils/Logger.js"; 50 | const DEBUG = new UtilLogger(ChalkColor.Red, "MercuryEventHandler.ts"); 51 | 52 | const ECPair = ECPairFactory(ecc); 53 | 54 | enum HTLCStatus { 55 | Pending, 56 | Succeeded, 57 | Failed, 58 | } 59 | 60 | class MillisatAmount { 61 | value: bigint | undefined; 62 | constructor(value: bigint | undefined) { 63 | this.value = value; 64 | } 65 | } 66 | 67 | interface PaymentInfo { 68 | preimage: any; 69 | secret: any; 70 | status: HTLCStatus; 71 | amt_msat: MillisatAmount; 72 | } 73 | 74 | class MercuryEventHandler implements EventHandlerInterface { 75 | channelManager: ChannelManager; 76 | network: any; 77 | 78 | static vout: any; 79 | static txid: any; 80 | static sequence: any; 81 | 82 | payments!: Map; 83 | static value: number; 84 | static privateKey: Buffer; 85 | 86 | constructor(_channelManager: ChannelManager, electrum: string) { 87 | DEBUG.log("Constructor of MercuryEventHandler", "constructor"); 88 | this.channelManager = _channelManager; 89 | if (electrum === "prod") { 90 | this.network = bitcoin.networks.bitcoin; 91 | } else if (electrum === "testnet") { 92 | this.network = bitcoin.networks.testnet; 93 | } else { 94 | this.network = bitcoin.networks.regtest; 95 | } 96 | 97 | if (MercuryEventHandler.privateKey !== undefined) { 98 | try { 99 | let electrum_wallet = ECPair.fromPrivateKey( 100 | MercuryEventHandler.privateKey, 101 | { 102 | network: this.network, 103 | } 104 | ); 105 | const p2wpkh = bitcoin.payments.p2wpkh({ 106 | pubkey: electrum_wallet.publicKey, 107 | network: this.network, 108 | }); 109 | DEBUG.log("Pay to this address: " + p2wpkh.address); 110 | } catch (e) { 111 | DEBUG.log("Error on reading private key" + e, "constructor"); 112 | } 113 | } 114 | 115 | this.payments = new Map(); 116 | } 117 | 118 | handle_event(e: any) { 119 | saveEventDataToDb(e); 120 | switch (true) { 121 | case e instanceof Event_FundingGenerationReady: 122 | this.handleFundingGenerationReadyEvent_Auto(e); 123 | break; 124 | case e instanceof Event_ChannelPending: 125 | this.handleChannelPendingEvent(e); 126 | break; 127 | case e instanceof Event_PaymentClaimed: 128 | this.handlePaymentClaimed(e); 129 | break; 130 | case e instanceof Event_PaymentClaimable: 131 | this.handlePaymentClaimable(e); 132 | break; 133 | case e instanceof Event_PaymentSent: 134 | this.handlePaymentSentEvent(e); 135 | break; 136 | case e instanceof Event_PaymentPathFailed: 137 | this.handlePaymentPathFailedEvent(e); 138 | break; 139 | case e instanceof Event_PendingHTLCsForwardable: 140 | this.handlePendingHTLCsForwardableEvent(e); 141 | break; 142 | case e instanceof Event_SpendableOutputs: 143 | this.handleSpendableOutputsEvent(e); 144 | break; 145 | case e instanceof Event_PaymentForwarded: 146 | this.handlePaymentForwardedEvent(e); 147 | break; 148 | case e instanceof Event_OpenChannelRequest: 149 | this.handleOpenChannelRequestEvent(e); 150 | break; 151 | case e instanceof Event_ChannelClosed: 152 | this.handleChannelClosedEvent(e); 153 | break; 154 | case e instanceof Event_ChannelReady: 155 | this.handleChannelReadyEvent(e); 156 | break; 157 | case e instanceof Event_PaymentPathSuccessful: 158 | this.handlePaymentPathSuccessful(e); 159 | break; 160 | case e instanceof Event_HTLCHandlingFailed: 161 | this.handleHTLCHandlingFailed(e); 162 | break; 163 | default: 164 | console.debug("[MercuryEventHandler.ts]: Event not handled: ", e); 165 | } 166 | } 167 | handleHTLCHandlingFailed(e: any) {} 168 | 169 | handlePaymentPathSuccessful(e: Event_PaymentPathSuccessful) {} 170 | 171 | handlePaymentClaimable(e: Event_PaymentClaimable) { 172 | const { payment_hash, amount_msat, purpose } = e; 173 | DEBUG.log( 174 | `received payment from payment hash ${uint8ArrayToHexString( 175 | payment_hash 176 | )} of ${amount_msat} millisatoshis`, 177 | "handlePaymentClaimable" 178 | ); 179 | let payment_preimage: PaymentPurpose; 180 | if (purpose instanceof PaymentPurpose_InvoicePayment) { 181 | DEBUG.log( 182 | "purpose is instance of PaymentPurpose_InvoicePayment", 183 | "handlePaymentClaimable" 184 | ); 185 | payment_preimage = PaymentPurpose.constructor_invoice_payment( 186 | purpose.payment_preimage, 187 | purpose.payment_secret 188 | ); 189 | this.channelManager.claim_funds(purpose.payment_preimage); 190 | } else if (purpose instanceof PaymentPurpose_SpontaneousPayment) { 191 | DEBUG.log( 192 | "purpose is instance of PaymentPurpose_SpontaneousPayment", 193 | "handlePaymentClaimable" 194 | ); 195 | payment_preimage = PaymentPurpose.constructor_spontaneous_payment( 196 | purpose.spontaneous_payment 197 | ); 198 | this.channelManager.claim_funds(purpose.spontaneous_payment); 199 | } 200 | } 201 | 202 | handlePaymentClaimed(e: Event_PaymentClaimed) { 203 | const { payment_hash, purpose, amount_msat, receiver_node_id, clone_ptr } = 204 | e; 205 | console.log( 206 | `[MercuryEventHandler.ts]: EVENT: claimed payment from payment hash ${uint8ArrayToHexString( 207 | payment_hash 208 | )} of ${amount_msat} millisatoshis` 209 | ); 210 | const { payment_preimage, payment_secret } = (() => { 211 | if (purpose instanceof PaymentPurpose_InvoicePayment) { 212 | return { 213 | payment_preimage: purpose.payment_preimage, 214 | payment_secret: purpose.payment_secret, 215 | }; 216 | } else if (purpose instanceof PaymentPurpose_SpontaneousPayment) { 217 | return { 218 | payment_preimage: purpose.spontaneous_payment, 219 | payment_secret: null, 220 | }; 221 | } else { 222 | throw new Error("Invalid payment purpose"); 223 | } 224 | })(); 225 | 226 | if (this.payments.has(e.payment_hash)) { 227 | const payment = this.payments.get(e.payment_hash); 228 | if (payment) { 229 | payment.status = HTLCStatus.Succeeded; 230 | payment.preimage = payment_preimage; 231 | payment.secret = payment_secret; 232 | } 233 | } else { 234 | this.payments.set(e.payment_hash, { 235 | preimage: payment_preimage, 236 | secret: payment_secret, 237 | status: HTLCStatus.Succeeded, 238 | amt_msat: new MillisatAmount(amount_msat), 239 | }); 240 | } 241 | 242 | console.log(payment_preimage, payment_secret); 243 | } 244 | 245 | setChannelManager(channelManager: ChannelManager) { 246 | this.channelManager = channelManager; 247 | } 248 | 249 | static validateTx(txData: any): void { 250 | // validate txData 251 | if (!txData || typeof txData !== "object") { 252 | throw new Error("Invalid transaction data provided"); 253 | } 254 | if (txData.vout === undefined) { 255 | throw new Error("Invalid vout was set in txid"); 256 | } 257 | if (txData.txid === undefined) { 258 | throw new Error("Invalid txid was set in txid"); 259 | } 260 | if (txData.sequence === undefined) { 261 | throw new Error("Invalid sequence was set in txid"); 262 | } 263 | } 264 | 265 | static setInputTx(txData: any, payment_address: string) { 266 | this.validateTx(txData); 267 | let matchingVoutIndex = -1; // Initialize with -1 if no match is found 268 | let amount = 0; 269 | 270 | for (let i = 0; i < txData.vout.length; i++) { 271 | if (txData.vout[i].scriptpubkey_address === payment_address) { 272 | matchingVoutIndex = i; 273 | amount = txData.vout[i].value; 274 | break; // Exit the loop once a match is found 275 | } 276 | } 277 | 278 | if (matchingVoutIndex === -1) { 279 | throw new Error( 280 | `No matching vout found for payment address: ${payment_address}` 281 | ); 282 | } 283 | 284 | // vout is equal to the one with the same payment address we passed in 285 | MercuryEventHandler.vout = matchingVoutIndex; 286 | MercuryEventHandler.value = amount; 287 | MercuryEventHandler.txid = txData.txid; 288 | MercuryEventHandler.sequence = txData.sequence; 289 | } 290 | 291 | resetInputTx() { 292 | MercuryEventHandler.vout = null; 293 | MercuryEventHandler.txid = null; 294 | MercuryEventHandler.sequence = null; 295 | } 296 | 297 | handleFundingGenerationReadyEvent_Manual( 298 | event: Event_FundingGenerationReady 299 | ) { 300 | const { 301 | temporary_channel_id, 302 | counterparty_node_id, 303 | channel_value_satoshis, 304 | output_script, 305 | } = event; 306 | 307 | // create funding transaction 308 | const witness_pos = output_script.length + 58; 309 | const funding_tx = new Uint8Array(witness_pos + 7); 310 | funding_tx[0] = 2; // 4-byte tx version 2 311 | funding_tx[4] = 0; 312 | funding_tx[5] = 1; // segwit magic bytes 313 | funding_tx[6] = 1; // 1-byte input count 1 314 | // 36 bytes previous outpoint all-0s 315 | funding_tx[43] = 0; // 1-byte input script length 0 316 | funding_tx[44] = 0xff; 317 | funding_tx[45] = 0xff; 318 | funding_tx[46] = 0xff; 319 | funding_tx[47] = 0xff; // 4-byte nSequence 320 | funding_tx[48] = 1; // one output 321 | const channelValueBuffer = Buffer.alloc(8); 322 | const channelValueNumber = parseInt(channel_value_satoshis.toString(), 10); 323 | channelValueBuffer.writeUInt32LE(channelValueNumber, 0); 324 | funding_tx.set(channelValueBuffer, 49); 325 | funding_tx[57] = output_script.length; // 1-byte output script length 326 | funding_tx.set(output_script, 58); 327 | funding_tx[witness_pos] = 1; 328 | funding_tx[witness_pos + 1] = 1; 329 | funding_tx[witness_pos + 2] = 0xff; // one witness element of size 1 with contents 0xff 330 | funding_tx[witness_pos + 3] = 0; 331 | funding_tx[witness_pos + 4] = 0; 332 | funding_tx[witness_pos + 5] = 0; 333 | funding_tx[witness_pos + 6] = 0; // lock time 0 334 | 335 | console.log( 336 | "[MercuryEventHandler.ts]: funding_tx->", 337 | uint8ArrayToHexString(funding_tx) 338 | ); 339 | 340 | let fund = this.channelManager.funding_transaction_generated( 341 | temporary_channel_id, 342 | counterparty_node_id, 343 | funding_tx 344 | ); 345 | } 346 | 347 | validator = (pubkey: Buffer, msghash: Buffer, signature: Buffer): boolean => 348 | ECPair.fromPublicKey(pubkey).verify(msghash, signature); 349 | 350 | validateFundingEvent(output_script: Uint8Array) { 351 | // validate event 352 | if ( 353 | output_script.length !== 34 && 354 | output_script[0] !== 0 && 355 | output_script[1] !== 32 356 | ) { 357 | return; 358 | } 359 | } 360 | 361 | async handleFundingGenerationReadyEvent_Auto( 362 | event: Event_FundingGenerationReady 363 | ) { 364 | const { 365 | temporary_channel_id, 366 | counterparty_node_id, 367 | channel_value_satoshis, 368 | output_script, 369 | } = event; 370 | 371 | if (MercuryEventHandler.privateKey === undefined) 372 | throw Error("[MercuryEventHandler.ts]: private key is undefined"); 373 | 374 | let electrum_wallet = ECPair.fromPrivateKey( 375 | MercuryEventHandler.privateKey, 376 | { 377 | network: this.network, 378 | } 379 | ); 380 | if (electrum_wallet === undefined) 381 | throw Error("[MercuryEventHandler.ts]: electrum wallet is undefined"); 382 | 383 | // Create the psbt transaction 384 | const psbt = new bitcoin.Psbt({ network: this.network }); 385 | psbt.setVersion(2); 386 | psbt.setLocktime(0); 387 | const p2wpkh = bitcoin.payments.p2wpkh({ 388 | pubkey: electrum_wallet.publicKey, 389 | network: this.network, 390 | }); 391 | let address = p2wpkh.address; 392 | if (address === undefined) throw Error("No address found."); 393 | 394 | console.log( 395 | chalk.red( 396 | "[MercuryEventHandler.ts]: SEND TO THIS ADDRESS --------->", 397 | address 398 | ) 399 | ); 400 | 401 | // validation again 402 | if (p2wpkh.output === undefined) { 403 | throw Error("[MercuryEventHandler.ts]: p2wpkh output is undefined"); 404 | } 405 | if (MercuryEventHandler.txid === null) { 406 | throw Error("[MercuryEventHandler.ts]: No TXID was set"); 407 | } 408 | if (MercuryEventHandler.vout === null) { 409 | throw Error("[MercuryEventHandler.ts]: No VOUT was set"); 410 | } 411 | if (MercuryEventHandler.sequence === null) { 412 | throw Error("[MercuryEventHandler.ts]: No sequence was set"); 413 | } 414 | 415 | let funding_output = parseInt(channel_value_satoshis.toString(), 10); 416 | 417 | psbt.addInput({ 418 | hash: MercuryEventHandler.txid, 419 | index: MercuryEventHandler.vout, 420 | witnessUtxo: { 421 | script: bitcoin.address.toOutputScript(address, this.network), 422 | value: MercuryEventHandler.value, 423 | }, 424 | }); 425 | psbt.addOutput({ 426 | script: Buffer.from(output_script), 427 | value: funding_output, 428 | }); 429 | psbt.signAllInputs(electrum_wallet); 430 | psbt.validateSignaturesOfAllInputs(this.validator); 431 | psbt.finalizeAllInputs(); 432 | 433 | let funding: Transaction = psbt.extractTransaction(); 434 | let funding_tx: Uint8Array = funding.toBuffer(); 435 | 436 | try { 437 | DEBUG.log( 438 | "Sending the funding transaction to the channel manager", 439 | "handleFundingGenerationReadyEvent_Auto" 440 | ); 441 | // Send the funding transaction to the channel manager 442 | let result: any = this.channelManager.funding_transaction_generated( 443 | temporary_channel_id, 444 | counterparty_node_id, 445 | funding_tx 446 | ); 447 | 448 | DEBUG.log("RESULT WAS->" + result); 449 | } catch (e) { 450 | console.error( 451 | "[MercuryEventHandler.ts]: error occured in funding transaction generated method.." 452 | ); 453 | } 454 | } 455 | 456 | handleChannelReadyEvent(e: Event_ChannelReady) { 457 | DEBUG.log(`Channel ready ${e}`, "handleChannelReadyEvent"); 458 | DEBUG.log( 459 | `EVENT: Channel ${uint8ArrayToHexString( 460 | e.channel_id 461 | )} with peer ${uint8ArrayToHexString( 462 | e.counterparty_node_id 463 | )} is ready to be used!` 464 | ); 465 | } 466 | 467 | handleChannelPendingEvent(event: Event_ChannelPending) { 468 | const { 469 | channel_id, 470 | user_channel_id, 471 | former_temporary_channel_id, 472 | counterparty_node_id, 473 | funding_txo, 474 | } = event; 475 | const node_id = uint8ArrayToHexString(counterparty_node_id); 476 | const pubkey = node_id.split("@")[0]; 477 | 478 | const channel_id_str = uint8ArrayToHexString(channel_id); 479 | const temp_channel_id_str = uint8ArrayToHexString( 480 | former_temporary_channel_id 481 | ); 482 | if (pubkey !== undefined && channel_id_str !== undefined) { 483 | saveChannelIdToDb(channel_id_str, pubkey); 484 | replaceTempChannelIdInDb(channel_id_str, temp_channel_id_str); 485 | } 486 | } 487 | 488 | handlePaymentSentEvent(e: Event_PaymentSent) { 489 | console.log( 490 | `[MercuryEventHandler.ts]: Payment with preimage '${uint8ArrayToHexString( 491 | e.payment_preimage 492 | )}' sent.` 493 | ); 494 | } 495 | 496 | handlePaymentPathFailedEvent(e: Event_PaymentPathFailed) { 497 | console.log( 498 | `[MercuryEventHandler.ts]: Payment with payment hash '${uint8ArrayToHexString( 499 | e.payment_hash 500 | )}' failed.` 501 | ); 502 | } 503 | 504 | handlePendingHTLCsForwardableEvent(e: Event_PendingHTLCsForwardable) { 505 | this.channelManager.process_pending_htlc_forwards(); 506 | } 507 | 508 | handleSpendableOutputsEvent(e: Event_SpendableOutputs) { 509 | // var tx = this.keyManager.spend_spendable_outputs( 510 | // e.outputs, 511 | // [], 512 | // Hex.decode(refundAddress), 513 | // feeEstimator.get_est_sat_per_1000_weight( 514 | // ConfirmationTarget.LDKConfirmationTarget_HighPriority 515 | // ) 516 | // ); 517 | // if (tx instanceof Result_TransactionNoneZ.Result_TransactionNoneZ_OK) { 518 | // chainBackend.publish(tx.res); 519 | // } 520 | } 521 | 522 | handlePaymentForwardedEvent(event: Event_PaymentForwarded) { 523 | const { 524 | prev_channel_id, //: Uint8Array; 525 | next_channel_id, //: Uint8Array; 526 | fee_earned_msat, //: Option_u64Z; 527 | claim_from_onchain_tx, //: boolean; 528 | } = event; 529 | 530 | console.log( 531 | "[MercuryEventHandler.ts]: Received payment forwarded event", 532 | event 533 | ); 534 | } 535 | 536 | handleOpenChannelRequestEvent(event: Event_OpenChannelRequest) { 537 | const { 538 | temporary_channel_id, // Uint8Array 539 | counterparty_node_id, // Uint8Array 540 | funding_satoshis, // bigint 541 | push_msat, // bigint 542 | channel_type, // ChannelTypeFeatures 543 | } = event; 544 | 545 | console.log( 546 | "[MercuryEventHandler.ts]: Received open channel request:", 547 | event 548 | ); 549 | } 550 | 551 | handleChannelClosedEvent(event: Event_ChannelClosed) { 552 | console.log("[MercuryEventHandler.ts]: Event Channel Closed!", event); 553 | } 554 | } 555 | 556 | export default MercuryEventHandler; 557 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryFeeEstimator.mts: -------------------------------------------------------------------------------- 1 | import { ConfirmationTarget, FeeEstimator, FeeEstimatorInterface } from "lightningdevkit"; 2 | 3 | var feerate_fast = 253; // estimate fee rate in BTC/kB 4 | var feerate_medium = 253; // estimate fee rate in BTC/kB 5 | var feerate_slow = 253; // estimate fee rate in BTC/kB 6 | 7 | class MercuryFeeEstimator implements FeeEstimatorInterface{ 8 | get_est_sat_per_1000_weight(confirmation_target: ConfirmationTarget): number { 9 | switch (confirmation_target) { 10 | case ConfirmationTarget.LDKConfirmationTarget_Background: 11 | // insert code to retireve a background feerate 12 | return feerate_slow; 13 | case ConfirmationTarget.LDKConfirmationTarget_Normal: 14 | // 15 | return feerate_medium; 16 | case ConfirmationTarget.LDKConfirmationTarget_HighPriority: 17 | // 18 | return feerate_fast; 19 | default: 20 | return 253; 21 | } 22 | } 23 | } 24 | 25 | export default MercuryFeeEstimator; 26 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryFilter.ts: -------------------------------------------------------------------------------- 1 | import { FilterInterface, WatchedOutput } from "lightningdevkit"; 2 | 3 | class MercuryFilter implements FilterInterface { 4 | register_tx(txid: Uint8Array, script_pubkey: Uint8Array): void {} 5 | 6 | register_output(output: WatchedOutput): void {} 7 | } 8 | 9 | export default MercuryFilter; 10 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryLogger.ts: -------------------------------------------------------------------------------- 1 | import { Level, LoggerInterface } from "lightningdevkit"; 2 | 3 | class MercuryLogger implements LoggerInterface{ 4 | log(record: any){ 5 | if (record.get_level() == Level.LDKLevel_Gossip) return; 6 | console.log(record.get_module_path() + ": " + record.get_args()); 7 | } 8 | } 9 | 10 | export default MercuryLogger; -------------------------------------------------------------------------------- /src/LDK/structs/MercuryOnionMessageHandler.ts: -------------------------------------------------------------------------------- 1 | import { OnionMessage, OnionMessageHandler } from "lightningdevkit"; 2 | 3 | class MercuryOnionMessageHandler extends OnionMessageHandler { 4 | override handle_onion_message( 5 | peer_node_id: Uint8Array, 6 | msg: OnionMessage 7 | ): void { 8 | // do something here 9 | } 10 | } 11 | 12 | export default MercuryOnionMessageHandler; 13 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryPersist.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ChannelMonitor, 3 | ChannelMonitorUpdate, 4 | ChannelMonitorUpdateStatus, 5 | MonitorUpdateId, 6 | OutPoint, 7 | PersistInterface, 8 | } from "lightningdevkit"; 9 | import fs from "fs"; 10 | import path from "path"; 11 | 12 | class MercuryPersist implements PersistInterface { 13 | private rootPath: string; 14 | private channelsDict = []; 15 | 16 | private CHANNELS_DICT_FILE = "channel_lookup.json"; 17 | 18 | constructor(_walletName: string) { 19 | this.rootPath = "./wallets/" + _walletName + "/channels"; 20 | this.loadChannelsDict(); 21 | } 22 | 23 | private loadChannelsDict() { 24 | const dictPath = path.join(this.rootPath, this.CHANNELS_DICT_FILE); 25 | try { 26 | const dictString = fs.readFileSync(dictPath, "utf8"); 27 | this.channelsDict = JSON.parse(dictString); 28 | } catch (err) { 29 | if (!fs.existsSync(dictPath)) { 30 | fs.writeFileSync(dictPath, "[]"); // Create an empty file if it doesn't exist 31 | console.log(`Created channels dictionary file: ${dictPath}`); 32 | this.channelsDict = []; 33 | } else { 34 | console.warn(`Failed to load channels dictionary: ${err}`); 35 | } 36 | } 37 | } 38 | 39 | private saveChannelsDict() { 40 | const dictPath = path.join(this.rootPath, this.CHANNELS_DICT_FILE); 41 | fs.writeFileSync(dictPath, JSON.stringify(this.channelsDict), "utf8"); 42 | } 43 | 44 | private getNextFileName(): string { 45 | const count = Object.keys(this.channelsDict).length + 1; 46 | return `${count}.dat`; 47 | } 48 | 49 | private getChannelFileName(channelId: OutPoint): string | null { 50 | const channelIdStr = channelId.to_channel_id().toString(); 51 | 52 | try { 53 | const channelLookupData = fs.readFileSync( 54 | this.rootPath + "/channel_lookup.json", 55 | "utf8" 56 | ); 57 | const channelLookup = JSON.parse(channelLookupData); 58 | 59 | for (const entry of channelLookup) { 60 | if (entry.key === channelIdStr) { 61 | return entry.monitor_file_name; 62 | } 63 | } 64 | } catch (error) { 65 | console.error("Error reading channel_lookup.json:", error); 66 | } 67 | 68 | return null; // Return null if no matching entry is found or an error occurs 69 | } 70 | 71 | private getHighestFileCounter(lookup: Array): number { 72 | let highestCounter = 0; 73 | lookup.forEach((entry) => { 74 | const monitor_file_name_parts = entry.monitor_file_name.split("_"); 75 | const id_file_name_parts = entry.id_file_name.split("_"); 76 | if ( 77 | monitor_file_name_parts[0] === "channelMonitor" && 78 | id_file_name_parts[0] === "channelId" 79 | ) { 80 | const fileCounter = parseInt(monitor_file_name_parts[1]); 81 | if (fileCounter > highestCounter) { 82 | highestCounter = fileCounter; 83 | } 84 | } 85 | }); 86 | return highestCounter; 87 | } 88 | 89 | private createLookupFile() { 90 | const lookup: Array = []; 91 | 92 | if (!fs.existsSync(this.rootPath)) { 93 | fs.mkdirSync(this.rootPath); 94 | } 95 | fs.writeFileSync( 96 | this.rootPath + "/channel_lookup.json", 97 | JSON.stringify(lookup) 98 | ); 99 | } 100 | 101 | persist_new_channel( 102 | channel_id: OutPoint, 103 | data: ChannelMonitor, 104 | update_id: MonitorUpdateId 105 | ): ChannelMonitorUpdateStatus { 106 | try { 107 | const channel_monitor_bytes = data.write(); // serialize the channel monitor data 108 | const channel_id_bytes = channel_id.write(); // serialize the channel ID 109 | 110 | // check if lookup file exists, and create it if it doesn't 111 | if (!fs.existsSync(this.rootPath + "/channel_lookup.json")) { 112 | this.createLookupFile(); 113 | } 114 | 115 | // read existing lookup file 116 | const lookup = JSON.parse( 117 | fs.readFileSync(this.rootPath + "/channel_lookup.json").toString() 118 | ); 119 | 120 | // check if channel ID already exists in lookup table 121 | const existingEntryIndex = lookup.findIndex( 122 | (entry: any) => entry.key === channel_id.to_channel_id().toString() 123 | ); 124 | if (existingEntryIndex >= 0) { 125 | console.log( 126 | `Channel ID ${channel_id 127 | .to_channel_id() 128 | .toString()} already exists in lookup table` 129 | ); 130 | 131 | // Replace the existing channelmonitor bytes with the new one 132 | const existingEntry = lookup[existingEntryIndex]; 133 | const existingMonitorFilePath = existingEntry.monitor_file_name; 134 | fs.writeFileSync(existingMonitorFilePath, channel_monitor_bytes); 135 | 136 | return ChannelMonitorUpdateStatus.LDKChannelMonitorUpdateStatus_Completed; 137 | } 138 | 139 | // generate file names based on the highest existing file counter in the lookup table 140 | let fileCounter = 0; 141 | lookup.forEach((entry: any) => { 142 | const parts = entry.monitor_file_name.split("_"); 143 | if (parts.length === 2 && parts[0] === "channels/channelMonitor") { 144 | const counter = parseInt(parts[1].replace(".dat", ""), 10); 145 | if (!isNaN(counter) && counter > fileCounter) { 146 | fileCounter = counter; 147 | } 148 | } 149 | }); 150 | fileCounter += 1; 151 | const monitor_file_name = `${this.rootPath}/channelMonitor_${fileCounter}.dat`; 152 | const id_file_name = `${this.rootPath}/channelId_${fileCounter}.dat`; 153 | 154 | // save the channel monitor data to a file 155 | fs.writeFileSync(monitor_file_name, channel_monitor_bytes); 156 | 157 | // save the channel ID to a file 158 | fs.writeFileSync(id_file_name, channel_id_bytes); 159 | 160 | let key = channel_id.to_channel_id().toString(); 161 | // save the file names to the lookup file 162 | const newEntry = { key, monitor_file_name, id_file_name }; 163 | lookup.push(newEntry); // add new entry to the lookup array 164 | fs.writeFileSync( 165 | this.rootPath + "/channel_lookup.json", 166 | JSON.stringify(lookup) 167 | ); // write updated lookup back to file 168 | 169 | return ChannelMonitorUpdateStatus.LDKChannelMonitorUpdateStatus_Completed; 170 | } catch (e) { 171 | console.error("Error occurred in persist_new_channel", e); 172 | return ChannelMonitorUpdateStatus.LDKChannelMonitorUpdateStatus_PermanentFailure; 173 | } 174 | } 175 | 176 | update_persisted_channel( 177 | channel_id: OutPoint, 178 | update: ChannelMonitorUpdate, 179 | data: ChannelMonitor, 180 | update_id: MonitorUpdateId 181 | ): ChannelMonitorUpdateStatus { 182 | try { 183 | const channelIdStr = channel_id.to_channel_id().toString(); 184 | const file_name = this.getChannelFileName(channel_id); 185 | if (!file_name) { 186 | console.error(`Could not find file name for channel ${channelIdStr}`); 187 | console.error(`couldn't find filename: ${file_name}`); 188 | return ChannelMonitorUpdateStatus.LDKChannelMonitorUpdateStatus_PermanentFailure; 189 | } 190 | const channel_monitor_bytes = data.write(); 191 | fs.writeFileSync(file_name, channel_monitor_bytes); 192 | return ChannelMonitorUpdateStatus.LDKChannelMonitorUpdateStatus_Completed; 193 | } catch (e) { 194 | console.error("Error occurred in update_persisted_channel", e); 195 | return ChannelMonitorUpdateStatus.LDKChannelMonitorUpdateStatus_PermanentFailure; 196 | } 197 | } 198 | } 199 | 200 | export default MercuryPersist; 201 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryPersister.ts: -------------------------------------------------------------------------------- 1 | import fs from "fs"; 2 | import { 3 | ChannelManager, 4 | NetworkGraph, 5 | PersisterInterface, 6 | Result_NoneErrorZ, 7 | WriteableScore, 8 | } from "lightningdevkit"; 9 | 10 | class MercuryPersister implements PersisterInterface { 11 | private rootPath: string; 12 | 13 | constructor(_walletName: string) { 14 | this.rootPath = "./wallets/" + _walletName; 15 | 16 | if (!fs.existsSync(this.rootPath)) { 17 | fs.mkdirSync(this.rootPath); 18 | } 19 | } 20 | 21 | persist_manager(channel_manager: ChannelManager): Result_NoneErrorZ { 22 | let data = channel_manager.write(); 23 | let tempFilePath = this.rootPath + "/channel_manager_data_temp.bin"; 24 | try { 25 | const buffer = Buffer.from(data); 26 | 27 | // Write data to a temporary file 28 | fs.writeFileSync(tempFilePath, buffer); 29 | 30 | // Rename the temporary file to the final file name 31 | fs.renameSync(tempFilePath, this.rootPath + "/channel_manager_data.bin"); 32 | 33 | return Result_NoneErrorZ.constructor_ok(); 34 | } catch (e: any) { 35 | // Handle any errors and delete the temporary file if it exists 36 | if (fs.existsSync(tempFilePath)) { 37 | fs.unlinkSync(tempFilePath); 38 | } 39 | return Result_NoneErrorZ.constructor_err(e); 40 | } 41 | } 42 | persist_graph(network_graph: NetworkGraph): Result_NoneErrorZ { 43 | let data = network_graph.write(); 44 | let tempFilePath = this.rootPath + "/network_graph_data_temp.bin"; 45 | try { 46 | const buffer = Buffer.from(data); 47 | 48 | // Write data to a temporary file 49 | fs.writeFileSync(tempFilePath, buffer); 50 | 51 | // Rename the temporary file to the final file name 52 | fs.renameSync(tempFilePath, this.rootPath + "/network_graph_data.bin"); 53 | 54 | return Result_NoneErrorZ.constructor_ok(); 55 | } catch (e: any) { 56 | // Handle any errors and delete the temporary file if it exists 57 | if (fs.existsSync(tempFilePath)) { 58 | fs.unlinkSync(tempFilePath); 59 | } 60 | return Result_NoneErrorZ.constructor_err(e); 61 | } 62 | } 63 | 64 | persist_scorer(scorer: WriteableScore): Result_NoneErrorZ { 65 | let data = scorer.write(); 66 | let tempFilePath = this.rootPath + "/writable_score_data_temp.bin"; 67 | try { 68 | // write to disk 69 | const buffer = Buffer.from(data); 70 | 71 | // Write data to a temporary file 72 | fs.writeFileSync(tempFilePath, buffer); 73 | 74 | // Rename the temporary file to the final file name 75 | fs.renameSync(tempFilePath, this.rootPath + "/writable_score_data.bin"); 76 | 77 | return Result_NoneErrorZ.constructor_ok(); 78 | } catch (e: any) { 79 | // Handle any errors and delete the temporary file if it exists 80 | if (fs.existsSync(tempFilePath)) { 81 | fs.unlinkSync(tempFilePath); 82 | } 83 | return Result_NoneErrorZ.constructor_err(e); 84 | } 85 | } 86 | } 87 | 88 | export default MercuryPersister; 89 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryRouter.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ChannelDetails, 3 | InFlightHtlcs, 4 | Result_RouteLightningErrorZ, 5 | RouteParameters, 6 | RouterInterface, 7 | } from "lightningdevkit"; 8 | 9 | class MercuryRouter implements RouterInterface { 10 | find_route( 11 | payer: Uint8Array, 12 | route_params: RouteParameters, 13 | first_hops: ChannelDetails[], 14 | inflight_htlcs: InFlightHtlcs 15 | ): Result_RouteLightningErrorZ { 16 | throw new Error("Method not implemented."); 17 | } 18 | find_route_with_id( 19 | payer: Uint8Array, 20 | route_params: RouteParameters, 21 | first_hops: ChannelDetails[], 22 | inflight_htlcs: InFlightHtlcs, 23 | _payment_hash: Uint8Array, 24 | _payment_id: Uint8Array 25 | ): Result_RouteLightningErrorZ { 26 | throw new Error("Method not implemented."); 27 | } 28 | } 29 | 30 | export default MercuryRouter; 31 | -------------------------------------------------------------------------------- /src/LDK/structs/MercuryRoutingMessageHandler.ts: -------------------------------------------------------------------------------- 1 | import { 2 | NodeAnnouncement, 3 | Result_boolLightningErrorZ, 4 | RoutingMessageHandler, 5 | } from "lightningdevkit"; 6 | 7 | class MercuryRoutingMessageHandler extends RoutingMessageHandler { 8 | /* 9 | override handle_node_announcement( 10 | msg: NodeAnnouncement 11 | ): Result_boolLightningErrorZ {}*/ 12 | } 13 | 14 | export default MercuryRoutingMessageHandler; 15 | -------------------------------------------------------------------------------- /src/LDK/structs/NodeLDKNet.mts: -------------------------------------------------------------------------------- 1 | import * as ldk from "lightningdevkit"; 2 | import * as net from "net"; 3 | 4 | /** 5 | * Handles TCP connections using Node.JS's 'net' module given an `ldk.PeerManager`. 6 | */ 7 | export class NodeLDKNet { 8 | private ping_timer; 9 | private servers: net.Server[]; 10 | public constructor(public peer_manager: ldk.PeerManager) { 11 | this.ping_timer = setInterval(function () { 12 | peer_manager.timer_tick_occurred(); 13 | peer_manager.process_events(); 14 | }, 10_000); 15 | this.servers = []; 16 | } 17 | 18 | /** 19 | * Disconnects all connections and releases all resources for this net handler. 20 | */ 21 | public stop() { 22 | clearInterval(this.ping_timer); 23 | for (const server of this.servers) { 24 | server.close(); 25 | } 26 | this.peer_manager.disconnect_all_peers(); 27 | } 28 | 29 | /** 30 | * Processes any pending events for the PeerManager, sending queued messages. 31 | * You should call this (or peer_manager.process_events()) any time you take an action which 32 | * is likely to generate messages to send (eg send a payment, processing payment forwards, 33 | * etc). 34 | */ 35 | public process_events() { 36 | this.peer_manager.process_events(); 37 | } 38 | 39 | private descriptor_count = BigInt(0); 40 | private get_descriptor(socket: net.Socket): ldk.SocketDescriptor { 41 | const this_index = this.descriptor_count; 42 | this.descriptor_count += BigInt(1); 43 | 44 | socket.setNoDelay(true); 45 | 46 | const this_pm = this.peer_manager; 47 | var sock_write_waiting = false; 48 | 49 | let descriptor = ldk.SocketDescriptor.new_impl({ 50 | send_data(data: Uint8Array, resume_read: boolean): number { 51 | if (resume_read) socket.resume(); 52 | 53 | if (sock_write_waiting) return 0; 54 | const written = socket.write(data); 55 | if (!written) sock_write_waiting = true; 56 | return data.length; 57 | }, 58 | disconnect_socket(): void { 59 | socket.destroy(); 60 | }, 61 | eq(other: ldk.SocketDescriptor): boolean { 62 | return other.hash() == this.hash(); 63 | }, 64 | hash(): bigint { 65 | return this_index; 66 | }, 67 | } as ldk.SocketDescriptorInterface); 68 | 69 | socket.on("drain", function () { 70 | if (sock_write_waiting) { 71 | if (!this_pm.write_buffer_space_avail(descriptor).is_ok()) { 72 | descriptor.disconnect_socket(); 73 | } 74 | } 75 | }); 76 | 77 | socket.on("data", function (data) { 78 | const res = this_pm.read_event(descriptor, data); 79 | if (!res.is_ok()) descriptor.disconnect_socket(); 80 | else if ((res as ldk.Result_boolPeerHandleErrorZ_OK).res) socket.pause(); 81 | this_pm.process_events(); 82 | }); 83 | 84 | socket.on("close", function () { 85 | this_pm.socket_disconnected(descriptor); 86 | }); 87 | socket.on("error", function () { 88 | this_pm.socket_disconnected(descriptor); 89 | }); 90 | 91 | return descriptor; 92 | } 93 | 94 | private static v4_addr_from_ip(ip: string, port: number): ldk.NetAddress { 95 | const sockaddr = ip.split(".").map(parseFloat); 96 | return ldk.NetAddress.constructor_ipv4(new Uint8Array(sockaddr), port); 97 | } 98 | private static v6_addr_from_ip(ip: string, port: number): ldk.NetAddress { 99 | const sockaddr = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; 100 | const halves = ip.split("::"); // either one or two elements 101 | if (halves[0]) { 102 | const first_half = halves[0].split(":"); 103 | for (var idx = 0; idx < first_half.length; idx++) { 104 | const v = parseInt(first_half[idx] || "0", 16); 105 | sockaddr[idx * 2] = v >> 8; 106 | sockaddr[idx * 2 + 1] = v & 0xff; 107 | } 108 | } 109 | if (halves.length == 2) { 110 | if (halves[1]) { 111 | const second_half = halves[1].split(":"); 112 | for (var idx = 0; idx < second_half.length; idx++) { 113 | const v = parseInt( 114 | second_half[second_half.length - idx - 1] || "0", 115 | 16 116 | ); 117 | sockaddr[14 - idx * 2] = v >> 8; 118 | sockaddr[15 - idx * 2] = v & 0xff; 119 | } 120 | } 121 | } 122 | 123 | return ldk.NetAddress.constructor_ipv6(new Uint8Array(sockaddr), port); 124 | } 125 | 126 | private static get_addr_from_socket( 127 | socket: net.Socket 128 | ): ldk.Option_NetAddressZ { 129 | const addr = socket.remoteAddress; 130 | if (addr === undefined) return ldk.Option_NetAddressZ.constructor_none(); 131 | if (net.isIPv4(addr) && socket.remotePort) { 132 | return ldk.Option_NetAddressZ.constructor_some( 133 | NodeLDKNet.v4_addr_from_ip(addr, socket.remotePort) 134 | ); 135 | } 136 | if (net.isIPv6(addr) && socket.remotePort) { 137 | return ldk.Option_NetAddressZ.constructor_some( 138 | NodeLDKNet.v6_addr_from_ip(addr, socket.remotePort) 139 | ); 140 | } 141 | return ldk.Option_NetAddressZ.constructor_none(); 142 | } 143 | 144 | /** 145 | * Binds a listener on the given host and port, accepting incoming connections. 146 | */ 147 | public async bind_listener(host: string, port: number) { 148 | const this_handler = this; 149 | const server = net.createServer(function (incoming_sock: net.Socket) { 150 | const descriptor = this_handler.get_descriptor(incoming_sock); 151 | const res = this_handler.peer_manager.new_inbound_connection( 152 | descriptor, 153 | NodeLDKNet.get_addr_from_socket(incoming_sock) 154 | ); 155 | if (!res.is_ok()) descriptor.disconnect_socket(); 156 | }); 157 | const servers_list = this.servers; 158 | return new Promise((resolve, reject) => { 159 | server.on("error", function () { 160 | reject(); 161 | server.close(); 162 | }); 163 | server.on("listening", function () { 164 | servers_list.push(server); 165 | resolve(); 166 | }); 167 | server.listen(port, host); 168 | }); 169 | } 170 | 171 | /** 172 | * Establishes an outgoing connection to the given peer at the given host and port. 173 | * 174 | * Note that the peer will not appear in the PeerManager peers list until the socket has 175 | * connected and the initial handshake completes. 176 | */ 177 | async connect_peer(host: string, port: number, peer_node_id: Uint8Array) { 178 | const this_handler = this; 179 | const sock = new net.Socket(); 180 | const res = new Promise((resolve, reject) => { 181 | sock.on("connect", function () { 182 | resolve(); 183 | }); 184 | sock.on("error", function () { 185 | reject(); 186 | }); 187 | }); 188 | sock.connect(port, host, function () { 189 | const descriptor = this_handler.get_descriptor(sock); 190 | const res = this_handler.peer_manager.new_outbound_connection( 191 | peer_node_id, 192 | descriptor, 193 | NodeLDKNet.get_addr_from_socket(sock) 194 | ); 195 | if (!res.is_ok()) descriptor.disconnect_socket(); 196 | else { 197 | const bytes = (res as ldk.Result_CVec_u8ZPeerHandleErrorZ_OK).res; 198 | const send_res = descriptor.send_data(bytes, true); 199 | console.assert(send_res == bytes.length); 200 | } 201 | }); 202 | return res; 203 | } 204 | } 205 | -------------------------------------------------------------------------------- /src/LDK/sync/Error.ts: -------------------------------------------------------------------------------- 1 | export class TxSyncError extends Error { 2 | static from(err: unknown): string | undefined { 3 | throw new Error("Error occured" + err); 4 | } 5 | constructor(message: string) { 6 | super(message); 7 | } 8 | } 9 | 10 | export class InternalError extends Error { 11 | static Inconsistency: InternalError; 12 | constructor(message: string) { 13 | super(message); 14 | } 15 | } 16 | 17 | export const enum TxSyncErrorType { 18 | Failed = "Failed to conduct transaction sync.", 19 | } 20 | 21 | export const enum InternalErrorType { 22 | Failed = "Failed to conduct transaction sync.", 23 | Inconsistency = "Encountered an inconsistency during transaction sync.", 24 | } 25 | 26 | export function createTxSyncError(type: TxSyncErrorType): TxSyncError { 27 | return new TxSyncError(type); 28 | } 29 | 30 | export function createInternalError(type: InternalErrorType): InternalError { 31 | return new InternalError(type); 32 | } 33 | -------------------------------------------------------------------------------- /src/LDK/sync/EsploraSyncClient.ts: -------------------------------------------------------------------------------- 1 | import { 2 | Confirm, 3 | FilterInterface, 4 | OutPoint, 5 | TwoTuple_usizeTransactionZ, 6 | WatchedOutput, 7 | } from "lightningdevkit"; 8 | import ElectrumClient from "../bitcoin_clients/ElectrumClient.mjs"; 9 | import TorClient from "../bitcoin_clients/TorClient.mjs"; 10 | import { FilterQueue, SyncState, newSyncState } from "./FilterQueue.js"; 11 | import { Mutex } from "async-mutex"; 12 | import { hexToUint8Array, uint8ArrayToHexString } from "../utils/utils.js"; 13 | import { Transaction } from "bitcoinjs-lib"; 14 | import { BitcoinDaemonClientInterface } from "../bitcoin_clients/BitcoinD.mjs"; 15 | import { InternalError, TxSyncError } from "./Error.js"; 16 | 17 | // Custom Logging 18 | import { ChalkColor, Logger } from "../utils/Logger.js"; 19 | import chalk from "chalk"; 20 | const DEBUG = new Logger(ChalkColor.Magenta, "EsploraSyncClient.ts"); 21 | 22 | interface ConfirmedTx { 23 | txs: [[0, Transaction]]; 24 | block_header: any; 25 | block_height: any; 26 | pos: number; 27 | } 28 | 29 | export default class EsploraSyncClient implements FilterInterface { 30 | bitcoind_client: ElectrumClient | TorClient; 31 | lock: Mutex; 32 | filter_queue: FilterQueue; 33 | sync_state!: SyncState; 34 | 35 | constructor(_bitcoind_client: any) { 36 | this.bitcoind_client = _bitcoind_client; 37 | this.filter_queue = new FilterQueue(); 38 | this.lock = new Mutex(); 39 | this.sync_state = newSyncState(); 40 | } 41 | 42 | static from_client(bitcoind_client: BitcoinDaemonClientInterface) { 43 | return new EsploraSyncClient(bitcoind_client); 44 | } 45 | 46 | register_tx(txid: Uint8Array, script_pubkey: Uint8Array) { 47 | let tx_string = uint8ArrayToHexString(txid); 48 | let reversed_txid = this.reverse_txid(tx_string) ?? tx_string; 49 | 50 | this.lock.acquire().then((release) => { 51 | this.filter_queue.transactions.add(reversed_txid); // saved as hex string rather than uint8array 52 | release(); 53 | }); 54 | } 55 | 56 | register_output(output: WatchedOutput) { 57 | this.lock.acquire().then((release) => { 58 | this.filter_queue.outputs.set(output.get_outpoint(), output); 59 | release(); 60 | }); 61 | } 62 | 63 | async sync(confirmables: Confirm[]) { 64 | DEBUG.log("Starting transaction sync.", "sync"); 65 | let tip_hash = await this.bitcoind_client.getBestBlockHash(); 66 | let sync_state = this.sync_state; // Check this, no lock? 67 | 68 | while (true) { 69 | let pending_registrations = this.filter_queue.processQueues(sync_state); 70 | let tip_is_new = tip_hash !== sync_state.last_sync_hash; 71 | 72 | if (!sync_state.pending_sync && !pending_registrations && !tip_is_new) { 73 | DEBUG.log("Nothing to do. Exiting sync loop.", "sync"); 74 | break; 75 | } else { 76 | if (tip_is_new) { 77 | DEBUG.log("if tip_is_new", "sync"); 78 | try { 79 | let unconfirmed_txs = await this.get_unconfirmed_transactions( 80 | confirmables 81 | ); 82 | let check_tip_hash = await this.bitcoind_client.getBestBlockHash(); 83 | 84 | if (check_tip_hash !== tip_hash) { 85 | DEBUG.log("if check_tip_hash !== tip_hash -> continue", "sync"); 86 | tip_hash = check_tip_hash; 87 | continue; 88 | } 89 | 90 | this.sync_unconfirmed_transactions( 91 | sync_state, 92 | confirmables, 93 | unconfirmed_txs 94 | ); 95 | } catch (err) { 96 | // (Semi-)permanent failure, retry later. 97 | DEBUG.log("Failed during transaction sync, aborting.", "sync", err); 98 | sync_state.pending_sync = true; 99 | return new Error("" + err); // Check me 100 | } 101 | 102 | try { 103 | await this.sync_best_block_updated(confirmables, tip_hash); 104 | } catch (err) { 105 | if ( 106 | err instanceof InternalError && // Check me 107 | err === InternalError.Inconsistency // Check me 108 | ) { 109 | // Immediately restart syncing when we encounter any inconsistencies. 110 | DEBUG.log( 111 | "Encountered inconsistency during transaction sync, restarting.", 112 | "sync" 113 | ); 114 | sync_state.pending_sync = true; 115 | continue; 116 | } else { 117 | // (Semi-)permanent failure, retry later. 118 | sync_state.pending_sync = true; 119 | throw new Error("Sync Error" + err); // Check me 120 | } 121 | } 122 | } 123 | 124 | DEBUG.log("Continue", "sync"); 125 | try { 126 | let confirmed_txs = await this.get_confirmed_transactions(sync_state); 127 | let check_tip_hash = await this.bitcoind_client.getBestBlockHash(); 128 | 129 | if (check_tip_hash !== tip_hash) { 130 | tip_hash = check_tip_hash; 131 | continue; 132 | } 133 | 134 | this.sync_confirmed_transactions( 135 | sync_state, 136 | confirmables, 137 | confirmed_txs 138 | ); 139 | } catch (err) { 140 | DEBUG.log("Failed during transaction sync, aborting.", "sync", err); 141 | } 142 | 143 | sync_state.last_sync_hash = tip_hash; 144 | sync_state.pending_sync = false; 145 | } 146 | } 147 | 148 | DEBUG.log("***** sync complete *****", "sync"); 149 | return true; 150 | } 151 | 152 | async sync_best_block_updated( 153 | confirmables: Confirm[], // chainMonitor.asConfirm(), channelManager.asConfirm() 154 | tipHash: string //BlockHash 155 | ): Promise { 156 | DEBUG.log("confirmables, tipHash", "sync_best_block_updated"); 157 | 158 | // Inform the interface of the new block. 159 | const tipHeader = await this.bitcoind_client.getHeaderByHash(tipHash); 160 | const tipStatus = await this.bitcoind_client.getBlockStatus(tipHash); 161 | 162 | DEBUG.log("tipHeader->", "sync_best_block_updated", tipHeader); 163 | DEBUG.log("tipStatus->", "sync_best_block_updated", tipStatus); 164 | 165 | if (tipStatus.in_best_chain) { 166 | DEBUG.log("tipStatus.in_best_chain -> true", "sync_best_block_updated"); 167 | if (tipStatus.height !== undefined) { 168 | DEBUG.log( 169 | "tipStatus.in_best_chain -> tipStatus.height !== undefined", 170 | "sync_best_block_updated" 171 | ); 172 | confirmables.forEach((c) => { 173 | DEBUG.log( 174 | "c.best_block_updated(confirmables)", 175 | "sync_best_block_updated", 176 | hexToUint8Array(tipHeader) + " " + tipStatus.height 177 | ); 178 | c.best_block_updated(hexToUint8Array(tipHeader), tipStatus.height); 179 | }); 180 | } 181 | } else { 182 | DEBUG.err("InternalError.Inconsistency"); 183 | } 184 | return; 185 | } 186 | 187 | async sync_confirmed_transactions( 188 | sync_state: SyncState, 189 | confirmables: Confirm[], 190 | confirmed_txs: ConfirmedTx[] 191 | ): Promise { 192 | DEBUG.log("*********", "sync_confirmed_transactions"); 193 | 194 | for (const ctx of confirmed_txs) { 195 | let transaction = ctx.txs[0][1]; 196 | for (const c of confirmables) { 197 | const txdata = [ 198 | TwoTuple_usizeTransactionZ.constructor_new( 199 | ctx.pos, 200 | transaction.toBuffer() 201 | ), 202 | ]; 203 | 204 | let hex_block_header = await this.bitcoind_client.getHeaderByHash( 205 | ctx.block_header.id 206 | ); 207 | 208 | console.log( 209 | chalk.magentaBright( 210 | `[EsploraSyncClient.ts/sync_confirmed_transactions]: c.transactions_confirmed(${hexToUint8Array( 211 | hex_block_header 212 | )}, ${txdata}, ${ctx.block_height})` 213 | ) 214 | ); 215 | 216 | c.transactions_confirmed( 217 | hexToUint8Array(hex_block_header), 218 | txdata, 219 | ctx.block_height 220 | ); 221 | } 222 | sync_state.watched_transactions.delete(transaction.toHex()); 223 | 224 | for (const input of ctx.txs[0][1].ins) { 225 | sync_state.watched_outputs.delete( 226 | OutPoint.constructor_new(input.hash, input.index) 227 | ); 228 | } 229 | } 230 | } 231 | 232 | async get_confirmed_transactions( 233 | sync_state: SyncState 234 | ): Promise { 235 | let confirmed_txs: ConfirmedTx[] = []; 236 | 237 | for (const txid of sync_state.watched_transactions) { 238 | DEBUG.log( 239 | "const txid of sync_state.watched_transactions, txid: ", 240 | "get_confirmed_transactions", 241 | txid 242 | ); 243 | 244 | // reverse byte txid here 245 | let txid_data = await this.bitcoind_client.getTxIdData(txid); 246 | 247 | DEBUG.log( 248 | "block_hash found->", 249 | "get_confirmed_transactions", 250 | txid_data?.hash 251 | ); 252 | DEBUG.log( 253 | "block_height found->", 254 | "get_confirmed_transactions", 255 | txid_data?.height 256 | ); 257 | 258 | const confirmed_tx = await this.get_confirmed_tx( 259 | txid, 260 | txid_data?.hash, 261 | txid_data?.height 262 | ); 263 | if (confirmed_tx) { 264 | confirmed_txs.push(confirmed_tx); 265 | } 266 | } 267 | 268 | for (const [, output] of sync_state.watched_outputs) { 269 | let hex_tx = uint8ArrayToHexString(output.get_outpoint().get_txid()); 270 | 271 | // check me 272 | const output_status = await this.bitcoind_client.getTxOut( 273 | hex_tx, 274 | output.get_outpoint().get_index() 275 | ); 276 | 277 | if (output_status && output_status.txid && output_status.status) { 278 | const { txid: spending_txid, status: spending_tx_status } = 279 | output_status; 280 | 281 | const confirmed_tx = await this.get_confirmed_tx( 282 | spending_txid, 283 | spending_tx_status.block_hash, 284 | spending_tx_status.block_height 285 | ); 286 | 287 | if (confirmed_tx) { 288 | confirmed_txs.push(confirmed_tx); 289 | } 290 | } 291 | } 292 | 293 | confirmed_txs.sort((tx1, tx2) => { 294 | return tx1.block_height - tx2.block_height || tx1.pos - tx2.pos; 295 | }); 296 | 297 | return confirmed_txs; 298 | } 299 | 300 | reverse_txid(txid: string) { 301 | let reverse_txid = txid 302 | .match(/[a-fA-F0-9]{2}/g) 303 | ?.reverse() 304 | .join(""); 305 | return reverse_txid; 306 | } 307 | 308 | async get_confirmed_tx( 309 | txid: string, 310 | block_hash: string | undefined, 311 | block_height: number | undefined 312 | ): Promise { 313 | if (block_hash !== undefined && block_height !== undefined) { 314 | console.log( 315 | chalk.magenta( 316 | "[EsploraSyncClient.ts/get_confirmed_tx]: block_hash for block header->", 317 | block_hash 318 | ) 319 | ); 320 | 321 | const block_header = await this.bitcoind_client.getBlockHeader( 322 | block_hash 323 | ); 324 | if (!block_header) { 325 | return undefined; 326 | } 327 | 328 | console.log( 329 | chalk.magenta( 330 | "[EsploraSyncClient.ts/get_confirmed_tx]: txid for getRawTransaction->", 331 | txid 332 | ) 333 | ); 334 | 335 | const tx_hex = await this.bitcoind_client.getRawTransaction(txid); 336 | if (!tx_hex) { 337 | return undefined; 338 | } 339 | 340 | const tx = Transaction.fromHex(tx_hex); 341 | 342 | const merkel_proof = await this.bitcoind_client.getMerkleProofPosition( 343 | txid 344 | ); 345 | 346 | return { 347 | block_header, 348 | txs: [[0, tx]], 349 | block_height, 350 | pos: merkel_proof.pos, 351 | }; 352 | } 353 | 354 | const txout = await this.bitcoind_client.getTxOut(txid, 0); 355 | if (!txout || !txout.confirmations) { 356 | return undefined; 357 | } 358 | 359 | const tx_hex = await this.bitcoind_client.getRawTransaction(txid); 360 | if (!tx_hex) { 361 | return undefined; 362 | } 363 | 364 | const tx = Transaction.fromHex(tx_hex); 365 | 366 | return { 367 | block_header: undefined, 368 | txs: [[0, tx]], 369 | block_height: txout.confirmations, 370 | }; 371 | } 372 | 373 | async get_unconfirmed_transactions(confirmables: Confirm[]) { 374 | // Query the interface for relevant txids and check whether the relevant blocks are still 375 | // in the best chain, mark them unconfirmed otherwise 376 | const relevantTxids = new Set( 377 | confirmables 378 | .map((c) => c.get_relevant_txids()) 379 | .flat() 380 | .map((tuple) => [ 381 | uint8ArrayToHexString(tuple.get_a()), 382 | uint8ArrayToHexString(tuple.get_b()), 383 | ]) 384 | ); 385 | const unconfirmedTxs: string[] | any = []; 386 | for (const [txid, blockHashOpt] of relevantTxids) { 387 | DEBUG.log( 388 | "const [txid, blockHashOpt] of relevantTxids", 389 | "get_unconfirmed_transactions", 390 | txid + " blockHashOpt:" + blockHashOpt 391 | ); 392 | if (blockHashOpt !== undefined) { 393 | let reverse_blockhash = this.reverse_txid(blockHashOpt); 394 | const blockStatus = await this.bitcoind_client.getBlockStatus( 395 | reverse_blockhash + "" 396 | ); 397 | if (!blockStatus.in_best_chain) { 398 | DEBUG.log( 399 | "!blockStatus.in_best_chain adding unconfirmedTx", 400 | "get_unconfirmed_transactions", 401 | txid 402 | ); 403 | let reverse_txid = this.reverse_txid(txid + ""); 404 | unconfirmedTxs.push(reverse_txid); 405 | } 406 | } 407 | } 408 | return unconfirmedTxs; 409 | } 410 | 411 | sync_unconfirmed_transactions( 412 | sync_state: SyncState, 413 | confirmables: Confirm[], 414 | unconfirmed_txs: string[] 415 | ): void { 416 | DEBUG.log( 417 | "unconfirmed_txs passed in:", 418 | "sync_unconfirmed_transactions", 419 | unconfirmed_txs 420 | ); 421 | for (const txid of unconfirmed_txs) { 422 | for (const c of confirmables) { 423 | console.log( 424 | chalk.magentaBright( 425 | `[EsploraSyncClient.ts/sync_unconfirmed_transactions]: c.transaction_unconfirmed(hexToUint8Array(txid)) ${hexToUint8Array( 426 | txid 427 | )}` 428 | ) 429 | ); 430 | c.transaction_unconfirmed(hexToUint8Array(txid)); //convert back to uint8array 431 | } 432 | sync_state.watched_transactions.add(txid); 433 | } 434 | } 435 | } 436 | -------------------------------------------------------------------------------- /src/LDK/sync/FilterQueue.ts: -------------------------------------------------------------------------------- 1 | import { OutPoint, WatchedOutput } from "lightningdevkit"; 2 | 3 | // export interface Txid extends Uint8Array {} 4 | //export interface BlockHash extends Object {} 5 | 6 | // Represents the current state. 7 | export interface SyncState { 8 | // Transactions that were previously processed, but must not be forgotten 9 | // yet since they still need to be monitored for confirmation on-chain. 10 | watched_transactions: Set; 11 | // Outputs that were previously processed, but must not be forgotten yet as 12 | // as we still need to monitor any spends on-chain. 13 | watched_outputs: Map; 14 | // The tip hash observed during our last sync. 15 | last_sync_hash: string | null; // BlockHash 16 | // Indicates whether we need to resync, e.g., after encountering an error. 17 | pending_sync: boolean; 18 | } 19 | 20 | export function newSyncState(): SyncState { 21 | return { 22 | watched_transactions: new Set(), 23 | watched_outputs: new Map(), 24 | last_sync_hash: null, 25 | pending_sync: false, 26 | }; 27 | } 28 | 29 | export class FilterQueue { 30 | // Transactions that were registered via the Filter interface and have to be processed. 31 | transactions: Set; 32 | // Outputs that were registered via the Filter interface and have to be processed. 33 | outputs: Map; 34 | 35 | constructor() { 36 | this.transactions = new Set(); 37 | this.outputs = new Map(); 38 | } 39 | 40 | // Processes the transaction and output queues and adds them to the given SyncState. 41 | // Returns true if new items had been registered. 42 | processQueues(syncState: SyncState): boolean { 43 | let pendingRegistrations = false; 44 | 45 | if (this.transactions.size > 0) { 46 | pendingRegistrations = true; 47 | for (const txid of this.transactions) { 48 | syncState.watched_transactions.add(txid); 49 | } 50 | this.transactions.clear(); 51 | } 52 | 53 | if (this.outputs.size > 0) { 54 | pendingRegistrations = true; 55 | for (const [outPoint, watchedOutput] of this.outputs) { 56 | syncState.watched_outputs.set(outPoint, watchedOutput); 57 | } 58 | this.outputs.clear(); 59 | } 60 | return pendingRegistrations; 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/LDK/types/ChannelTypes.ts: -------------------------------------------------------------------------------- 1 | export enum ChannelType{ 2 | Private = 'Private', 3 | Public = 'Public' 4 | } -------------------------------------------------------------------------------- /src/LDK/types/LightningClientInterface.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ChannelMessageHandler, 3 | CustomMessageHandler, 4 | OnionMessageHandler, 5 | PeerManager, 6 | RoutingMessageHandler, 7 | Event_FundingGenerationReady, 8 | FeeEstimator, 9 | Logger, 10 | BroadcasterInterface, 11 | Network, 12 | BestBlock, 13 | NetworkGraph, 14 | Persist, 15 | EventHandler, 16 | Filter, 17 | ChainMonitor, 18 | KeysManager, 19 | UserConfig, 20 | ChannelHandshakeConfig, 21 | ChainParameters, 22 | ChannelManager, 23 | IgnoringMessageHandler, 24 | Persister, 25 | Router, 26 | } from "lightningdevkit"; 27 | import ElectrumClient from "../bitcoin_clients/ElectrumClient.mjs"; 28 | import TorClient from "../bitcoin_clients/TorClient.mjs"; 29 | import MercuryEventHandler from "../structs/MercuryEventHandler"; 30 | import EsploraSyncClient from "../sync/EsploraSyncClient.js"; 31 | 32 | export default interface LightningClientInterface { 33 | walletName: string; 34 | feeEstimator: FeeEstimator; 35 | bitcoind_client: TorClient | ElectrumClient; // Electrum for dev, Tor for prod 36 | logger: Logger; 37 | txBroadcasted: any; 38 | txBroadcaster: BroadcasterInterface; 39 | network: any; 40 | genesisBlock: any; 41 | genesisBlockHash: any; 42 | networkGraph: NetworkGraph; 43 | filter: Filter; 44 | persist: Persist; 45 | persister: Persister; 46 | eventHandler: EventHandler; 47 | chainMonitor: ChainMonitor; 48 | chainWatch: any; 49 | keysManager: KeysManager; 50 | config: UserConfig; 51 | channelHandshakeConfig: ChannelHandshakeConfig; 52 | params: ChainParameters; 53 | channelManager: ChannelManager; 54 | peerManager: PeerManager; 55 | txdata: any; 56 | router: Router; 57 | currentConnections: Array; // array of current peer connections 58 | blockHeight: number | undefined; 59 | latestBlockHeader: Uint8Array | undefined; 60 | netHandler: any; 61 | syncClient: EsploraSyncClient; 62 | } 63 | -------------------------------------------------------------------------------- /src/LDK/types/PeerDetails.ts: -------------------------------------------------------------------------------- 1 | export default interface PeerDetails { 2 | pubkey: Uint8Array, 3 | host: string, 4 | port: number, 5 | id: number 6 | } -------------------------------------------------------------------------------- /src/LDK/utils/Logger.ts: -------------------------------------------------------------------------------- 1 | import chalk, { ChalkInstance } from "chalk"; 2 | 3 | export enum ChalkColor { 4 | Green = "green", 5 | Yellow = "yellow", 6 | Magenta = "magenta", 7 | Blue = "blue", 8 | Red = "red", 9 | Cyan = "cyan", 10 | bgCyan = "bgCyan", 11 | } 12 | 13 | export class Logger { 14 | color: ChalkColor; 15 | filename: any; 16 | 17 | getColor(): ChalkColor { 18 | return this.color; 19 | } 20 | 21 | constructor(color: ChalkColor, filename: string) { 22 | this.color = color; 23 | this.filename = filename; 24 | } 25 | 26 | logD(msg: string = "", data = "") { 27 | const chalkColor = this.getColor(); 28 | console.log(chalk[chalkColor](`[${this.filename}]: ${msg} ${data}`)); 29 | } 30 | 31 | log(msg: string = "", func: string = "", data: any = "") { 32 | const chalkColor = this.getColor(); 33 | 34 | if (func === "") { 35 | console.log(chalk[chalkColor](`[${this.filename}]: ${msg} ${data}`)); 36 | } else { 37 | console.log( 38 | chalk[chalkColor](`[${this.filename}/${func}]: ${msg} ${data}`) 39 | ); 40 | } 41 | } 42 | 43 | err(msg: string, err: any = "") { 44 | console.log(chalk.red(msg + err)); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/LDK/utils/ldk-utils.ts: -------------------------------------------------------------------------------- 1 | import LDKClientFactory from "../init/LDKClientFactory"; 2 | import { getDatabase } from "../../db/db"; 3 | import fs from "fs"; 4 | import { uint8ArrayToHexString, stringifyEvent } from "./utils"; 5 | import { ChannelDetails, Option_u64Z_Some } from "lightningdevkit"; 6 | 7 | export const closeConnections = () => { 8 | console.log("[ldk-utils.ts]: Closing all the connections"); 9 | try { 10 | let LDK = LDKClientFactory.getLDKClient(); 11 | LDK.netHandler?.stop(); 12 | } catch (e) { 13 | console.error("Trying to get reference to undefined LDKClientFactory"); 14 | } 15 | }; 16 | 17 | export const validateInvoiceBody = ( 18 | amount_in_sats: any, 19 | invoice_expiry_secs: any, 20 | description: any 21 | ) => { 22 | if (amount_in_sats === undefined) { 23 | throw new Error("Undefined amount_in_sats given."); 24 | } else if (typeof amount_in_sats !== "number" || isNaN(amount_in_sats)) { 25 | throw new Error( 26 | "Invalid amount_in_sats given. Must be a number that can be converted to a BigInt." 27 | ); 28 | } 29 | 30 | if (invoice_expiry_secs === undefined) { 31 | throw new Error("Undefined invoice_expiry_secs given."); 32 | } else if ( 33 | typeof invoice_expiry_secs !== "number" || 34 | isNaN(invoice_expiry_secs) 35 | ) { 36 | throw new Error("Invalid invoice_expiry_secs given. Must be a number."); 37 | } 38 | 39 | if (description === undefined) { 40 | throw new Error("Undefined description given."); 41 | } else if (typeof description !== "string") { 42 | throw new Error("Invalid description given. Must be a string."); 43 | } 44 | }; 45 | 46 | // This function is called from peerRoutes.ts /create-channel request 47 | export const savePeerAndChannelToDatabase = async ( 48 | amount: number, 49 | pubkey: string, 50 | host: string, 51 | port: number, 52 | channel_name: string, 53 | wallet_name: string, 54 | channelType: boolean, 55 | privkey: string, // Private key from txid address 56 | paid: boolean, 57 | payment_address: string // index of input 58 | ) => { 59 | console.log("[ldk-utils.ts] - savePeerAndChannelToDatabase"); 60 | console.log( 61 | `[ldk-utils.ts] - values: amount:${amount}, 62 | pubkey:${pubkey}, host:${host}, port:${port}, channel_name:${channel_name}, 63 | wallet_name:${wallet_name}, channelType:${channelType}, 64 | privkey:${privkey}, paid:${paid}, payment_address:${payment_address}` 65 | ); 66 | 67 | // Save the peer 68 | try { 69 | const result = await saveNewPeerToDB(host, port, pubkey); 70 | console.log(`[ldk-utils.ts] - result: ${JSON.stringify(result)}`); 71 | var peer_id = result.peer_id; 72 | if (!peer_id) throw "[ldk-utils.ts] Error: PEER_ID undefined"; 73 | } catch (err) { 74 | console.log(err); 75 | throw err; 76 | } 77 | console.log("[ldk-utils.ts]: Peer created, saveds its id: ", peer_id); 78 | 79 | let channel_id = null; 80 | let result; 81 | // Save the channel 82 | try { 83 | result = await saveNewChannelToDB( 84 | channel_name, 85 | amount, 86 | 0, 87 | channelType, 88 | wallet_name, 89 | peer_id, 90 | privkey, 91 | paid, 92 | payment_address 93 | ); 94 | console.log("[ldk-utils.ts]:" + result); 95 | if (result && result.channel_id) { 96 | console.log(result); 97 | channel_id = result.channel_id; 98 | console.log("Channel Created, saved its id: ", channel_id); 99 | } 100 | } catch (err) { 101 | console.log("[ldk-utils.ts]:" + err); 102 | throw err; 103 | } 104 | console.log("[ldk-utils.ts]: Channel Created, saved its id: ", channel_id); 105 | 106 | return result; 107 | }; 108 | 109 | export const saveChannelFundingToDatabase = async ( 110 | amount: number, 111 | paid: boolean, 112 | txid: string, 113 | vout: number, 114 | addr: string 115 | ) => { 116 | console.log("[ldk-utils.ts]: saveChannelFundingToDatabase"); 117 | try { 118 | const result = await saveTxDataToDB(amount, paid, txid, vout, addr); 119 | return result; 120 | } catch (err) { 121 | console.log("[ldk-utils.ts]: " + err); 122 | throw err; 123 | } 124 | }; 125 | 126 | export const saveNewPeerToDB = ( 127 | host: string, 128 | port: number, 129 | pubkey: string 130 | ): Promise<{ 131 | channel_id?: { 132 | status: number; 133 | message?: string; 134 | error?: string; 135 | peer_id?: number; 136 | }; 137 | status?: number; 138 | message?: string; 139 | error?: string; 140 | peer_id?: number; 141 | }> => { 142 | console.log("[ldk-utils.ts] - saveNewPeerToDB"); 143 | return new Promise(async (resolve, reject) => { 144 | const db = await getDatabase(); 145 | db.get( 146 | `SELECT id FROM peers WHERE host = ? AND port = ? AND pubkey = ?`, 147 | [host, port, pubkey], 148 | (err: any, row: any) => { 149 | if (err) { 150 | console.log( 151 | `[ldk-utils.ts->saveNewPeerToDB] - Error occurred during select: ${err}` 152 | ); 153 | reject({ status: 500, error: "Failed to query database" }); 154 | } else if (row) { 155 | console.log( 156 | `[ldk-utils.ts->saveNewPeerToDB] - Error occurred peer exists in database: ${row}` 157 | ); 158 | resolve({ 159 | status: 409, 160 | message: "Peer already exists in the database", 161 | peer_id: row.id, 162 | }); 163 | } else { 164 | db.run( 165 | `INSERT INTO peers (host, port, pubkey) VALUES (?,?,?)`, 166 | [host, port, pubkey], 167 | function (this: any, err: any, row: any) { 168 | console.log( 169 | `[ldk-utils.ts->saveNewPeerToDB] - inserting into peers: host:${host}, port:${port}, pubkey:${pubkey}` 170 | ); 171 | if (err) { 172 | console.log( 173 | `[ldk-utils.ts] - Error occurred during insert: ${err}` 174 | ); 175 | reject({ 176 | status: 500, 177 | error: "Failed to insert peers into database", 178 | }); 179 | } else { 180 | console.log( 181 | `[ldk-utils.ts->saveNewPeerToDB] - Successful insert` 182 | ); 183 | const lastID = this.lastID; 184 | console.log( 185 | `[ldk-utils.ts->saveNewPeerToDB] - result:${lastID}` 186 | ); 187 | if (lastID !== undefined) { 188 | resolve({ 189 | status: 201, 190 | message: "Peer added to database", 191 | peer_id: lastID, 192 | }); 193 | } else { 194 | reject({ 195 | status: 500, 196 | error: "Failed to retrieve peer ID", 197 | }); 198 | } 199 | } 200 | } 201 | ); 202 | } 203 | } 204 | ); 205 | }); 206 | }; 207 | 208 | export const saveNewChannelToDB = ( 209 | name: string, 210 | amount: number, 211 | push_msat: number, 212 | channelType: boolean, 213 | wallet_name: string, 214 | peer_id: number, 215 | privkey: string, // Private key from txid address 216 | paid: boolean, 217 | payment_address: string 218 | ): Promise<{ 219 | status: number; 220 | message?: string; 221 | error?: string; 222 | channel_id?: number; 223 | }> => { 224 | console.log("[ldk-utils.ts] - saveNewChannelToDB"); 225 | return new Promise(async (resolve, reject) => { 226 | let channelId: number; 227 | const db = await getDatabase(); 228 | db.get( 229 | `SELECT id FROM channels WHERE peer_id = ?`, 230 | [peer_id], 231 | (err: any, row: any) => { 232 | if (err) { 233 | reject({ 234 | status: 500, 235 | error: "Failed to query database" + err, 236 | }); 237 | } else if (row) { 238 | resolve({ 239 | status: 409, 240 | message: "Channel already exists with this peer", 241 | }); 242 | } else { 243 | const insertData = `INSERT INTO channels (name, amount, push_msat, public, wallet_name, peer_id, privkey, paid, payment_address) VALUES (?,?,?,?,?,?,?,?,?)`; 244 | db.run( 245 | insertData, 246 | [ 247 | name, 248 | amount, 249 | push_msat, 250 | channelType, 251 | wallet_name, 252 | peer_id, 253 | privkey, 254 | paid, 255 | payment_address, 256 | ], 257 | function (err: any, result: any) { 258 | if (err) { 259 | reject({ 260 | status: 500, 261 | error: "Failed to insert channel into database" + err, 262 | }); 263 | } else { 264 | db.get( 265 | `SELECT last_insert_rowid() as channel_id`, 266 | (err: any, row: any) => { 267 | if (err) { 268 | reject({ 269 | status: 500, 270 | error: "Failed to get last inserted channel ID", 271 | }); 272 | } else { 273 | channelId = row.channel_id; 274 | resolve({ 275 | status: 201, 276 | message: "Channel saved successfully", 277 | channel_id: channelId, 278 | }); 279 | } 280 | } 281 | ); 282 | } 283 | } 284 | ); 285 | } 286 | } 287 | ); 288 | }); 289 | }; 290 | 291 | export const saveTxDataToDB = ( 292 | amount: number, 293 | paid: boolean, 294 | txid: string, 295 | vout: number, 296 | addr: string 297 | ): Promise<{ 298 | status: number; 299 | message?: string; 300 | error?: string; 301 | channel_id: number; 302 | channel_type: boolean; 303 | push_msat: number; 304 | priv_key: string; 305 | }> => { 306 | console.log("[ldk-utils.ts] - insertTxDataToDB"); 307 | console.log( 308 | `[ldk-utils.ts] - values: amount:${amount}, paid:${paid}, txid:${txid}, vout:${vout}, addr:${addr}` 309 | ); 310 | return new Promise(async (resolve, reject) => { 311 | const updateData = 312 | "UPDATE channels SET amount=?, paid=?, txid=?, vout=? WHERE payment_address=?"; 313 | const db = await getDatabase(); 314 | db.run( 315 | updateData, 316 | [amount, paid, txid, vout, addr], 317 | function (err: any, result: any) { 318 | if (err) { 319 | reject({ 320 | status: 500, 321 | error: "Failed to insert tx data into database " + err, 322 | }); 323 | } 324 | } 325 | ); 326 | 327 | console.log("Tx data inserted"); 328 | const getData = `SELECT id, public, push_msat, privkey FROM channels WHERE payment_address=?`; 329 | db.get(getData, [addr], (err: any, row: any) => { 330 | if (err) { 331 | reject({ 332 | status: 500, 333 | error: "Failed to get channel data " + err, 334 | }); 335 | } else if (!row) { 336 | reject({ 337 | status: 404, 338 | error: "No channel found for payment address " + addr, 339 | }); 340 | } else { 341 | resolve({ 342 | status: 201, 343 | message: "Channel saved and updated successfully", 344 | channel_id: row.id, 345 | channel_type: row.public, 346 | push_msat: row.push_msat, 347 | priv_key: row.privkey, 348 | }); 349 | } 350 | }); 351 | }); 352 | }; 353 | 354 | export const saveChannelIdToDb = async (channelId: string, pubkey: string) => { 355 | console.log("[ldk-utils.ts] - saveChannelIdToDB"); 356 | console.log( 357 | `[ldk-utils.ts] - values: channelId:${channelId}, pubkey:${pubkey}` 358 | ); 359 | const updateData = 360 | "UPDATE channels SET channel_id = ? WHERE peer_id = ( SELECT id FROM peers WHERE pubkey = ?)"; 361 | const db = await getDatabase(); 362 | db.run(updateData, [channelId, pubkey], function (err: any, result: any) { 363 | if (err) { 364 | console.log("Error in saving channelId to db: " + err); 365 | } 366 | }); 367 | }; 368 | 369 | export const saveEventDataToDb = async (event: any) => { 370 | console.log("[ldk-utils.ts] - saveEventDataToDB"); 371 | const event_type = Object.getPrototypeOf(event).constructor.name; 372 | const event_data = stringifyEvent(event); 373 | let channel_id_hex; 374 | if ( 375 | event && 376 | (event.channel_id || event.temporary_channel_id || event.via_channel_id) 377 | ) { 378 | const channel_id = 379 | event.channel_id || event.temporary_channel_id || event.via_channel_id; 380 | channel_id_hex = uint8ArrayToHexString(channel_id); 381 | } else { 382 | if (event.path) { 383 | const hops = event.path.get_hops(); 384 | const short_channel_id = hops[0].get_short_channel_id(); 385 | if (short_channel_id) { 386 | const channels: ChannelDetails[] = 387 | LDKClientFactory.getLDKClient().getChannels(); 388 | console.log("SHORT CHANNEL ID", short_channel_id); 389 | channels.forEach((channel) => { 390 | if ( 391 | (channel.get_outbound_payment_scid() as Option_u64Z_Some).some === 392 | short_channel_id 393 | ) { 394 | channel_id_hex = uint8ArrayToHexString(channel.get_channel_id()); 395 | } 396 | }); 397 | } 398 | } 399 | } 400 | 401 | console.log("CHANNEL_ID_HEX", channel_id_hex); 402 | if (channel_id_hex) { 403 | const insertEventData = `INSERT INTO events (event_type, event_data, channel_id_hex) VALUES (?, ?, ?)`; 404 | const db = await getDatabase(); 405 | db.run( 406 | insertEventData, 407 | [event_type, event_data, channel_id_hex], 408 | function (err: any) { 409 | if (err) { 410 | console.log("Error in saving event to db: " + err); 411 | } 412 | console.log("Data inserted successfully."); 413 | } 414 | ); 415 | } 416 | }; 417 | 418 | export const replaceTempChannelIdInDb = async ( 419 | channel_id: string, 420 | temp_channel_id: string 421 | ) => { 422 | console.log("[ldk-utils.ts] - replaceTempChannelIdInDb"); 423 | const updateData = 424 | "UPDATE events SET channel_id_hex = ? WHERE channel_id_hex = ?"; 425 | const db = await getDatabase(); 426 | db.run( 427 | updateData, 428 | [channel_id, temp_channel_id], 429 | function (err: any, result: any) { 430 | if (err) { 431 | console.log("Error in replacing channelId to db: " + err); 432 | } 433 | } 434 | ); 435 | }; 436 | 437 | export const checkIfChannelExists = (pubkey: string): Promise => { 438 | return new Promise(async (resolve, reject) => { 439 | const db = await getDatabase(); 440 | db.get( 441 | `SELECT channel_id FROM channels WHERE peer_id = (SELECT id FROM peers WHERE pubkey = ?)`, 442 | [pubkey], 443 | (err: any, row: any) => { 444 | if (err) { 445 | reject(err); 446 | } else { 447 | if (row && row.channel_id) { 448 | resolve(true); 449 | } else { 450 | resolve(false); 451 | } 452 | } 453 | } 454 | ); 455 | }); 456 | }; 457 | 458 | export const deleteChannelById = ( 459 | channelId: number 460 | ): Promise<{ 461 | status: number; 462 | message?: string; 463 | error?: string; 464 | }> => { 465 | return new Promise(async (resolve, reject) => { 466 | const db = await getDatabase(); 467 | db.run( 468 | `DELETE FROM channels WHERE id = ?`, 469 | [channelId], 470 | function (err: any) { 471 | if (err) { 472 | reject({ 473 | status: 500, 474 | error: "Failed to delete channel from database", 475 | }); 476 | } else { 477 | resolve({ 478 | status: 200, 479 | message: "Channel deleted successfully", 480 | }); 481 | } 482 | } 483 | ); 484 | }); 485 | }; 486 | 487 | export const deleteChannelByPaymentAddr = ( 488 | addr: string 489 | ): Promise<{ 490 | status: number; 491 | message?: string; 492 | error?: string; 493 | }> => { 494 | return new Promise(async (resolve, reject) => { 495 | const db = await getDatabase(); 496 | db.run( 497 | `DELETE FROM channels WHERE payment_address = ?`, 498 | [addr], 499 | function (err: any) { 500 | if (err) { 501 | reject({ 502 | status: 500, 503 | error: "Failed to delete channel from database", 504 | }); 505 | } else { 506 | resolve({ 507 | status: 200, 508 | message: "Channel deleted successfully", 509 | }); 510 | } 511 | } 512 | ); 513 | }); 514 | }; 515 | 516 | export class ChannelMonitorRead { 517 | outpoint: Uint8Array; 518 | bytes: Uint8Array; 519 | 520 | constructor(outpoint: Uint8Array, bytes: Uint8Array) { 521 | this.outpoint = outpoint; 522 | this.bytes = bytes; 523 | } 524 | } 525 | export function readChannelsFromDictionary(file: string): ChannelMonitorRead[] { 526 | let channels: ChannelMonitorRead[] = []; 527 | try { 528 | if (!fs.existsSync(file)) { 529 | throw Error("File not found"); 530 | } 531 | const dict = JSON.parse(fs.readFileSync(file, "utf-8")); 532 | 533 | if (!Array.isArray(dict)) { 534 | throw Error("Invalid dictionary format"); 535 | } 536 | 537 | for (const obj of dict) { 538 | if (!obj.monitor_file_name || !obj.id_file_name) { 539 | throw Error("Invalid object in dictionary"); 540 | } 541 | 542 | if (!fs.existsSync(obj.monitor_file_name)) { 543 | throw Error("File not found: " + obj.monitor_file_name); 544 | } 545 | 546 | if (!fs.existsSync(obj.id_file_name)) { 547 | throw Error("File not found: " + obj.id_file_name); 548 | } 549 | 550 | const channelmonitorbytes_read = fs.readFileSync(obj.monitor_file_name); 551 | const outpointbytes_read = fs.readFileSync(obj.id_file_name); 552 | 553 | const channelmonitor_object: ChannelMonitorRead = new ChannelMonitorRead( 554 | outpointbytes_read, 555 | channelmonitorbytes_read 556 | ); 557 | channels.push(channelmonitor_object); 558 | } 559 | } catch (e) { 560 | throw e; 561 | } 562 | return channels; 563 | } 564 | -------------------------------------------------------------------------------- /src/LDK/utils/utils.ts: -------------------------------------------------------------------------------- 1 | import * as secp256k1 from "secp256k1"; 2 | 3 | export function hexToBytes(hex: String) { 4 | if (hex === undefined) return; 5 | let bytes = []; 6 | for (let c = 0; c < hex.length; c += 2) { 7 | bytes.push(parseInt(hex.substr(c, 2), 16)); 8 | } 9 | var bytesUint8Array: Uint8Array = new Uint8Array(bytes); 10 | // bytes = new Uint8Array(bytes) 11 | return bytesUint8Array; 12 | } 13 | 14 | export function hexToUint8Array(hex: string) { 15 | let matchHex = hex.match(/.{1,2}/g); 16 | 17 | try { 18 | if (matchHex) 19 | return new Uint8Array(matchHex.map((byte) => parseInt(byte, 16))); 20 | else throw "err"; 21 | } catch (e) { 22 | throw new Error(`Conversion Hex -> Uint8Array Err`); 23 | } 24 | } 25 | 26 | export function uint8ArrayToHexString(arr: Uint8Array) { 27 | return Buffer.from(arr.buffer).toString("hex"); 28 | } 29 | 30 | export const validateSigFunction = ( 31 | publicKey: Buffer, 32 | signature: Buffer, 33 | data: Buffer 34 | ): boolean => { 35 | // Extract the public key in compressed form 36 | const publicKeyBuffer = publicKey.slice(1); 37 | 38 | // Verify the signature against the public key and data 39 | const verified = secp256k1.ecdsaVerify(data, signature, publicKeyBuffer); 40 | 41 | // Return the result of the verification 42 | return verified; 43 | }; 44 | 45 | export const stringifyEvent = (event: any) => { 46 | let eventString = `${event.constructor.name} {\n`; 47 | 48 | for (const key in event) { 49 | if (event[key] instanceof Uint8Array) { 50 | const hexString = uint8ArrayToHexString(event[key]); 51 | eventString += ` ${key}: ${hexString},\n`; 52 | } else { 53 | eventString += ` ${key}: ${event[key]},\n`; 54 | } 55 | } 56 | 57 | eventString += '}'; 58 | return eventString; 59 | } 60 | 61 | export const convertToMillisats = (sats: number) => { 62 | return sats * 1000; 63 | } -------------------------------------------------------------------------------- /src/db/database.ts: -------------------------------------------------------------------------------- 1 | // to persist channels 2 | import sqlite from "sqlite3"; 3 | const sqlite3 = sqlite.verbose(); 4 | 5 | const isDev = false; 6 | 7 | const sampleDataPeersTable = [ 8 | { 9 | node: "WalletOfSatoshi.com", 10 | host: "170.75.163.209", 11 | port: "9735", 12 | pubkey: 13 | "035e4ff418fc8b5554c5d9eea66396c227bd429a3251c8cbc711002ba215bfc226", 14 | }, 15 | { 16 | node: "ACINQ", 17 | host: "3.33.236.230", 18 | port: "9735", 19 | pubkey: 20 | "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", 21 | }, 22 | { 23 | node: "CoinGate", 24 | host: "3.124.63.44", 25 | port: "9735", 26 | pubkey: 27 | "0242a4ae0c5bef18048fbecf995094b74bfb0f7391418d71ed394784373f41e4f3", 28 | }, 29 | ]; 30 | 31 | const sampleDataChannelsTable = [ 32 | { 33 | name: "channel1", 34 | amount: 100000, 35 | push_msat: 444, 36 | public: true, 37 | wallet_name: "satoshi", 38 | peer_id: 1, 39 | privkey: "testprivkey1", 40 | txid: "testtxid1", 41 | vout: 0, 42 | paid: true, 43 | payment_address: "tb324524asda23asdsad234esdaxdasd12312311", 44 | }, 45 | { 46 | name: "testChannel", 47 | amount: 100000, 48 | push_msat: 444, 49 | public: true, 50 | wallet_name: "ldk1", 51 | peer_id: 2, 52 | privkey: "testprivkey2", 53 | txid: "testtxid2", 54 | vout: 1, 55 | paid: false, 56 | payment_address: "tbdsfsdrererd12fdgdfg3123145asdsa23a1", 57 | }, 58 | { 59 | name: "p2p", 60 | amount: 100000, 61 | push_msat: 444, 62 | public: false, 63 | wallet_name: "LDK3", 64 | peer_id: 3, 65 | privkey: "testprivkey3", 66 | txid: "testtxid3", 67 | vout: 2, 68 | paid: true, 69 | payment_address: "tb3245242sadsadwe3242sadasghgvh1", 70 | }, 71 | ]; 72 | 73 | const sampleDataEventsTable = [ 74 | { 75 | event_type: "Event_FundingGenerationReady", 76 | event_data: `Event_FundingGenerationReady { 77 | ptrs_to: [object Object], 78 | ptr: 4304647200, 79 | temporary_channel_id: 614d54affc469907359be53607ed79a51f13e5b6d745a40ec015639d1390e1a7, 80 | counterparty_node_id: 0227e0e3a9198601964d77a5b2d9a2b21ffff59a85a85031d61c6bb27b2ece2075, 81 | channel_value_satoshis: 100000, 82 | output_script: 0020c3e6cce8fdbb4cfedde222d6669255d44566e37a553d05c3b1b06a365b0a634a, 83 | user_channel_id: 1329227995784915872903807060280344576, 84 | }`, 85 | channel_id_hex: "614d54affc469907359be53607ed79a51f13e5b6d745a40ec015639d1390e1a7" 86 | }, 87 | { 88 | event_type: "Event_ChannelPending", 89 | event_data: `Event_ChannelPending { 90 | ptrs_to: [object Object], 91 | ptr: 4304686320, 92 | channel_id: e8a6f2a4f7cc9a8952622542d660934e02dcfcfc088e59710c1a1a43bed3053d, 93 | user_channel_id: 1329227995784915872903807060280344576, 94 | former_temporary_channel_id: 614d54affc469907359be53607ed79a51f13e5b6d745a40ec015639d1390e1a7, 95 | counterparty_node_id: 0227e0e3a9198601964d77a5b2d9a2b21ffff59a85a85031d61c6bb27b2ece2075, 96 | funding_txo: [object Object], 97 | }`, 98 | channel_id_hex: "e8a6f2a4f7cc9a8952622542d660934e02dcfcfc088e59710c1a1a43bed3053d" 99 | } 100 | ]; 101 | 102 | // Connect/create the SQLite database 103 | export const createDatabase = () => { 104 | return new Promise((resolve, reject) => { 105 | const db = new sqlite3.Database("lightning.db", async (err) => { 106 | if (err) { 107 | console.error(err.message); 108 | } 109 | console.log("[database.ts]: Connected to/Created the SQLite database."); 110 | 111 | //////////////////////////////////////////////////////////// 112 | //////// peerlist table //////////////////////////////////// 113 | //////////////////////////////////////////////////////////// 114 | 115 | const createPeersTable = () => { 116 | return new Promise((resolve, reject) => { 117 | const createPeersTableQuery = `CREATE TABLE IF NOT EXISTS peers ( 118 | id INTEGER PRIMARY KEY AUTOINCREMENT, 119 | node TEXT, 120 | pubkey TEXT NOT NULL, 121 | host TEXT NOT NULL, 122 | port INTEGER NOT NULL 123 | )`; 124 | db.run(createPeersTableQuery, (err) => { 125 | if (err) { 126 | console.error(err.message); 127 | reject(err); 128 | } 129 | console.log("[database.ts]: Table 'peers' created or already exist"); 130 | resolve(); 131 | }); 132 | }); 133 | }; 134 | 135 | const createChannelsTable = () => { 136 | return new Promise((resolve, reject) => { 137 | // Create the 'channels' table if it doesn't exist 138 | const createChannelsTableQuery = `CREATE TABLE IF NOT EXISTS channels ( 139 | id INTEGER PRIMARY KEY AUTOINCREMENT, 140 | name TEXT, 141 | amount REAL NOT NULL, 142 | push_msat INTEGER NOT NULL, 143 | public BOOL NOT NULL, 144 | wallet_name TEXT, 145 | peer_id INTEGER UNIQUE, 146 | privkey TEXT NOT NULL, 147 | txid TEXT, 148 | vout INTEGER, 149 | paid BOOL NOT NULL, 150 | payment_address TEXT, 151 | channel_id TEXT, 152 | FOREIGN KEY (peer_id) REFERENCES peer(id) 153 | )`; 154 | 155 | db.run(createChannelsTableQuery, (err) => { 156 | if (err) { 157 | console.error(err.message); 158 | reject(err); 159 | } 160 | console.log("[database.ts]: Table 'channels' created or already exists"); 161 | resolve(); 162 | }); 163 | }); 164 | }; 165 | 166 | // Create the 'events' table if it doesn't exist 167 | const createEventsTable = () => { 168 | return new Promise((resolve, reject) => { 169 | const createEventsTable = `CREATE TABLE IF NOT EXISTS events ( 170 | id INTEGER PRIMARY KEY AUTOINCREMENT, 171 | event_type TEXT NOT NULL, 172 | event_data TEXT NOT NULL, 173 | channel_id_hex INTEGER NOT NULL, 174 | notification_seen BOOL NOT NULL DEFAULT 0 175 | )`; 176 | db.run(createEventsTable, (err) => { 177 | if (err) { 178 | console.error(err.message); 179 | reject(err); 180 | } 181 | console.log("[database.ts]: Table 'events' created or already exist"); 182 | resolve(); 183 | }); 184 | }); 185 | }; 186 | 187 | try { 188 | // Create tables sequentially using async/await 189 | await createPeersTable(); 190 | await createChannelsTable(); 191 | await createEventsTable(); 192 | 193 | if (isDev) { 194 | // Insert some sample data into the 'peers' table if there's no data 195 | db.get("SELECT count(*) as count FROM peers", (err, row: any) => { 196 | if (err) { 197 | console.error(err.message); 198 | } 199 | if (row.count === 0) { 200 | console.log( 201 | "[database.ts]: Inserting sample data for table peers ..." 202 | ); 203 | 204 | const insertData = `INSERT INTO peers (node, host, port, pubkey) VALUES (?,?,?,?)`; 205 | sampleDataPeersTable.forEach((data) => { 206 | db.run(insertData, [ 207 | data.node, 208 | data.host, 209 | data.port, 210 | data.pubkey, 211 | ]); 212 | }); 213 | } else { 214 | console.log( 215 | "[database.ts]: Table 'peers' already contains data, skipping the sample data insertion." 216 | ); 217 | } 218 | }); 219 | 220 | // Insert some sample data into the 'channels' table if there's no data 221 | db.get("SELECT count(*) as count FROM channels", (err, row: any) => { 222 | if (err) { 223 | console.error(err.message); 224 | } 225 | if (row.count === 0) { 226 | console.log( 227 | "[database.ts]: Inserting sample data for table channels ..." 228 | ); 229 | 230 | const insertData = `INSERT INTO channels (name, amount, push_msat, public, wallet_name, peer_id, privkey, txid, vout, paid, payment_address) VALUES (?,?,?,?,?,?,?,?,?,?,?)`; 231 | sampleDataChannelsTable.forEach((data) => { 232 | db.run(insertData, [ 233 | data.name, 234 | data.amount, 235 | data.push_msat, 236 | data.public, 237 | data.wallet_name, 238 | data.peer_id, 239 | data.privkey, 240 | data.txid, 241 | data.vout, 242 | data.paid, 243 | data.payment_address, 244 | ]); 245 | }); 246 | } else { 247 | console.log( 248 | "[database.ts]: Table 'channels' already contains data, skipping the sample data insertion." 249 | ); 250 | } 251 | }); 252 | 253 | // Insert some sample data into the 'events' table if there's no data 254 | db.get("SELECT count(*) as count FROM events", (err, row: any) => { 255 | if (err) { 256 | console.error(err.message); 257 | } 258 | if (row.count === 0) { 259 | console.log( 260 | "[database.ts]: Inserting sample data for table events ..." 261 | ); 262 | 263 | const insertData = `INSERT INTO events (event_type, event_data, channel_id_hex) VALUES (?,?,?)`; 264 | sampleDataEventsTable.forEach((data) => { 265 | db.run(insertData, [ 266 | data.event_type, 267 | data.event_data, 268 | data.channel_id_hex, 269 | ]); 270 | }); 271 | } else { 272 | console.log( 273 | "[database.ts]: Table 'events' already contains data, skipping the sample data insertion." 274 | ); 275 | } 276 | }); 277 | 278 | } 279 | 280 | console.log("[database.ts]: Table creation complete"); 281 | resolve(db); 282 | } catch (error) { 283 | console.error("Error creating tables:", error); 284 | reject(error); 285 | } 286 | }); 287 | }); 288 | }; 289 | -------------------------------------------------------------------------------- /src/db/db.ts: -------------------------------------------------------------------------------- 1 | import sqlite from "sqlite3"; 2 | import { createMockDatabase } from "../../test/db-mock"; 3 | import { createDatabase } from "./database"; 4 | 5 | let db: sqlite.Database | null = null; // Global variable to store the db object 6 | 7 | export const getDatabase = async () => { 8 | if (db) { 9 | return db; 10 | } 11 | if (process.env["NODE_ENV"] === "test") { 12 | db = await createMockDatabase(); 13 | } else { 14 | db = await createDatabase(); 15 | } 16 | return db; 17 | }; -------------------------------------------------------------------------------- /src/debug_lightning.ts: -------------------------------------------------------------------------------- 1 | import initialiseWasm from "./LDK/init/initializeWasm.js"; 2 | import LDKClientFactory from "./LDK/init/LDKClientFactory.js"; 3 | import { hexToUint8Array } from "./LDK/utils/utils"; 4 | import crypto from "crypto"; 5 | import express from "express"; 6 | import cors from "cors"; 7 | import bodyParser from "body-parser"; 8 | import serverRoutes from "./routes/serverRoutes"; 9 | import peerRoutes from "./routes/peerRoutes"; 10 | import channelRoutes from "./routes/channelRoutes"; 11 | import { 12 | closeConnections, 13 | savePeerAndChannelToDatabase, 14 | } from "./LDK/utils/ldk-utils"; 15 | import { ChannelDetails } from "lightningdevkit"; 16 | import { ChalkColor, Logger } from "./LDK/utils/Logger.js"; 17 | const DEBUG = new Logger(ChalkColor.Cyan, "debug_lightning.ts"); 18 | import dotenv from "dotenv"; 19 | dotenv.config(); 20 | import fs from "fs"; 21 | 22 | export async function debug_lightning() { 23 | DEBUG.log("initialiseWasm"); 24 | await initialiseWasm(); 25 | 26 | DEBUG.log("createLDKClient with dev"); 27 | await LDKClientFactory.createLDKClient("dev"); 28 | 29 | DEBUG.log("getLDKClient"); 30 | const LightningClient = await LDKClientFactory.getLDKClient(); 31 | 32 | DEBUG.log("start LDK"); 33 | await LightningClient.start(); 34 | 35 | DEBUG.log("call updateBestBlockHeight"); 36 | let bestBlockHeight = await LightningClient.updateBestBlockHeight(); 37 | DEBUG.log("bestBlockHeight:", "", bestBlockHeight); 38 | 39 | DEBUG.log("call updateBestBlockHash"); 40 | let bestBlockHash = await LightningClient.updateBestBlockHash(); 41 | DEBUG.log("bestBlockHash:", "", bestBlockHash); 42 | 43 | // Counterparty LND Node details 44 | const pubkeyHex = process.env["PUBKEY_HEX"]; 45 | const hostname = process.env["HOSTNAME"]; 46 | const portRead = process.env["PORT"]; 47 | 48 | if (!pubkeyHex || !hostname || !portRead) { 49 | throw new Error("Required environment variables are not set."); 50 | } 51 | const port = parseInt(portRead); 52 | 53 | // Connect to the peer node 54 | DEBUG.log("Connect to Peer"); 55 | await LightningClient.connectToPeer(pubkeyHex, hostname, port); 56 | 57 | // Connect to the channel 58 | let pubkey = hexToUint8Array(pubkeyHex); 59 | 60 | if (pubkey) { 61 | let privateKey = loadPrivateKeyFromFile("private_key.txt"); 62 | if (privateKey === "") { 63 | privateKey = crypto.randomBytes(32).toString("hex"); 64 | } 65 | 66 | let hostInfo = { 67 | host: pubkeyHex, 68 | port, 69 | channel_name: "Mercury Channel", 70 | wallet_name: "LightningWallet", 71 | privkey: privateKey, 72 | }; 73 | 74 | // Set the TXID for the funding generation event 75 | // bcrt1qa0h3k6mfhjxedelag752k04lkj245e47kaullm 76 | // txid: 5557bd457de22fb0950cf6364da8ecb0d15ee9c478f874071e5a85fab0978a5f 77 | 78 | // MUST ONLY BE CALLED ONCE - calling it twice opens a new channel 79 | 80 | await LightningClient.createChannel( 81 | pubkey, 82 | 100000, 83 | 0, 84 | false, 85 | "511c7848172a3dba8d3e15c27f03ba3f94a7096f94772fef190867744698ce99", 86 | "bcrt1qa0h3k6mfhjxedelag752k04lkj245e47kaullm", 87 | hostInfo 88 | ); 89 | 90 | // Sample create invoice 91 | const receiveInvoice = await LightningClient.createInvoice( 92 | BigInt(36900), 93 | "Lunch Meal", 94 | 36000 95 | ); 96 | DEBUG.log("Lightning Invoice for receiving: ", "", receiveInvoice); 97 | 98 | // Send a payment to an invoice 99 | /* 100 | DEBUG.log( 101 | "Sending payment to: lnbcrt500u1pjxcl69sp5ujmw5t5v962rczhk8l25fq76x46sx3l65gswp0gq9at78eps8xzqpp5y0pvvjn8jmj4dwkz5y950rd7mnl7tqhd05e769fpya4c3apwc8cqdqdx5cxkgrnv968xxqyjw5qcqp29qyysgqhq62thmesgkf79m2srntqslle04lvxr3tgchfcsgm5jgan67zzfjf7dxwuk7npv5u4ztfz6hs6u7jv8cdcv35mn4tr6rdkmjqe6alwgp3q77z2" 102 | ); 103 | LightningClient.sendPayment( 104 | "lnbcrt500u1pjxcl69sp5ujmw5t5v962rczhk8l25fq76x46sx3l65gswp0gq9at78eps8xzqpp5y0pvvjn8jmj4dwkz5y950rd7mnl7tqhd05e769fpya4c3apwc8cqdqdx5cxkgrnv968xxqyjw5qcqp29qyysgqhq62thmesgkf79m2srntqslle04lvxr3tgchfcsgm5jgan67zzfjf7dxwuk7npv5u4ztfz6hs6u7jv8cdcv35mn4tr6rdkmjqe6alwgp3q77z2" 105 | );*/ 106 | } 107 | 108 | // Close a channel 109 | //await LightningClient.forceCloseChannel("ef382090de601be8d62439d80def437503bb5a5e5c2ddc7a5aa27c4a7f3d3618");*/ 110 | } 111 | 112 | const loadPrivateKeyFromFile = (privateKeyFilePath: string): string => { 113 | let privateKey: string = ""; 114 | 115 | // Check if the private key file exists 116 | if (fs.existsSync(privateKeyFilePath)) { 117 | // Private key file exists, read the contents 118 | const privateKeyBuffer = fs 119 | .readFileSync(privateKeyFilePath) 120 | .toString("hex"); 121 | privateKey = privateKeyBuffer; 122 | 123 | DEBUG.log("Private key:", "loadPrivateKeyFromFile", privateKey); 124 | DEBUG.log("privateKeyBuffer", "loadPrivateKeyFromFile", privateKeyBuffer); 125 | } else { 126 | console.error("private key file doesn't exist"); 127 | return ""; 128 | } 129 | return privateKey; 130 | }; 131 | 132 | // Constants 133 | const PORT = 3003; 134 | 135 | // Express app 136 | const app = express(); 137 | 138 | // Middlewares 139 | app.use(cors()); 140 | app.use(bodyParser.json()); 141 | 142 | // Routes 143 | app.use("/", serverRoutes); 144 | app.use("/peer", peerRoutes); 145 | app.use("/channel", channelRoutes); 146 | 147 | // Starting the express server 148 | app.listen(PORT, async () => { 149 | debug_lightning(); 150 | }); 151 | 152 | // Exit handlers 153 | const onExit = () => { 154 | // code to be executed on exit, e.g. close connections, cleanup resources 155 | DEBUG.log("Exiting the application"); 156 | closeConnections(); 157 | }; 158 | 159 | const onSigInt = () => { 160 | // code to be executed on sigint, e.g. close connections, cleanup resources 161 | DEBUG.log("Application interrupted"); 162 | closeConnections(); 163 | process.exit(); 164 | }; 165 | 166 | process.on("exit", onExit); 167 | process.on("SIGINT", onSigInt); 168 | 169 | export default app; 170 | -------------------------------------------------------------------------------- /src/mercury-node.d.ts: -------------------------------------------------------------------------------- 1 | declare module "mercury-node" { 2 | const server: any; 3 | export { server }; 4 | } 5 | -------------------------------------------------------------------------------- /src/routes/channelRoutes.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import { getDatabase } from "../db/db.js"; 3 | 4 | import LDKClientFactory from "../LDK/init/LDKClientFactory.js"; 5 | import { hexToUint8Array, uint8ArrayToHexString } from "../LDK/utils/utils.js"; 6 | import { 7 | ChannelDetails, 8 | Option_u32Z, 9 | Option_u32Z_None, 10 | Option_u32Z_Some, 11 | Option_u64Z_Some, 12 | } from "lightningdevkit"; 13 | 14 | const router = express.Router(); 15 | 16 | interface Channel { 17 | id: number; 18 | hexId: string; 19 | name: string; 20 | amount: number; 21 | push_msat: number; 22 | wallet_name: string; 23 | peer_id: string; 24 | privkey: string; 25 | txid: string; 26 | vout: number; 27 | paid: boolean; 28 | payment_address: string; 29 | } 30 | 31 | router.get("/nodeID", async function (req, res) { 32 | // Get the Node ID of our wallet 33 | 34 | try { 35 | const nodeId = LDKClientFactory.getLDKClient().getOurNodeId(); 36 | const hexNodeId = uint8ArrayToHexString(nodeId); 37 | res.json({ nodeID: hexNodeId }); 38 | } catch (e) { 39 | res.status(500).json({ message: "Error'd with" + e }); 40 | } 41 | }); 42 | 43 | router.get("/balance", async function (req, res) { 44 | try { 45 | // for all usable channels, add up the balance and return it 46 | let activeChannels = LDKClientFactory.getLDKClient().getUsableChannels(); 47 | let total_balance: any = 0; 48 | activeChannels.forEach((chn: ChannelDetails) => { 49 | total_balance += chn.get_balance_msat(); 50 | }); 51 | res.json({ balance: total_balance }); 52 | } catch (e) {} 53 | }); 54 | 55 | // This is live channels that the LDK adapter has open - different to channels persisted in database. 56 | router.get("/liveChannels", async function (req, res) { 57 | try { 58 | const channels: ChannelDetails[] = 59 | LDKClientFactory.getLDKClient().getChannels(); 60 | let activeChannels = LDKClientFactory.getLDKClient().getUsableChannels(); 61 | console.log("active channels:", activeChannels); 62 | console.log("channels: ", channels); 63 | 64 | let jsonChannels = []; 65 | if (channels && channels.length > 0) { 66 | for (const channel of channels) { 67 | jsonChannels.push({ 68 | //id: channel.get_channel_id().toString(), 69 | channel_hexId: uint8ArrayToHexString(channel.get_channel_id()), 70 | usable: channel.get_is_usable(), 71 | ready: channel.get_is_channel_ready(), 72 | counterparty_hexId: uint8ArrayToHexString( 73 | channel.get_counterparty().get_node_id() 74 | ), 75 | funding_txo: uint8ArrayToHexString( 76 | channel.get_funding_txo().get_txid() 77 | ), 78 | balance_msat: channel.get_balance_msat().toString(), 79 | amount_in_satoshis: channel.get_channel_value_satoshis().toString(), 80 | public: channel.get_is_public(), 81 | confirmations: (channel.get_confirmations() as Option_u32Z_Some).some, 82 | confirmations_required: ( 83 | channel.get_confirmations_required() as Option_u32Z_Some 84 | ).some, 85 | }); 86 | console.log(channel.get_short_channel_id()); 87 | } 88 | res.json(jsonChannels); 89 | } else { 90 | res.json([]); 91 | } 92 | } catch (e) {} 93 | }); 94 | 95 | router.post("/createChannel", async (req, res) => { 96 | const { 97 | pubkey, 98 | amount, 99 | push_msat, 100 | channelType, 101 | host, 102 | port, 103 | channel_name, 104 | wallet_name, 105 | privkey, 106 | paid, 107 | payment_address, 108 | funding_txid, 109 | } = req.body; 110 | 111 | if ( 112 | pubkey === undefined || 113 | amount === undefined || 114 | push_msat === undefined || 115 | channelType === undefined || 116 | host === undefined || 117 | port === undefined || 118 | channel_name === undefined || 119 | wallet_name === undefined || 120 | privkey === undefined || 121 | paid === undefined || 122 | payment_address === undefined || 123 | funding_txid === undefined 124 | ) { 125 | res.status(500).send("Missing required parameters"); 126 | } else { 127 | channelType === "Public" ? true : false; 128 | try { 129 | if (pubkey.length !== 33) { 130 | const connection = await LDKClientFactory.getLDKClient().createChannel( 131 | hexToUint8Array(pubkey), 132 | amount, 133 | push_msat, 134 | channelType, 135 | funding_txid, 136 | payment_address, 137 | { 138 | host, 139 | port, 140 | channel_name, 141 | wallet_name, 142 | privkey, 143 | } 144 | ); 145 | if (connection) { 146 | res.status(200).send("Created Channel on LDK"); 147 | } else { 148 | res.status(500).send("Failed to create Channel"); 149 | } 150 | } 151 | } catch (e) { 152 | res.status(500).send("Error creating channel on LDK"); 153 | } 154 | } 155 | }); 156 | 157 | router.get("/usableChannels", async function (req, res) { 158 | const activeChannels: ChannelDetails[] = 159 | LDKClientFactory.getLDKClient().getUsableChannels(); 160 | 161 | let jsonChannels: any = []; 162 | activeChannels.forEach((channel: ChannelDetails) => { 163 | let confirmations: Option_u32Z | Option_u32Z_Some | Option_u32Z_None | any; 164 | jsonChannels.push({ 165 | channel_hexId: uint8ArrayToHexString(channel.get_channel_id()), 166 | balance_msat: channel.get_balance_msat(), 167 | counterparty_hexId: uint8ArrayToHexString( 168 | channel.get_counterparty().get_node_id() 169 | ), 170 | funding_txo: uint8ArrayToHexString(channel.get_funding_txo().get_txid()), 171 | amount_in_satoshis: channel.get_channel_value_satoshis().toString(), 172 | public: channel.get_is_public(), 173 | confirmations: confirmations?.some, 174 | }); 175 | }); 176 | res.json(jsonChannels); 177 | }); 178 | 179 | // This gets all the channels from the database of all wallets 180 | router.get("/allChannels", async function (req, res) { 181 | try { 182 | const db = await getDatabase(); 183 | db.all("SELECT * FROM channels", (err: any, rows: any) => { 184 | if (err) { 185 | throw err; 186 | } 187 | res.json(rows); 188 | }); 189 | } catch (e) {} 190 | }); 191 | 192 | router.get("/allEvents", async function (req, res) { 193 | try { 194 | const db = await getDatabase(); 195 | db.all("SELECT * FROM events", (err: any, rows: any) => { 196 | if (err) { 197 | throw err; 198 | } 199 | res.json(rows); 200 | }); 201 | } catch (e) {} 202 | }); 203 | 204 | router.get("/loadEvents/:wallet_name", async function (req, res) { 205 | try { 206 | const wallet_id = req.params.wallet_name; 207 | const selectData = ` 208 | SELECT * 209 | FROM events 210 | WHERE channel_id_hex = ( 211 | SELECT channel_id 212 | FROM channels 213 | WHERE wallet_name = ? 214 | ); 215 | `; 216 | const db = await getDatabase(); 217 | db.all(selectData, [wallet_id], (err: any, rows: any) => { 218 | if (err) { 219 | console.log(err.message); 220 | res.status(500).json({ error: err.message }); 221 | return; 222 | } 223 | if (rows && rows.length > 0) { 224 | res.status(200).json(rows); 225 | } else { 226 | res.json([]); // empty channels 227 | } 228 | }); 229 | } catch (e) {} 230 | }); 231 | 232 | router.get("/loadUnnotifiedEvents/:wallet_name", async function (req, res) { 233 | try { 234 | const wallet_id = req.params.wallet_name; 235 | const selectData = ` 236 | SELECT id, channel_id_hex, event_type, event_data 237 | FROM events 238 | WHERE channel_id_hex = ( 239 | SELECT channel_id 240 | FROM channels 241 | WHERE wallet_name = ? 242 | ) AND notification_seen = 0; 243 | `; 244 | const db = await getDatabase(); 245 | db.all(selectData, [wallet_id], (err: any, rows: any) => { 246 | if (err) { 247 | res.status(500).json({ error: err.message }); 248 | return; 249 | } 250 | if (rows && rows.length > 0) { 251 | res.status(200).json(rows); 252 | } else { 253 | res.json([]); // empty channels 254 | } 255 | }); 256 | } catch (e) {} 257 | }); 258 | 259 | router.post("/setEventNotificationSeen", async (req, res) => { 260 | const { id } = req.body; 261 | 262 | if (!Number.isInteger(parseInt(id))) { 263 | res.status(400).json({ error: "Invalid event ID" }); 264 | return; 265 | } 266 | 267 | const updateData = `UPDATE events SET notification_seen = 1 WHERE id=?`; 268 | const db = await getDatabase(); 269 | db.run(updateData, [id], function (err: any) { 270 | if (err) { 271 | res.status(500).json({ error: err.message }); 272 | return; 273 | } 274 | res.json({ message: "Event notification status updated successfully" }); 275 | }); 276 | }); 277 | 278 | // load channels by wallet name e.g. -> localhost:3003/channel/loadChannels/vLDK 279 | router.get("/loadChannels/:wallet_name", async (req, res) => { 280 | try { 281 | const wallet_id = req.params.wallet_name; 282 | const selectData = ` 283 | SELECT channels.*, peers.node, peers.pubkey, peers.host, peers.port 284 | FROM channels 285 | INNER JOIN peers ON channels.peer_id = peers.id 286 | WHERE channels.wallet_name = ? 287 | `; 288 | const db = await getDatabase(); 289 | db.all(selectData, [wallet_id], (err: any, rows: any) => { 290 | if (err) { 291 | console.log(err.message); 292 | res.status(500).json({ error: err.message }); 293 | return; 294 | } 295 | if (rows && rows.length > 0) { 296 | res.status(200).json(rows); 297 | } else { 298 | res.json([]); // empty channels 299 | } 300 | }); 301 | } catch (e) { 302 | res 303 | .status(500) 304 | .json({ message: "couldn't load a channel that doesn't exist" }); 305 | } 306 | }); 307 | 308 | // This updates the name of a channel by id 309 | router.put("/updateChannelName/:id", async (req, res) => { 310 | // update the name of a channel by id 311 | const { name } = req.body; 312 | 313 | if (!Number.isInteger(parseInt(req.params.id))) { 314 | res.status(400).json({ error: "Invalid channel ID" }); 315 | return; 316 | } 317 | 318 | const updateData = `UPDATE channels SET name=? WHERE id=?`; 319 | const db = await getDatabase(); 320 | db.run(updateData, [name, req.params.id], function (err: any) { 321 | if (err) { 322 | res.status(500).json({ error: err.message }); 323 | return; 324 | } 325 | res.json({ message: "Channel name updated successfully" }); 326 | }); 327 | }); 328 | 329 | // This updates the paid value of a channel by id 330 | router.put("/updateChannelPaid/:id", async (req, res) => { 331 | // update the paid value of a channel by id 332 | const { paid } = req.body; 333 | 334 | if (!Number.isInteger(parseInt(req.params.id))) { 335 | res.status(400).json({ error: "Invalid channel ID" }); 336 | return; 337 | } 338 | 339 | const updateData = `UPDATE channels SET paid=? WHERE id=?`; 340 | const db = await getDatabase(); 341 | db.run(updateData, [paid, req.params.id], function (err: any) { 342 | if (err) { 343 | res.status(500).json({ error: err.message }); 344 | return; 345 | } 346 | res.json({ message: "Channel paid value updated successfully" }); 347 | }); 348 | }); 349 | 350 | // This updates an entire channel by id 351 | router.put("/updateChannel/:id", async (req, res) => { 352 | // update a channel by id 353 | const { 354 | name, 355 | amount, 356 | push_msat = 0, 357 | wallet_name, 358 | peer_id, 359 | privkey, 360 | txid, 361 | vout, 362 | paid = false, 363 | payment_address, 364 | } = req.body; 365 | 366 | if (!Number.isInteger(parseInt(req.params.id))) { 367 | res.status(400).json({ error: "Invalid channel ID" }); 368 | return; 369 | } 370 | 371 | const updateData = `UPDATE channels SET name=?, amount=?, push_msat=?, wallet_name=?, peer_id=?, privkey=?, txid=?, vout=?, paid=?, payment_address=? WHERE id=?`; 372 | const db = await getDatabase(); 373 | db.run( 374 | updateData, 375 | [ 376 | name, 377 | amount, 378 | push_msat, 379 | wallet_name, 380 | peer_id, 381 | privkey, 382 | txid, 383 | vout, 384 | paid, 385 | payment_address, 386 | req.params.id, 387 | ], 388 | function (err: any) { 389 | if (err) { 390 | res.status(500).json({ error: err.message }); 391 | return; 392 | } 393 | res.json({ message: "Channel updated successfully" }); 394 | } 395 | ); 396 | }); 397 | 398 | // This removes duplicate channels from the database 399 | router.get("/removeDuplicateChannels", async (req, res) => { 400 | const query = ` 401 | DELETE FROM channels 402 | WHERE id NOT IN ( 403 | SELECT MIN(id) 404 | FROM channels 405 | GROUP BY name, amount, push_msat, wallet_name, peer_id, privkey, txid, vout, paid, payment_address 406 | HAVING COUNT(*) > 1 407 | ) 408 | AND id NOT NULL 409 | `; 410 | const db = await getDatabase(); 411 | db.run(query, [], function (err: any) { 412 | if (err) { 413 | console.error(err); 414 | res.status(500).json({ error: err.message }); 415 | return; 416 | } 417 | 418 | res.json({ message: "Duplicate channels removed successfully" }); 419 | }); 420 | }); 421 | 422 | // takes hexadecimal format of channelId 423 | router.delete("/forceCloseChannel/:id", async (req, res) => { 424 | try { 425 | const channel_id = req.params.id; 426 | const closeChannelReq = 427 | LDKClientFactory.getLDKClient().forceCloseChannel(channel_id); 428 | if (closeChannelReq) { 429 | res.status(200).json({ status: 200, message: "Success" }); 430 | } else { 431 | res.status(500).json({ error: "Failed to force close channel" }); 432 | } 433 | } catch (e) { 434 | console.log("Error ", e); 435 | res.status(500).json({ error: e }); 436 | } 437 | }); 438 | 439 | // takes hexadecimal format of channelId 440 | router.delete("/mutualCloseChannel/:id", async (req, res) => { 441 | try { 442 | const channel_id = req.params.id; 443 | const closeChannelReq = 444 | LDKClientFactory.getLDKClient().mutualCloseChannel(channel_id); 445 | if (closeChannelReq) { 446 | res.status(200).json({ status: 200, message: "Success" }); 447 | } else { 448 | res.status(500).json({ error: "Failed to mutual close channel" }); 449 | } 450 | } catch (e) { 451 | console.log("Error ", e); 452 | res.status(500).json({ error: e }); 453 | } 454 | }); 455 | 456 | router.delete("/deleteChannelByPaymentAddr/:addr", async (req, res) => { 457 | // delete channel by payment address 458 | const deleteData = `DELETE FROM channels WHERE payment_address=?`; 459 | const db = await getDatabase(); 460 | db.run(deleteData, [req.params.addr], function (err: any) { 461 | if (err) { 462 | res.status(500).json({ error: err.message }); 463 | return; 464 | } 465 | res.json({ message: "Data deleted successfully" }); 466 | }); 467 | }); 468 | 469 | export default router; 470 | -------------------------------------------------------------------------------- /src/routes/peerRoutes.ts: -------------------------------------------------------------------------------- 1 | // handle all peer logic on server 2 | import express from "express"; 3 | import { 4 | ChainMonitor, 5 | PeerManager, 6 | TwoTuple_PublicKeyCOption_NetAddressZZ, 7 | } from "lightningdevkit"; 8 | const router = express.Router(); 9 | import { getDatabase } from "../db/db"; 10 | import LDKClientFactory from "../LDK/init/LDKClientFactory"; 11 | import { hexToUint8Array, uint8ArrayToHexString } from "../LDK/utils/utils"; 12 | import { 13 | savePeerAndChannelToDatabase, 14 | saveChannelFundingToDatabase, 15 | } from "../LDK/utils/ldk-utils"; 16 | 17 | router.get("/liveChainMonitors", async (req, res) => { 18 | /* - TODO: Broken, has circular JSON error 19 | let chainMonitor: ChainMonitor | null = 20 | await LDKClientFactory.getLDKClient().getChainMonitor(); 21 | if (chainMonitor) { 22 | res.status(200).json({ chainMonitors: chainMonitor.list_monitors() }); 23 | } else { 24 | res.status(500).json("Failed to get chain monitor"); 25 | }*/ 26 | }); 27 | 28 | router.get("/livePeers", async (req, res) => { 29 | let peerManager: PeerManager | null = 30 | await LDKClientFactory.getLDKClient().getPeerManager(); 31 | if (peerManager) { 32 | let peer_node_ids: TwoTuple_PublicKeyCOption_NetAddressZZ[] | any = 33 | peerManager.get_peer_node_ids(); 34 | 35 | let peer_ids = []; 36 | 37 | console.log(peer_node_ids.length); 38 | 39 | for (var i = 0; i < peer_node_ids.length; i++) { 40 | console.log( 41 | "[peerRoutes.ts/livePeers]: get a ->", 42 | uint8ArrayToHexString(peer_node_ids[i]?.get_a()) 43 | ); 44 | peer_ids.push({ 45 | id: i + 1, 46 | pubkey: uint8ArrayToHexString(peer_node_ids[i]?.get_a()), 47 | }); 48 | } 49 | res.status(200).json(peer_ids); 50 | } else { 51 | res.status(500).json({ status: 500, message: "Failed to get peermanager" }); 52 | } 53 | }); 54 | 55 | let count = 1; 56 | router.post("/connectToPeer", async (req, res) => { 57 | const { pubkey, host, port } = req.body; 58 | 59 | console.log("//////////////////////////////////////////////////////"); 60 | console.log("//////////////////////////////////////////////////////"); 61 | console.log("//////////////////////////////////////////////////////"); 62 | console.log("an attempt to connect to peer has been made", count); 63 | count++; 64 | console.log("values found:", pubkey); 65 | console.log("//////////////////////////////////////////////////////"); 66 | console.log("//////////////////////////////////////////////////////"); 67 | console.log("//////////////////////////////////////////////////////"); 68 | 69 | if (pubkey === undefined || host === undefined || port === undefined) { 70 | res.status(500).json({ 71 | status: 500, 72 | message: "Missing required parameters", 73 | }); 74 | } else { 75 | // try and connect to a peer, return success if it can, fail if it can't 76 | try { 77 | const connection = await LDKClientFactory.getLDKClient().connectToPeer( 78 | pubkey, 79 | host, 80 | port 81 | ); 82 | if (connection) { 83 | res.status(200).json({ 84 | status: 200, 85 | message: "Connected to peer", 86 | }); 87 | } else { 88 | res.status(500).json({ 89 | status: 500, 90 | message: "Failed to connect to peer", 91 | }); 92 | } 93 | } catch (e) { 94 | if ( 95 | e instanceof Error && 96 | e.message.includes("already tried to connect to this peer") 97 | ) { 98 | res.status(500).json({ 99 | status: 500, 100 | message: "You're already connected to this peer!", 101 | }); 102 | } else { 103 | res.status(500).json({ 104 | status: 500, 105 | message: "Error connecting to peer", 106 | }); 107 | } 108 | } 109 | } 110 | }); 111 | 112 | // Saves the channel to the database. 113 | router.post("/savePeerAndChannelToDb", async (req, res) => { 114 | console.log("[peerRoutes.ts]->router.post/savePeerAndChannelToDb"); 115 | 116 | const { 117 | amount, 118 | pubkey, 119 | host, 120 | port, 121 | channel_name, 122 | wallet_name, 123 | channelType, 124 | privkey, 125 | paid, 126 | payment_address, 127 | } = req.body; 128 | 129 | console.log( 130 | "[peerRoutes.ts]->router.post/savePeerAndChannelToDb->values" + amount, 131 | pubkey, 132 | host, 133 | port, 134 | channel_name, 135 | wallet_name, 136 | channelType, 137 | privkey, 138 | paid, 139 | payment_address 140 | ); 141 | 142 | console.log( 143 | "[peerRoutes.ts]->router.post/savePeerAndChannelToDb-> Set channelType" 144 | ); 145 | channelType === "Public" ? true : false; 146 | 147 | try { 148 | const result = await savePeerAndChannelToDatabase( 149 | amount, 150 | pubkey, 151 | host, 152 | port, 153 | channel_name, 154 | wallet_name, 155 | channelType, 156 | privkey, 157 | paid, 158 | payment_address 159 | ); 160 | 161 | if (result && result.status === 409) { 162 | res.status(409).json({ 163 | status: 409, 164 | message: result.message, 165 | }); 166 | } else if (result && result.channel_id) { 167 | res.status(200).json({ 168 | status: 200, 169 | message: "Saved peer and channel to database.", 170 | channel_id: result.channel_id, 171 | }); 172 | } else { 173 | res.status(500).json({ 174 | status: 500, 175 | message: "Error: Failed to save peer and channel to database.", 176 | }); 177 | } 178 | } catch (e: any) { 179 | res 180 | .status(500) 181 | .json({ status: 500, message: "Couldn't insert into DB: " + e?.message }); 182 | } 183 | }); 184 | 185 | router.post("/setTxData", async (req, res) => { 186 | const { txid, payment_address } = req.body; 187 | 188 | console.log("[peerRoutes.ts]->setTxData" + txid); 189 | 190 | if (txid === undefined) { 191 | console.log("No TXID was found."); 192 | res.status(500).json({ 193 | status: 500, 194 | message: "No txid specified", 195 | }); 196 | } else { 197 | try { 198 | await LDKClientFactory.getLDKClient().setEventTxData( 199 | txid, 200 | payment_address 201 | ); 202 | res.status(200).json({ 203 | status: 200, 204 | message: "Txid was set correctly.", 205 | }); 206 | } catch (e) { 207 | res.status(500).json({ 208 | status: 500, 209 | message: "Error occured during setting the txid", 210 | }); 211 | } 212 | } 213 | }); 214 | 215 | router.post("/saveChannelPaymentInfoToDb", async (req, res) => { 216 | const { amount, paid, txid, vout, address } = req.body; 217 | 218 | console.log( 219 | "[peerRoutes.ts]->saveChannelPaymentInfoToDb " + amount, 220 | paid, 221 | txid, 222 | vout, 223 | address 224 | ); 225 | 226 | if (address === undefined) { 227 | console.log( 228 | "[peerRoutes.ts]->saveChannelPaymentInfoToDb-> No address specified" 229 | ); 230 | res.status(500).json({ 231 | status: 500, 232 | message: "No address was posted to peer/saveChannelPaymentInfoToDb", 233 | }); 234 | } else { 235 | try { 236 | await saveChannelFundingToDatabase(amount, paid, txid, vout, address); 237 | res 238 | .status(200) 239 | .json({ status: 200, message: "Channel funding saved to DB" }); 240 | } catch (e: any) { 241 | res.status(500).json({ 242 | status: 500, 243 | message: "Couldn't save channel payment info" + e?.message, 244 | }); 245 | } 246 | } 247 | }); 248 | 249 | // gives you peer details with the peer_id 250 | router.get("/getPeer/:peer_id", async (req, res) => { 251 | const peer_id = req.params.peer_id; 252 | const selectData = "SELECT node, pubkey, host, port FROM peers WHERE id = ?"; 253 | const db = await getDatabase(); 254 | db.get(selectData, [peer_id], (err: any, row: any) => { 255 | if (err) { 256 | res.status(500).json({ error: err.message }); 257 | return; 258 | } 259 | if (row) { 260 | res.json(row); 261 | } else { 262 | res.status(404).json({ error: "Peer not found" }); 263 | } 264 | }); 265 | }); 266 | 267 | router.get("/default_peerlist", async function (req, res) { 268 | // sample public list 269 | let data = [ 270 | { 271 | id: 1, 272 | node: "WalletOfSatoshi.com", 273 | host: "170.75.163.209", 274 | port: "9735", 275 | pubkey: 276 | "035e4ff418fc8b5554c5d9eea66396c227bd429a3251c8cbc711002ba215bfc226", 277 | }, 278 | { 279 | id: 2, 280 | node: "ACINQ", 281 | host: "3.33.236.230", 282 | port: "9735", 283 | pubkey: 284 | "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", 285 | }, 286 | { 287 | id: 3, 288 | node: "CoinGate", 289 | host: "3.124.63.44", 290 | port: "9735", 291 | pubkey: 292 | "0242a4ae0c5bef18048fbecf995094b74bfb0f7391418d71ed394784373f41e4f3", 293 | }, 294 | ]; 295 | res.status(200).json(data); 296 | }); 297 | 298 | // get the peerlist that's stored in the database 299 | router.get("/peers", async function (req, res) { 300 | try { 301 | const db = await getDatabase(); 302 | db.all("SELECT * FROM peers", (err: any, rows: any) => { 303 | if (err) { 304 | throw err; 305 | } 306 | res.json(rows); 307 | }); 308 | } catch (e) {} 309 | }); 310 | 311 | export default router; 312 | -------------------------------------------------------------------------------- /src/routes/serverRoutes.ts: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | const router = express.Router(); 3 | import { closeConnections, validateInvoiceBody } from "../LDK/utils/ldk-utils"; 4 | import LDKClientFactory from "../LDK/init/LDKClientFactory"; 5 | import { convertToMillisats } from "../LDK/utils/utils"; 6 | import fs from "fs"; 7 | 8 | const createAllFolders = (walletName: string) => { 9 | const rootPath = "wallets/" + walletName + "/"; 10 | 11 | if (!fs.existsSync(rootPath)) { 12 | fs.mkdirSync(rootPath); 13 | } 14 | 15 | if (!fs.existsSync(rootPath + "./.ldk")) { 16 | fs.mkdirSync(rootPath + "./.ldk"); 17 | } 18 | 19 | if (!fs.existsSync(rootPath + "./.scorer")) { 20 | fs.mkdirSync(rootPath + "./.scorer"); 21 | } 22 | 23 | if (!fs.existsSync(rootPath + "./channels")) { 24 | fs.mkdirSync(rootPath + "./channels"); 25 | } 26 | 27 | if (!fs.existsSync(rootPath + "/channels/channel_lookup.json")) { 28 | fs.writeFileSync(rootPath + "/channels/channel_lookup.json", "[]"); 29 | } 30 | }; 31 | 32 | router.post("/startLDK", async function (req, res) { 33 | // initialize an LDK with the network set 34 | const { wallet_name, network } = req.body; 35 | 36 | // create folders that must exist for LDK to exist 37 | createAllFolders(wallet_name); 38 | 39 | console.log("wallet value is:", wallet_name); 40 | console.log("network value is:", network); 41 | 42 | // validate network values 43 | // validate network values 44 | const validNetworks = ["dev", "prod", "test", "mock"]; 45 | 46 | if (!validNetworks.includes(network)) { 47 | res.status(500).json("Invalid network given for initLDK"); 48 | } 49 | 50 | try { 51 | if (LDKClientFactory.isInitialized()) { 52 | res.status(500).json("LDK already intialized."); 53 | } else { 54 | console.log("[Server.ts]: Finished initialiseWasm"); 55 | await LDKClientFactory.createLDKClient(wallet_name, network); // prod/test/dev 56 | console.log("[Server.ts]: Finished create LDK"); 57 | const LightningClient = LDKClientFactory.getLDKClient(); 58 | console.log("[Server.ts]: Starting LDK Client"); 59 | await LightningClient.start(); 60 | console.log("[Server.ts]: LDK Client started"); 61 | res 62 | .status(200) 63 | .json( 64 | "Started LDK with network " + 65 | network + 66 | " LDK: " + 67 | LightningClient.getOurNodeId() 68 | ); 69 | } 70 | } catch (e) { 71 | console.error(`Error occured setting up LDK \n ${e} \n`); 72 | res.status(500).json({ message: "Error occured when starting LDK: " + e }); 73 | } 74 | }); 75 | 76 | router.get("/closeLDK", async function (req, res) { 77 | // Closing all connections 78 | closeConnections(); 79 | 80 | try { 81 | // Close all intervals 82 | const LightningClient = LDKClientFactory.getLDKClient(); 83 | LightningClient.stop(); 84 | LDKClientFactory.destroy(); 85 | } catch (e) { 86 | console.error("Error occured stopping LDK"); 87 | } 88 | 89 | res.status(200).json({ message: "Connections closed" }); 90 | }); 91 | 92 | router.post("/generateInvoice", async function (req, res) { 93 | try { 94 | let { amount_in_sats, invoice_expiry_secs, description } = req.body; 95 | amount_in_sats = Number(amount_in_sats); 96 | // make sure we have valid object 97 | validateInvoiceBody(amount_in_sats, invoice_expiry_secs, description); 98 | 99 | let invoice = await LDKClientFactory.getLDKClient().createInvoice( 100 | BigInt(convertToMillisats(amount_in_sats)), 101 | invoice_expiry_secs, 102 | description 103 | ); 104 | res.status(201).json({ status: 200, invoice }); 105 | } catch (err) { 106 | const err_msg = `Bad request: ${err}`; 107 | console.log(err_msg); 108 | } 109 | }); 110 | 111 | router.post("/sendPayment", async function (req, res) { 112 | // send a payment with values posted into this route -> 113 | const invoice_str = req.body.invoice; 114 | try { 115 | const payment_res = await LDKClientFactory.getLDKClient().sendPayment( 116 | invoice_str 117 | ); 118 | if (payment_res) { 119 | res.status(200).json({ message: "Payment successful" }); 120 | } else { 121 | res.status(500).json({ error: "Payment failed" }); 122 | } 123 | } catch (err: any) { 124 | // Handle the error 125 | console.log(err); 126 | res.status(500).json({ error: err }); 127 | } 128 | }); 129 | 130 | export default router; 131 | -------------------------------------------------------------------------------- /src/server.js: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import cors from "cors"; 3 | import bodyParser from "body-parser"; 4 | import serverRoutes from "./routes/serverRoutes"; 5 | import peerRoutes from "./routes/peerRoutes"; 6 | import channelRoutes from "./routes/channelRoutes"; 7 | import { closeConnections } from "./LDK/utils/ldk-utils"; 8 | import initialiseWasm from "./LDK/init/initializeWasm"; 9 | 10 | await initialiseWasm(); 11 | 12 | // Constants 13 | const PORT = 3003; 14 | 15 | // Express app 16 | const app = express(); 17 | 18 | // Middlewares 19 | app.use(cors()); 20 | app.use(bodyParser.json()); 21 | 22 | // Routes 23 | app.use("/", serverRoutes); 24 | app.use("/peer", peerRoutes); 25 | app.use("/channel", channelRoutes); 26 | 27 | // Starting the express server 28 | async function startServer() { 29 | // Check if the port is in use 30 | const net = await import("net"); 31 | const socket = await net.createConnection(PORT); 32 | if (socket.connected) { 33 | console.log(`Port ${PORT} is in use. Force closing...`); 34 | socket.destroy(); 35 | } 36 | 37 | // Start the express server 38 | app.listen(PORT, async () => { 39 | /* PRODUCTION CODE */ 40 | console.log( 41 | `[Server.ts]: lightning-adapter listening at http://localhost:${PORT}` 42 | ); 43 | }); 44 | } 45 | 46 | // Exit handlers 47 | const onExit = () => { 48 | // code to be executed on exit, e.g. close connections, cleanup resources 49 | console.log("[Server.ts]: Exiting the application"); 50 | closeConnections(); 51 | }; 52 | 53 | const onSigInt = () => { 54 | // code to be executed on sigint, e.g. close connections, cleanup resources 55 | console.log("[Server.ts]: Application interrupted"); 56 | closeConnections(); 57 | process.exit(); 58 | }; 59 | 60 | process.on("exit", onExit); 61 | process.on("SIGINT", onSigInt); 62 | 63 | // Force close the port if it is in use 64 | const fs = await import("fs"); 65 | const path = await import("path"); 66 | const portFile = path.join(process.cwd(), "port.pid"); 67 | if (fs.existsSync(portFile)) { 68 | const pid = fs.readFileSync(portFile, "utf8").trim(); 69 | if (pid) { 70 | console.log(`Killing process ${pid}`); 71 | try { 72 | process.kill(pid, "SIGKILL"); 73 | } catch (error) { 74 | console.error("Error killing process:", error); 75 | } 76 | } 77 | } 78 | 79 | startServer(); 80 | -------------------------------------------------------------------------------- /test/ElectrumClient.test.ts: -------------------------------------------------------------------------------- 1 | import ElectrumClient from "../src/LDK/bitcoin_clients/ElectrumClient.mjs"; 2 | 3 | describe("Electrum Client", () => { 4 | let electrumClient: ElectrumClient; 5 | let bestBlockHash: any; 6 | let blockHeight: any; 7 | let latestBlockHeader: any; 8 | let txid: any; 9 | 10 | beforeAll(async () => { 11 | electrumClient = new ElectrumClient(); 12 | }); 13 | 14 | it("getBestBlockHash", async () => { 15 | bestBlockHash = await electrumClient.getBestBlockHash(); 16 | expect(bestBlockHash).toEqual(expect.any(String)); 17 | }); 18 | 19 | it("getBlockHeight", async () => { 20 | blockHeight = await electrumClient.getBestBlockHeight(); 21 | expect(blockHeight).toEqual(expect.any(Number)); 22 | }); 23 | 24 | it("getLatestBlockHeader", async () => { 25 | latestBlockHeader = await electrumClient.getBlockHeader(blockHeight); 26 | expect(latestBlockHeader).toEqual(expect.any(String)); 27 | }); 28 | 29 | it("getTxIdData", async () => { 30 | txid = (await ElectrumClient.get(`rest/block/${bestBlockHash}.json`)).data 31 | .tx[0].txid; 32 | const response = await electrumClient.getTxIdData(txid); 33 | expect(response).toHaveProperty("txid"); 34 | expect(response).toHaveProperty("vout"); 35 | expect(response).toHaveProperty("sequence"); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /test/TorClient.test.ts: -------------------------------------------------------------------------------- 1 | import TorClient, { 2 | TOR_ENDPOINT, 3 | } from "../src/LDK/bitcoin_clients/TorClient.mjs"; 4 | 5 | describe("Tor Client", () => { 6 | let torClient: TorClient; 7 | let bestBlockHash: any; 8 | let blockHeight: any; 9 | let latestBlockHeader: any; 10 | let txid: any; 11 | 12 | beforeAll(async () => { 13 | torClient = new TorClient(); 14 | }); 15 | 16 | it("getBestBlockHash", async () => { 17 | bestBlockHash = await torClient.getBestBlockHash(); 18 | expect(bestBlockHash).toEqual(expect.any(String)); 19 | }); 20 | 21 | it("getBlockHeight", async () => { 22 | blockHeight = await torClient.getBestBlockHeight(); 23 | expect(blockHeight).toEqual(expect.any(Number)); 24 | }); 25 | 26 | it("getLatestBlockHeader", async () => { 27 | latestBlockHeader = await torClient.getBlockHeader(blockHeight); 28 | expect(latestBlockHeader).toEqual(expect.any(String)); 29 | }); 30 | 31 | it("getTxIdData", async () => { 32 | txid = ( 33 | await TorClient.get( 34 | `${TOR_ENDPOINT}/electrs/block/${bestBlockHash}/txids` 35 | ) 36 | ).data[0]; 37 | const response = await torClient.getTxIdData(txid); 38 | expect(response).toHaveProperty("txid"); 39 | expect(response).toHaveProperty("vout"); 40 | expect(response).toHaveProperty("sequence"); 41 | }); 42 | 43 | it("getUtxoSpentData", async () => { 44 | const response = await torClient.getUtxoSpentData(txid, 0); 45 | expect(response).toHaveProperty("spent"); 46 | }); 47 | }); 48 | -------------------------------------------------------------------------------- /test/channelRoutes.test.ts: -------------------------------------------------------------------------------- 1 | import request from "supertest"; 2 | import express from "express"; 3 | import router from "../src/routes/channelRoutes"; 4 | import { describe, expect, it, beforeAll } from "@jest/globals"; 5 | import LDKClientFactory from "../src/LDK/init/LDKClientFactory"; 6 | import { uint8ArrayToHexString } from "../src/LDK/utils/utils"; 7 | import { MOCK_DATA } from "./mocks/MockLightningClient"; 8 | 9 | describe("Channel Routes", () => { 10 | let app: any; 11 | beforeAll(async () => { 12 | app = express(); 13 | app.use(express.json()); 14 | app.use(router); 15 | await LDKClientFactory.createLDKClient(MOCK_DATA.WALLET_NAME,"mock"); 16 | }); 17 | 18 | it("GET /nodeID", async () => { 19 | const response = await request(app).get("/nodeID"); 20 | 21 | expect(response.body).toEqual({ 22 | nodeID: uint8ArrayToHexString(MOCK_DATA.NODE_ID), 23 | }); 24 | }); 25 | 26 | it("GET /balance", async () => { 27 | const response = await request(app).get("/balance"); 28 | 29 | expect(response.statusCode).toBe(200); 30 | expect(response.body).toEqual({ balance: 0 }); 31 | }); 32 | 33 | it("GET /liveChannels", async () => { 34 | const response = await request(app).get("/liveChannels"); 35 | 36 | expect(response.statusCode).toBe(200); 37 | expect(response.body).toEqual([]); 38 | }); 39 | 40 | it("POST /createChannel with valid parameters", async () => { 41 | const response = await request(app).post("/createChannel").send({ 42 | pubkey: MOCK_DATA.PUBKEY, 43 | amount: MOCK_DATA.AMOUNT, 44 | push_msat: MOCK_DATA.PUSH_MSAT, 45 | channelType: MOCK_DATA.CHANNEL_TYPE, 46 | host: MOCK_DATA.HOST, 47 | port: MOCK_DATA.PORT, 48 | channel_name: MOCK_DATA.CHANNEL_NAME, 49 | wallet_name: MOCK_DATA.WALLET_NAME, 50 | privkey: MOCK_DATA.PRIVKEY, 51 | paid: MOCK_DATA.PAID, 52 | payment_address: MOCK_DATA.PAYMENT_ADDRESS, 53 | funding_txid: MOCK_DATA.FUNDING_TXID 54 | }); 55 | 56 | expect(response.statusCode).toBe(200); 57 | expect(response.text).toBe("Created Channel on LDK"); 58 | }); 59 | 60 | it("POST /createChannel with invalid parameters", async () => { 61 | const response = await request(app).post("/createChannel").send({}); 62 | 63 | expect(response.statusCode).toBe(500); 64 | expect(response.text).toBe("Missing required parameters"); 65 | }); 66 | 67 | it("GET /usableChannels", async () => { 68 | const response = await request(app).get("/liveChannels"); 69 | 70 | expect(response.statusCode).toBe(200); 71 | expect(response.body).toEqual([]); 72 | }); 73 | 74 | it("GET /allChannels", async () => { 75 | const response = await request(app).get("/allChannels"); 76 | 77 | expect(response.statusCode).toBe(200); 78 | expect(response.body).toEqual(expect.any(Array)); 79 | }); 80 | 81 | it("GET /allEvents", async () => { 82 | const response = await request(app).get("/allEvents"); 83 | 84 | expect(response.statusCode).toBe(200); 85 | expect(response.body).toEqual(expect.any(Array)); 86 | }); 87 | 88 | it("GET /loadEvents should return 200 and the list of events for a given wallet name", async () => { 89 | const response = await request(app).get("/loadEvents/ldk1"); 90 | 91 | expect(response.statusCode).toBe(200); 92 | expect(response.body).toEqual(expect.any(Array)); 93 | }); 94 | 95 | it("GET /loadEvents if the wallet with the given name does not exist", async () => { 96 | const response = await request(app).get("/loadEvents/nonexistentWallet"); 97 | 98 | expect(response.statusCode).toBe(200); 99 | expect(response.body).toEqual([]); 100 | }); 101 | 102 | it("GET /loadUnnotifiedEvents should return 200 and the list of events for a given wallet name", async () => { 103 | const response = await request(app).get("/loadUnnotifiedEvents/ldk1"); 104 | 105 | expect(response.statusCode).toBe(200); 106 | expect(response.body).toEqual(expect.any(Array)); 107 | }); 108 | 109 | it("GET /loadUnnotifiedEvents if the wallet with the given name does not exist", async () => { 110 | const response = await request(app).get("/loadUnnotifiedEvents/nonexistentWallet"); 111 | 112 | expect(response.statusCode).toBe(200); 113 | expect(response.body).toEqual([]); 114 | }); 115 | 116 | it("POST /setEventNotificationSeen with valid parameters", async () => { 117 | const response = await request(app).post("/setEventNotificationSeen").send({ 118 | id: 1 119 | }); 120 | 121 | expect(response.statusCode).toBe(200); 122 | expect(response.body).toEqual({ 123 | message: "Event notification status updated successfully" 124 | }); 125 | }); 126 | 127 | it("POST /setEventNotificationSeen with invalid parameters", async () => { 128 | const response = await request(app).post("/setEventNotificationSeen").send({}); 129 | 130 | expect(response.statusCode).toBe(400); 131 | expect(response.body).toEqual({ 132 | error: "Invalid event ID" 133 | }); 134 | }); 135 | 136 | it("GET /loadChannels should return 200 and the list of channels for a given wallet name", async () => { 137 | const response = await request(app).get("/loadChannels/ldk1"); 138 | 139 | expect(response.statusCode).toBe(200); 140 | expect(response.body).toEqual(expect.any(Array)); 141 | }); 142 | 143 | it("GET /loadChannels if the wallet with the given name does not exist", async () => { 144 | const response = await request(app).get("/loadChannels/nonexistentWallet"); 145 | 146 | expect(response.statusCode).toBe(200); 147 | expect(response.body).toEqual([]); 148 | }); 149 | 150 | it("PUT /updateChannelName should update a channel name by id", async () => { 151 | const response = await request(app).put("/updateChannelName/1").send({ 152 | name: MOCK_DATA.CHANNEL_NAME, 153 | }); 154 | 155 | expect(response.statusCode).toBe(200); 156 | expect(response.body).toEqual({ 157 | message: "Channel name updated successfully", 158 | }); 159 | }); 160 | 161 | it("PUT /updateChannelName with invalid channel id", async () => { 162 | const response = await request(app).put("/updateChannelName/abc").send({ 163 | name: MOCK_DATA.CHANNEL_NAME, 164 | }); 165 | 166 | expect(response.statusCode).toBe(400); 167 | expect(response.body).toEqual({ error: "Invalid channel ID" }); 168 | }); 169 | 170 | it("PUT /updateChannelPaid should update a channel paid status by id", async () => { 171 | const response = await request(app).put("/updateChannelName/1").send({ 172 | paid: MOCK_DATA.PAID, 173 | }); 174 | 175 | expect(response.statusCode).toBe(200); 176 | expect(response.body).toEqual({ 177 | message: "Channel name updated successfully", 178 | }); 179 | }); 180 | 181 | it("PUT /updateChannelPaid with invalid channel id", async () => { 182 | const response = await request(app).put("/updateChannelPaid/abc").send({ 183 | paid: MOCK_DATA.PAID, 184 | }); 185 | 186 | expect(response.statusCode).toBe(400); 187 | expect(response.body).toEqual({ error: "Invalid channel ID" }); 188 | }); 189 | 190 | it("PUT /updateChannel should update a channel by id", async () => { 191 | const response = await request(app).put("/updateChannel/1").send({ 192 | name: MOCK_DATA.CHANNEL_NAME, 193 | amount: MOCK_DATA.AMOUNT, 194 | push_msat: MOCK_DATA.PUSH_MSAT, 195 | wallet_name: MOCK_DATA.WALLET_NAME, 196 | peer_id: 5, 197 | privkey: MOCK_DATA.PRIVKEY, 198 | txid: MOCK_DATA.TXID, 199 | vout: MOCK_DATA.VOUT, 200 | paid: MOCK_DATA.PAID, 201 | payment_address: MOCK_DATA.PAYMENT_ADDRESS, 202 | }); 203 | 204 | expect(response.statusCode).toBe(200); 205 | expect(response.body).toEqual({ message: "Channel updated successfully" }); 206 | }); 207 | 208 | it("PUT /updateChannel with invalid channel id", async () => { 209 | const response = await request(app).put("/updateChannel/abc").send({}); 210 | 211 | expect(response.statusCode).toBe(400); 212 | expect(response.body).toEqual({ error: "Invalid channel ID" }); 213 | }); 214 | 215 | it("GET /removeDuplicateChannels", async () => { 216 | const response = await request(app).get("/removeDuplicateChannels"); 217 | 218 | expect(response.statusCode).toBe(200); 219 | expect(response.body).toEqual({ 220 | message: "Duplicate channels removed successfully", 221 | }); 222 | }); 223 | 224 | it("DELETE /forceCloseChannel", async () => { 225 | const response = await request(app).delete( 226 | `/forceCloseChannel/${MOCK_DATA.CHANNEL_ID}` 227 | ); 228 | 229 | expect(response.statusCode).toBe(200); 230 | expect(response.body).toEqual({ message: "Success", status: 200 }); 231 | }); 232 | 233 | it("DELETE /mutualCloseChannel", async () => { 234 | const response = await request(app).delete( 235 | `/mutualCloseChannel/${MOCK_DATA.CHANNEL_ID}` 236 | ); 237 | 238 | expect(response.statusCode).toBe(200); 239 | expect(response.body).toEqual({ message: "Success", status: 200 }); 240 | }); 241 | 242 | it("DELETE /deleteChannelByPaymentAddr", async () => { 243 | const response = await request(app).delete( 244 | `/deleteChannelByPaymentAddr/${MOCK_DATA.PAYMENT_ADDRESS}` 245 | ); 246 | 247 | expect(response.statusCode).toBe(200); 248 | expect(response.body).toEqual({ message: "Data deleted successfully" }); 249 | }); 250 | }); 251 | -------------------------------------------------------------------------------- /test/db-mock.ts: -------------------------------------------------------------------------------- 1 | // to persist channels 2 | import sqlite from "sqlite3"; 3 | const sqlite3 = sqlite.verbose(); 4 | 5 | const isDev = true; 6 | 7 | const sampleDataPeersTable = [ 8 | { 9 | node: "WalletOfSatoshi.com", 10 | host: "170.75.163.209", 11 | port: "9735", 12 | pubkey: 13 | "035e4ff418fc8b5554c5d9eea66396c227bd429a3251c8cbc711002ba215bfc226", 14 | }, 15 | { 16 | node: "ACINQ", 17 | host: "3.33.236.230", 18 | port: "9735", 19 | pubkey: 20 | "03864ef025fde8fb587d989186ce6a4a186895ee44a926bfc370e2c366597a3f8f", 21 | }, 22 | { 23 | node: "CoinGate", 24 | host: "3.124.63.44", 25 | port: "9735", 26 | pubkey: 27 | "0242a4ae0c5bef18048fbecf995094b74bfb0f7391418d71ed394784373f41e4f3", 28 | }, 29 | ]; 30 | 31 | const sampleDataChannelsTable = [ 32 | { 33 | name: "channel1", 34 | amount: 100000, 35 | push_msat: 444, 36 | public: true, 37 | wallet_name: "satoshi", 38 | peer_id: 1, 39 | privkey: "testprivkey1", 40 | txid: "testtxid1", 41 | vout: 0, 42 | paid: true, 43 | payment_address: "tb324524asda23asdsad234esdaxdasd12312311", 44 | }, 45 | { 46 | name: "testChannel", 47 | amount: 100000, 48 | push_msat: 444, 49 | public: true, 50 | wallet_name: "ldk1", 51 | peer_id: 2, 52 | privkey: "testprivkey2", 53 | txid: "testtxid2", 54 | vout: 1, 55 | paid: false, 56 | payment_address: "tbdsfsdrererd12fdgdfg3123145asdsa23a1", 57 | }, 58 | { 59 | name: "p2p", 60 | amount: 100000, 61 | push_msat: 444, 62 | public: false, 63 | wallet_name: "LDK3", 64 | peer_id: 3, 65 | privkey: "testprivkey3", 66 | txid: "testtxid3", 67 | vout: 2, 68 | paid: true, 69 | payment_address: "tb3245242sadsadwe3242sadasghgvh1", 70 | }, 71 | ]; 72 | 73 | const sampleDataEventsTable = [ 74 | { 75 | event_type: "Event_FundingGenerationReady", 76 | event_data: `Event_FundingGenerationReady { 77 | ptrs_to: [object Object], 78 | ptr: 4304647200, 79 | temporary_channel_id: 614d54affc469907359be53607ed79a51f13e5b6d745a40ec015639d1390e1a7, 80 | counterparty_node_id: 0227e0e3a9198601964d77a5b2d9a2b21ffff59a85a85031d61c6bb27b2ece2075, 81 | channel_value_satoshis: 100000, 82 | output_script: 0020c3e6cce8fdbb4cfedde222d6669255d44566e37a553d05c3b1b06a365b0a634a, 83 | user_channel_id: 1329227995784915872903807060280344576, 84 | }`, 85 | channel_id_hex: "614d54affc469907359be53607ed79a51f13e5b6d745a40ec015639d1390e1a7" 86 | }, 87 | { 88 | event_type: "Event_ChannelPending", 89 | event_data: `Event_ChannelPending { 90 | ptrs_to: [object Object], 91 | ptr: 4304686320, 92 | channel_id: e8a6f2a4f7cc9a8952622542d660934e02dcfcfc088e59710c1a1a43bed3053d, 93 | user_channel_id: 1329227995784915872903807060280344576, 94 | former_temporary_channel_id: 614d54affc469907359be53607ed79a51f13e5b6d745a40ec015639d1390e1a7, 95 | counterparty_node_id: 0227e0e3a9198601964d77a5b2d9a2b21ffff59a85a85031d61c6bb27b2ece2075, 96 | funding_txo: [object Object], 97 | }`, 98 | channel_id_hex: "e8a6f2a4f7cc9a8952622542d660934e02dcfcfc088e59710c1a1a43bed3053d" 99 | } 100 | ]; 101 | 102 | // Connect/create the SQLite database 103 | export const createMockDatabase = () => { 104 | return new Promise((resolve, reject) => { 105 | const db = new sqlite3.Database("lightning-mock.db", async (err) => { 106 | if (err) { 107 | console.error(err.message); 108 | } 109 | console.log("[db-mock.ts]: Connected to/Created the SQLite database."); 110 | 111 | //////////////////////////////////////////////////////////// 112 | //////// peerlist table //////////////////////////////////// 113 | //////////////////////////////////////////////////////////// 114 | 115 | const createPeersTable = () => { 116 | return new Promise((resolve, reject) => { 117 | const createPeersTableQuery = `CREATE TABLE IF NOT EXISTS peers ( 118 | id INTEGER PRIMARY KEY AUTOINCREMENT, 119 | node TEXT, 120 | pubkey TEXT NOT NULL, 121 | host TEXT NOT NULL, 122 | port INTEGER NOT NULL 123 | )`; 124 | db.run(createPeersTableQuery, (err) => { 125 | if (err) { 126 | console.error(err.message); 127 | reject(err); 128 | } 129 | console.log("[db-mock.ts]: Table 'peers' created or already exist"); 130 | resolve(); 131 | }); 132 | }); 133 | }; 134 | 135 | const createChannelsTable = () => { 136 | return new Promise((resolve, reject) => { 137 | // Create the 'channels' table if it doesn't exist 138 | const createChannelsTableQuery = `CREATE TABLE IF NOT EXISTS channels ( 139 | id INTEGER PRIMARY KEY AUTOINCREMENT, 140 | name TEXT, 141 | amount REAL NOT NULL, 142 | push_msat INTEGER NOT NULL, 143 | public BOOL NOT NULL, 144 | wallet_name TEXT, 145 | peer_id INTEGER UNIQUE, 146 | privkey TEXT NOT NULL, 147 | txid TEXT, 148 | vout INTEGER, 149 | paid BOOL NOT NULL, 150 | payment_address TEXT, 151 | channel_id TEXT, 152 | FOREIGN KEY (peer_id) REFERENCES peer(id) 153 | )`; 154 | 155 | db.run(createChannelsTableQuery, (err) => { 156 | if (err) { 157 | console.error(err.message); 158 | reject(err); 159 | } 160 | console.log("[db-mock.ts]: Table 'channels' created or already exists"); 161 | resolve(); 162 | }); 163 | }); 164 | }; 165 | 166 | // Create the 'events' table if it doesn't exist 167 | const createEventsTable = () => { 168 | return new Promise((resolve, reject) => { 169 | const createEventsTable = `CREATE TABLE IF NOT EXISTS events ( 170 | id INTEGER PRIMARY KEY AUTOINCREMENT, 171 | event_type TEXT NOT NULL, 172 | event_data TEXT NOT NULL, 173 | channel_id_hex INTEGER NOT NULL, 174 | notification_seen BOOL NOT NULL DEFAULT 0 175 | )`; 176 | db.run(createEventsTable, (err) => { 177 | if (err) { 178 | console.error(err.message); 179 | reject(err); 180 | } 181 | console.log("[db-mock.ts]: Table 'events' created or already exist"); 182 | resolve(); 183 | }); 184 | }); 185 | }; 186 | 187 | try { 188 | // Create tables sequentially using async/await 189 | await createPeersTable(); 190 | await createChannelsTable(); 191 | await createEventsTable(); 192 | 193 | if (isDev) { 194 | // Insert some sample data into the 'peers' table if there's no data 195 | db.get("SELECT count(*) as count FROM peers", (err, row: any) => { 196 | if (err) { 197 | console.error(err.message); 198 | } 199 | if (row.count === 0) { 200 | console.log( 201 | "[db-mock.ts]: Inserting sample data for table peers ..." 202 | ); 203 | 204 | const insertData = `INSERT INTO peers (node, host, port, pubkey) VALUES (?,?,?,?)`; 205 | sampleDataPeersTable.forEach((data) => { 206 | db.run(insertData, [ 207 | data.node, 208 | data.host, 209 | data.port, 210 | data.pubkey, 211 | ]); 212 | }); 213 | } else { 214 | console.log( 215 | "[db-mock.ts]: Table 'peers' already contains data, skipping the sample data insertion." 216 | ); 217 | } 218 | }); 219 | 220 | // Insert some sample data into the 'channels' table if there's no data 221 | db.get("SELECT count(*) as count FROM channels", (err, row: any) => { 222 | if (err) { 223 | console.error(err.message); 224 | } 225 | if (row.count === 0) { 226 | console.log( 227 | "[db-mock.ts]: Inserting sample data for table channels ..." 228 | ); 229 | 230 | const insertData = `INSERT INTO channels (name, amount, push_msat, public, wallet_name, peer_id, privkey, txid, vout, paid, payment_address) VALUES (?,?,?,?,?,?,?,?,?,?,?)`; 231 | sampleDataChannelsTable.forEach((data) => { 232 | db.run(insertData, [ 233 | data.name, 234 | data.amount, 235 | data.push_msat, 236 | data.public, 237 | data.wallet_name, 238 | data.peer_id, 239 | data.privkey, 240 | data.txid, 241 | data.vout, 242 | data.paid, 243 | data.payment_address, 244 | ]); 245 | }); 246 | } else { 247 | console.log( 248 | "[db-mock.ts]: Table 'channels' already contains data, skipping the sample data insertion." 249 | ); 250 | } 251 | }); 252 | 253 | // Insert some sample data into the 'events' table if there's no data 254 | db.get("SELECT count(*) as count FROM events", (err, row: any) => { 255 | if (err) { 256 | console.error(err.message); 257 | } 258 | if (row.count === 0) { 259 | console.log( 260 | "[db-mock.ts]: Inserting sample data for table events ..." 261 | ); 262 | 263 | const insertData = `INSERT INTO events (event_type, event_data, channel_id_hex) VALUES (?,?,?)`; 264 | sampleDataEventsTable.forEach((data) => { 265 | db.run(insertData, [ 266 | data.event_type, 267 | data.event_data, 268 | data.channel_id_hex, 269 | ]); 270 | }); 271 | } else { 272 | console.log( 273 | "[db-mock.ts]: Table 'events' already contains data, skipping the sample data insertion." 274 | ); 275 | } 276 | }); 277 | 278 | } 279 | 280 | console.log("[db-mock.ts]: Table creation complete"); 281 | resolve(db); 282 | } catch (error) { 283 | console.error("Error creating tables:", error); 284 | reject(error); 285 | } 286 | }); 287 | }); 288 | }; 289 | -------------------------------------------------------------------------------- /test/db.test.ts: -------------------------------------------------------------------------------- 1 | // describe("Check tables data in newly created DB", () => { 2 | 3 | // test("Check if all tables present", (done) => { 4 | // db.all("SHOW TABLES", (err, rows) => { 5 | // if (err) { 6 | // console.log(err); 7 | // } 8 | // expect(rows.length).toBe(3); 9 | // expect(rows[0].name).toBe("wallets"); 10 | // expect(rows[1].name).toBe("peers"); 11 | // expect(rows[2].name).toBe("channels"); 12 | // done(); 13 | // }); 14 | // }); 15 | 16 | // test("Check that sample data is in the wallets table", (done) => { 17 | // const db = require("./db-mock.js"); 18 | // db.get("SELECT * FROM wallets", (err, rows) => { 19 | 20 | // if (err) { 21 | // console.log(err); 22 | // } 23 | // expect(rows.length).toBe(3); 24 | // expect(rows[0].name).toBe("Mainnet Wallet 1"); 25 | // expect(rows[1].name).toBe("Testnet Wallet 1"); 26 | // expect(rows[2].name).toBe("Testnet Wallet 2"); 27 | // done(); 28 | // }, 10000); 29 | // db.close(); 30 | // }); 31 | 32 | // test("Check that sample data is in the channels table", (done) => { 33 | // const db = require("./db-mock.js"); 34 | // db.get("SELECT * FROM channels", (err, rows) => { 35 | // if (err) { 36 | // console.log(err); 37 | // } 38 | // expect(rows.length).toBe(3); 39 | // expect(rows[0].name).toBe("channel1"); 40 | // expect(rows[1].name).toBe("testChannel"); 41 | // expect(rows[2].name).toBe("p2p"); 42 | // done(); 43 | // }, 10000); 44 | // db.close(); 45 | // }); 46 | 47 | // afterAll(() => { 48 | // db.close(); 49 | // console.log("ALL FINISHES") 50 | // }); 51 | // jest.setTimeout(2*60*1000); 52 | // }); 53 | -------------------------------------------------------------------------------- /test/mocks/MockLightningClient.ts: -------------------------------------------------------------------------------- 1 | import { ChannelType } from "../../src/LDK/types/ChannelTypes"; 2 | 3 | export const MOCK_DATA = { 4 | NODE_ID: new Uint8Array([0x00, 0x01, 0x02, 0x03]), 5 | BLOCK_HASH: 6 | "0000000000009997191fe3ae4b646a4bb417d4417791310037ef4f5deeb7cc57", 7 | INVOICE: 8 | "lnbc100u1pj96z8epp5fc3vjyg5uudzx2eeykwutsed3jl97sc3s30emzem3mzf7995d40qdq5g9kxy7fqd9h8vmmfvdjscqzzsxqyz5vqsp5a9auggr2fwcmemp4lk50dcmquer39p74deut6g7rv9zpfc5kdu5q9qyyssq00qlqx98q9usndf8gxrzgajcrpl7dvffwv2tzu28ld7x9nph57rn9hk8tn9tuqhwhzgzkcd004rcl84hln7j30ef6u8w2j39hd656rgpxf37nr", 9 | CHANNELS: [], 10 | PUBKEY: "03534237af8affcf708cfe553b59fafa3a8420a4aaf1b2861d6e52df967976b53b", 11 | HOST: "127.0.0.1", 12 | PORT: 9735, 13 | CHANNEL_ID: 14 | "d6449fa9c9f4dd120411825c8357fe4a8f85a1f789776f786e264414c66cb439", 15 | CHANNEL_NAME: "test-channel", 16 | AMOUNT: 100000, 17 | PUSH_MSAT: 1000, 18 | CHANNEL_TYPE: ChannelType.Public, 19 | PRIVKEY: "cRrhJwXVBPHdbSRsZo31SU24zoFmy4Jsr8H1aMwRTDn3qb67zG1r", 20 | PAID: true, 21 | TXID: "6cf30a3fc3a32774494a9b04d06459f1ffd05382cf9e4e943675bea74c99a64c", 22 | VOUT: 1, 23 | PAYMENT_ADDRESS: "tb324524asda23asdsad234esdaxdasd12312311", 24 | INVOICE_EXPIRY_SECS: 3600, 25 | FUNDING_TXID: 26 | "1ec1ab323d1acad8bea1e24b4a9dbf5f82963838d255b8473255c07aa2b78892", 27 | WALLET_NAME: "Test Wallet" 28 | }; 29 | 30 | export class MockLightningClient { 31 | netHandler: any; 32 | start() {} 33 | stop() {} 34 | updateBestBlockHeight() { 35 | return 1; 36 | } 37 | updateBestBlockHash() { 38 | return MOCK_DATA.BLOCK_HASH; 39 | } 40 | getChannels() { 41 | return MOCK_DATA.CHANNELS; 42 | } 43 | getOurNodeId() { 44 | return MOCK_DATA.NODE_ID; 45 | } 46 | connectToPeer() { 47 | return true; 48 | } 49 | setEventTxData() {} 50 | createInvoice() { 51 | return MOCK_DATA.INVOICE; 52 | } 53 | getUsableChannels() { 54 | return []; 55 | } 56 | createChannel() { 57 | return true; 58 | } 59 | mutualCloseChannel(pubkey: string) { 60 | return true; 61 | } 62 | forceCloseChannel(pubkey: string) { 63 | return true; 64 | } 65 | getChainMonitor() { 66 | return null; 67 | } 68 | getPeerManager() { 69 | return null; 70 | } 71 | sendPayment() { 72 | return true; 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /test/peerRoutes.test.ts: -------------------------------------------------------------------------------- 1 | import request from "supertest"; 2 | import express from "express"; 3 | import router from "../src/routes/peerRoutes.js"; 4 | import LDKClientFactory from "../src/LDK/init/LDKClientFactory"; 5 | import { MOCK_DATA } from "./mocks/MockLightningClient"; 6 | 7 | describe("Peer Routes", () => { 8 | let app: any; 9 | beforeAll(async () => { 10 | app = express(); 11 | app.use(express.json()); 12 | app.use(router); 13 | await LDKClientFactory.createLDKClient(MOCK_DATA.WALLET_NAME, "mock"); 14 | }); 15 | 16 | // it("GET /liveChainMonitors", async () => { 17 | // const response = await request(app).get("/liveChainMonitors"); 18 | 19 | // expect(response.statusCode).toBe(500); 20 | // expect(response.body).toEqual("Failed to get chain monitor"); 21 | // }); 22 | 23 | it("GET /livePeers", async () => { 24 | const response = await request(app).get("/livePeers"); 25 | 26 | expect(response.statusCode).toBe(500); 27 | expect(response.body).toEqual({ status: 500, message: "Failed to get peermanager" }); 28 | }); 29 | 30 | 31 | it("POST /connectToPeer with valid parameters", async () => { 32 | const res = await request(app).post("/connectToPeer").send({ 33 | pubkey: MOCK_DATA.PUBKEY, 34 | host: MOCK_DATA.HOST, 35 | port: MOCK_DATA.PORT, 36 | }); 37 | expect(res.statusCode).toBe(200); 38 | expect(res.body).toEqual({ status: 200, message: "Connected to peer" }); 39 | }); 40 | 41 | it("POST /connectToPeer with missing parameters", async () => { 42 | const res = await request(app).post("/connectToPeer").send({}); 43 | 44 | expect(res.statusCode).toBe(500); 45 | expect(res.body).toEqual({ status: 500, message: "Missing required parameters" }); 46 | }); 47 | 48 | it("POST /savePeerAndChannelToDb with valid parameters", async () => { 49 | const res = await request(app).post("/savePeerAndChannelToDb").send({ 50 | amount: MOCK_DATA.AMOUNT, 51 | pubkey: MOCK_DATA.PUBKEY, 52 | host: MOCK_DATA.HOST, 53 | port: MOCK_DATA.PORT, 54 | channel_name: MOCK_DATA.CHANNEL_NAME, 55 | wallet_name: MOCK_DATA.WALLET_NAME, 56 | channelType: MOCK_DATA.CHANNEL_TYPE, 57 | privkey: MOCK_DATA.PRIVKEY, 58 | paid: MOCK_DATA.PAID, 59 | payment_addr: MOCK_DATA.PAYMENT_ADDRESS 60 | }); 61 | 62 | expect(res.statusCode === 200 || res.statusCode === 409).toBeTruthy(); 63 | }); 64 | 65 | it("POST /savePeerAndChannelToDb with invalid parameters", async () => { 66 | const res = await request(app).post("/savePeerAndChannelToDb").send({}); 67 | 68 | expect(res.statusCode).toBe(500); 69 | expect(res.body).toEqual({ status: 500, message: "Couldn't insert into DB: undefined" }); 70 | }); 71 | 72 | it("POST /setTxData", async () => { 73 | const res = await request(app).post("/setTxData").send({ 74 | txid: MOCK_DATA.TXID, 75 | }); 76 | 77 | expect(res.statusCode).toBe(200); 78 | expect(res.body).toEqual({ status: 200, message: "Txid was set correctly." }); 79 | }); 80 | 81 | it("POST /saveChannelPaymentInfoToDb with valid parameters", async () => { 82 | const res = await request(app).post("/saveChannelPaymentInfoToDb").send({ 83 | amount: MOCK_DATA.AMOUNT, 84 | paid: MOCK_DATA.PAID, 85 | txid: MOCK_DATA.TXID, 86 | vout: MOCK_DATA.VOUT, 87 | address: MOCK_DATA.PAYMENT_ADDRESS 88 | }); 89 | 90 | expect(res.statusCode).toBe(200); 91 | expect(res.body).toEqual({ status: 200, message: "Channel funding saved to DB" }); 92 | }); 93 | 94 | it("POST /saveChannelPaymentInfoToDb with invalid parameters", async () => { 95 | const res = await request(app).post("/saveChannelPaymentInfoToDb").send({}); 96 | 97 | expect(res.statusCode).toBe(500); 98 | expect(res.body).toEqual({ status: 500, message: "No address was posted to peer/saveChannelPaymentInfoToDb" }); 99 | }); 100 | 101 | it("GET /getPeer returns a peer if found", async () => { 102 | const response = await request(app).get("/getPeer/1"); 103 | 104 | expect(response.statusCode).toBe(200); 105 | expect(response.body).toHaveProperty("node"); 106 | expect(response.body).toHaveProperty("host"); 107 | expect(response.body).toHaveProperty("port"); 108 | expect(response.body).toHaveProperty("pubkey"); 109 | }); 110 | 111 | it("GET /getPeer returns 404 if peer is not found", async () => { 112 | const response = await request(app).get("/getPeer/not-found"); 113 | 114 | expect(response.statusCode).toBe(404); 115 | expect(response.body).toEqual({ error: "Peer not found" }); 116 | }); 117 | 118 | it("GET /default_peerlist", async () => { 119 | const response = await request(app).get("/default_peerlist"); 120 | 121 | expect(response.statusCode).toBe(200); 122 | expect(response.body).toEqual(expect.any(Array)); 123 | }); 124 | 125 | it("GET /peers", async () => { 126 | const response = await request(app).get("/peers"); 127 | 128 | expect(response.statusCode).toBe(200); 129 | expect(response.body).toEqual(expect.any(Array)); 130 | }); 131 | }); 132 | -------------------------------------------------------------------------------- /test/serverRoutes.test.ts: -------------------------------------------------------------------------------- 1 | import request from "supertest"; 2 | import express from "express"; 3 | import router from "../src/routes/serverRoutes"; 4 | import LDKClientFactory from "../src/LDK/init/LDKClientFactory"; 5 | import { MOCK_DATA } from "./mocks/MockLightningClient"; 6 | import { jest, describe, expect, it, beforeAll } from "@jest/globals"; 7 | 8 | jest.mock("../src/LDK/utils/ldk-utils.ts", () => ({ 9 | closeConnections: jest.fn(), 10 | })); 11 | 12 | describe("GET Routes", () => { 13 | let app: any; 14 | beforeAll(async () => { 15 | app = express(); 16 | app.use(express.json()); 17 | app.use(router); 18 | await LDKClientFactory.createLDKClient(MOCK_DATA.WALLET_NAME, "mock"); 19 | }); 20 | 21 | it('POST /startLDK should return LDK is already initialized', async () => { 22 | const validNetwork = 'mock'; 23 | 24 | const response = await request(app) 25 | .post('/startLDK') 26 | .send({ network: validNetwork }); 27 | 28 | expect(response.status).toBe(500); 29 | expect(response.body).toBe('LDK already intialized.'); 30 | }); 31 | 32 | it("GET /closeLDK should call the closeConnections function and stop LightningClient", async () => { 33 | const response = await request(app).get("/closeLDK"); 34 | 35 | expect(response.statusCode).toBe(200); 36 | expect(response.body).toEqual({ message: "Connections closed" }); 37 | }); 38 | 39 | it('POST /startLDK should start LDK with valid network', async () => { 40 | const validNetwork = 'mock'; 41 | 42 | const response = await request(app) 43 | .post('/startLDK') 44 | .send({ network: validNetwork }); 45 | 46 | expect(response.status).toBe(200); 47 | expect(response.body).toMatch(/Started LDK with network/); 48 | }); 49 | 50 | it("POST /generateInvoice", async () => { 51 | const response = await request(app).post("/generateInvoice").send({ 52 | amount_in_sats: MOCK_DATA.AMOUNT, 53 | invoice_expiry_secs: MOCK_DATA.INVOICE_EXPIRY_SECS, 54 | description: "" 55 | }); 56 | 57 | expect(response.statusCode).toBe(201); 58 | expect(response.body).toEqual({ status: 200, invoice: MOCK_DATA.INVOICE }); 59 | }); 60 | 61 | it("POST /sendPayment", async () => { 62 | const response = await request(app).post("/sendPayment").send({ 63 | invoice: MOCK_DATA.INVOICE, 64 | }); 65 | 66 | expect(response.statusCode).toBe(200); 67 | expect(response.body).toEqual({ "message": "Payment successful" }); 68 | }); 69 | }); 70 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | // This is an alias to @tsconfig/node16: https://github.com/tsconfig/bases 3 | "extends": "ts-node/node16/tsconfig.json", 4 | // Most ts-node options can be specified here using their programmatic names. 5 | "ts-node": { 6 | // It is faster to skip typechecking. 7 | // Remove if you want ts-node to do typechecking. 8 | "transpileOnly": true, 9 | "esm": true, 10 | "files": true, 11 | "compilerOptions": { 12 | // compilerOptions specified here will override those declared below, 13 | // but *only* in ts-node. Useful if you want ts-node and tsc to use 14 | // different options with a single tsconfig.json. 15 | } 16 | }, 17 | "compilerOptions": { 18 | // typescript options here 19 | "target": "ESNext", 20 | "module": "ES2020", 21 | "lib": [ 22 | "ES2020", 23 | "ES2021" 24 | ], 25 | // "moduleSuffixes": [".ts", ".mts" , ""], 26 | "removeComments": true, 27 | "strict": true, 28 | "allowJs": true, 29 | "skipLibCheck": true, 30 | "allowSyntheticDefaultImports": true, 31 | "forceConsistentCasingInFileNames": true, 32 | "esModuleInterop": true, 33 | "noEmit": true, 34 | "noFallthroughCasesInSwitch": true, 35 | /* Experimental Options */ 36 | "experimentalDecorators": true, 37 | "emitDecoratorMetadata": true, 38 | "sourceMap": true, 39 | "stripInternal": true, 40 | "declaration": true, 41 | "noImplicitAny": true, 42 | "strictNullChecks": true, 43 | "strictFunctionTypes": true, 44 | "strictBindCallApply": true, 45 | "strictPropertyInitialization": true, 46 | "noImplicitThis": true, 47 | "useUnknownInCatchVariables": true, 48 | "alwaysStrict": true, 49 | "noUnusedLocals": false, 50 | "noUnusedParameters": false, 51 | "exactOptionalPropertyTypes": true, 52 | "noImplicitReturns": true, 53 | "noUncheckedIndexedAccess": true, 54 | "noImplicitOverride": true, 55 | "noPropertyAccessFromIndexSignature": true, 56 | "allowUnusedLabels": false, 57 | "allowUnreachableCode": false 58 | } 59 | } -------------------------------------------------------------------------------- /wallets/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/layer2tech/mercury-node/cc25d4bfcd7f3148ac4e95dd85fb4a44a1e73ee9/wallets/.gitkeep --------------------------------------------------------------------------------