├── .gitignore ├── config.template.toml ├── deps ├── binding.gyp └── sqlite-extensions │ └── xfl.c ├── package-lock.json ├── package.json ├── readme.md ├── src ├── app │ ├── cache.js │ ├── crawl.js │ ├── ledger.js │ ├── main.js │ └── server.js ├── cache │ ├── icons.js │ ├── todo.js │ ├── tokens.js │ └── worker.js ├── cmd │ ├── backup.js │ └── rebuild-cache.js ├── crawl │ ├── crawlers │ │ ├── bithomp.js │ │ ├── domains.js │ │ ├── gravatar.js │ │ ├── index.js │ │ ├── tokenlists.js │ │ ├── twitter.js │ │ ├── xrpscan.js │ │ └── xumm.js │ ├── init.js │ └── schedule.js ├── db │ ├── codecs │ │ ├── address.js │ │ ├── index.js │ │ └── xfl.js │ ├── helpers │ │ ├── balances.js │ │ ├── common.js │ │ ├── heads.js │ │ ├── ledgers.js │ │ ├── nftoffers.js │ │ ├── props.js │ │ ├── tokenexchanges.js │ │ ├── tokenmetrics.js │ │ └── tokenoffers.js │ ├── index.js │ └── schemas │ │ ├── cache.json │ │ └── core.json ├── etl │ ├── backfill.js │ ├── derivatives │ │ ├── index.js │ │ └── marketcaps.js │ ├── events │ │ ├── index.js │ │ ├── ledgerstats.js │ │ ├── nftexchanges.js │ │ └── tokenexchanges.js │ ├── snapshot.js │ ├── state │ │ ├── accounts.js │ │ ├── index.js │ │ ├── nftoffers.js │ │ ├── nfts.js │ │ ├── tokenoffers.js │ │ └── tokens.js │ └── sync.js ├── lib │ ├── config.js │ ├── fetch.js │ ├── ipc.js │ ├── url.js │ └── version.js ├── run.js ├── srv │ ├── api.js │ ├── http.js │ ├── procedures │ │ ├── ledger.js │ │ ├── server.js │ │ └── token.js │ ├── sanitizers │ │ ├── common.js │ │ └── token.js │ ├── server.js │ ├── worker.js │ └── ws.js └── xrpl │ ├── blackhole.js │ ├── ledger.js │ ├── node.js │ ├── nodepool.js │ ├── snapshot.js │ └── stream.js └── test ├── live ├── cases │ ├── crawl.bithomp.js │ ├── crawl.domains.js │ ├── crawl.gravatar.js │ ├── crawl.tokenlists.js │ ├── crawl.twitter.js │ ├── crawl.xrpscan.js │ ├── crawl.xumm.js │ ├── icon.cache.js │ └── toml.read.js └── run.js └── unit ├── db.codecs.test.js ├── db.helpers.test.js ├── env.js ├── fetch.test.js ├── icon-cache.test.js ├── prop-diff.test.js ├── prop-rank.test.js └── xrpl.test.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | deps/build 4 | config.toml -------------------------------------------------------------------------------- /config.template.toml: -------------------------------------------------------------------------------- 1 | [NODE] 2 | # The file directory where the node can store its persistent data 3 | data_dir = "" 4 | # How many ledger objects per ledger_data request to fetch 5 | snapshot_chunk_size = 64000 6 | # How many ledgers to fetch in advance before writing them to the database 7 | stream_queue_size = 100 8 | # Any ledgers before this sequence will be ignored 9 | # For full-history livenet, this value should be 32570, beause the first 32569 ledgers were lost 10 | backfill_to_ledger = 32570 11 | 12 | 13 | [[SOURCE.XRPLS]] 14 | # A public websocket endpoint provided by a rippled or clio node 15 | # The node will pull all relevant ledger data from there 16 | url = "wss://xrplcluster.com" 17 | connections = 2 18 | 19 | [[SOURCE.XRPLS]] 20 | url = "wss://s2.ripple.com" 21 | connections = 2 22 | 23 | [[SOURCE.XRPLS]] 24 | url = "wss://s1.ripple.com" 25 | 26 | 27 | 28 | [[SOURCE.TOKENLISTS]] 29 | # Tokenlists are manually published lists containing token metadata according to XLS-26 30 | # https://github.com/XRPLF/XRPL-Standards/discussions/71 31 | # You can add as many as you want 32 | # The id field is used for ranking. Read below at "source_ranking" 33 | # The trust_level field defines the maximum possible trust level the list can set for any token 34 | id = "xrplmeta" 35 | url = "https://xrplmeta.org/tokens.toml" 36 | fetch_interval = 30 37 | trust_level = 3 38 | 39 | 40 | [SOURCE.ISSUERDOMAIN] 41 | # All issuing accounts with the Domain field set are being automatically crawled for 42 | # DOMAIN/.well-known/xrp-ledger.toml. If the issuer published metadata about their token 43 | # according to XLS-26, the data will be copied and presented by the server. 44 | concurrency = 3 45 | connection_timeout = 10 46 | fetch_interval = 600 47 | 48 | 49 | [SOURCE.XRPSCAN] 50 | # xrpscan.com provides usernames, social media links and verifications of XRPL accounts. 51 | # https://docs.xrpscan.com/api-doc.html#get-names 52 | fetch_interval = 600 53 | 54 | 55 | [SOURCE.GRAVATAR] 56 | # gravatar.com provides avatar images for XRPL accounts that are linked via the on-chain Account "EmailHash" field. 57 | fetch_interval = 43200 58 | max_requests_per_minute = 60 59 | 60 | 61 | # [SOURCE.XUMM] 62 | # xumm.app provides a list of curated assets, KYC status and avatar images. 63 | # It is required to obtain an API key to use this service. 64 | # https://apps.xumm.dev 65 | # 66 | # api_key = "paste_here" 67 | # api_secret = "paste_here" 68 | # fetch_interval_assets = 60 69 | # fetch_interval_kyc = 43200 70 | # fetch_interval_avatar = 43200 71 | # max_requests_per_minute = 30 72 | 73 | 74 | # [SOURCE.BITHOMP] 75 | # bithomp.com provides icons, usernames, website and social media links for XRPL accounts. 76 | # It is required to obtain an API key for this service. 77 | # https://bithomp.com/developer 78 | # 79 | # refresh_interval = 600 80 | # api_key = "" 81 | 82 | 83 | # [SOURCE.TWITTER] 84 | # twitter.com can provide icons, display names, website links and descriptions for both XRPL accounts and tokens. 85 | # It is required to obtain an API key for this service. 86 | # https://developer.twitter.com/en/docs/twitter-api/getting-started/getting-access-to-the-twitter-api 87 | # 88 | # bearer_token = "paste_here" 89 | # fetch_interval = 3600 90 | # max_requests_per_minute = 60 91 | 92 | 93 | 94 | [API] 95 | # The server listens on the specified port for incoming HTTP (REST + WebSocket) connections. 96 | port = 4080 97 | 98 | # Set this to the URL under which your node is reachable. 99 | # This is required for the API to correctly return the URLs of locally cached media files, such as token icons. 100 | # public_url = "https://example.com" 101 | 102 | # Multiple sources can give conflicting data for the same field. This array defines who's data has precedence, 103 | # from highest to lowest. Example for the values below: 104 | # if XUMM, Bithomp and Twitter all define an icon for a token, the one from XUMM will be used. 105 | source_ranking = [ 106 | 'ledger', # on-chain values, such as the "Domain" field 107 | 'tokenlist', # .toml files published by trusted entities 108 | 'issuer/domain', # .toml files published by the token issuer, linked via the "Domain" field 109 | 'xumm/curated', # names and icons, manually set by Xumm 110 | 'xumm/avatar', # user defined icons from Xumm 111 | 'gravatar', 112 | 'bithomp', 113 | 'xrpscan', 114 | 'twitter' 115 | ] -------------------------------------------------------------------------------- /deps/binding.gyp: -------------------------------------------------------------------------------- 1 | { 2 | 'targets': [ 3 | { 4 | 'target_name': 'sqlite-xfl', 5 | 'dependencies': ['../node_modules/better-sqlite3/deps/sqlite3.gyp:sqlite3'], 6 | 'sources': ['sqlite-extensions/xfl.c'], 7 | } 8 | ] 9 | } -------------------------------------------------------------------------------- /deps/sqlite-extensions/xfl.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | SQLITE_EXTENSION_INIT1 6 | 7 | 8 | static int64_t const minMantissa = 1000000000000000ull; 9 | static int64_t const maxMantissa = 9999999999999999ull; 10 | static int32_t const minExponent = -96; 11 | static int32_t const maxExponent = 80; 12 | 13 | static int64_t const mantissaMask = 0b111111111111111111111111111111111111111111111111111111; 14 | static int64_t const exponentMask = 0b11111111; 15 | 16 | static int64_t const INVALID_FLOAT = -1ll; 17 | 18 | 19 | static bool xfl_is_negative(int64_t float1) 20 | { 21 | return ((float1 >> 62U) & 1ULL) == 0; 22 | } 23 | 24 | static int32_t xfl_get_exponent(int64_t float1) 25 | { 26 | if (float1 < 0) 27 | return INVALID_FLOAT; 28 | 29 | if (float1 == 0) 30 | return 0; 31 | 32 | if (float1 < 0) 33 | return INVALID_FLOAT; 34 | 35 | uint64_t exponent = (uint64_t)float1; 36 | 37 | exponent >>= 54U; 38 | exponent &= 0xFFU; 39 | 40 | if(xfl_is_negative(float1)){ 41 | exponent ^= exponentMask; 42 | } 43 | 44 | return (int32_t)exponent - 97; 45 | } 46 | 47 | static int64_t xfl_get_mantissa(int64_t float1) 48 | { 49 | if (float1 < 0) 50 | return INVALID_FLOAT; 51 | 52 | if (float1 == 0) 53 | return 0; 54 | 55 | if (float1 < 0) 56 | return INVALID_FLOAT; 57 | 58 | int64_t mantissa = float1 - ((((int64_t)float1) >> 54U) << 54U); 59 | 60 | if(xfl_is_negative(float1)){ 61 | mantissa = -(mantissa ^ mantissaMask); 62 | } 63 | 64 | return mantissa; 65 | } 66 | 67 | 68 | static int64_t xfl_make(int64_t mantissa, int32_t exponent) 69 | { 70 | if (mantissa == 0) 71 | return 0; 72 | 73 | bool neg = mantissa < 0; 74 | 75 | if (neg) 76 | mantissa *= -1; 77 | 78 | while (mantissa < minMantissa) 79 | { 80 | mantissa *= 10; 81 | exponent--; 82 | 83 | if (exponent < minExponent) 84 | return INVALID_FLOAT; 85 | } 86 | 87 | while (mantissa > maxMantissa) 88 | { 89 | mantissa /= 10; 90 | exponent++; 91 | 92 | if (exponent > maxExponent) 93 | return INVALID_FLOAT; 94 | } 95 | 96 | exponent = exponent - minExponent + 1; 97 | 98 | int64_t out = 0; 99 | 100 | if(neg){ 101 | exponent = exponent ^ exponentMask; 102 | mantissa = mantissa ^ mantissaMask; 103 | }else{ 104 | out = 1; 105 | } 106 | 107 | out <<= 8; 108 | out |= exponent; 109 | out <<= 54; 110 | out |= mantissa; 111 | 112 | return out; 113 | } 114 | 115 | static int64_t xfl_invert_sign(int64_t float1) 116 | { 117 | return xfl_make( 118 | -xfl_get_mantissa(float1), 119 | xfl_get_exponent(float1) 120 | ); 121 | } 122 | 123 | static int64_t xfl_sum(int64_t a, int64_t b) 124 | { 125 | int64_t am = xfl_get_mantissa(a); 126 | int32_t ae = xfl_get_exponent(a); 127 | int64_t bm = xfl_get_mantissa(b); 128 | int32_t be = xfl_get_exponent(b); 129 | 130 | if(am == 0) 131 | return b; 132 | 133 | if(bm == 0) 134 | return a; 135 | 136 | while (ae < be){ 137 | am /= 10; 138 | ae++; 139 | } 140 | 141 | while (be < ae){ 142 | bm /= 10; 143 | be++; 144 | } 145 | 146 | am += bm; 147 | 148 | return xfl_make(am, ae); 149 | } 150 | 151 | static int64_t xfl_sub(int64_t a, int64_t b) 152 | { 153 | return xfl_sum(a, xfl_invert_sign(b)); 154 | } 155 | 156 | 157 | 158 | 159 | static void sum_step(sqlite3_context* ctx, int argc, sqlite3_value* argv[]) 160 | { 161 | if(sqlite3_value_type(argv[0]) != SQLITE_INTEGER){ 162 | sqlite3_result_error(ctx, "xfl_sum() only works with integers", -1); 163 | return; 164 | } 165 | 166 | sqlite3_int64* current_sum = (sqlite3_int64*)sqlite3_aggregate_context( 167 | ctx, 168 | sizeof(sqlite3_int64) 169 | ); 170 | 171 | if(current_sum){ 172 | *current_sum = xfl_sum( 173 | *current_sum, 174 | sqlite3_value_int64(argv[0]) 175 | ); 176 | } 177 | } 178 | 179 | static void sum_inverse(sqlite3_context* ctx, int argc, sqlite3_value* argv[]) 180 | { 181 | sqlite3_int64* current_sum = (sqlite3_int64*)sqlite3_aggregate_context( 182 | ctx, 183 | sizeof(sqlite3_int64) 184 | ); 185 | 186 | *current_sum = xfl_sub( 187 | *current_sum, 188 | sqlite3_value_int64(argv[0]) 189 | ); 190 | } 191 | 192 | static void sum_value(sqlite3_context* ctx) 193 | { 194 | sqlite3_int64* current_sum = (sqlite3_int64*)sqlite3_aggregate_context(ctx, 0); 195 | sqlite3_int64 value = 0; 196 | 197 | if(current_sum){ 198 | value = *current_sum; 199 | } 200 | 201 | sqlite3_result_int64(ctx, value); 202 | } 203 | 204 | 205 | #ifdef _WIN32 206 | __declspec(dllexport) 207 | #endif 208 | 209 | int sqlite3_extension_init(sqlite3* db, char** err_msg, const sqlite3_api_routines* api) { 210 | SQLITE_EXTENSION_INIT2(api) 211 | 212 | if (err_msg != 0) 213 | *err_msg = 0; 214 | 215 | sqlite3_create_window_function( 216 | db, 217 | "xfl_sum", 218 | 1, 219 | SQLITE_UTF8, 220 | 0, 221 | sum_step, 222 | sum_value, 223 | sum_value, 224 | sum_inverse, 225 | 0 226 | ); 227 | 228 | return SQLITE_OK; 229 | } 230 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "xrplmeta", 3 | "version": "2.11.0-alpha", 4 | "type": "module", 5 | "bin": { 6 | "xrplmeta": "./src/run.js" 7 | }, 8 | "scripts": { 9 | "start": "node src/run.js", 10 | "postinstall": "npm run build-deps", 11 | "build-deps": "node-gyp rebuild --directory=deps --release", 12 | "test": "mocha test/unit/*.test.js", 13 | "livetest": "node test/live/run.js" 14 | }, 15 | "dependencies": { 16 | "@mwni/events": "3.0.0", 17 | "@mwni/log": "3.2.0", 18 | "@mwni/workers": "1.1.0", 19 | "@structdb/sqlite": "1.6.1-alpha", 20 | "@xrplkit/socket": "2.1.0", 21 | "@xrplkit/time": "1.0.0", 22 | "@xrplkit/tokens": "1.0.1", 23 | "@xrplkit/toml": "1.0.0", 24 | "@xrplkit/txmeta": "1.4.1", 25 | "@xrplkit/xfl": "2.1.1", 26 | "@xrplkit/xls26": "2.4.0", 27 | "@koa/router": "13.1.0", 28 | "koa": "3.0.0", 29 | "koa-easy-ws": "2.1.0", 30 | "koa-json": "2.0.2", 31 | "koa-send": "5.0.1", 32 | "better-sqlite3": "11.10.0", 33 | "limiter": "2.0.1", 34 | "node-abort-controller": "3.1.1", 35 | "node-fetch": "3.3.2", 36 | "minimist": "1.2.8", 37 | "ripple-address-codec": "5.0.0", 38 | "sharp": "0.34.2", 39 | "ws": "8.18.2" 40 | }, 41 | "devDependencies": { 42 | "chai": "^5.1.1", 43 | "mocha": "^10.7.3" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | 2 | # The XRPL Meta Node 3 | 4 | This is a Javascript implementation of the [XRPL Meta](https://xrplmeta.org) project. 5 | 6 | XRPL Meta collects metadata about digital assets on the XRP Ledger. It makes the data available via a JSON REST and WebSocket API, just like [rippled](https://github.com/XRPLF/rippled). It connects to one or multiple rippled or [clio](https://github.com/XRPLF/clio) nodes and tracks updates in real time. Historical data is being backfilled. 7 | 8 | 9 | 10 | ## Technical Overview 11 | 12 | On the first launch 13 | - The server will create its SQLite database files in the config specified data directory 14 | - It will then create a full snapshot of the most recent ledger 15 | 16 | From there on 17 | - It will sync itself with the live transaction stream 18 | - Backfill ledger history simultaneously 19 | - Scrape additional metadata sources, such as [Bithomp](https://bithomp.com), [XRP Scan](https://xrpscan.com) and [Xumm](https://xumm.dev) 20 | 21 | 22 | 23 | ## The Config File 24 | 25 | When starting the node for the first time, it will automatically create a directory called `.xrplmeta` in the user's home directory. A copy of the [default configuration file](https://github.com/xrplmeta/node/blob/develop/config.template.toml) will be put there, and used. 26 | 27 | Alternatively, you can specify which config file to use using 28 | 29 | node src/run --config /path/to/config.toml 30 | 31 | The config file uses "stanzas" for configuring each relevant component, such as the [public server API](https://github.com/xrplmeta/node/tree/develop/src/srv) and the [crawlers](https://github.com/xrplmeta/node/tree/develop/src/crawl/crawlers). Delete or comment the respective stanza to disable the component. 32 | 33 | Review the comments in [default configuration file](https://github.com/xrplmeta/node/blob/develop/config.template.toml) for further explanation of the individual parameters. 34 | 35 | 36 | 37 | ## API Documentation 38 | 39 | https://xrplmeta.org/docs 40 | 41 | The node will listen for incoming HTTP connections on the port specified in the config file. These can either serve a REST query, or be upgraded to a WebSocket connection. 42 | 43 | 44 | 45 | ## Install for production use 46 | 47 | Install the public NPM package: 48 | 49 | npm install -g xrplmeta 50 | 51 | This will add the `xrplmeta` command to your PATH. Simply run this command to start the server. A template configuration file will be placed in your user directory. It is recommended to adjust this config. 52 | 53 | 54 | 55 | ## Install for development 56 | 57 | Clone this repository and install the dependencies: 58 | 59 | npm install 60 | 61 | The development node can be started using: 62 | 63 | node src/run 64 | 65 | 66 | 67 | ## Requirements 68 | 69 | - Node.js version +14 70 | 71 | - An internet connection 72 | 73 | - More than 3 GB of disk storage -------------------------------------------------------------------------------- /src/app/cache.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import { startMetaCacheWorker, startIconCacheWorker } from '../cache/worker.js' 5 | 6 | 7 | export async function run({ ctx }){ 8 | log.info('starting cache worker') 9 | 10 | await spawn(':runMetaCacheWorker', { ctx }) 11 | await spawn(':runIconCacheWorker', { ctx }) 12 | } 13 | 14 | export async function runMetaCacheWorker({ ctx }){ 15 | if(ctx.log) 16 | log.pipe(ctx.log) 17 | 18 | return await startMetaCacheWorker({ 19 | ctx: { 20 | ...ctx, 21 | db: await openDB({ 22 | ctx, 23 | coreReadOnly: true 24 | }) 25 | } 26 | }) 27 | } 28 | 29 | export async function runIconCacheWorker({ ctx }){ 30 | if(ctx.log) 31 | log.pipe(ctx.log) 32 | 33 | return await startIconCacheWorker({ 34 | ctx: { 35 | ...ctx, 36 | db: await openDB({ 37 | ctx, 38 | coreReadOnly: true 39 | }) 40 | } 41 | }) 42 | } -------------------------------------------------------------------------------- /src/app/crawl.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import { startCrawlers } from '../crawl/init.js' 5 | 6 | 7 | export async function run({ ctx }){ 8 | await spawn(':runCrawl', { ctx }) 9 | } 10 | 11 | 12 | export async function runCrawl({ ctx }){ 13 | if(ctx.log) 14 | log.pipe(ctx.log) 15 | 16 | log.info('starting crawlers') 17 | 18 | return await startCrawlers({ 19 | ctx: { 20 | ...ctx, 21 | db: await openDB({ ctx }) 22 | } 23 | }) 24 | } -------------------------------------------------------------------------------- /src/app/ledger.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { createPool } from '../xrpl/nodepool.js' 4 | import { openDB } from '../db/index.js' 5 | import { createSnapshot } from '../etl/snapshot.js' 6 | import { startSync } from '../etl/sync.js' 7 | import { startBackfill } from '../etl/backfill.js' 8 | 9 | 10 | export async function run({ ctx }){ 11 | ctx = { 12 | ...ctx, 13 | xrpl: createPool(ctx.config.source.xrpls), 14 | } 15 | 16 | await spawn(':runSnapshot', { ctx }) 17 | 18 | spawn(':runSync', { ctx }) 19 | .then(task => task.onceInSync()) 20 | .then(() => spawn(':runBackfill', { ctx })) 21 | } 22 | 23 | 24 | export async function runSnapshot({ ctx }){ 25 | if(ctx.log) 26 | log.pipe(ctx.log) 27 | 28 | return await createSnapshot({ 29 | ctx: { 30 | ...ctx, 31 | db: await openDB({ ctx }) 32 | } 33 | }) 34 | } 35 | 36 | export async function runSync({ ctx }){ 37 | if(ctx.log) 38 | log.pipe(ctx.log) 39 | 40 | log.info('starting sync') 41 | 42 | return await startSync({ 43 | ctx: { 44 | ...ctx, 45 | db: await openDB({ ctx }) 46 | } 47 | }) 48 | } 49 | 50 | export async function runBackfill({ ctx }){ 51 | if(ctx.log) 52 | log.pipe(ctx.log) 53 | 54 | log.info('starting backfill') 55 | 56 | return await startBackfill({ 57 | ctx: { 58 | ...ctx, 59 | db: await openDB({ ctx }) 60 | } 61 | }) 62 | } -------------------------------------------------------------------------------- /src/app/main.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { run as runLedgerApp } from './ledger.js' 3 | import { run as runCrawlApp } from './crawl.js' 4 | import { run as runCacheApp } from './cache.js' 5 | import { run as runServerApp } from './server.js' 6 | import createIPC from '../lib/ipc.js' 7 | 8 | 9 | export default async function({ config, args }){ 10 | const ctx = { 11 | ipc: createIPC(), 12 | config, 13 | log, 14 | } 15 | 16 | 17 | if(!args['only-server']){ 18 | await runLedgerApp({ ctx }) 19 | .catch(error => { 20 | log.error(`ledger app crashed due to fatal error:`) 21 | log.error(error) 22 | process.exit(1) 23 | }) 24 | 25 | log.info(`bootstrap complete`) 26 | 27 | runCrawlApp({ ctx }) 28 | .catch(error => { 29 | log.error(`crawl app crashed due to fatal error:`) 30 | log.error(error) 31 | log.warn(`attempting to continue without it`) 32 | }) 33 | 34 | runCacheApp({ ctx }) 35 | .catch(error => { 36 | log.error(`cache app crashed due to fatal error:`) 37 | log.error(error) 38 | log.warn(`attempting to continue without it`) 39 | }) 40 | } 41 | 42 | runServerApp({ ctx }) 43 | .catch(error => { 44 | log.error(`server app crashed:`) 45 | log.error(error) 46 | log.warn(`attempting to continue without it`) 47 | }) 48 | 49 | 50 | return { 51 | async terminate(){ 52 | log.info(`shutting down`) 53 | process.exit() 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /src/app/server.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import { startServer } from '../srv/server.js' 5 | 6 | 7 | export async function run({ ctx }){ 8 | if(!ctx.config.api){ 9 | log.warn(`config is missing [API] stanza: disabling server`) 10 | return 11 | } 12 | 13 | await spawn(':runServer', { ctx }) 14 | } 15 | 16 | 17 | export async function runServer({ ctx }){ 18 | if(ctx.log) 19 | log.pipe(ctx.log) 20 | 21 | log.info('starting server') 22 | 23 | return await startServer({ 24 | ctx: { 25 | ...ctx, 26 | db: await openDB({ ctx }) 27 | } 28 | }) 29 | } -------------------------------------------------------------------------------- /src/cache/icons.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import sharp from 'sharp' 4 | import log from '@mwni/log' 5 | import { createHash } from 'crypto' 6 | import { unixNow } from '@xrplkit/time' 7 | import { readAccountProps, readTokenProps } from '../db/helpers/props.js' 8 | import { validate as validateURL } from '../lib/url.js' 9 | import { createFetch } from '../lib/fetch.js' 10 | import { getAccountId, getTokenId } from '../db/helpers/common.js' 11 | import { getCommonTokenCacheFields } from './tokens.js' 12 | 13 | 14 | const mimeTypes = { 15 | 'image/jpeg': 'jpg', 16 | 'image/png': 'png', 17 | 'image/gif': 'gif', 18 | 'image/webp': 'webp', 19 | 'image/svg+xml': 'svg' 20 | } 21 | 22 | export const iconSizes = [ 23 | 512, 24 | 256, 25 | 128, 26 | 64 27 | ] 28 | 29 | export async function updateIconCacheFor({ ctx, token, account }){ 30 | let user 31 | let targetTokens 32 | 33 | if(token){ 34 | token.id = getTokenId({ ctx, token }) 35 | user = { 36 | userType: 'token', 37 | userId: token.id 38 | } 39 | targetTokens = [token] 40 | }else if(account){ 41 | account.id = getAccountId({ ctx, account }) 42 | user = { 43 | userType: 'account', 44 | userId: account.id 45 | } 46 | targetTokens = ctx.db.core.tokens.readMany({ 47 | where: { 48 | issuer: account 49 | } 50 | }) 51 | }else 52 | throw new Error(`must specify either "token" or "account"`) 53 | 54 | 55 | let using = ctx.db.cache.iconUsers.readMany({ 56 | where: { 57 | ...user 58 | }, 59 | include: { 60 | icon: true 61 | } 62 | }) 63 | 64 | let props = token 65 | ? readTokenProps({ ctx, token }) 66 | : readAccountProps({ ctx, account }) 67 | 68 | let urls = props 69 | .filter(prop => prop.key === 'icon') 70 | .map(prop => prop.value) 71 | .filter((url, i, urls) => urls.indexOf(url) === i) 72 | .filter(validateURL) 73 | 74 | log.debug(`got ${urls.length} icon URL(s) for ${token ? `token ${token.id}` : `account ${account.id}`}`) 75 | 76 | for(let url of urls){ 77 | let cache = ctx.db.cache.icons.readOne({ 78 | where: { 79 | url 80 | } 81 | }) 82 | 83 | if(!cache){ 84 | cache = ctx.db.cache.icons.createOne({ 85 | data: { 86 | url 87 | } 88 | }) 89 | } 90 | 91 | ctx.db.cache.iconUsers.createOne({ 92 | data: { 93 | icon: cache, 94 | ...user 95 | } 96 | }) 97 | 98 | let lifetime = cache.error 99 | ? 60 * 10 100 | : (ctx.config.cache?.icons?.lifetime || 60 * 60) 101 | 102 | if(!cache.timeUpdated || cache.timeUpdated < unixNow() - lifetime){ 103 | try{ 104 | let { hash, fileType } = await downloadAndProcessIcon({ ctx, url }) 105 | 106 | cache = ctx.db.cache.icons.updateOne({ 107 | data: { 108 | hash, 109 | fileType, 110 | timeUpdated: unixNow() 111 | }, 112 | where: { 113 | id: cache.id 114 | } 115 | }) 116 | }catch(error){ 117 | log.debug(`failed to download ${url}: ${error.message}`) 118 | 119 | ctx.db.cache.icons.updateOne({ 120 | data: { 121 | timeUpdated: unixNow(), 122 | error: error.message 123 | }, 124 | where: { 125 | id: cache.id 126 | } 127 | }) 128 | 129 | continue 130 | } 131 | }else{ 132 | log.debug(`icon ${url} not yet due for renewal`) 133 | } 134 | 135 | for(let token of targetTokens){ 136 | linkCachedIconToTokenCache({ 137 | ctx, 138 | token, 139 | cachedIcon: cache 140 | }) 141 | } 142 | } 143 | 144 | let previouslyUsedUrls = using 145 | .map(use => use.icon.url) 146 | .filter((url, i, urls) => urls.indexOf(url) === i) 147 | 148 | let removedUsing = using 149 | .filter(use => !urls.includes(use.icon.url)) 150 | 151 | let removedUrls = previouslyUsedUrls 152 | .filter(url => !urls.includes(url)) 153 | 154 | for(let url of removedUrls){ 155 | for(let token of targetTokens){ 156 | unlinkCachedIconFromTokenCache({ ctx, token, url }) 157 | } 158 | } 159 | 160 | for(let use of removedUsing){ 161 | ctx.db.cache.iconUsers.deleteOne({ 162 | where: { 163 | id: use.id 164 | } 165 | }) 166 | } 167 | 168 | for(let url of previouslyUsedUrls){ 169 | let userCount = ctx.db.cache.iconUsers.count({ 170 | where: { 171 | icon:{ 172 | url 173 | } 174 | } 175 | }) 176 | 177 | if(Number(userCount) === 0){ 178 | log.debug(`icon "${url}" has no more users - deleting it`) 179 | deleteIcon({ ctx, url }) 180 | } 181 | } 182 | } 183 | 184 | 185 | function linkCachedIconToTokenCache({ ctx, token, cachedIcon }){ 186 | let tokenCache = ctx.db.cache.tokens.readOne({ 187 | where: { 188 | token: token.id 189 | } 190 | }) 191 | 192 | if(!tokenCache){ 193 | ctx.db.cache.tokens.createOne({ 194 | data: { 195 | ...getCommonTokenCacheFields({ ctx, token }), 196 | cachedIcons: { 197 | [cachedIcon.url]: `${cachedIcon.hash}.${cachedIcon.fileType}` 198 | } 199 | } 200 | }) 201 | }else{ 202 | ctx.db.cache.tokens.updateOne({ 203 | data: { 204 | cachedIcons: { 205 | ...tokenCache.cachedIcons, 206 | [cachedIcon.url]: `${cachedIcon.hash}.${cachedIcon.fileType}` 207 | } 208 | }, 209 | where: { 210 | id: tokenCache.id 211 | } 212 | }) 213 | } 214 | 215 | log.debug(`linked cached icon ${cachedIcon.url} -> ${cachedIcon.hash}.${cachedIcon.fileType} to token ${token.id}`) 216 | } 217 | 218 | function unlinkCachedIconFromTokenCache({ ctx, token, url }){ 219 | let tokenCache = ctx.db.cache.tokens.readOne({ 220 | where: { 221 | token: token.id 222 | } 223 | }) 224 | 225 | ctx.db.cache.tokens.updateOne({ 226 | data: { 227 | cachedIcons: { 228 | ...tokenCache.cachedIcon, 229 | [url]: undefined 230 | } 231 | }, 232 | where: { 233 | id: tokenCache.id 234 | } 235 | }) 236 | 237 | log.debug(`unlinked cached icon ${url} from token ${token.id}`) 238 | } 239 | 240 | async function downloadAndProcessIcon({ ctx, url }){ 241 | let fetch = createFetch() 242 | let res = await fetch(url, { raw: true }) 243 | let mime = res.headers.get('content-type') 244 | let fileType = mimeTypes[mime] 245 | 246 | if(!fileType) 247 | throw new Error(`unsupported format: ${mime}`) 248 | 249 | let buffer = Buffer.from(await res.arrayBuffer()) 250 | let hash = createHash('md5') 251 | .update(buffer) 252 | .digest('hex') 253 | .slice(0, 10) 254 | .toUpperCase() 255 | 256 | let makePath = suffix => getCachedIconPath({ ctx, hash, suffix, fileType }) 257 | 258 | fs.writeFileSync( 259 | makePath(), 260 | buffer 261 | ) 262 | 263 | if(fileType !== 'svg'){ 264 | for(let size of iconSizes){ 265 | await sharp(buffer) 266 | .png() 267 | .resize(size, size, { fit: 'cover' }) 268 | .toFile(makePath(`@${size}`)) 269 | } 270 | } 271 | 272 | log.debug(`downloaded ${url} (hash ${hash})`) 273 | 274 | return { hash, fileType } 275 | } 276 | 277 | function deleteIcon({ ctx, url }){ 278 | let icon = ctx.db.cache.icons.readOne({ 279 | where: { 280 | url 281 | } 282 | }) 283 | 284 | fs.rmSync( 285 | getCachedIconPath({ ctx, ...icon }), 286 | { force: true } 287 | ) 288 | 289 | if(icon.fileType !== 'svg'){ 290 | for(let size of iconSizes){ 291 | fs.rmSync( 292 | getCachedIconPath({ ctx, ...icon, suffix: `@${size}` }), 293 | { force: true } 294 | ) 295 | } 296 | } 297 | 298 | ctx.db.cache.icons.deleteOne({ 299 | where: { 300 | id: icon.id 301 | } 302 | }) 303 | } 304 | 305 | function getIconCacheDir({ ctx }){ 306 | let dir = path.join(ctx.config.node.dataDir, 'media', 'icons') 307 | 308 | if(!fs.existsSync(dir)) 309 | fs.mkdirSync(dir, { recursive: true }) 310 | 311 | return dir 312 | } 313 | 314 | export function getCachedIconPath({ ctx, hash, suffix, fileType }){ 315 | return path.join(getIconCacheDir({ ctx }), `${hash}${suffix || ''}.${fileType}`) 316 | } -------------------------------------------------------------------------------- /src/cache/todo.js: -------------------------------------------------------------------------------- 1 | import { getAccountId, getTokenId } from '../db/helpers/common.js' 2 | 3 | export function markCacheDirtyForAccountProps({ ctx, account }){ 4 | if(ctx.backwards) 5 | return 6 | 7 | ctx.db.cache.todos.createOne({ 8 | data: { 9 | task: 'account.props', 10 | subject: getAccountId({ ctx, account }) 11 | } 12 | }) 13 | } 14 | 15 | export function markCacheDirtyForTokenProps({ ctx, token }){ 16 | if(ctx.backwards) 17 | return 18 | 19 | ctx.db.cache.todos.createOne({ 20 | data: { 21 | task: 'token.props', 22 | subject: getTokenId({ ctx, token }) 23 | } 24 | }) 25 | } 26 | 27 | export function markCacheDirtyForTokenMetrics({ ctx, token, metrics }){ 28 | if(ctx.backwards) 29 | return 30 | 31 | let subject = getTokenId({ ctx, token }) 32 | 33 | for(let metric of Object.keys(metrics)){ 34 | ctx.db.cache.todos.createOne({ 35 | data: { 36 | task: `token.metrics.${metric}`, 37 | subject 38 | } 39 | }) 40 | } 41 | } 42 | 43 | export function markCacheDirtyForTokenExchanges({ ctx, token }){ 44 | if(ctx.backwards) 45 | return 46 | 47 | if(token.currency === 'XRP') 48 | return 49 | 50 | ctx.db.cache.todos.createOne({ 51 | data: { 52 | task: 'token.exchanges', 53 | subject: getTokenId({ ctx, token }) 54 | } 55 | }) 56 | } 57 | 58 | export function markCacheDirtyForTokenIcons({ ctx, token }){ 59 | ctx.db.cache.todos.createOne({ 60 | data: { 61 | task: 'token.icons', 62 | subject: getTokenId({ ctx, token }) 63 | } 64 | }) 65 | } 66 | 67 | export function markCacheDirtyForAccountIcons({ ctx, account }){ 68 | ctx.db.cache.todos.createOne({ 69 | data: { 70 | task: 'account.icons', 71 | subject: getAccountId({ ctx, account }) 72 | } 73 | }) 74 | } -------------------------------------------------------------------------------- /src/cache/tokens.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { sub, mul, div, min, gt } from '@xrplkit/xfl' 3 | import { currencyHexToUTF8 } from '@xrplkit/tokens' 4 | import { readLedgerAt, readMostRecentLedger } from '../db/helpers/ledgers.js' 5 | import { readTokenMetrics } from '../db/helpers/tokenmetrics.js' 6 | import { readTokenExchangeAligned, readTokenExchangeCount, readTokenExchangeUniqueTakerCount, readTokenVolume } from '../db/helpers/tokenexchanges.js' 7 | import { readAccountProps, readTokenProps } from '../db/helpers/props.js' 8 | 9 | 10 | const maxChangePercent = 999999999 11 | const metricInts = ['trustlines', 'holders'] 12 | 13 | 14 | 15 | export function updateCacheForTokenProps({ ctx, token }){ 16 | if(ctx.backwards) 17 | return 18 | 19 | let props = readTokenProps({ ctx, token }) 20 | let tokenName = props.find(prop => prop.key === 'name')?.value 21 | let changedCache = ctx.db.cache.tokens.createOne({ 22 | data: { 23 | ...getCommonTokenCacheFields({ ctx, token }), 24 | tokenName, 25 | tokenProps: props, 26 | trustLevel: Math.max( 27 | 0, 28 | ...props 29 | .filter(({ key }) => key === 'trust_level') 30 | .map(({ value }) => value) 31 | ) 32 | }, 33 | returnUnchanged: false 34 | }) 35 | 36 | if(changedCache){ 37 | dispatchTokenUpdate({ ctx, token, subject: 'tokenProps' }) 38 | } 39 | } 40 | 41 | export function updateCacheForAccountProps({ ctx, account }){ 42 | if(ctx.backwards) 43 | return 44 | 45 | let props = readAccountProps({ 46 | ctx, 47 | account 48 | }) 49 | 50 | let issuerName = props.find(prop => prop.key === 'name')?.value 51 | 52 | let tokens = ctx.db.core.tokens.readMany({ 53 | where: { 54 | issuer: account 55 | } 56 | }) 57 | 58 | for(let token of tokens){ 59 | if(!token.issuer) 60 | continue 61 | 62 | let changedCache = ctx.db.cache.tokens.createOne({ 63 | data: { 64 | ...getCommonTokenCacheFields({ ctx, token }), 65 | issuerName, 66 | issuerProps: props 67 | }, 68 | returnUnchanged: false 69 | }) 70 | 71 | if(changedCache){ 72 | dispatchTokenUpdate({ ctx, token, subject: 'issuerProps' }) 73 | } 74 | 75 | updateCacheForTokenProps({ ctx, token }) 76 | } 77 | } 78 | 79 | export function updateCacheForTokenMetrics({ ctx, token, metrics }){ 80 | if(ctx.backwards) 81 | return 82 | 83 | let cache = {} 84 | let sequences = getCommonLedgerSequences({ ctx }) 85 | 86 | let currentValues = readTokenMetrics({ 87 | ctx, 88 | token, 89 | metrics, 90 | ledgerSequence: sequences.current 91 | }) 92 | 93 | let pre24hValues = readTokenMetrics({ 94 | ctx, 95 | token, 96 | metrics, 97 | ledgerSequence: sequences.pre24h 98 | }) 99 | 100 | let pre7dValues = readTokenMetrics({ 101 | ctx, 102 | token, 103 | metrics, 104 | ledgerSequence: sequences.pre7d 105 | }) 106 | 107 | for(let key of Object.keys(metrics)){ 108 | let current = currentValues[key] || 0 109 | let pre24h = pre24hValues[key] || 0 110 | let pre7d = pre7dValues[key] || 0 111 | let delta24h = sub(current, pre24h) 112 | let delta7d = sub(current, pre7d) 113 | 114 | let percent24h = gt(pre24h, 0) 115 | ? Number(min(mul(div(delta24h, pre24h), 100), maxChangePercent)) 116 | : 0 117 | 118 | let percent7d = gt(pre7d, 0) 119 | ? Number(min(mul(div(delta7d, pre7d), 100), maxChangePercent)) 120 | : 0 121 | 122 | if(metricInts.includes(key)){ 123 | delta24h = Number(delta24h) 124 | delta7d = Number(delta7d) 125 | }else{ 126 | current = current.toString() 127 | delta24h = delta24h.toString() 128 | delta7d = delta7d.toString() 129 | } 130 | 131 | cache[key] = current 132 | cache[`${key}Delta24H`] = delta24h 133 | cache[`${key}Percent24H`] = percent24h 134 | cache[`${key}Delta7D`] = delta7d 135 | cache[`${key}Percent7D`] = percent7d 136 | } 137 | 138 | let changedCache = ctx.db.cache.tokens.createOne({ 139 | data: { 140 | ...getCommonTokenCacheFields({ ctx, token }), 141 | ...cache 142 | }, 143 | returnUnchanged: false 144 | }) 145 | 146 | if(changedCache){ 147 | dispatchTokenUpdate({ ctx, token, subject: 'metrics' }) 148 | } 149 | } 150 | 151 | export function updateCacheForTokenExchanges({ ctx, token }){ 152 | if(ctx.backwards) 153 | return 154 | 155 | if(token.currency === 'XRP') 156 | return 157 | 158 | let sequences = getCommonLedgerSequences({ ctx }) 159 | 160 | let current = readTokenExchangeAligned({ 161 | ctx, 162 | base: token, 163 | quote: { 164 | currency: 'XRP' 165 | }, 166 | ledgerSequence: sequences.current, 167 | skipDust: true 168 | })?.price || 0 169 | 170 | let pre24h = readTokenExchangeAligned({ 171 | ctx, 172 | base: token, 173 | quote: { 174 | currency: 'XRP' 175 | }, 176 | ledgerSequence: sequences.pre24h, 177 | skipDust: true 178 | })?.price || 0 179 | 180 | let pre7d = readTokenExchangeAligned({ 181 | ctx, 182 | base: token, 183 | quote: { 184 | currency: 'XRP' 185 | }, 186 | ledgerSequence: sequences.pre7d, 187 | skipDust: true 188 | })?.price || 0 189 | 190 | let delta24h = sub(current, pre24h) 191 | let delta7d = sub(current, pre7d) 192 | 193 | let percent24h = gt(pre24h, 0) 194 | ? Number(min(mul(div(delta24h, pre24h), 100), maxChangePercent)) 195 | : 0 196 | 197 | let percent7d = gt(pre7d, 0) 198 | ? Number(min(mul(div(delta7d, pre7d), 100), maxChangePercent)) 199 | : 0 200 | 201 | let volume24H = readTokenVolume({ 202 | ctx, 203 | base: token, 204 | quote: { 205 | id: 1, 206 | currency: 'XRP' 207 | }, 208 | sequenceStart: sequences.pre24h, 209 | sequenceEnd: sequences.current 210 | }) 211 | 212 | let volume7D = readTokenVolume({ 213 | ctx, 214 | base: token, 215 | quote: { 216 | id: 1, 217 | currency: 'XRP' 218 | }, 219 | sequenceStart: sequences.pre7d, 220 | sequenceEnd: sequences.current 221 | }) 222 | 223 | let exchanges24H = readTokenExchangeCount({ 224 | ctx, 225 | base: token, 226 | quote: { 227 | id: 1, 228 | currency: 'XRP' 229 | }, 230 | sequenceStart: sequences.pre24h, 231 | sequenceEnd: sequences.current 232 | }) 233 | 234 | let exchanges7D = readTokenExchangeCount({ 235 | ctx, 236 | base: token, 237 | quote: { 238 | id: 1, 239 | currency: 'XRP' 240 | }, 241 | sequenceStart: sequences.pre7d, 242 | sequenceEnd: sequences.current 243 | }) 244 | 245 | let takers24H = readTokenExchangeUniqueTakerCount({ 246 | ctx, 247 | base: token, 248 | quote: { 249 | id: 1, 250 | currency: 'XRP' 251 | }, 252 | sequenceStart: sequences.pre24h, 253 | sequenceEnd: sequences.current 254 | }) 255 | 256 | let takers7D = readTokenExchangeUniqueTakerCount({ 257 | ctx, 258 | base: token, 259 | quote: { 260 | id: 1, 261 | currency: 'XRP' 262 | }, 263 | sequenceStart: sequences.pre7d, 264 | sequenceEnd: sequences.current 265 | }) 266 | 267 | let changedCache = ctx.db.cache.tokens.createOne({ 268 | data: { 269 | ...getCommonTokenCacheFields({ ctx, token }), 270 | price: current.toString(), 271 | pricePercent24H: percent24h, 272 | pricePercent7D: percent7d, 273 | volume24H: volume24H.toString(), 274 | volume7D: volume7D.toString(), 275 | exchanges24H, 276 | exchanges7D, 277 | takers24H, 278 | takers7D 279 | }, 280 | returnUnchanged: false 281 | }) 282 | 283 | if(changedCache){ 284 | dispatchTokenUpdate({ ctx, token, subject: 'metrics' }) 285 | } 286 | } 287 | 288 | export function getCommonTokenCacheFields({ ctx, token }){ 289 | if(!token.id || !token.issuer || !token.issuer.address) 290 | token = ctx.db.core.tokens.readOne({ 291 | where: token, 292 | include: { 293 | issuer: true 294 | } 295 | }) 296 | 297 | return { 298 | token: token.id, 299 | tokenCurrencyHex: token.currency, 300 | tokenCurrencyUtf8: currencyHexToUTF8(token.currency), 301 | issuerAddress: token.issuer.address 302 | } 303 | } 304 | 305 | function getCommonLedgerSequences({ ctx }){ 306 | let currentLedger = readMostRecentLedger({ ctx }) 307 | 308 | return { 309 | current: currentLedger.sequence, 310 | pre24h: readLedgerAt({ 311 | ctx, 312 | time: currentLedger.closeTime - 60 * 60 * 24, 313 | clamp: true 314 | }).sequence, 315 | pre7d: readLedgerAt({ 316 | ctx, 317 | time: currentLedger.closeTime - 60 * 60 * 24 * 7, 318 | clamp: true 319 | }).sequence 320 | } 321 | } 322 | 323 | function dispatchTokenUpdate({ ctx, token, subject }){ 324 | if(!ctx.ipc) 325 | return 326 | 327 | ctx.ipc.emit({ 328 | tokenUpdate: { 329 | token, 330 | subject 331 | } 332 | }) 333 | } -------------------------------------------------------------------------------- /src/cache/worker.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { wait } from '@xrplkit/time' 3 | import { 4 | updateCacheForAccountProps, 5 | updateCacheForTokenExchanges, 6 | updateCacheForTokenMetrics, 7 | updateCacheForTokenProps 8 | } from './tokens.js' 9 | import { updateIconCacheFor } from './icons.js' 10 | 11 | 12 | export async function startMetaCacheWorker({ ctx }){ 13 | let running = true 14 | 15 | ;(async () => { 16 | while(running){ 17 | let todo = ctx.db.cache.todos.readOne({ 18 | where: { 19 | NOT: { 20 | task: { 21 | in: [ 22 | 'account.icons', 23 | 'token.icons', 24 | ] 25 | } 26 | } 27 | } 28 | }) 29 | 30 | if(!todo){ 31 | await wait(25) 32 | continue 33 | } 34 | 35 | try{ 36 | switch(todo.task){ 37 | case 'account.props': { 38 | updateCacheForAccountProps({ 39 | ctx, 40 | account: { 41 | id: todo.subject 42 | } 43 | }) 44 | break 45 | } 46 | case 'token.props': { 47 | updateCacheForTokenProps({ 48 | ctx, 49 | token: { 50 | id: todo.subject 51 | } 52 | }) 53 | break 54 | } 55 | case 'token.exchanges': { 56 | updateCacheForTokenExchanges({ 57 | ctx, 58 | token: { 59 | id: todo.subject 60 | } 61 | }) 62 | break 63 | } 64 | case 'token.metrics.trustlines': { 65 | updateCacheForTokenMetrics({ 66 | ctx, 67 | token: { 68 | id: todo.subject 69 | }, 70 | metrics: { 71 | trustlines: true 72 | } 73 | }) 74 | break 75 | } 76 | case 'token.metrics.holders': { 77 | updateCacheForTokenMetrics({ 78 | ctx, 79 | token: { 80 | id: todo.subject 81 | }, 82 | metrics: { 83 | holders: true 84 | } 85 | }) 86 | break 87 | } 88 | case 'token.metrics.supply': { 89 | updateCacheForTokenMetrics({ 90 | ctx, 91 | token: { 92 | id: todo.subject 93 | }, 94 | metrics: { 95 | supply: true 96 | } 97 | }) 98 | break 99 | } 100 | case 'token.metrics.marketcap': { 101 | updateCacheForTokenMetrics({ 102 | ctx, 103 | token: { 104 | id: todo.subject 105 | }, 106 | metrics: { 107 | marketcap: true 108 | } 109 | }) 110 | break 111 | } 112 | } 113 | }catch(error){ 114 | log.warn(`cache update for token ${todo.subject} failed: ${error.stack || error.message || error}`) 115 | } 116 | 117 | ctx.db.cache.todos.deleteOne({ 118 | where: { 119 | id: todo.id 120 | } 121 | }) 122 | 123 | let remainingCount = ctx.db.cache.todos.count({ 124 | where: { 125 | NOT: { 126 | task: { 127 | in: [ 128 | 'account.icons', 129 | 'token.icons', 130 | ] 131 | } 132 | } 133 | } 134 | }) 135 | 136 | log.accumulate.info({ 137 | text: [`processed %cacheTasksProcessed cache updates in %time (${remainingCount} remaining)`], 138 | data: { cacheTasksProcessed: 1 } 139 | }) 140 | 141 | await wait(1) 142 | } 143 | })() 144 | 145 | return { 146 | stop(){ 147 | running = false 148 | } 149 | } 150 | } 151 | 152 | export async function startIconCacheWorker({ ctx }){ 153 | let running = true 154 | 155 | ;(async () => { 156 | while(running){ 157 | let todo = ctx.db.cache.todos.readOne({ 158 | where: { 159 | task: { 160 | in: [ 161 | 'account.icons', 162 | 'token.icons', 163 | ] 164 | } 165 | } 166 | }) 167 | 168 | if(!todo){ 169 | await wait(1000) 170 | continue 171 | } 172 | 173 | switch(todo.task){ 174 | case 'account.icons': { 175 | updateIconCacheFor({ 176 | ctx, 177 | account: { 178 | id: todo.subject 179 | } 180 | }) 181 | break 182 | } 183 | case 'token.icons': { 184 | updateIconCacheFor({ 185 | ctx, 186 | token: { 187 | id: todo.subject 188 | } 189 | }) 190 | break 191 | } 192 | } 193 | 194 | ctx.db.cache.todos.deleteOne({ 195 | where: { 196 | id: todo.id 197 | } 198 | }) 199 | 200 | let remainingCount = ctx.db.cache.todos.count({ 201 | where: { 202 | task: { 203 | in: [ 204 | 'account.icons', 205 | 'token.icons', 206 | ] 207 | } 208 | } 209 | }) 210 | 211 | log.accumulate.info({ 212 | text: [`processed %iconCacheTasksProcessed icon cache updates in %time (${remainingCount} remaining)`], 213 | data: { iconCacheTasksProcessed: 1 } 214 | }) 215 | 216 | await wait(1) 217 | } 218 | })() 219 | 220 | return { 221 | stop(){ 222 | running = false 223 | } 224 | } 225 | } -------------------------------------------------------------------------------- /src/cmd/backup.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { openDB } from '../db/index.js' 3 | 4 | 5 | export default async function({ config, destinationFile }){ 6 | let { database } = await openDB({ ctx: { config } }) 7 | 8 | try{ 9 | await database.backup({ 10 | lockDatabase: true, 11 | destinationFile, 12 | progress: v => log.info(`backup progress: ${Math.round(v * 10000)/100} %`) 13 | }) 14 | }catch(error){ 15 | log.info(`backup failed:\n`, error) 16 | return 17 | } 18 | 19 | log.info(`backup finished sucessfully`) 20 | } -------------------------------------------------------------------------------- /src/cmd/rebuild-cache.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { openDB } from '../db/index.js' 3 | import { updateIconCacheFor } from '../cache/icons.js' 4 | import { 5 | updateCacheForTokenProps, 6 | updateCacheForAccountProps, 7 | updateCacheForTokenExchanges, 8 | updateCacheForTokenMetrics, 9 | } from '../cache/tokens.js' 10 | 11 | 12 | export default async function({ config, args }){ 13 | const ctx = { 14 | config, 15 | log, 16 | db: await openDB({ 17 | ctx: { config }, 18 | coreReadOnly: true 19 | }) 20 | } 21 | 22 | let tokens 23 | 24 | if(args.token){ 25 | let [currency, issuer] = args.token.split(':') 26 | 27 | tokens = [ctx.db.core.tokens.readOne({ 28 | where: { 29 | currency, 30 | issuer: { 31 | address: issuer 32 | } 33 | } 34 | })] 35 | 36 | if(!tokens[0]) 37 | throw new Error(`token "${args.token}" not found`) 38 | }else{ 39 | tokens = ctx.db.core.tokens.readMany().slice(1) // first is XRP 40 | 41 | if(args.clean){ 42 | log.time.info(`cache.wipe`, `wiping current cache`) 43 | ctx.db.cache.tokens.deleteMany() 44 | ctx.db.cache.icons.deleteMany() 45 | ctx.db.cache.iconUsers.deleteMany() 46 | ctx.db.cache.todos.deleteMany() 47 | log.time.info(`cache.wipe`, `wiped cache in %`) 48 | } 49 | } 50 | 51 | log.time.info(`cache.tokens`, `rebuilding for`, tokens.length, `token(s)`) 52 | 53 | for(let i=0; i { 38 | log.info(`fetching services list...`) 39 | 40 | let accounts = [] 41 | 42 | let { data } = await fetch('services') 43 | let services = data.services 44 | 45 | log.info(`got`, services.length, `services`) 46 | 47 | for(let service of services){ 48 | for(let { address } of service.addresses){ 49 | let weblinks = undefined 50 | 51 | if(service.socialAccounts && service.socialAccounts.length > 0){ 52 | weblinks = Object.entries(service.socialAccounts).map( 53 | ([key, handle]) => ({ 54 | url: socialMediaUrls[key].replace('%', handle), 55 | type: 'socialmedia' 56 | }) 57 | ) 58 | } 59 | 60 | accounts.push({ 61 | address, 62 | props: { 63 | name: service.name, 64 | domain: service.domain, 65 | weblinks, 66 | }, 67 | }) 68 | } 69 | } 70 | 71 | diffMultiAccountProps({ 72 | ctx, 73 | accounts, 74 | source: 'bithomp/services' 75 | }) 76 | 77 | log.info(`updated`, accounts.length, `issuers`) 78 | } 79 | }) 80 | } 81 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/domains.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseXLS26 } from '@xrplkit/xls26' 3 | import { parse as parseURL } from 'url' 4 | import { sanitize as sanitizeURL } from '../../lib/url.js' 5 | import { scheduleIterator } from '../schedule.js' 6 | import { createFetch } from '../../lib/fetch.js' 7 | import { clearAccountProps, clearTokenProps, readAccountProps, writeAccountProps, writeTokenProps } from '../../db/helpers/props.js' 8 | import { currencyUTF8ToHex } from '@xrplkit/tokens' 9 | import { reduceProps } from '../../srv/procedures/token.js' 10 | 11 | 12 | const tomlStandardPath = '.well-known/xrp-ledger.toml' 13 | 14 | 15 | export default async function({ ctx }){ 16 | let config = ctx.config.source.issuerdomain 17 | 18 | if(!config || config.disabled){ 19 | throw new Error(`disabled by config`) 20 | } 21 | 22 | let fetch = createFetch({ 23 | timeout: config.connectionTimeout || 20 24 | }) 25 | 26 | while(true){ 27 | await scheduleIterator({ 28 | ctx, 29 | type: 'issuer', 30 | task: 'domains', 31 | interval: config.fetchInterval, 32 | concurrency: 3, 33 | routine: async ({ id, address }) => { 34 | let { domain } = reduceProps({ 35 | props: readAccountProps({ 36 | ctx, 37 | account: { id } 38 | }), 39 | sourceRanking: [ 40 | 'tokenlist', 41 | 'ledger', 42 | 'issuer/domain', 43 | 'xumm', 44 | 'bithomp', 45 | 'xrpscan', 46 | 'twitter' 47 | ] 48 | }) 49 | 50 | if(domain){ 51 | try{ 52 | var xls26 = await fetchToml({ domain, fetch }) 53 | }catch(error){ 54 | log.debug(`issuer (${address}): ${error.message}`) 55 | return 56 | }finally{ 57 | log.accumulate.info({ 58 | text: [`%xrplTomlLookups xrp-ledger.toml lookups in %time`], 59 | data: { 60 | xrplTomlLookups: 1 61 | } 62 | }) 63 | } 64 | 65 | let publishedIssuers = 0 66 | let publishedTokens = 0 67 | 68 | for(let { address: issuer, ...props } of xls26.issuers){ 69 | if(issuer !== address) 70 | continue 71 | 72 | delete props.trust_level 73 | 74 | writeAccountProps({ 75 | ctx, 76 | account: { 77 | address: issuer 78 | }, 79 | props, 80 | source: `issuer/domain/${address}` 81 | }) 82 | 83 | publishedIssuers++ 84 | } 85 | 86 | for(let { currency, issuer, ...props } of xls26.tokens){ 87 | if(issuer !== address) 88 | continue 89 | 90 | delete props.trust_level 91 | 92 | writeTokenProps({ 93 | ctx, 94 | token: { 95 | currency: currencyUTF8ToHex(currency), 96 | issuer: { 97 | address: issuer 98 | } 99 | }, 100 | props, 101 | source: `issuer/domain/${address}` 102 | }) 103 | 104 | publishedTokens++ 105 | } 106 | 107 | log.debug(`issuer (${address}) valid xls26:`, xls26) 108 | 109 | if(publishedIssuers || publishedTokens){ 110 | log.accumulate.info({ 111 | text: [`%domainIssuersUpdated issuers and %domainTokensUpdated tokens updated in %time`], 112 | data: { 113 | domainIssuersUpdated: publishedIssuers, 114 | domainTokensUpdated: publishedTokens, 115 | } 116 | }) 117 | } 118 | }else{ 119 | clearAccountProps({ 120 | ctx, 121 | account: { id }, 122 | source: `issuer/domain/${address}` 123 | }) 124 | 125 | for(let token of ctx.db.core.tokens.readMany({ 126 | where: { 127 | issuer: { id } 128 | } 129 | })){ 130 | clearTokenProps({ 131 | ctx, 132 | token, 133 | source: `issuer/domain/${address}` 134 | }) 135 | } 136 | } 137 | } 138 | }) 139 | } 140 | } 141 | 142 | export async function fetchToml({ domain, fetch }){ 143 | let { protocol, host, pathname } = parseURL(domain) 144 | 145 | if(protocol && protocol !== 'https:' && protocol !== 'http:') 146 | throw new Error(`unsupported protocol: ${domain}`) 147 | 148 | if(!host) 149 | host = '' 150 | 151 | if(!pathname) 152 | pathname = '' 153 | 154 | let tomlUrls = (protocol ? [protocol] : ['https:', 'http:']) 155 | .map(protocol => `${protocol}//${host}${pathname}/${tomlStandardPath}`) 156 | .map(sanitizeURL) 157 | 158 | for(let tomlUrl of tomlUrls){ 159 | log.debug(`fetching ${tomlUrl}`) 160 | 161 | try{ 162 | let { status, data } = await fetch(tomlUrl) 163 | 164 | if(status !== 200) 165 | throw new Error(`HTTP ${status}`) 166 | 167 | return parseXLS26(data) 168 | }catch(error){ 169 | if(tomlUrl === tomlUrls.at(-1)) 170 | throw new Error( 171 | error.message.includes(tomlUrl) 172 | ? error.message 173 | : `${tomlUrl} -> ${error.message}` 174 | ) 175 | } 176 | } 177 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/gravatar.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleIterator } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { writeAccountProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.source.gravatar 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetch = new createFetch({ 15 | baseUrl: 'https://www.gravatar.com', 16 | ratelimit: config.maxRequestsPerMinute 17 | }) 18 | 19 | while(true){ 20 | await scheduleIterator({ 21 | ctx, 22 | type: 'issuer', 23 | task: 'gravatar', 24 | interval: config.fetchInterval, 25 | routine: async ({ id, address, emailHash }) => { 26 | let icon 27 | 28 | if(emailHash){ 29 | log.debug(`checking avatar for ${address}`) 30 | 31 | let { status } = await fetch(`avatar/${emailHash.toLowerCase()}?d=404`) 32 | 33 | if(status === 200){ 34 | icon = `https://www.gravatar.com/avatar/${emailHash.toLowerCase()}` 35 | }else if(status !== 404){ 36 | throw `HTTP ${status}` 37 | } 38 | 39 | log.debug(`avatar for ${address}: ${icon}`) 40 | } 41 | 42 | writeAccountProps({ 43 | ctx, 44 | account: { id }, 45 | props: { 46 | icon 47 | }, 48 | source: 'gravatar/avatar' 49 | }) 50 | 51 | log.accumulate.info({ 52 | text: [`%gravatarsChecked avatars checked in %time`], 53 | data: { 54 | gravatarsChecked: 1 55 | } 56 | }) 57 | } 58 | }) 59 | } 60 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/index.js: -------------------------------------------------------------------------------- 1 | import domains from './domains.js' 2 | import tokenlists from './tokenlists.js' 3 | import xumm from './xumm.js' 4 | import bithomp from './bithomp.js' 5 | import xrpscan from './xrpscan.js' 6 | import gravatar from './gravatar.js' 7 | import twitter from './twitter.js' 8 | 9 | export default [ 10 | { name: 'domains', start: domains }, 11 | { name: 'tokenlists', start: tokenlists }, 12 | { name: 'xumm', start: xumm }, 13 | { name: 'bithomp', start: bithomp }, 14 | { name: 'xrpscan', start: xrpscan }, 15 | { name: 'gravatar', start: gravatar }, 16 | { name: 'twitter', start: twitter }, 17 | ] -------------------------------------------------------------------------------- /src/crawl/crawlers/tokenlists.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseXLS26 } from '@xrplkit/xls26' 3 | import { scheduleGlobal } from '../schedule.js' 4 | import { createFetch } from '../../lib/fetch.js' 5 | import { diffMultiAccountProps, diffMultiTokenProps } from '../../db/helpers/props.js' 6 | import { currencyUTF8ToHex } from '@xrplkit/tokens' 7 | 8 | 9 | export default async function({ ctx }){ 10 | let configs = ctx.config.source.tokenlists 11 | 12 | if(!configs || configs.length == 0){ 13 | throw new Error(`disabled by config`) 14 | } 15 | 16 | await Promise.all( 17 | configs 18 | .filter(config => !config.disabled) 19 | .map(config => crawlList({ ctx, ...config })) 20 | ) 21 | } 22 | 23 | async function crawlList({ ctx, id, url, fetchInterval = 600, trustLevel = 0, ignoreAdvisories = false }){ 24 | let fetch = createFetch({ 25 | baseUrl: url 26 | }) 27 | 28 | while(true){ 29 | await scheduleGlobal({ 30 | ctx, 31 | task: `tokenlist.${id}`, 32 | interval: fetchInterval, 33 | routine: async () => { 34 | log.info(`reading ${url}`) 35 | 36 | let tokens = [] 37 | let accounts = [] 38 | 39 | let { status, data } = await fetch() 40 | 41 | if(status !== 200){ 42 | throw `${url}: HTTP ${response.status}` 43 | } 44 | 45 | try{ 46 | var { issuers: declaredIssuers, tokens: declaredTokens, issues, advisories } = parseXLS26(data) 47 | }catch(error){ 48 | console.log(error) 49 | throw error 50 | } 51 | 52 | if(issues.length > 0){ 53 | log.debug(`tokenlist [${id}] has issues: ${ 54 | issues 55 | .map(issue => ` - ${issue}`) 56 | .join(`\n`) 57 | }`) 58 | } 59 | 60 | for(let { address, ...props } of declaredIssuers){ 61 | if(props.hasOwnProperty('trust_level')) 62 | props.trust_level = Math.min(props.trust_level, trustLevel) 63 | 64 | accounts.push({ 65 | address, 66 | props 67 | }) 68 | } 69 | 70 | for(let { currency, issuer, ...props } of declaredTokens){ 71 | if(props.hasOwnProperty('trust_level')) 72 | props.trust_level = Math.min(props.trust_level, trustLevel) 73 | 74 | tokens.push({ 75 | currency: currencyUTF8ToHex(currency), 76 | issuer: { 77 | address: issuer 78 | }, 79 | props 80 | }) 81 | } 82 | 83 | let advisoryUpdates = 0 84 | 85 | if(!ignoreAdvisories && trustLevel > 0){ 86 | let groupedAdvisories = {} 87 | 88 | for(let { address, ...props } of advisories){ 89 | if(!groupedAdvisories[address]) 90 | groupedAdvisories[address] = [] 91 | 92 | groupedAdvisories[address].push(props) 93 | } 94 | 95 | for(let [address, advisories] of Object.entries(groupedAdvisories)){ 96 | advisoryUpdates++ 97 | accounts.push({ 98 | address, 99 | props: { 100 | advisories 101 | } 102 | }) 103 | } 104 | } 105 | 106 | diffMultiAccountProps({ 107 | ctx, 108 | accounts, 109 | source: `tokenlist/${id}` 110 | }) 111 | 112 | diffMultiTokenProps({ 113 | ctx, 114 | tokens, 115 | source: `tokenlist/${id}` 116 | }) 117 | 118 | log.info(`tokenlist [${id}] synced (issuers: ${issues.length} tokens: ${tokens.length} advisories: ${advisoryUpdates})`) 119 | } 120 | }) 121 | } 122 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/twitter.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleBatchedIterator } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { writeAccountProps, writeTokenProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.source.twitter 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetch = new createFetch({ 15 | baseUrl: 'https://api.twitter.com/2', 16 | headers: { 17 | authorization: `Bearer ${config.bearerToken}` 18 | }, 19 | ratelimit: config.maxRequestsPerMinute 20 | }) 21 | 22 | while(true){ 23 | await scheduleBatchedIterator({ 24 | ctx, 25 | type: 'token', 26 | task: 'twitter', 27 | interval: config.fetchInterval, 28 | batchSize: 100, 29 | where: { 30 | OR: [ 31 | { 32 | props: { 33 | key: 'weblinks' 34 | } 35 | }, 36 | { 37 | issuer: { 38 | props: { 39 | key: 'weblinks' 40 | } 41 | } 42 | } 43 | ] 44 | }, 45 | include: { 46 | issuer: true 47 | }, 48 | accumulate: (tasks, token) => { 49 | if(!token.issuer) 50 | return 51 | 52 | let issuerWeblinks = ctx.db.core.accountProps.readMany({ 53 | where: { 54 | account: token.issuer, 55 | key: 'weblinks' 56 | } 57 | }) 58 | 59 | let tokenWeblinks = ctx.db.core.tokenProps.readMany({ 60 | where: { 61 | token, 62 | key: 'weblinks' 63 | } 64 | }) 65 | 66 | for(let prop of [...issuerWeblinks, ...tokenWeblinks]){ 67 | let link = prop.value 68 | .filter(link => link.type !== 'support') 69 | .find(link => link.url.includes('twitter.com')) 70 | 71 | if(!link) 72 | continue 73 | 74 | let handle = link.url.split('/')[3] 75 | 76 | if(!handle) 77 | continue 78 | 79 | if(!/^[A-Za-z0-9_]{1,15}$/.test(handle)) 80 | continue 81 | 82 | let task = tasks.find(task => task.handle === handle) 83 | 84 | if(!task){ 85 | tasks.push(task = { 86 | handle, 87 | items: [], 88 | issuers: [], 89 | tokens: [] 90 | }) 91 | } 92 | 93 | task.items.push(token) 94 | 95 | if(prop.token){ 96 | task.tokens.push(prop.token) 97 | }else{ 98 | task.issuers.push(prop.account) 99 | } 100 | } 101 | 102 | return tasks 103 | }, 104 | commit: async tasks => { 105 | log.info(`got batch of`, tasks.length, `twitter profiles to fetch`) 106 | 107 | let usernamesQuery = tasks 108 | .map(({ handle }) => handle) 109 | .join(',') 110 | 111 | let { status, data: {data, errors} } = await fetch( 112 | 'users/by?user.fields=name,profile_image_url,description,entities,public_metrics' 113 | + `&usernames=${encodeURIComponent(usernamesQuery)}` 114 | ) 115 | 116 | if(status !== 200) 117 | throw `HTTP ${status}` 118 | 119 | if(!data){ 120 | throw errors[0] 121 | } 122 | 123 | log.info(`fetched`, data.length, `profiles`) 124 | 125 | let updatedTokens = 0 126 | let updatedAccounts = 0 127 | 128 | for(let { handle, tokens, issuers } of tasks){ 129 | let profile = data.find(entry => entry.username.toLowerCase() === handle.toLowerCase()) 130 | let props = { 131 | followers: undefined, 132 | name: undefined, 133 | icon: undefined, 134 | description: undefined, 135 | domain: undefined 136 | } 137 | 138 | 139 | if(profile){ 140 | props.followers = profile.public_metrics.followers_count 141 | props.name = profile.name 142 | props.description = profile.description 143 | props.icon = profile.profile_image_url 144 | ? profile.profile_image_url.replace('_normal', '') 145 | : undefined 146 | 147 | if(profile.entities?.url?.urls){ 148 | props.domain = profile.entities.url.urls[0].expanded_url 149 | .replace(/^https?:\/\//, '') 150 | .replace(/\/$/, '') 151 | } 152 | 153 | if(profile.entities?.description?.urls){ 154 | let offset = 0 155 | 156 | for(let { start, end, expanded_url } of profile.entities.description.urls){ 157 | props.description = props.description.slice(0, start + offset) + expanded_url + props.description.slice(end + offset) 158 | offset += expanded_url.length - (end - start) 159 | } 160 | } 161 | } 162 | 163 | for(let token of tokens){ 164 | writeTokenProps({ 165 | ctx, 166 | token, 167 | props, 168 | source: 'twitter/profile' 169 | }) 170 | 171 | updatedTokens++ 172 | } 173 | 174 | for(let account of issuers){ 175 | writeAccountProps({ 176 | ctx, 177 | account, 178 | props, 179 | source: 'twitter/profile' 180 | }) 181 | 182 | updatedAccounts++ 183 | } 184 | } 185 | 186 | log.info(`updated`, updatedAccounts, `issuers and`, updatedTokens, `tokens`) 187 | } 188 | }) 189 | } 190 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/xrpscan.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleGlobal } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { diffMultiAccountProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.source.xrpscan 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetch = createFetch({ 15 | baseUrl: 'https://api.xrpscan.com/api/v1' 16 | }) 17 | 18 | while(true){ 19 | await scheduleGlobal({ 20 | ctx, 21 | task: 'xrpscan.well-known', 22 | interval: config.fetchInterval, 23 | routine: async () => { 24 | log.info(`fetching well-known list...`) 25 | 26 | let accounts = [] 27 | let { data } = await fetch('names/well-known') 28 | 29 | log.info(`got`, data.length, `well known`) 30 | 31 | for(let { account, name, domain, twitter } of data){ 32 | let weblinks = undefined 33 | 34 | if(twitter){ 35 | weblinks = [{ 36 | url: `https://twitter.com/${twitter}`, 37 | type: `socialmedia` 38 | }] 39 | } 40 | 41 | accounts.push({ 42 | address: account, 43 | props: { 44 | name, 45 | domain, 46 | weblinks 47 | }, 48 | }) 49 | } 50 | 51 | diffMultiAccountProps({ 52 | ctx, 53 | accounts, 54 | source: 'xrpscan/well-known' 55 | }) 56 | 57 | log.info(`updated`, accounts.length, `issuers`) 58 | } 59 | }) 60 | } 61 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/xumm.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleGlobal, scheduleIterator } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { diffMultiAccountProps, diffMultiTokenProps, writeAccountProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.source.xumm 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetchApi = createFetch({ 15 | baseUrl: 'https://xumm.app/api/v1/platform/', 16 | headers: { 17 | 'x-api-key': config.apiKey, 18 | 'x-api-secret': config.apiSecret 19 | }, 20 | ratelimit: config.maxRequestsPerMinute 21 | }) 22 | 23 | let fetchAvatar = createFetch({ 24 | baseUrl: 'https://xumm.app/avatar/', 25 | ratelimit: config.maxRequestsPerMinute 26 | }) 27 | 28 | await Promise.all([ 29 | crawlAssets({ 30 | ctx, 31 | fetch: fetchApi, 32 | interval: config.fetchIntervalAssets 33 | }), 34 | crawlKyc({ 35 | ctx, 36 | fetch: fetchApi, 37 | interval: config.fetchIntervalKyc 38 | }), 39 | crawlAvatar({ 40 | ctx, 41 | fetch: fetchAvatar, 42 | interval: config.fetchIntervalAvatar 43 | }) 44 | ]) 45 | } 46 | 47 | async function crawlAssets({ ctx, fetch, interval }){ 48 | while(true){ 49 | await scheduleGlobal({ 50 | ctx, 51 | task: 'xumm.curated', 52 | interval, 53 | routine: async () => { 54 | log.info(`fetching curated asset list...`) 55 | 56 | let tokens = [] 57 | let accounts = [] 58 | 59 | let { data } = await fetch('curated-assets') 60 | 61 | if(!data?.details){ 62 | log.warn(`got malformed XUMM curated asset list:`, data) 63 | throw new Error(`malformed response`) 64 | } 65 | 66 | log.info(`got ${Object.values(data.details).length} curated assets`) 67 | 68 | for(let issuer of Object.values(data.details)){ 69 | if(issuer.info_source.type !== 'native') 70 | continue 71 | 72 | for(let currency of Object.values(issuer.currencies)){ 73 | accounts.push({ 74 | address: currency.issuer, 75 | props: { 76 | name: issuer.name.length > 0 77 | ? issuer.name 78 | : undefined, 79 | domain: issuer.domain, 80 | icon: issuer.avatar, 81 | trust_level: issuer.shortlist ? 3 : 2 82 | } 83 | }) 84 | 85 | tokens.push({ 86 | currency: currency.currency, 87 | issuer: { 88 | address: currency.issuer 89 | }, 90 | props: { 91 | name: currency.name > 0 92 | ? currency.name 93 | : undefined, 94 | icon: currency.avatar, 95 | trust_level: ( 96 | currency.info_source.type === 'native' 97 | ? (currency.shortlist ? 3 : 2) 98 | : 1 99 | ) 100 | } 101 | }) 102 | } 103 | } 104 | 105 | diffMultiAccountProps({ 106 | ctx, 107 | accounts, 108 | source: 'xumm/curated' 109 | }) 110 | 111 | diffMultiTokenProps({ 112 | ctx, 113 | tokens, 114 | source: 'xumm/curated' 115 | }) 116 | 117 | log.info(`updated`, tokens.length, `tokens and`, accounts.length, `issuers`) 118 | } 119 | }) 120 | } 121 | } 122 | 123 | 124 | async function crawlKyc({ ctx, fetch, interval }){ 125 | while(true){ 126 | await scheduleIterator({ 127 | ctx, 128 | type: 'issuer', 129 | task: 'xumm.kyc', 130 | interval, 131 | routine: async ({ id, address }) => { 132 | log.debug(`checking KYC for ${address}`) 133 | 134 | let { data } = await fetch(`kyc-status/${address}`) 135 | 136 | writeAccountProps({ 137 | ctx, 138 | account: { id }, 139 | props: { 140 | kyc: data.kycApproved 141 | }, 142 | source: 'xumm/kyc' 143 | }) 144 | 145 | log.debug(`KYC for ${address}: ${data.kycApproved}`) 146 | 147 | log.accumulate.info({ 148 | text: [`%kycChecked KYC checked in %time`], 149 | data: { 150 | kycChecked: 1 151 | } 152 | }) 153 | } 154 | }) 155 | } 156 | } 157 | 158 | async function crawlAvatar({ ctx, fetch, interval }){ 159 | while(true){ 160 | await scheduleIterator({ 161 | ctx, 162 | type: 'issuer', 163 | task: 'xumm.avatar', 164 | interval, 165 | routine: async ({ id, address }) => { 166 | log.debug(`checking avatar for ${address}`) 167 | 168 | let { headers } = await fetch( 169 | `${address}.png`, 170 | { redirect: 'manual' } 171 | ) 172 | 173 | let avatar = headers.get('location') 174 | ? headers.get('location').split('?')[0] 175 | : undefined 176 | 177 | writeAccountProps({ 178 | ctx, 179 | account: { id }, 180 | props: { 181 | icon: avatar 182 | }, 183 | source: 'xumm/avatar' 184 | }) 185 | 186 | log.debug(`avatar for ${address}: ${avatar}`) 187 | 188 | log.accumulate.info({ 189 | text: [`%avatarsChecked avatars checked in %time`], 190 | data: { 191 | avatarsChecked: 1 192 | } 193 | }) 194 | } 195 | }) 196 | } 197 | } -------------------------------------------------------------------------------- /src/crawl/init.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import crawlers from './crawlers/index.js' 5 | 6 | 7 | export async function startCrawlers({ ctx }){ 8 | if(ctx.config.crawl?.disabled){ 9 | log.warn(`skipping all crawlers (disabled by config)`) 10 | return 11 | } 12 | 13 | for(let { name } of crawlers){ 14 | spawn(':spawnCrawler', { ctx, name }) 15 | } 16 | } 17 | 18 | export async function spawnCrawler({ ctx, name }){ 19 | let { start } = crawlers.find(crawler => crawler.name === name) 20 | let crashed = false 21 | 22 | log.pipe(ctx.log) 23 | 24 | ctx = { 25 | ...ctx, 26 | db: await openDB({ ctx }) 27 | } 28 | 29 | start({ ctx }) 30 | .catch(error => { 31 | log.warn(`skipping crawler [${name}]:`, error.message) 32 | crashed = true 33 | }) 34 | 35 | await Promise.resolve() 36 | 37 | if(!crashed){ 38 | log.info(`started crawler [${name}]`) 39 | }else{ 40 | process.exit() 41 | } 42 | } -------------------------------------------------------------------------------- /src/crawl/schedule.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { unixNow, wait } from '@xrplkit/time' 3 | 4 | 5 | export async function scheduleGlobal({ ctx, task, interval, routine }){ 6 | let duration = 0 7 | let previousOperation = ctx.db.core.operations.readOne({ 8 | where: { 9 | subjectType: 'global', 10 | subjectId: 0, 11 | task 12 | } 13 | }) 14 | 15 | if(previousOperation) 16 | duration = interval - unixNow() + previousOperation.time 17 | 18 | if(duration > 0) 19 | log.debug(`${task}:`, `waiting ${duration} seconds for next operation`) 20 | 21 | await wait(duration * 1000 + 1) 22 | 23 | try{ 24 | await routine() 25 | 26 | ctx.db.core.operations.createOne({ 27 | data: { 28 | subjectType: 'global', 29 | subjectId: 0, 30 | task, 31 | time: unixNow() 32 | } 33 | }) 34 | }catch(error){ 35 | log.warn(`scheduled task "${task}" failed:\n`, error.stack || error.message || error) 36 | await wait(4000) 37 | } 38 | } 39 | 40 | export async function scheduleIterator({ ctx, type, where, include, task, interval, concurrency = 1, routine }){ 41 | let { table, ids } = collectItemIds({ ctx, type, where }) 42 | 43 | log.debug(`${task}:`, ids.length, `items[${table}] to iterate`) 44 | 45 | await Promise.all( 46 | Array(concurrency) 47 | .fill(0) 48 | .map(async () => { 49 | while(ids.length > 0){ 50 | let id = ids.shift() 51 | let item = ctx.db.core[table].readOne({ 52 | where: { 53 | id 54 | }, 55 | include 56 | }) 57 | 58 | let previousOperation = ctx.db.core.operations.readOne({ 59 | where: { 60 | subjectType: type, 61 | subjectId: item.id, 62 | task, 63 | time: { 64 | greaterThan: unixNow() - interval 65 | } 66 | } 67 | }) 68 | 69 | if(previousOperation) 70 | continue 71 | 72 | try{ 73 | await routine(item) 74 | }catch(error){ 75 | log.warn(`scheduled task "${task}" failed for item:\n`, error.stack || error.message || error) 76 | await wait(3000) 77 | } 78 | 79 | ctx.db.core.operations.createOne({ 80 | data: { 81 | subjectType: type, 82 | subjectId: item.id, 83 | task, 84 | time: unixNow() 85 | } 86 | }) 87 | } 88 | }) 89 | ) 90 | 91 | await wait(1) 92 | } 93 | 94 | 95 | export async function scheduleBatchedIterator({ ctx, type, where, include, task, interval, batchSize, accumulate, commit }){ 96 | let queue = [] 97 | let flush = async () => { 98 | let batch = queue.splice(0, batchSize) 99 | 100 | try{ 101 | await commit(batch) 102 | }catch(error){ 103 | log.warn(`scheduled task "${task}" failed for batch:\n`, error.stack || error.message || error) 104 | } 105 | 106 | let time = unixNow() 107 | 108 | for(let { items } of batch){ 109 | for(let item of items){ 110 | ctx.db.core.operations.createOne({ 111 | data: { 112 | subjectType: type, 113 | subjectId: item.id, 114 | task, 115 | time 116 | } 117 | }) 118 | } 119 | } 120 | } 121 | 122 | let { table, ids } = collectItemIds({ ctx, type, where }) 123 | let now = unixNow() 124 | 125 | log.debug(`${task}:`, ids.length, `items[${table}] to iterate`) 126 | 127 | for(let id of ids){ 128 | let item = ctx.db.core[table].readOne({ 129 | where: { id }, 130 | include 131 | }) 132 | 133 | let previousOperation = ctx.db.core.operations.readOne({ 134 | where: { 135 | subjectType: type, 136 | subjectId: item.id, 137 | task, 138 | time: { 139 | greaterThan: now - interval 140 | } 141 | } 142 | }) 143 | 144 | await wait(1) 145 | 146 | if(previousOperation) 147 | continue 148 | 149 | queue = accumulate(queue, item) 150 | 151 | if(queue.length >= batchSize) 152 | await flush() 153 | } 154 | 155 | if(queue.length > 0) 156 | await flush() 157 | 158 | await wait(1) 159 | } 160 | 161 | function collectItemIds({ ctx, type, where }){ 162 | if(type === 'issuer'){ 163 | return { 164 | table: 'accounts', 165 | ids: ctx.db.core.tokens.readMany({ 166 | select: { issuer: true }, 167 | distinct: ['issuer'], 168 | where 169 | }) 170 | .map(row => row.issuer?.id) 171 | .filter(Boolean) 172 | } 173 | }else{ 174 | return { 175 | table: 'tokens', 176 | ids: ctx.db.core.tokens.readMany({ 177 | select: { id: true }, 178 | where 179 | }) 180 | .map(row => row.id) 181 | } 182 | } 183 | } -------------------------------------------------------------------------------- /src/db/codecs/address.js: -------------------------------------------------------------------------------- 1 | import { encodeAccountID, decodeAccountID } from 'ripple-address-codec' 2 | 3 | export default { 4 | acceptsFormat: 'xrpl/address', 5 | acceptsNull: true, 6 | returnsType: 'blob', 7 | returnsNull: true, 8 | 9 | encode(data){ 10 | return data ? decodeAccountID(data) : data 11 | }, 12 | 13 | decode(data){ 14 | return data ? encodeAccountID(data) : data 15 | } 16 | } -------------------------------------------------------------------------------- /src/db/codecs/index.js: -------------------------------------------------------------------------------- 1 | import xfl from './xfl.js' 2 | import address from './address.js' 3 | 4 | export default [ 5 | xfl, 6 | address 7 | ] -------------------------------------------------------------------------------- /src/db/codecs/xfl.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { XFL, toSortSafeBigInt } from '@xrplkit/xfl' 3 | 4 | export default { 5 | acceptsFormat: 'xrpl/xfl', 6 | acceptsNull: true, 7 | returnsType: 'bigint', 8 | returnsNull: true, 9 | 10 | encode(data){ 11 | try{ 12 | return data !== null && data !== undefined 13 | ? toSortSafeBigInt(data) 14 | : data 15 | }catch(error){ 16 | log.error(`failed to encode XFL: ${data}`) 17 | log.error(error) 18 | throw error 19 | } 20 | }, 21 | 22 | decode(data){ 23 | return data !== null && data !== undefined 24 | ? XFL.fromSortSafeBigInt(BigInt(data)) 25 | : data 26 | } 27 | } -------------------------------------------------------------------------------- /src/db/helpers/balances.js: -------------------------------------------------------------------------------- 1 | import { eq } from '@xrplkit/xfl' 2 | import { readPoint, writePoint } from './common.js' 3 | 4 | 5 | export function readBalance({ ctx, account, token, ledgerSequence }){ 6 | return readPoint({ 7 | table: ctx.db.core.accountBalances, 8 | selector: { 9 | account, 10 | token 11 | }, 12 | ledgerSequence 13 | }) 14 | ?.balance 15 | } 16 | 17 | export function writeBalance({ ctx, account, token, ledgerSequence, balance }){ 18 | return writePoint({ 19 | table: ctx.db.core.accountBalances, 20 | selector: { 21 | account, 22 | token 23 | }, 24 | ledgerSequence, 25 | backwards: ctx.backwards, 26 | data: !eq(balance, 0) 27 | ? { balance } 28 | : null 29 | }) 30 | } -------------------------------------------------------------------------------- /src/db/helpers/common.js: -------------------------------------------------------------------------------- 1 | const minLedgerSequence = 0 2 | const maxLedgerSequence = 1_000_000_000_000 3 | 4 | 5 | export function readPoint({ table, selector, ledgerSequence, expirable }){ 6 | if(ledgerSequence === undefined){ 7 | return table.readOne({ 8 | where: selector, 9 | orderBy: { 10 | ledgerSequence: 'desc' 11 | } 12 | }) 13 | }else if(expirable){ 14 | return table.readOne({ 15 | where: { 16 | ...selector, 17 | ledgerSequence: { 18 | lessOrEqual: ledgerSequence 19 | }, 20 | lastLedgerSequence: { 21 | greaterOrEqual: ledgerSequence 22 | } 23 | } 24 | }) 25 | }else{ 26 | return table.readOne({ 27 | where: { 28 | ...selector, 29 | ledgerSequence: { 30 | lessOrEqual: ledgerSequence 31 | } 32 | }, 33 | orderBy: { 34 | ledgerSequence: 'desc' 35 | } 36 | }) 37 | } 38 | } 39 | 40 | export function writePoint({ table, selector, ledgerSequence, backwards, data, expirable }){ 41 | let headSequenceKey = 'ledgerSequence' 42 | let tailSequenceKey = 'ledgerSequence' 43 | 44 | let expirySequence = backwards 45 | ? ledgerSequence + 1 46 | : ledgerSequence - 1 47 | 48 | let offboundSequence = backwards 49 | ? minLedgerSequence 50 | : maxLedgerSequence 51 | 52 | if(expirable){ 53 | if(backwards) 54 | headSequenceKey = 'lastLedgerSequence' 55 | else 56 | tailSequenceKey = 'lastLedgerSequence' 57 | } 58 | 59 | let point = readPoint({ 60 | table, 61 | selector, 62 | ledgerSequence, 63 | expirable 64 | }) 65 | 66 | if(point){ 67 | let override = point[headSequenceKey] === ledgerSequence 68 | 69 | if(data){ 70 | let changes = {} 71 | 72 | for(let [key, value] of Object.entries(data)){ 73 | let a = value != null ? value.toString() : value 74 | let b = point[key] != null ? point[key].toString() : point[key] 75 | 76 | if(a != b){ 77 | changes[key] = value 78 | } 79 | } 80 | 81 | if(Object.keys(changes).length === 0) 82 | return 83 | 84 | if(override){ 85 | return table.updateOne({ 86 | data: changes, 87 | where: { 88 | id: point.id 89 | } 90 | }) 91 | } 92 | }else{ 93 | if(override){ 94 | return table.deleteOne({ 95 | where: { 96 | id: point.id 97 | } 98 | }) 99 | } 100 | } 101 | 102 | if(expirable || backwards){ 103 | table.createOne({ 104 | data: { 105 | ...point, 106 | id: undefined, 107 | [tailSequenceKey]: expirySequence 108 | } 109 | }) 110 | } 111 | } 112 | 113 | if(!data && expirable) 114 | return 115 | 116 | if(!data && !expirable && !point) 117 | return 118 | 119 | return table.createOne({ 120 | data: { 121 | ...selector, 122 | ...( 123 | expirable || backwards 124 | ? { 125 | [headSequenceKey]: ledgerSequence, 126 | [tailSequenceKey]: point 127 | ? point[tailSequenceKey] 128 | : offboundSequence 129 | } 130 | : { 131 | [headSequenceKey]: ledgerSequence 132 | } 133 | ), 134 | ...data 135 | } 136 | }) 137 | } 138 | 139 | export function getAccountId({ ctx, account }){ 140 | if(account.id) 141 | return account.id 142 | 143 | return ctx.db.core.accounts.readOne({ 144 | where: account, 145 | select: { 146 | id: true 147 | } 148 | }).id 149 | } 150 | 151 | export function getTokenId({ ctx, token }){ 152 | if(token.id) 153 | return token.id 154 | 155 | return ctx.db.core.tokens.readOne({ 156 | where: token, 157 | select: { 158 | id: true 159 | } 160 | }).id 161 | } -------------------------------------------------------------------------------- /src/db/helpers/heads.js: -------------------------------------------------------------------------------- 1 | const relevantTables = [ 2 | 'accountBalances', 3 | 'tokenExchanges', 4 | 'tokenSupply', 5 | 'tokenOffers' 6 | ] 7 | 8 | 9 | export function readTableHeads({ ctx }){ 10 | return relevantTables.reduce( 11 | (heads, table) => ({ 12 | ...heads, 13 | [table]: ctx.db.core[table].readOne({ 14 | orderBy: { 15 | id: 'desc' 16 | } 17 | })?.id || 0 18 | }), 19 | {} 20 | ) 21 | } 22 | 23 | export function pullNewItems({ ctx, previousHeads }){ 24 | return relevantTables.reduce( 25 | (heads, table) => ({ 26 | ...heads, 27 | [table]: ctx.db.core[table].readMany({ 28 | where: { 29 | id: { 30 | greaterThan: previousHeads[table] 31 | } 32 | } 33 | }) 34 | }), 35 | {} 36 | ) 37 | } -------------------------------------------------------------------------------- /src/db/helpers/ledgers.js: -------------------------------------------------------------------------------- 1 | export function getAvailableRange({ ctx }){ 2 | let start = ctx.db.core.ledgers.readOne({ 3 | orderBy: { 4 | sequence: 'asc' 5 | } 6 | }) 7 | 8 | let end = ctx.db.core.ledgers.readOne({ 9 | orderBy: { 10 | sequence: 'desc' 11 | } 12 | }) 13 | 14 | return { 15 | sequence: { 16 | start: start.sequence, 17 | end: end.sequence 18 | }, 19 | time: { 20 | start: start.closeTime, 21 | end: end.closeTime 22 | } 23 | } 24 | } 25 | 26 | 27 | export function readMostRecentLedger({ ctx }){ 28 | return ctx.db.core.ledgers.readOne({ 29 | orderBy: { 30 | sequence: 'desc' 31 | } 32 | }) 33 | } 34 | 35 | 36 | export function readLedgerAt({ ctx, sequence, time, clamp, include }){ 37 | let key = sequence !== undefined 38 | ? 'sequence' 39 | : 'closeTime' 40 | 41 | let point = sequence !== undefined 42 | ? sequence 43 | : time 44 | 45 | let ledger = ctx.db.core.ledgers.readOne({ 46 | where: { 47 | [key]: { 48 | lessOrEqual: point 49 | } 50 | }, 51 | orderBy: { 52 | [key]: 'desc' 53 | }, 54 | include 55 | }) 56 | 57 | if(!ledger && clamp){ 58 | ledger = ctx.db.core.ledgers.readOne({ 59 | where: { 60 | [key]: { 61 | greaterThan: point 62 | } 63 | }, 64 | orderBy: { 65 | [key]: 'asc' 66 | }, 67 | include 68 | }) 69 | } 70 | 71 | return ledger 72 | } 73 | -------------------------------------------------------------------------------- /src/db/helpers/nftoffers.js: -------------------------------------------------------------------------------- 1 | import { writePoint } from './common.js' 2 | 3 | 4 | export function writeNFTokenOffer({ ctx, offerId, ledgerSequence, ...data }){ 5 | return writePoint({ 6 | table: ctx.db.core.nftOffers, 7 | selector: { 8 | offerId, 9 | }, 10 | ledgerSequence, 11 | backwards: ctx.backwards, 12 | data, 13 | expirable: true 14 | }) 15 | } 16 | 17 | export function expireNFTokenOffer({ ctx, offerId, ledgerSequence }){ 18 | return writePoint({ 19 | table: ctx.db.core.nftOffers, 20 | selector: { 21 | offerId, 22 | }, 23 | ledgerSequence, 24 | backwards: ctx.backwards, 25 | data: null, 26 | expirable: true 27 | }) 28 | } -------------------------------------------------------------------------------- /src/db/helpers/props.js: -------------------------------------------------------------------------------- 1 | import { isSameToken } from '@xrplkit/tokens' 2 | import { readTokenMetrics } from './tokenmetrics.js' 3 | import { 4 | markCacheDirtyForAccountIcons, 5 | markCacheDirtyForAccountProps, 6 | markCacheDirtyForTokenIcons, 7 | markCacheDirtyForTokenProps 8 | } from '../../cache/todo.js' 9 | 10 | 11 | 12 | export function diffMultiTokenProps({ ctx, tokens, source }){ 13 | let propIds = [] 14 | 15 | for(let { currency, issuer, props } of tokens){ 16 | writeTokenProps({ 17 | ctx, 18 | token: { 19 | currency, 20 | issuer 21 | }, 22 | props, 23 | source 24 | }) 25 | 26 | for(let key of Object.keys(props)){ 27 | let prop = ctx.db.core.tokenProps.readOne({ 28 | where: { 29 | token: { 30 | currency, 31 | issuer 32 | }, 33 | key, 34 | source 35 | } 36 | }) 37 | 38 | if(prop) 39 | propIds.push(prop.id) 40 | } 41 | } 42 | 43 | let staleProps = ctx.db.core.tokenProps.readMany({ 44 | where: { 45 | NOT: { 46 | id: { 47 | in: propIds 48 | } 49 | }, 50 | source 51 | }, 52 | include: { 53 | token: true 54 | } 55 | }) 56 | 57 | ctx.db.core.tokenProps.deleteMany({ 58 | where: { 59 | id: { 60 | in: staleProps.map( 61 | ({ id }) => id 62 | ) 63 | } 64 | } 65 | }) 66 | 67 | let deletionAffectedTokens = staleProps 68 | .map(({ token }) => token) 69 | .filter( 70 | (token, index, tokens) => index === tokens.findIndex( 71 | ({ currency, issuer }) => isSameToken(token, { currency, issuer }) 72 | ) 73 | ) 74 | 75 | for(let token of deletionAffectedTokens){ 76 | markCacheDirtyForTokenProps({ ctx, token }) 77 | } 78 | } 79 | 80 | export function diffMultiAccountProps({ ctx, accounts, source }){ 81 | let propIds = [] 82 | 83 | for(let { address, props } of accounts){ 84 | writeAccountProps({ 85 | ctx, 86 | account: { 87 | address 88 | }, 89 | props, 90 | source 91 | }) 92 | 93 | for(let key of Object.keys(props)){ 94 | let prop = ctx.db.core.accountProps.readOne({ 95 | where: { 96 | account: { 97 | address 98 | }, 99 | key, 100 | source 101 | } 102 | }) 103 | 104 | if(prop) 105 | propIds.push(prop.id) 106 | } 107 | } 108 | 109 | let staleProps = ctx.db.core.accountProps.readMany({ 110 | where: { 111 | NOT: { 112 | id: { 113 | in: propIds 114 | } 115 | }, 116 | source 117 | }, 118 | include: { 119 | account: true 120 | } 121 | }) 122 | 123 | ctx.db.core.accountProps.deleteMany({ 124 | where: { 125 | id: { 126 | in: staleProps.map( 127 | ({ id }) => id 128 | ) 129 | } 130 | } 131 | }) 132 | 133 | let deletionAffectedAccounts = staleProps 134 | .map(({ account }) => account) 135 | .filter( 136 | (account, index, accounts) => index === accounts.findIndex( 137 | ({ address }) => address === account.address 138 | ) 139 | ) 140 | 141 | for(let account of deletionAffectedAccounts){ 142 | markCacheDirtyForAccountProps({ ctx, account }) 143 | } 144 | } 145 | 146 | 147 | export function readTokenProps({ ctx, token }){ 148 | let props = ctx.db.core.tokenProps.readMany({ 149 | where: { 150 | token 151 | } 152 | }) 153 | 154 | let issuerGivenTrustLevelProps = [] 155 | let issuerProps = readAccountProps({ 156 | ctx, 157 | account: token.issuer 158 | ? token.issuer 159 | : ctx.db.core.tokens.readOne({ where: token }).issuer 160 | }) 161 | 162 | for(let { key, value, source } of issuerProps){ 163 | if(key !== 'trust_level') 164 | continue 165 | 166 | let existingTrustProp = props.find( 167 | prop => prop.key === 'trust_level' && prop.source === source 168 | ) 169 | 170 | if(existingTrustProp){ 171 | existingTrustProp.value = Math.max(existingTrustProp.value, 1) 172 | }else{ 173 | issuerGivenTrustLevelProps.push({ 174 | key: 'trust_level', 175 | value, 176 | source 177 | }) 178 | } 179 | } 180 | 181 | if(issuerGivenTrustLevelProps.length > 0){ 182 | let { holders } = readTokenMetrics({ 183 | ctx, 184 | token, 185 | metrics: { 186 | holders: true 187 | } 188 | }) 189 | 190 | if(holders > 0){ 191 | props.push(...issuerGivenTrustLevelProps) 192 | } 193 | } 194 | 195 | return props.map(({ key, value, source }) => ({ key, value, source })) 196 | } 197 | 198 | export function writeTokenProps({ ctx, token, props, source }){ 199 | if(Object.keys(props).length === 0) 200 | return 201 | 202 | ctx.db.core.tx(() => { 203 | for(let [key, value] of Object.entries(props)){ 204 | if(value == null){ 205 | ctx.db.core.tokenProps.deleteOne({ 206 | where: { 207 | token, 208 | key, 209 | source 210 | } 211 | }) 212 | }else{ 213 | ctx.db.core.tokenProps.createOne({ 214 | data: { 215 | token, 216 | key, 217 | value, 218 | source 219 | } 220 | }) 221 | } 222 | } 223 | }) 224 | 225 | markCacheDirtyForTokenProps({ ctx, token }) 226 | 227 | if(props.hasOwnProperty('icon')) 228 | markCacheDirtyForTokenIcons({ ctx, token }) 229 | } 230 | 231 | 232 | export function readAccountProps({ ctx, account }){ 233 | let props = ctx.db.core.accountProps.readMany({ 234 | where: { 235 | account 236 | } 237 | }) 238 | 239 | let kycProps = props.filter( 240 | prop => prop.key === 'kyc' && prop.value === true 241 | ) 242 | 243 | for(let { source } of kycProps){ 244 | let trustProp = props.find( 245 | prop => prop.key === 'trust_level' && prop.source === source 246 | ) 247 | 248 | if(trustProp){ 249 | trustProp.value = Math.max(trustProp.value, 1) 250 | }else{ 251 | props.push({ 252 | key: 'trust_level', 253 | value: 1, 254 | source 255 | }) 256 | } 257 | } 258 | 259 | let { domain } = ctx.db.core.accounts.readOne({ 260 | where: account, 261 | select: { 262 | domain: true 263 | } 264 | }) 265 | 266 | if(domain) 267 | props.push({ 268 | key: 'domain', 269 | value: domain, 270 | source: 'ledger' 271 | }) 272 | 273 | 274 | return props.map(({ key, value, source }) => ({ key, value, source })) 275 | } 276 | 277 | export function writeAccountProps({ ctx, account, props, source }){ 278 | ctx.db.core.tx(() => { 279 | for(let [key, value] of Object.entries(props)){ 280 | if(value == null){ 281 | ctx.db.core.accountProps.deleteOne({ 282 | where: { 283 | account, 284 | key, 285 | source 286 | } 287 | }) 288 | }else{ 289 | ctx.db.core.accountProps.createOne({ 290 | data: { 291 | account, 292 | key, 293 | value, 294 | source 295 | } 296 | }) 297 | } 298 | } 299 | }) 300 | 301 | markCacheDirtyForAccountProps({ ctx, account }) 302 | 303 | if(props.hasOwnProperty('icon')) 304 | markCacheDirtyForAccountIcons({ ctx, account }) 305 | } 306 | 307 | 308 | export function clearTokenProps({ ctx, token, source }){ 309 | let deletedNum = ctx.db.core.tokenProps.deleteMany({ 310 | where: { 311 | token, 312 | source 313 | } 314 | }) 315 | 316 | if(deletedNum > 0){ 317 | markCacheDirtyForTokenProps({ ctx, token }) 318 | markCacheDirtyForTokenIcons({ ctx, token }) 319 | } 320 | } 321 | 322 | export function clearAccountProps({ ctx, account, source }){ 323 | let deletedNum = ctx.db.core.accountProps.deleteMany({ 324 | where: { 325 | account, 326 | source 327 | } 328 | }) 329 | 330 | if(deletedNum > 0){ 331 | markCacheDirtyForAccountProps({ ctx, account }) 332 | markCacheDirtyForAccountIcons({ ctx, account }) 333 | } 334 | } -------------------------------------------------------------------------------- /src/db/helpers/tokenexchanges.js: -------------------------------------------------------------------------------- 1 | import { XFL, sum, div, gt } from '@xrplkit/xfl' 2 | 3 | const dustValueXRP = '0.0001' 4 | 5 | export function readTokenExchangesAligned({ 6 | ctx, 7 | base, 8 | quote, 9 | sequenceStart, 10 | sequenceEnd, 11 | limit, 12 | newestFirst, 13 | include, 14 | skipDust 15 | }){ 16 | return ctx.db.core.tokenExchanges.readMany({ 17 | where: { 18 | ...composeBaseQuoteWhere({ base, quote, skipDust }), 19 | AND: [ 20 | { 21 | ledgerSequence: { 22 | greaterOrEqual: sequenceStart 23 | } 24 | }, 25 | { 26 | ledgerSequence: { 27 | lessOrEqual: sequenceEnd 28 | } 29 | } 30 | ] 31 | }, 32 | orderBy: { 33 | ledgerSequence: newestFirst ? 'desc' : 'asc' 34 | }, 35 | include: { 36 | ...include, 37 | takerPaidToken: { 38 | issuer: true 39 | }, 40 | takerGotToken: { 41 | issuer: true 42 | } 43 | }, 44 | take: limit 45 | }) 46 | .map(exchange => alignTokenExchange({ exchange, base, quote })) 47 | } 48 | 49 | export function readTokenExchangeAligned({ ctx, base, quote, ledgerSequence, skipDust }){ 50 | let exchange = ctx.db.core.tokenExchanges.readOne({ 51 | where: { 52 | ...composeBaseQuoteWhere({ base, quote, skipDust }), 53 | ledgerSequence: { 54 | lessOrEqual: ledgerSequence 55 | } 56 | }, 57 | orderBy: { 58 | ledgerSequence: 'desc' 59 | }, 60 | include: { 61 | takerPaidToken: { 62 | issuer: true 63 | }, 64 | takerGotToken: { 65 | issuer: true 66 | } 67 | } 68 | }) 69 | 70 | if(!exchange) 71 | return 72 | 73 | return alignTokenExchange({ exchange, base, quote }) 74 | } 75 | 76 | export function readTokenVolume({ ctx, base, quote, sequenceStart, sequenceEnd }){ 77 | let volume = XFL(0) 78 | 79 | for(let counter of [false, true]){ 80 | let sumKey = counter 81 | ? 'takerPaidValue' 82 | : 'takerGotValue' 83 | 84 | let aggregate = ctx.db.core.tokenExchanges.readOne({ 85 | select: { 86 | [sumKey]: { 87 | function: 'XFL_SUM' 88 | }, 89 | id: { 90 | function: 'COUNT' 91 | } 92 | }, 93 | where: { 94 | AND: [ 95 | { 96 | takerPaidToken: counter ? quote : base, 97 | takerGotToken: counter ? base : quote 98 | }, 99 | { 100 | ledgerSequence: { 101 | greaterOrEqual: sequenceStart 102 | } 103 | }, 104 | { 105 | ledgerSequence: { 106 | lessOrEqual: sequenceEnd 107 | } 108 | } 109 | ] 110 | } 111 | }) 112 | 113 | volume = sum(volume, aggregate[sumKey]) 114 | } 115 | 116 | return volume 117 | } 118 | 119 | export function readTokenExchangeCount({ ctx, base, quote, sequenceStart, sequenceEnd }){ 120 | return ctx.db.core.tokenExchanges.count({ 121 | where: { 122 | OR: [ 123 | { 124 | takerPaidToken: base, 125 | takerGotToken: quote 126 | }, 127 | { 128 | takerPaidToken: quote, 129 | takerGotToken: base 130 | } 131 | ], 132 | AND: [ 133 | { 134 | ledgerSequence: { 135 | greaterOrEqual: sequenceStart 136 | } 137 | }, 138 | { 139 | ledgerSequence: { 140 | lessOrEqual: sequenceEnd 141 | } 142 | } 143 | ] 144 | } 145 | }) 146 | } 147 | 148 | export function readTokenExchangeUniqueTakerCount({ ctx, base, quote, sequenceStart, sequenceEnd }){ 149 | return ctx.db.core.tokenExchanges.count({ 150 | distinct: ['taker'], 151 | where: { 152 | OR: [ 153 | { 154 | takerPaidToken: base, 155 | takerGotToken: quote 156 | }, 157 | { 158 | takerPaidToken: quote, 159 | takerGotToken: base 160 | } 161 | ], 162 | AND: [ 163 | { 164 | ledgerSequence: { 165 | greaterOrEqual: sequenceStart 166 | } 167 | }, 168 | { 169 | ledgerSequence: { 170 | lessOrEqual: sequenceEnd 171 | } 172 | } 173 | ] 174 | } 175 | }) 176 | } 177 | 178 | export function alignTokenExchange({ exchange, base, quote }){ 179 | let { takerPaidToken, takerGotToken, takerPaidValue, takerGotValue, ...props } = exchange 180 | let takerPaidIsBase = false 181 | let takerGotIsBase = false 182 | let takerPaidIsQuote = false 183 | let takerGotIsQuote = false 184 | 185 | if(base?.currency === 'XRP') 186 | base.id = 1 187 | 188 | if(quote?.currency === 'XRP') 189 | quote.id = 1 190 | 191 | if(base){ 192 | takerPaidIsBase = ( 193 | takerPaidToken.id === base.id 194 | || ( 195 | takerPaidToken.currency === base.currency 196 | && takerPaidToken.issuer?.address == base.issuer?.address 197 | ) 198 | ) 199 | 200 | takerGotIsBase = ( 201 | takerGotToken.id === base.id 202 | || ( 203 | takerGotToken.currency === base.currency 204 | && takerGotToken.issuer?.address == base.issuer?.address 205 | ) 206 | ) 207 | } 208 | 209 | if(quote){ 210 | takerPaidIsQuote = ( 211 | takerPaidToken.id === quote.id 212 | || ( 213 | takerPaidToken.currency === quote.currency 214 | && takerPaidToken.issuer?.address == quote.issuer?.address 215 | ) 216 | ) 217 | 218 | takerGotIsQuote = ( 219 | takerGotToken.id === quote.id 220 | || ( 221 | takerGotToken.currency === quote.currency 222 | && takerGotToken.issuer?.address == quote.issuer?.address 223 | ) 224 | ) 225 | } 226 | 227 | if(takerPaidIsBase || takerGotIsQuote){ 228 | return { 229 | ...props, 230 | base: exchange.takerPaidToken, 231 | quote: exchange.takerGotToken, 232 | price: gt(takerPaidValue, 0) 233 | ? div(takerGotValue, takerPaidValue) 234 | : XFL(0), 235 | volume: takerGotValue 236 | } 237 | } 238 | else if(takerPaidIsQuote || takerGotIsBase) 239 | { 240 | return { 241 | ...props, 242 | base: exchange.takerGotToken, 243 | quote: exchange.takerPaidToken, 244 | price: gt(takerGotValue, 0) 245 | ? div(takerPaidValue, takerGotValue) 246 | : XFL(0), 247 | volume: takerPaidValue 248 | } 249 | } 250 | else 251 | { 252 | throw new Error(`cannot align exchange: base/quote does not match`) 253 | } 254 | } 255 | 256 | 257 | 258 | export function readTokenExchangeIntervalSeries({ ctx, base, quote, sequence, time }){ 259 | if(time){ 260 | var exchanges = ctx.db.core.tokenExchanges.readManyRaw({ 261 | query: 262 | `SELECT MAX(Ledger.closeTime) as time, takerPaidToken, takerGotToken, takerPaidValue, takerGotValue 263 | FROM TokenExchange 264 | LEFT JOIN Ledger ON (Ledger.sequence = ledgerSequence) 265 | WHERE ( 266 | (takerPaidToken = ? AND takerGotToken = ?) 267 | OR 268 | (takerGotToken = ? AND takerPaidToken = ?) 269 | ) 270 | AND 271 | ( 272 | (Ledger.closeTime >= ? AND Ledger.closeTime <= ?) 273 | OR 274 | ( 275 | ledgerSequence = ( 276 | SELECT ledgerSequence 277 | FROM TokenExchange 278 | WHERE ( 279 | (takerPaidToken = ? AND takerGotToken = ?) 280 | OR 281 | (takerGotToken = ? AND takerPaidToken = ?) 282 | ) 283 | AND ledgerSequence < ? 284 | ORDER BY ledgerSequence DESC 285 | LIMIT 1 286 | ) 287 | ) 288 | ) 289 | GROUP BY Ledger.closeTime / CAST(? as INTEGER) 290 | ORDER BY Ledger.closeTime ASC`, 291 | params: [ 292 | base.id, 293 | quote.id, 294 | quote.id, 295 | base.id, 296 | time.start, 297 | time.end, 298 | base.id, 299 | quote.id, 300 | quote.id, 301 | base.id, 302 | sequence.start, 303 | time.interval, 304 | ] 305 | }) 306 | }else{ 307 | var exchanges = ctx.db.core.tokenExchanges.readManyRaw({ 308 | query: 309 | `SELECT MAX(ledgerSequence) as sequence, takerPaidToken, takerGotToken, takerPaidValue, takerGotValue 310 | FROM TokenExchange 311 | WHERE ( 312 | (takerPaidToken = ? AND takerGotToken = ?) 313 | OR 314 | (takerGotToken = ? AND takerPaidToken = ?) 315 | ) 316 | AND ( 317 | (ledgerSequence >= ? AND ledgerSequence <= ?) 318 | OR 319 | ( 320 | ledgerSequence = ( 321 | SELECT ledgerSequence 322 | FROM TokenExchange 323 | WHERE ( 324 | (takerPaidToken = ? AND takerGotToken = ?) 325 | OR 326 | (takerGotToken = ? AND takerPaidToken = ?) 327 | ) 328 | AND ledgerSequence < ? 329 | ORDER BY ledgerSequence DESC 330 | LIMIT 1 331 | ) 332 | ) 333 | ) 334 | GROUP BY ledgerSequence / CAST(? as INTEGER) 335 | ORDER BY ledgerSequence ASC`, 336 | params: [ 337 | base.id, 338 | quote.id, 339 | quote.id, 340 | base.id, 341 | sequence.start, 342 | sequence.end, 343 | base.id, 344 | quote.id, 345 | quote.id, 346 | base.id, 347 | sequence.start, 348 | sequence.interval, 349 | ] 350 | }) 351 | } 352 | 353 | return exchanges.map( 354 | ({ takerPaidToken, takerGotToken, takerPaidValue, takerGotValue, ...props }) => { 355 | if(takerPaidToken === base.id){ 356 | return { 357 | ...props, 358 | price: div(takerGotValue, takerPaidValue), 359 | volume: takerPaidValue 360 | } 361 | }else{ 362 | return { 363 | ...props, 364 | price: div(takerPaidValue, takerGotValue), 365 | volume: takerGotValue 366 | } 367 | } 368 | } 369 | ) 370 | } 371 | 372 | function composeBaseQuoteWhere({ base, quote, skipDust }){ 373 | let takerGotBaseCondition = { 374 | takerPaidToken: quote, 375 | takerGotToken: base 376 | } 377 | 378 | let takerGotQuoteCondition = { 379 | takerPaidToken: base, 380 | takerGotToken: quote 381 | } 382 | 383 | if(skipDust){ 384 | if(base.currency === 'XRP'){ 385 | takerGotBaseCondition.takerGotValue = { 386 | greaterOrEqual: dustValueXRP 387 | } 388 | 389 | takerGotQuoteCondition.takerPaidValue = { 390 | greaterOrEqual: dustValueXRP 391 | } 392 | }else if(quote.currency === 'XRP'){ 393 | takerGotBaseCondition.takerPaidValue = { 394 | greaterOrEqual: dustValueXRP 395 | } 396 | 397 | takerGotQuoteCondition.takerGotValue = { 398 | greaterOrEqual: dustValueXRP 399 | } 400 | } 401 | } 402 | 403 | return { 404 | OR: [takerGotBaseCondition, takerGotQuoteCondition] 405 | } 406 | } -------------------------------------------------------------------------------- /src/db/helpers/tokenmetrics.js: -------------------------------------------------------------------------------- 1 | import { readPoint, writePoint } from './common.js' 2 | import { markCacheDirtyForTokenMetrics } from '../../cache/todo.js' 3 | 4 | 5 | const metricTables = { 6 | trustlines: 'tokenTrustlines', 7 | holders: 'tokenHolders', 8 | supply: 'tokenSupply', 9 | marketcap: 'tokenMarketcap' 10 | } 11 | 12 | 13 | export function writeTokenMetrics({ ctx, token, ledgerSequence, metrics, updateCache = true }){ 14 | for(let [key, value] of Object.entries(metrics)){ 15 | writePoint({ 16 | table: ctx.db.core[metricTables[key]], 17 | selector: { 18 | token 19 | }, 20 | ledgerSequence, 21 | backwards: ctx.backwards, 22 | data: value.toString() !== '0' 23 | ? { value } 24 | : null 25 | }) 26 | } 27 | 28 | if(updateCache) 29 | markCacheDirtyForTokenMetrics({ ctx, token, metrics }) 30 | } 31 | 32 | 33 | export function readTokenMetrics({ ctx, token, ledgerSequence, metrics }){ 34 | let point = {} 35 | 36 | for(let key of Object.keys(metrics)){ 37 | let entry = readPoint({ 38 | table: ctx.db.core[metricTables[key]], 39 | selector: { 40 | token 41 | }, 42 | ledgerSequence 43 | }) 44 | 45 | if(entry){ 46 | point[key] = entry.value 47 | } 48 | } 49 | 50 | return point 51 | } 52 | 53 | 54 | 55 | export function readTokenMetricSeries({ ctx, token, metric, sequenceStart, sequenceEnd }){ 56 | return ctx.db.core[metricTables[metric]].readMany({ 57 | where: { 58 | token, 59 | ledgerSequence: { 60 | greaterOrEqual: sequenceStart 61 | }, 62 | ...( 63 | sequenceEnd 64 | ? { 65 | ledgerSequence: { 66 | lessOrEqual: sequenceEnd 67 | } 68 | } 69 | : {} 70 | ) 71 | } 72 | }) 73 | } 74 | 75 | 76 | 77 | export function readTokenMetricIntervalSeries({ ctx, token, metric, sequence, time }){ 78 | let table = metricTables[metric] 79 | 80 | if(time){ 81 | return ctx.db.core[table].readManyRaw({ 82 | query: 83 | `SELECT MAX(Ledger.closeTime) as time, value 84 | FROM ${table} 85 | LEFT JOIN Ledger ON (Ledger.sequence = ledgerSequence) 86 | WHERE token = ? 87 | AND ( 88 | (Ledger.closeTime >= ? AND Ledger.closeTime <= ?) 89 | OR 90 | ( 91 | ledgerSequence = ( 92 | SELECT ledgerSequence 93 | FROM ${table} 94 | WHERE token = ? 95 | AND ledgerSequence < ? 96 | ORDER BY ledgerSequence DESC 97 | LIMIT 1 98 | ) 99 | ) 100 | ) 101 | GROUP BY Ledger.closeTime / CAST(? as INTEGER) 102 | ORDER BY Ledger.closeTime ASC`, 103 | params: [ 104 | token.id, 105 | time.start, 106 | time.end, 107 | token.id, 108 | sequence.start, 109 | time.interval, 110 | ] 111 | }) 112 | }else{ 113 | return ctx.db.core[table].readManyRaw({ 114 | query: 115 | `SELECT MAX(ledgerSequence) as sequence, value 116 | FROM ${table} 117 | WHERE token = ? 118 | AND ( 119 | (ledgerSequence >= ? AND ledgerSequence <= ?) 120 | OR 121 | ( 122 | ledgerSequence = ( 123 | SELECT ledgerSequence 124 | FROM ${table} 125 | WHERE token = ? 126 | AND ledgerSequence < ? 127 | ORDER BY ledgerSequence DESC 128 | LIMIT 1 129 | ) 130 | ) 131 | ) 132 | GROUP BY ledgerSequence / CAST(? as INTEGER) 133 | ORDER BY ledgerSequence ASC`, 134 | params: [ 135 | token.id, 136 | sequence.start, 137 | sequence.end, 138 | token.id, 139 | sequence.start, 140 | sequence.interval, 141 | ] 142 | }) 143 | } 144 | } -------------------------------------------------------------------------------- /src/db/helpers/tokenoffers.js: -------------------------------------------------------------------------------- 1 | import { writePoint } from './common.js' 2 | 3 | 4 | export function writeTokenOffer({ ctx, account, accountSequence, ledgerSequence, book, quality, size }){ 5 | return writePoint({ 6 | table: ctx.db.core.tokenOffers, 7 | selector: { 8 | account, 9 | accountSequence, 10 | book, 11 | }, 12 | ledgerSequence, 13 | backwards: ctx.backwards, 14 | data: { 15 | quality, 16 | size 17 | }, 18 | expirable: true 19 | }) 20 | } 21 | 22 | export function expireTokenOffer({ ctx, account, accountSequence, ledgerSequence }){ 23 | return writePoint({ 24 | table: ctx.db.core.tokenOffers, 25 | selector: { 26 | account, 27 | accountSequence 28 | }, 29 | ledgerSequence, 30 | backwards: ctx.backwards, 31 | data: null, 32 | expirable: true 33 | }) 34 | } 35 | 36 | export function readOffersBy({ ctx, account, book, ledgerSequence }){ 37 | return ctx.db.core.tokenOffers.readMany({ 38 | where: { 39 | account, 40 | book, 41 | ledgerSequence: { 42 | lessOrEqual: ledgerSequence 43 | }, 44 | lastLedgerSequence: { 45 | greaterOrEqual: ledgerSequence 46 | } 47 | }, 48 | include: { 49 | book: true 50 | } 51 | }) 52 | } -------------------------------------------------------------------------------- /src/db/index.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import { fileURLToPath } from 'url' 4 | import createStructDB from '@structdb/sqlite' 5 | import codecs from './codecs/index.js' 6 | 7 | 8 | const __filename = fileURLToPath(import.meta.url) 9 | const __dirname = path.dirname(__filename) 10 | 11 | 12 | export async function openDB({ ctx, coreReadOnly=false, inMemory=false }){ 13 | return { 14 | core: await openCoreDB({ 15 | ctx, 16 | readOnly: coreReadOnly, 17 | inMemory 18 | }), 19 | cache: await openCacheDB({ 20 | ctx, 21 | inMemory 22 | }) 23 | } 24 | } 25 | 26 | export async function openCoreDB({ ctx, readOnly=false, inMemory=false }){ 27 | let db = await createStructDB({ 28 | file: inMemory 29 | ? ':memory:' 30 | : `${ctx.config.node.dataDir}/core.db`, 31 | schema: JSON.parse( 32 | fs.readFileSync( 33 | path.join(__dirname, 'schemas/core.json') 34 | ) 35 | ), 36 | journalMode: 'WAL', 37 | timeout: 600000, 38 | debug: ctx.config.debug?.queries, 39 | codecs, 40 | readOnly 41 | }) 42 | 43 | db.loadExtension( 44 | path.join( 45 | __dirname, 46 | '..', 47 | '..', 48 | 'deps', 49 | 'build', 50 | 'Release', 51 | 'sqlite-xfl.node' 52 | ) 53 | ) 54 | 55 | db.tokens.createOne({ 56 | data: { 57 | currency: 'XRP', 58 | issuer: null 59 | } 60 | }) 61 | 62 | return db 63 | } 64 | 65 | export async function openCacheDB({ ctx, inMemory=false }){ 66 | return await createStructDB({ 67 | file: inMemory 68 | ? ':memory:' 69 | : `${ctx.config.node.dataDir}/cache.db`, 70 | schema: JSON.parse( 71 | fs.readFileSync( 72 | path.join(__dirname, 'schemas/cache.json') 73 | ) 74 | ), 75 | journalMode: 'WAL', 76 | debug: ctx.config.debug?.queries, 77 | codecs 78 | }) 79 | } -------------------------------------------------------------------------------- /src/db/schemas/cache.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "XRPL Meta Cache Database Structure", 3 | "type": "object", 4 | "properties": { 5 | "tokens": { 6 | "type": "array", 7 | "items": { 8 | "$ref": "#/definitions/Token" 9 | } 10 | }, 11 | "icons": { 12 | "type": "array", 13 | "items": { 14 | "$ref": "#/definitions/Icon" 15 | } 16 | }, 17 | "iconUsers": { 18 | "type": "array", 19 | "items": { 20 | "$ref": "#/definitions/IconUser" 21 | } 22 | }, 23 | "todos": { 24 | "type": "array", 25 | "items": { 26 | "$ref": "#/definitions/Todo" 27 | } 28 | } 29 | }, 30 | "definitions": { 31 | "Token": { 32 | "type": "object", 33 | "properties": { 34 | "id": { 35 | "type": "integer", 36 | "id": true 37 | }, 38 | "token": { 39 | "type": "integer" 40 | }, 41 | "tokenCurrencyHex": { 42 | "type": "string" 43 | }, 44 | "tokenCurrencyUtf8": { 45 | "type": "string" 46 | }, 47 | "tokenName": { 48 | "type": "string" 49 | }, 50 | "tokenProps": { 51 | "type": "any" 52 | }, 53 | "issuerAddress": { 54 | "type": "string" 55 | }, 56 | "issuerName": { 57 | "type": "string" 58 | }, 59 | "issuerProps": { 60 | "type": "any" 61 | }, 62 | "cachedIcons": { 63 | "type": "any" 64 | }, 65 | "trustLevel": { 66 | "type": "integer", 67 | "default": 0 68 | }, 69 | "trustlines": { 70 | "type": "integer", 71 | "default": 0 72 | }, 73 | "trustlinesDelta24H": { 74 | "type": "integer", 75 | "default": 0 76 | }, 77 | "trustlinesPercent24H": { 78 | "type": "number", 79 | "default": 0 80 | }, 81 | "trustlinesDelta7D": { 82 | "type": "integer", 83 | "default": 0 84 | }, 85 | "trustlinesPercent7D": { 86 | "type": "number", 87 | "default": 0 88 | }, 89 | "holders": { 90 | "type": "integer", 91 | "default": 0 92 | }, 93 | "holdersDelta24H": { 94 | "type": "integer", 95 | "default": 0 96 | }, 97 | "holdersPercent24H": { 98 | "type": "number", 99 | "default": 0 100 | }, 101 | "holdersDelta7D": { 102 | "type": "integer", 103 | "default": 0 104 | }, 105 | "holdersPercent7D": { 106 | "type": "number", 107 | "default": 0 108 | }, 109 | "supply": { 110 | "type": "string", 111 | "format": "xrpl/xfl", 112 | "default": "0" 113 | }, 114 | "supplyDelta24H": { 115 | "type": "string", 116 | "format": "xrpl/xfl", 117 | "default": "0" 118 | }, 119 | "supplyPercent24H": { 120 | "type": "number", 121 | "default": 0 122 | }, 123 | "supplyDelta7D": { 124 | "type": "string", 125 | "format": "xrpl/xfl", 126 | "default": "0" 127 | }, 128 | "supplyPercent7D": { 129 | "type": "number", 130 | "default": 0 131 | }, 132 | "marketcap": { 133 | "type": "string", 134 | "format": "xrpl/xfl", 135 | "default": "0" 136 | }, 137 | "marketcapDelta24H": { 138 | "type": "string", 139 | "format": "xrpl/xfl", 140 | "default": "0" 141 | }, 142 | "marketcapPercent24H": { 143 | "type": "number", 144 | "default": 0 145 | }, 146 | "marketcapDelta7D": { 147 | "type": "string", 148 | "format": "xrpl/xfl", 149 | "default": "0" 150 | }, 151 | "marketcapPercent7D": { 152 | "type": "number", 153 | "default": 0 154 | }, 155 | "price": { 156 | "type": "string", 157 | "format": "xrpl/xfl", 158 | "default": "0" 159 | }, 160 | "pricePercent24H": { 161 | "type": "number", 162 | "default": 0 163 | }, 164 | "pricePercent7D": { 165 | "type": "number", 166 | "default": 0 167 | }, 168 | "volume24H": { 169 | "type": "string", 170 | "format": "xrpl/xfl", 171 | "default": "0" 172 | }, 173 | "volume7D": { 174 | "type": "string", 175 | "format": "xrpl/xfl", 176 | "default": "0" 177 | }, 178 | "exchanges24H": { 179 | "type": "integer", 180 | "default": 0 181 | }, 182 | "exchanges7D": { 183 | "type": "integer", 184 | "default": 0 185 | }, 186 | "takers24H": { 187 | "type": "integer", 188 | "default": 0 189 | }, 190 | "takers7D": { 191 | "type": "integer", 192 | "default": 0 193 | } 194 | }, 195 | "required": [ 196 | "token", 197 | "tokenCurrencyHex", 198 | "tokenCurrencyUtf8", 199 | "issuerAddress" 200 | ], 201 | "unique": [ 202 | "token" 203 | ], 204 | "index": [ 205 | "tokenCurrencyHex", 206 | "tokenName", 207 | "issuerAddress", 208 | "issuerName", 209 | "trustLevel", 210 | "trustlines", 211 | "trustlinesDelta24H", 212 | "trustlinesPercent24H", 213 | "trustlinesDelta7D", 214 | "trustlinesPercent7D", 215 | "holders", 216 | "holdersDelta24H", 217 | "holdersPercent24H", 218 | "holdersDelta7D", 219 | "holdersPercent7D", 220 | "supply", 221 | "supplyDelta24H", 222 | "supplyPercent24H", 223 | "supplyDelta7D", 224 | "supplyPercent7D", 225 | "marketcap", 226 | "marketcapDelta24H", 227 | "marketcapPercent24H", 228 | "marketcapDelta7D", 229 | "marketcapPercent7D", 230 | "price", 231 | "pricePercent24H", 232 | "pricePercent7D", 233 | "volume24H", 234 | "volume7D", 235 | "exchanges24H", 236 | "exchanges7D", 237 | "takers24H", 238 | "takers7D" 239 | ] 240 | }, 241 | "Icon": { 242 | "type": "object", 243 | "properties": { 244 | "id": { 245 | "type": "integer", 246 | "id": true 247 | }, 248 | "url": { 249 | "type": "string" 250 | }, 251 | "hash": { 252 | "type": "string" 253 | }, 254 | "fileType": { 255 | "type": "string" 256 | }, 257 | "timeUpdated": { 258 | "type": "integer" 259 | }, 260 | "error": { 261 | "type": "string" 262 | }, 263 | "users": { 264 | "type": "array", 265 | "items": { 266 | "$ref": "#/definitions/IconUser" 267 | } 268 | } 269 | }, 270 | "required": [ 271 | "url" 272 | ], 273 | "unique": [ 274 | "url" 275 | ], 276 | "index": [ 277 | "timeUpdated" 278 | ] 279 | }, 280 | "IconUser": { 281 | "type": "object", 282 | "properties": { 283 | "id": { 284 | "type": "integer", 285 | "id": true 286 | }, 287 | "icon": { 288 | "$ref": "#/definitions/Icon" 289 | }, 290 | "userType": { 291 | "type": "string", 292 | "enum": [ 293 | "account", 294 | "token" 295 | ] 296 | }, 297 | "userId": { 298 | "type": "integer" 299 | } 300 | }, 301 | "required": [ 302 | "icon", 303 | "userType", 304 | "userId" 305 | ], 306 | "unique": [ 307 | ["icon", "userType", "userId"] 308 | ], 309 | "index": [ 310 | ["userType", "userId"] 311 | ] 312 | }, 313 | "Todo": { 314 | "type": "object", 315 | "properties": { 316 | "id": { 317 | "type": "integer", 318 | "id": true 319 | }, 320 | "task": { 321 | "type": "string", 322 | "enum": [ 323 | "account.props", 324 | "account.icons", 325 | "token.props", 326 | "token.exchanges", 327 | "token.metrics.trustlines", 328 | "token.metrics.holders", 329 | "token.metrics.supply", 330 | "token.metrics.marketcap", 331 | "token.icons" 332 | ] 333 | }, 334 | "subject": { 335 | "type": "integer" 336 | } 337 | }, 338 | "required": [ 339 | "task", 340 | "subject" 341 | ], 342 | "unique": [ 343 | ["task", "subject"] 344 | ], 345 | "index": [ 346 | "task" 347 | ] 348 | } 349 | } 350 | } -------------------------------------------------------------------------------- /src/etl/backfill.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { extractEvents } from './events/index.js' 4 | import { applyTransactions } from './state/index.js' 5 | import { createDerivatives } from './derivatives/index.js' 6 | import { pullNewItems, readTableHeads } from '../db/helpers/heads.js' 7 | import { wait } from '@xrplkit/time' 8 | 9 | 10 | export async function startBackfill({ ctx }){ 11 | let { sequence: firstSequence } = ctx.db.core.ledgers.readOne({ 12 | orderBy: { 13 | sequence: 'asc' 14 | }, 15 | take: 1 16 | }) 17 | 18 | let stream = await spawn( 19 | '../xrpl/stream.js:createBackwardStream', 20 | { 21 | ctx, 22 | startSequence: firstSequence - 1 23 | } 24 | ) 25 | 26 | while(true){ 27 | let { ledger } = await stream.next() 28 | 29 | ctx.db.core.tx(() => { 30 | ctx = { 31 | ...ctx, 32 | currentLedger: ledger, 33 | ledgerSequence: ledger.sequence, 34 | backwards: true 35 | } 36 | 37 | try{ 38 | let heads = readTableHeads({ ctx }) 39 | 40 | extractEvents({ ctx, ledger }) 41 | applyTransactions({ ctx, ledger }) 42 | createDerivatives({ 43 | ctx, 44 | newItems: pullNewItems({ 45 | ctx, 46 | previousHeads: heads 47 | }) 48 | }) 49 | }catch(error){ 50 | log.error(`fatal error while backfilling ledger #${ledger.sequence}:`) 51 | log.error(error.stack) 52 | 53 | throw error 54 | } 55 | }) 56 | 57 | log.accumulate.info({ 58 | text: [ 59 | `at ledger #${ledger.sequence} ${ 60 | new Date(ledger.closeTime * 1000) 61 | .toISOString() 62 | .slice(0, -5) 63 | .replace('T', ' ') 64 | } (+%backfilledLedgers in %time)` 65 | ], 66 | data: { 67 | backfilledLedgers: 1 68 | } 69 | }) 70 | 71 | await wait(10) 72 | } 73 | } -------------------------------------------------------------------------------- /src/etl/derivatives/index.js: -------------------------------------------------------------------------------- 1 | import { updateMarketcapFromExchange, updateMarketcapFromSupply } from './marketcaps.js' 2 | 3 | 4 | export function createDerivatives({ ctx, newItems }){ 5 | for(let exchange of newItems.tokenExchanges){ 6 | updateMarketcapFromExchange({ ctx, exchange }) 7 | } 8 | 9 | for(let supply of newItems.tokenSupply){ 10 | updateMarketcapFromSupply({ ctx, supply }) 11 | } 12 | } 13 | 14 | export function createAllDerivatives({ ctx }){ 15 | let exchanges = ctx.db.core.tokenExchanges.iter() 16 | 17 | for(let exchange of exchanges){ 18 | updateMarketcapFromExchange({ ctx, exchange }) 19 | } 20 | } -------------------------------------------------------------------------------- /src/etl/derivatives/marketcaps.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { mul } from '@xrplkit/xfl' 3 | import { readTokenMetricSeries, readTokenMetrics, writeTokenMetrics } from '../../db/helpers/tokenmetrics.js' 4 | import { readTokenExchangeAligned, alignTokenExchange } from '../../db/helpers/tokenexchanges.js' 5 | 6 | 7 | export function updateMarketcapFromExchange({ ctx, exchange }){ 8 | try{ 9 | exchange = alignTokenExchange({ 10 | exchange, 11 | quote: { currency: 'XRP' } 12 | }) 13 | }catch(error){ 14 | if(exchange.takerGotToken.id === 1 || exchange.takerPaidToken.id === 1){ 15 | log.warn(`market cap update failed: ${error.message}`) 16 | } 17 | return 18 | } 19 | 20 | if(ctx.backwards){ 21 | let firstMarketcap = ctx.db.core.tokenMarketcap.readOne({ 22 | where: { 23 | token: exchange.base, 24 | ledgerSequence: { 25 | greaterOrEqual: ctx.ledgerSequence 26 | } 27 | }, 28 | orderBy: { 29 | ledgerSequence: 'asc' 30 | } 31 | }) 32 | 33 | let series = readTokenMetricSeries({ 34 | ctx, 35 | token: exchange.base, 36 | metric: 'supply', 37 | sequenceStart: ctx.ledgerSequence, 38 | sequenceEnd: firstMarketcap?.ledgerSequence 39 | }) 40 | 41 | for(let { ledgerSequence: sequence, value: supply } of series){ 42 | writeTokenMetrics({ 43 | ctx, 44 | token: exchange.base, 45 | ledgerSequence: sequence, 46 | metrics: { 47 | marketcap: supply 48 | ? mul(supply, exchange.price) 49 | : '0' 50 | } 51 | }) 52 | } 53 | }else{ 54 | let { supply } = readTokenMetrics({ 55 | ctx, 56 | token: exchange.base, 57 | ledgerSequence: ctx.ledgerSequence, 58 | metrics: { 59 | supply: true 60 | } 61 | }) 62 | 63 | writeTokenMetrics({ 64 | ctx, 65 | token: exchange.base, 66 | ledgerSequence: ctx.ledgerSequence, 67 | metrics: { 68 | marketcap: supply 69 | ? mul(supply, exchange.price) 70 | : '0' 71 | } 72 | }) 73 | } 74 | } 75 | 76 | export function updateMarketcapFromSupply({ ctx, supply }){ 77 | let exchange = readTokenExchangeAligned({ 78 | ctx, 79 | base: supply.token, 80 | quote: { 81 | currency: 'XRP' 82 | }, 83 | ledgerSequence: ctx.ledgerSequence, 84 | skipDust: true 85 | }) 86 | 87 | if(ctx.backwards && !exchange) 88 | return 89 | 90 | writeTokenMetrics({ 91 | ctx, 92 | token: supply.token, 93 | ledgerSequence: ctx.ledgerSequence, 94 | metrics: { 95 | marketcap: exchange 96 | ? mul(supply.value, exchange.price) 97 | : '0' 98 | } 99 | }) 100 | } -------------------------------------------------------------------------------- /src/etl/events/index.js: -------------------------------------------------------------------------------- 1 | import { extractLedgerStats } from './ledgerstats.js' 2 | import { extractTokenExchanges } from './tokenexchanges.js' 3 | import { extractNFTokenExchanges } from './nftexchanges.js' 4 | 5 | 6 | export function extractEvents({ ctx, ledger }){ 7 | extractLedgerStats({ ctx, ledger }) 8 | extractTokenExchanges({ ctx, ledger }) 9 | extractNFTokenExchanges({ ctx, ledger }) 10 | } -------------------------------------------------------------------------------- /src/etl/events/ledgerstats.js: -------------------------------------------------------------------------------- 1 | import { div, max, min, sum, floor } from "@xrplkit/xfl" 2 | 3 | const pseudoTransactionTypes = [ 4 | 'EnableAmendment', 5 | 'SetFee', 6 | 'UNLModify' 7 | ] 8 | 9 | 10 | export function extractLedgerStats({ ctx, ledger }){ 11 | let baseData = { 12 | sequence: ledger.sequence, 13 | hash: ledger.hash, 14 | closeTime: ledger.closeTime, 15 | txCount: ledger.transactions.length, 16 | } 17 | 18 | if(ledger.transactions.length === 0){ 19 | ctx.db.core.ledgers.createOne({ 20 | data: baseData 21 | }) 22 | }else{ 23 | let types = {} 24 | let fees = [] 25 | 26 | for(let transaction of ledger.transactions){ 27 | if(pseudoTransactionTypes.includes(transaction.TransactionType)) 28 | continue 29 | 30 | if(!types[transaction.TransactionType]) 31 | types[transaction.TransactionType] = 0 32 | 33 | types[transaction.TransactionType]++ 34 | fees.push(parseInt(transaction.Fee)) 35 | } 36 | 37 | ctx.db.core.ledgers.createOne({ 38 | data: { 39 | ...baseData, 40 | txTypeCounts: Object.entries(types) 41 | .map(([type, count]) => ({ type, count })), 42 | minFee: Math.min(...fees), 43 | maxFee: Math.max(...fees), 44 | avgFee: Math.floor( 45 | fees.reduce((total, fee) => total + fee, 0) / fees.length 46 | ) 47 | } 48 | }) 49 | } 50 | } -------------------------------------------------------------------------------- /src/etl/events/nftexchanges.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseOffer } from '../state/nftoffers.js' 3 | 4 | 5 | export function extractNFTokenExchanges({ ctx, ledger }){ 6 | for(let transaction of ledger.transactions){ 7 | if(transaction.TransactionType !== 'NFTokenAcceptOffer') 8 | continue 9 | 10 | if(transaction.metaData.TransactionResult !== 'tesSUCCESS') 11 | continue 12 | 13 | let offer 14 | 15 | for(let { DeletedNode } of transaction.metaData.AffectedNodes){ 16 | if(!DeletedNode) 17 | continue 18 | 19 | if(DeletedNode.LedgerEntryType !== 'NFTokenOffer') 20 | continue 21 | 22 | if(DeletedNode.LedgerIndex === transaction.NFTokenBuyOffer 23 | || DeletedNode.LedgerIndex === transaction.NFTokenSellOffer){ 24 | offer = { 25 | ...parseOffer({ 26 | index: DeletedNode.LedgerIndex, 27 | entry: DeletedNode.FinalFields 28 | }), 29 | ledgerSequence: DeletedNode.FinalFields.PreviousTxnLgrSeq, 30 | lastLedgerSequence: ledger.sequence - 1 31 | } 32 | } 33 | } 34 | 35 | if(!offer){ 36 | log.warn(`unable to determine accepted nft offer of ${transaction.hash}`) 37 | continue 38 | } 39 | 40 | ctx.db.core.nftExchanges.createOne({ 41 | data: { 42 | txHash: transaction.hash, 43 | account: { 44 | address: transaction.Account 45 | }, 46 | offer, 47 | nft: offer.nft, 48 | ledgerSequence: ledger.sequence 49 | } 50 | }) 51 | } 52 | } -------------------------------------------------------------------------------- /src/etl/events/tokenexchanges.js: -------------------------------------------------------------------------------- 1 | import { extractExchanges } from '@xrplkit/txmeta' 2 | import { markCacheDirtyForTokenExchanges } from '../../cache/todo.js' 3 | 4 | 5 | export function extractTokenExchanges({ ctx, ledger }){ 6 | let exchanges = [] 7 | 8 | for(let transaction of ledger.transactions){ 9 | exchanges.push(...extractExchanges(transaction)) 10 | } 11 | 12 | if(exchanges.length === 0) 13 | return 14 | 15 | for(let { hash, sequence, maker, taker, takerPaid, takerGot } of exchanges){ 16 | let takerPaidToken = { 17 | currency: takerPaid.currency, 18 | issuer: takerPaid.issuer 19 | ? { address: takerPaid.issuer } 20 | : undefined 21 | } 22 | 23 | let takerGotToken = { 24 | currency: takerGot.currency, 25 | issuer: takerGot.issuer 26 | ? { address: takerGot.issuer } 27 | : undefined 28 | } 29 | 30 | ctx.db.core.tokenExchanges.createOne({ 31 | data: { 32 | txHash: hash, 33 | ledgerSequence: ledger.sequence, 34 | taker: { 35 | address: taker 36 | }, 37 | maker: { 38 | address: maker 39 | }, 40 | sequence, 41 | takerPaidToken, 42 | takerGotToken, 43 | takerPaidValue: takerPaid.value, 44 | takerGotValue: takerGot.value, 45 | } 46 | }) 47 | 48 | markCacheDirtyForTokenExchanges({ ctx, token: takerPaidToken }) 49 | markCacheDirtyForTokenExchanges({ ctx, token: takerGotToken }) 50 | } 51 | } -------------------------------------------------------------------------------- /src/etl/snapshot.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { unixNow } from '@xrplkit/time' 3 | import { spawn } from '@mwni/workers' 4 | import { fetch as fetchLedger } from '../xrpl/ledger.js' 5 | import { applyObjects } from './state/index.js' 6 | import { extractEvents } from './events/index.js' 7 | import { createAllDerivatives } from './derivatives/index.js' 8 | 9 | 10 | export async function createSnapshot({ ctx }){ 11 | ctx = { 12 | ...ctx, 13 | snapshotState: ctx.db.core.snapshots.readLast(), 14 | ledgerSequence: 0 15 | } 16 | 17 | if(!ctx.snapshotState){ 18 | await createSnapshotEntry({ ctx }) 19 | log.info(`creating snapshot of ledger #${ctx.snapshotState.ledgerSequence} - this may take a long time`) 20 | } 21 | 22 | if(ctx.snapshotState.entriesCount === 0 || ctx.snapshotState.marker){ 23 | try{ 24 | await copyFromFeed({ 25 | ctx, 26 | feed: await createFeed({ 27 | ctx, 28 | ledgerSequence: ctx.snapshotState.ledgerSequence, 29 | marker: ctx.snapshotState.marker, 30 | node: ctx.snapshotState.originNode 31 | }) 32 | }) 33 | }catch(error){ 34 | log.error(`fatal error while copying from ledger feed:`) 35 | log.error(error.stack) 36 | 37 | throw error.stack 38 | } 39 | } 40 | 41 | if(!ctx.snapshotState.completionTime){ 42 | log.time.info(`snapshot.derivatives`, `creating derivative data ...`) 43 | createAllDerivatives({ ctx }) 44 | log.time.info(`snapshot.derivatives`, `created derivative data in %`) 45 | 46 | ctx.db.core.snapshots.updateOne({ 47 | data: { 48 | completionTime: unixNow(), 49 | marker: null 50 | }, 51 | where: { 52 | id: ctx.snapshotState.id 53 | } 54 | }) 55 | 56 | log.info(`ledger snapshot complete`) 57 | } 58 | } 59 | 60 | async function createSnapshotEntry({ ctx }){ 61 | let ledger = await fetchLedger({ 62 | ctx, 63 | sequence: 'validated' 64 | }) 65 | 66 | extractEvents({ ctx, ledger }) 67 | 68 | ctx.currentLedger = ledger 69 | ctx.snapshotState = ctx.db.core.snapshots.createOne({ 70 | data: { 71 | ledgerSequence: ledger.sequence, 72 | creationTime: unixNow() 73 | } 74 | }) 75 | } 76 | 77 | async function createFeed({ ctx, ledgerSequence, marker, node }){ 78 | return await spawn( 79 | '../xrpl/snapshot.js:start', 80 | { 81 | ctx, 82 | ledgerSequence, 83 | marker, 84 | node 85 | } 86 | ) 87 | } 88 | 89 | 90 | async function copyFromFeed({ ctx, feed }){ 91 | while(true){ 92 | let chunk = await feed.next() 93 | 94 | if(!chunk) 95 | break 96 | 97 | ctx.db.core.tx(() => { 98 | applyObjects({ 99 | ctx, 100 | objects: chunk.objects 101 | }) 102 | 103 | ctx.snapshotState = ctx.db.core.snapshots.updateOne({ 104 | data: { 105 | originNode: feed.node, 106 | marker: chunk.marker, 107 | entriesCount: ctx.snapshotState.entriesCount + chunk.objects.length 108 | }, 109 | where: { 110 | id: ctx.snapshotState.id 111 | } 112 | }) 113 | }) 114 | 115 | log.accumulate.info({ 116 | text: [ 117 | `processed`, 118 | ctx.snapshotState.entriesCount, 119 | `ledger objects (+%objects in %time)` 120 | ], 121 | data: { 122 | objects: chunk.objects.length 123 | } 124 | }) 125 | } 126 | 127 | log.flush() 128 | log.info(`reached end of ledger data`) 129 | } -------------------------------------------------------------------------------- /src/etl/state/accounts.js: -------------------------------------------------------------------------------- 1 | import { div } from '@xrplkit/xfl' 2 | import { isBlackholed } from '../../xrpl/blackhole.js' 3 | import { writeBalance } from '../../db/helpers/balances.js' 4 | import { markCacheDirtyForAccountProps } from '../../cache/todo.js' 5 | 6 | 7 | export function parse({ entry }){ 8 | return { 9 | address: entry.Account, 10 | emailHash: entry.EmailHash, 11 | balance: div(entry.Balance, '1000000'), 12 | transferRate: entry.TransferRate, 13 | blackholed: isBlackholed(entry), 14 | domain: entry.Domain 15 | ? Buffer.from(entry.Domain, 'hex').toString() 16 | : undefined, 17 | } 18 | } 19 | 20 | export function diff({ ctx, previous, final }){ 21 | let address = final?.address || previous?.address 22 | 23 | if(final){ 24 | let { balance, ...meta } = final 25 | var { id } = ctx.db.core.accounts.createOne({ 26 | data: ctx.backwards 27 | ? { address } 28 | : meta 29 | }) 30 | 31 | if(final?.Domain != previous?.Domain) 32 | markCacheDirtyForAccountProps({ ctx, account: final }) 33 | }else{ 34 | var { id } = ctx.db.core.accounts.createOne({ 35 | data: { 36 | address 37 | } 38 | }) 39 | } 40 | 41 | writeBalance({ 42 | ctx, 43 | account: { 44 | id 45 | }, 46 | token: { 47 | currency: 'XRP', 48 | issuer: null 49 | }, 50 | ledgerSequence: ctx.ledgerSequence, 51 | balance: final 52 | ? final.balance 53 | : '0' 54 | }) 55 | } -------------------------------------------------------------------------------- /src/etl/state/index.js: -------------------------------------------------------------------------------- 1 | import * as accounts from './accounts.js' 2 | import * as tokens from './tokens.js' 3 | import * as tokenOffers from './tokenoffers.js' 4 | import * as nfts from './nfts.js' 5 | import * as nftOffers from './nftoffers.js' 6 | 7 | 8 | const ledgerEntryModules = { 9 | AccountRoot: accounts, 10 | RippleState: tokens, 11 | Offer: tokenOffers, 12 | NFTokenPage: nfts, 13 | NFTokenOffer: nftOffers, 14 | } 15 | 16 | 17 | export function applyObjects({ ctx, objects }){ 18 | return applyDeltas({ 19 | ctx, 20 | deltas: objects.map(entry => ({ 21 | type: entry.LedgerEntryType, 22 | index: entry.index, 23 | final: entry 24 | })) 25 | }) 26 | } 27 | 28 | export function applyTransactions({ ctx, ledger }){ 29 | let deltas = [] 30 | 31 | for(let transaction of ledger.transactions){ 32 | let meta = transaction.meta || transaction.metaData 33 | 34 | for(let { CreatedNode, ModifiedNode, DeletedNode } of meta.AffectedNodes){ 35 | if(CreatedNode && CreatedNode.NewFields){ 36 | deltas.push({ 37 | type: CreatedNode.LedgerEntryType, 38 | index: CreatedNode.LedgerIndex, 39 | final: { 40 | ...CreatedNode.NewFields, 41 | PreviousTxnLgrSeq: ledger.sequence 42 | } 43 | }) 44 | }else if(ModifiedNode && ModifiedNode.FinalFields){ 45 | deltas.push({ 46 | type: ModifiedNode.LedgerEntryType, 47 | index: ModifiedNode.LedgerIndex, 48 | previous: { 49 | ...ModifiedNode.FinalFields, 50 | ...ModifiedNode.PreviousFields, 51 | PreviousTxnLgrSeq: ModifiedNode.PreviousTxnLgrSeq 52 | }, 53 | final: { 54 | ...ModifiedNode.FinalFields, 55 | PreviousTxnLgrSeq: ledger.sequence 56 | } 57 | }) 58 | }else if(DeletedNode){ 59 | deltas.push({ 60 | type: DeletedNode.LedgerEntryType, 61 | index: DeletedNode.LedgerIndex, 62 | previous: { 63 | ...DeletedNode.FinalFields, 64 | ...DeletedNode.PreviousFields, 65 | PreviousTxnLgrSeq: ledger.sequence 66 | } 67 | }) 68 | } 69 | } 70 | } 71 | 72 | if(ctx.backwards){ 73 | return applyDeltas({ 74 | ctx: { 75 | ...ctx, 76 | ledgerSequence: ledger.sequence - 1 77 | }, 78 | deltas: deltas 79 | .map(({ type, index, previous, final }) => ({ type, index, previous: final, final: previous })) 80 | .reverse(), 81 | }) 82 | }else{ 83 | return applyDeltas({ 84 | ctx: { 85 | ...ctx, 86 | ledgerSequence: ledger.sequence, 87 | }, 88 | deltas 89 | }) 90 | } 91 | } 92 | 93 | function applyDeltas({ ctx, deltas }){ 94 | let groups = {} 95 | let solos = [] 96 | 97 | for(let { type, index, previous, final } of deltas){ 98 | let module = ledgerEntryModules[type] 99 | 100 | if(!module) 101 | continue 102 | 103 | let parsedPrevious = previous 104 | ? module.parse({ index, entry: previous }) 105 | : undefined 106 | 107 | let parsedFinal = final 108 | ? module.parse({ index, entry: final }) 109 | : undefined 110 | 111 | if(!parsedPrevious && !parsedFinal) 112 | continue 113 | 114 | if(module.group){ 115 | let grouped = module.group({ 116 | previous: parsedPrevious, 117 | final: parsedFinal 118 | }) 119 | 120 | for(let { group, previous, final } of grouped){ 121 | if(!groups[group.key]) 122 | groups[group.key] = { 123 | ...group, 124 | type, 125 | deltas: [] 126 | } 127 | 128 | groups[group.key].deltas.push({ 129 | previous, 130 | final 131 | }) 132 | } 133 | }else{ 134 | solos.push({ 135 | type, 136 | previous: parsedPrevious, 137 | final: parsedFinal 138 | }) 139 | } 140 | } 141 | 142 | for(let { type, key, ...group } of Object.values(groups)){ 143 | ledgerEntryModules[type].diff({ ctx, ...group }) 144 | } 145 | 146 | for(let { type, ...delta } of solos){ 147 | ledgerEntryModules[type].diff({ ctx, ...delta }) 148 | } 149 | } -------------------------------------------------------------------------------- /src/etl/state/nftoffers.js: -------------------------------------------------------------------------------- 1 | import { encodeAccountID } from 'ripple-address-codec' 2 | import { amountFromRippled } from '@xrplkit/tokens' 3 | import { rippleToUnix } from '@xrplkit/time' 4 | import { expireNFTokenOffer, writeNFTokenOffer } from '../../db/helpers/nftoffers.js' 5 | 6 | 7 | export function parse({ index, entry }){ 8 | let amountToken 9 | let amountValue 10 | let issuer = encodeAccountID(Buffer.from(entry.NFTokenID.slice(8, 48), 'hex')) 11 | let isSellOffer = entry.Flags & 0x00000001 12 | let expirationTime = entry.Expiration 13 | ? rippleToUnix(entry.Expiration) 14 | : null 15 | 16 | 17 | if(entry.Amount){ 18 | let { currency, issuer, value } = amountFromRippled(entry.Amount) 19 | 20 | amountValue = value 21 | amountToken = currency === 'XRP' 22 | ? { id: 1 } 23 | : { 24 | currency, 25 | issuer: { 26 | address: issuer 27 | } 28 | } 29 | } 30 | 31 | return { 32 | account: { 33 | address: entry.Owner 34 | }, 35 | offerId: index, 36 | nft: { 37 | tokenId: entry.NFTokenID, 38 | issuer: { 39 | address: issuer 40 | } 41 | }, 42 | destination: entry.Destination 43 | ? { address: entry.Destination } 44 | : null, 45 | amountToken, 46 | amountValue, 47 | isSellOffer, 48 | expirationTime 49 | } 50 | } 51 | 52 | 53 | 54 | export function diff({ ctx, previous, final }){ 55 | if(previous){ 56 | expireNFTokenOffer({ 57 | ...previous, 58 | ctx, 59 | ledgerSequence: ctx.ledgerSequence, 60 | }) 61 | } 62 | 63 | if(final){ 64 | writeNFTokenOffer({ 65 | ...final, 66 | ctx, 67 | ledgerSequence: ctx.ledgerSequence, 68 | }) 69 | } 70 | } -------------------------------------------------------------------------------- /src/etl/state/nfts.js: -------------------------------------------------------------------------------- 1 | import { encodeAccountID } from 'ripple-address-codec' 2 | 3 | 4 | export function parse({ index, entry }){ 5 | let address = encodeAccountID(Buffer.from(index.slice(0, 40), 'hex')) 6 | let page = { 7 | account: { address }, 8 | nfts: [] 9 | } 10 | 11 | for(let { NFToken } of entry.NFTokens){ 12 | let issuer = encodeAccountID(Buffer.from(NFToken.NFTokenID.slice(8, 48), 'hex')) 13 | let uri = NFToken.URI 14 | ? Buffer.from(NFToken.URI, 'hex') 15 | : null 16 | 17 | page.nfts.push({ 18 | owner: { address }, 19 | issuer: { address: issuer }, 20 | tokenId: NFToken.NFTokenID, 21 | uri, 22 | }) 23 | } 24 | 25 | return page 26 | } 27 | 28 | 29 | 30 | export function diff({ ctx, previous, final }){ 31 | if(previous){ 32 | for(let { owner, ...pNft } of previous.nfts){ 33 | if(final && final.nfts.some(fNft => fNft.tokenId === pNft.tokenId)) 34 | continue 35 | 36 | ctx.db.core.nfts.createOne({ 37 | data: ctx.backwards 38 | ? pNft 39 | : { ...pNft, owner: null } 40 | }) 41 | } 42 | } 43 | 44 | if(final){ 45 | for(let { owner, ...fNft } of final.nfts){ 46 | if(previous && previous.nfts.some(pNft => pNft.tokenId === fNft.tokenId)) 47 | continue 48 | 49 | ctx.db.core.nfts.createOne({ 50 | data: ctx.backwards 51 | ? fNft 52 | : { ...fNft, owner } 53 | }) 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /src/etl/state/tokenoffers.js: -------------------------------------------------------------------------------- 1 | import { XFL } from '@xrplkit/xfl' 2 | import { amountFromRippled } from '@xrplkit/tokens' 3 | import { rippleToUnix } from '@xrplkit/time' 4 | import { writeTokenOffer, expireTokenOffer } from '../../db/helpers/tokenoffers.js' 5 | 6 | 7 | export function parse({ entry }){ 8 | let takerPays = amountFromRippled(entry.TakerPays) 9 | let takerGets = amountFromRippled(entry.TakerGets) 10 | let size = takerGets.value 11 | let qualityHex = entry.BookDirectory.slice(-16) 12 | 13 | try{ 14 | let qualityMantissa = Buffer.from(`00${qualityHex.slice(2)}`, 'hex') 15 | .readBigInt64BE(0) 16 | 17 | let qualityExponent = Buffer.from(qualityHex.slice(0, 2), 'hex') 18 | .readInt8(0) 19 | - 100 20 | + (takerPays.currency === 'XRP' ? -6 : 0) 21 | - (takerGets.currency === 'XRP' ? -6 : 0) 22 | 23 | var quality = XFL(`${qualityMantissa}e${qualityExponent}`) 24 | }catch{ 25 | return 26 | } 27 | 28 | return { 29 | account: { address: entry.Account }, 30 | accountSequence: entry.Sequence, 31 | book: { 32 | takerPays: { 33 | currency: takerPays.currency, 34 | issuer: takerPays.issuer 35 | ? { address: takerPays.issuer } 36 | : undefined 37 | }, 38 | takerGets: { 39 | currency: takerGets.currency, 40 | issuer: takerGets.issuer 41 | ? { address: takerGets.issuer } 42 | : undefined 43 | }, 44 | }, 45 | quality, 46 | size, 47 | expirationTime: entry.Expiration 48 | ? rippleToUnix(entry.Expiration) 49 | : null, 50 | previousSequence: entry.PreviousTxnLgrSeq 51 | } 52 | } 53 | 54 | export function diff({ ctx, previous, final }){ 55 | if(previous){ 56 | expireTokenOffer({ 57 | ctx, 58 | account: previous.account, 59 | accountSequence: previous.accountSequence, 60 | ledgerSequence: ctx.ledgerSequence, 61 | book: previous.book 62 | }) 63 | } 64 | 65 | if(final){ 66 | writeTokenOffer({ 67 | ctx, 68 | account: final.account, 69 | accountSequence: final.accountSequence, 70 | ledgerSequence: ctx.ledgerSequence, 71 | book: final.book, 72 | quality: final.quality, 73 | size: final.size, 74 | expirationTime: final.expirationTime 75 | }) 76 | } 77 | } -------------------------------------------------------------------------------- /src/etl/state/tokens.js: -------------------------------------------------------------------------------- 1 | import { sum, sub, eq, lt, gt, neg, max } from '@xrplkit/xfl' 2 | import { writeBalance } from '../../db/helpers/balances.js' 3 | import { writeTokenMetrics, readTokenMetrics } from '../../db/helpers/tokenmetrics.js' 4 | 5 | 6 | export function parse({ entry }){ 7 | let lowIssuer = entry.HighLimit.value !== '0' || lt(entry.Balance.value, '0') 8 | let highIssuer = entry.LowLimit.value !== '0' || gt(entry.Balance.value, '0') 9 | let transformed = {} 10 | 11 | if(lowIssuer){ 12 | transformed.low = { 13 | account: { 14 | address: entry.HighLimit.issuer 15 | }, 16 | token: { 17 | currency: entry.Balance.currency, 18 | issuer: { 19 | address: entry.LowLimit.issuer 20 | } 21 | }, 22 | balance: max(0, neg(entry.Balance.value)), 23 | previousSequence: entry.PreviousTxnLgrSeq 24 | } 25 | } 26 | 27 | if(highIssuer){ 28 | transformed.high = { 29 | account: { 30 | address: entry.LowLimit.issuer 31 | }, 32 | token: { 33 | currency: entry.Balance.currency, 34 | issuer: { 35 | address: entry.HighLimit.issuer 36 | } 37 | }, 38 | balance: max(0, entry.Balance.value), 39 | previousSequence: entry.PreviousTxnLgrSeq 40 | } 41 | } 42 | 43 | return transformed 44 | } 45 | 46 | 47 | export function group({ previous, final }){ 48 | let groups = [] 49 | 50 | for(let side of ['low', 'high']){ 51 | let entry = final 52 | ? final[side] 53 | : previous[side] 54 | 55 | if(!entry) 56 | continue 57 | 58 | groups.push({ 59 | group: { 60 | token: entry.token, 61 | key: `${entry.token.currency}:${entry.token.issuer.address}`, 62 | }, 63 | previous: previous ? previous[side] : undefined, 64 | final: final ? final[side] : undefined 65 | }) 66 | } 67 | 68 | return groups 69 | } 70 | 71 | 72 | export function diff({ ctx, token, deltas }){ 73 | token = ctx.db.core.tokens.createOne({ 74 | data: token 75 | }) 76 | 77 | let { trustlines, holders, supply } = readTokenMetrics({ 78 | ctx, 79 | token, 80 | metrics: { 81 | trustlines: true, 82 | holders: true, 83 | supply: true 84 | }, 85 | ledgerSequence: ctx.ledgerSequence 86 | }) 87 | 88 | let metrics = { 89 | trustlines: trustlines || 0, 90 | holders: holders || 0, 91 | supply: supply || 0, 92 | } 93 | 94 | for(let { previous, final } of deltas){ 95 | if(previous && final){ 96 | metrics.supply = sum( 97 | metrics.supply, 98 | sub(final.balance, previous.balance) 99 | ) 100 | 101 | if(eq(previous.balance, 0) && gt(final.balance, 0)){ 102 | metrics.holders++ 103 | }else if(eq(final.balance, 0) && gt(previous.balance, 0)){ 104 | metrics.holders-- 105 | } 106 | }else if(final){ 107 | metrics.trustlines++ 108 | 109 | if(gt(final.balance, 0)){ 110 | metrics.supply = sum(metrics.supply, final.balance) 111 | metrics.holders++ 112 | } 113 | }else{ 114 | metrics.trustlines-- 115 | 116 | if(gt(previous.balance, 0)){ 117 | metrics.supply = sub(metrics.supply, previous.balance) 118 | metrics.holders-- 119 | } 120 | } 121 | 122 | writeBalance({ 123 | ctx, 124 | account: final?.account || previous?.account, 125 | token, 126 | balance: final 127 | ? final.balance 128 | : '0', 129 | ledgerSequence: ctx.ledgerSequence 130 | }) 131 | } 132 | 133 | writeTokenMetrics({ 134 | ctx, 135 | token, 136 | metrics, 137 | ledgerSequence: ctx.ledgerSequence 138 | }) 139 | } -------------------------------------------------------------------------------- /src/etl/sync.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { extractEvents } from './events/index.js' 4 | import { applyTransactions } from './state/index.js' 5 | import { createDerivatives } from './derivatives/index.js' 6 | import { pullNewItems, readTableHeads } from '../db/helpers/heads.js' 7 | 8 | 9 | export async function startSync({ ctx }){ 10 | let onceInSyncTrigger 11 | 12 | let { sequence: lastSequence } = ctx.db.core.ledgers.readOne({ 13 | orderBy: { 14 | sequence: 'desc' 15 | }, 16 | take: 1 17 | }) 18 | 19 | let stream = await spawn( 20 | '../xrpl/stream.js:createForwardStream', 21 | { 22 | ctx, 23 | startSequence: lastSequence + 1 24 | } 25 | ) 26 | 27 | log.info(`catching up from ledger #${lastSequence} -> #${(await stream.status()).targetSequence}`) 28 | 29 | ;(async () => { 30 | while(true){ 31 | log.time.debug(`sync.cycle`) 32 | 33 | let { ledger, ledgersBehind } = await stream.next() 34 | 35 | ctx.db.core.tx(() => { 36 | ctx = { 37 | ...ctx, 38 | currentLedger: ledger, 39 | ledgerSequence: ledger.sequence, 40 | } 41 | 42 | try{ 43 | let heads = readTableHeads({ ctx }) 44 | 45 | extractEvents({ ctx, ledger }) 46 | applyTransactions({ ctx, ledger }) 47 | createDerivatives({ 48 | ctx, 49 | newItems: pullNewItems({ 50 | ctx, 51 | previousHeads: heads 52 | }) 53 | }) 54 | }catch(error){ 55 | log.error(`fatal error while syncing ledger #${ledger.sequence}:`) 56 | log.error(error.stack) 57 | 58 | throw error 59 | } 60 | }) 61 | 62 | 63 | if(ledgersBehind > 0){ 64 | log.accumulate.info({ 65 | text: [ 66 | ledgersBehind, 67 | `ledgers behind (+%advancedLedgers in %time)` 68 | ], 69 | data: { 70 | advancedLedgers: 1 71 | } 72 | }) 73 | }else{ 74 | log.flush() 75 | 76 | if(onceInSyncTrigger){ 77 | onceInSyncTrigger() 78 | onceInSyncTrigger = undefined 79 | log.info(`catched up with live`) 80 | } 81 | 82 | log.info(`in sync with ledger #${ledger.sequence} ${ 83 | new Date(ledger.closeTime * 1000) 84 | .toISOString() 85 | .slice(0, -5) 86 | .replace('T', ' ') 87 | }`) 88 | } 89 | 90 | log.time.debug(`sync.cycle`, `sync cycle took % for`, ledger.transactions.length, `tx`) 91 | } 92 | })() 93 | 94 | return { 95 | onceInSync(){ 96 | return new Promise(resolve => { 97 | onceInSyncTrigger = resolve 98 | }) 99 | } 100 | } 101 | } -------------------------------------------------------------------------------- /src/lib/config.js: -------------------------------------------------------------------------------- 1 | import os from 'os' 2 | import fs from 'fs' 3 | import path from 'path' 4 | import { fileURLToPath } from 'url' 5 | import log from '@mwni/log' 6 | import { parse as parseToml } from '@xrplkit/toml' 7 | 8 | 9 | const __filename = fileURLToPath(import.meta.url) 10 | const __dirname = path.dirname(__filename) 11 | 12 | export function find(){ 13 | let preferredPath = path.join(os.homedir(), '.xrplmeta', 'config.toml') 14 | let paths = ['config.toml', preferredPath] 15 | 16 | for(let path of paths){ 17 | if(fs.existsSync(path)) 18 | return path 19 | } 20 | 21 | return preferredPath 22 | } 23 | 24 | 25 | export function load(file, createIfMissing){ 26 | if(!fs.existsSync(file)){ 27 | log.warn(`no config at "${file}" - creating new from template`) 28 | 29 | if(createIfMissing) 30 | create(file) 31 | } 32 | 33 | let content = fs.readFileSync(file, 'utf-8') 34 | let config = parseToml(content, 'camelCase') 35 | 36 | // schema checks here 37 | 38 | return config 39 | } 40 | 41 | export function create(file){ 42 | let dir = path.dirname(file) 43 | let root = path.dirname(process.argv[1]) 44 | let templatePath = path.join(__dirname, '../../config.template.toml') 45 | let template = fs.readFileSync(templatePath, 'utf-8') 46 | let customizedTemplate = template 47 | .replace( 48 | 'data_dir = ""', 49 | `data_dir = "${dir.replace(/\\/g, '\\\\')}"` 50 | ) 51 | 52 | if(!fs.existsSync(dir)) 53 | fs.mkdirSync(dir) 54 | 55 | fs.writeFileSync(file, customizedTemplate) 56 | } 57 | 58 | export function override(config, ...overrides){ 59 | if (!overrides.length) 60 | return config 61 | 62 | let source = overrides.shift() 63 | 64 | if(isObject(config) && isObject(source)){ 65 | for (const key in source){ 66 | if(isObject(source[key])){ 67 | if(!config[key]) 68 | Object.assign(config, { [key]: {} }) 69 | 70 | override(config[key], source[key]) 71 | }else{ 72 | Object.assign(config, { [key]: source[key] }) 73 | } 74 | } 75 | } 76 | 77 | return override(config, ...overrides) 78 | } 79 | 80 | function isObject(item) { 81 | return item && typeof item === 'object' && !Array.isArray(item) 82 | } -------------------------------------------------------------------------------- /src/lib/fetch.js: -------------------------------------------------------------------------------- 1 | import { RateLimiter } from 'limiter' 2 | import { sanitize } from './url.js' 3 | import { AbortController } from 'node-abort-controller' 4 | import fetch from 'node-fetch' 5 | 6 | 7 | export function createFetch({ baseUrl, headers, ratelimit, timeout = 20 } = {}){ 8 | let limiter = ratelimit 9 | ? new RateLimiter({ 10 | tokensPerInterval: ratelimit, 11 | interval: 'minute' 12 | }) 13 | : null 14 | 15 | return async (url = '', options = {}) => { 16 | if(limiter) 17 | await limiter.removeTokens(1) 18 | 19 | let res 20 | let data 21 | let controller = new AbortController() 22 | let timeoutTimer = setTimeout(() => controller.abort(), timeout * 1000) 23 | let sanitizedUrl = sanitize(baseUrl ? `${baseUrl}/${url}` : url) 24 | 25 | try{ 26 | res = await fetch( 27 | sanitizedUrl, 28 | { 29 | signal: controller.signal, 30 | headers: { 31 | 'user-agent': 'XRPL-Meta-Node (https://xrplmeta.org)', 32 | ...headers, 33 | ...options.headers 34 | } 35 | } 36 | ) 37 | }catch(error){ 38 | res?.blob()?.catch(() => null) 39 | throw error 40 | }finally{ 41 | clearTimeout(timeoutTimer) 42 | } 43 | 44 | if(options.raw){ 45 | return res 46 | } 47 | 48 | try{ 49 | if(res.headers.get('content-type')?.includes('application/json')){ 50 | data = await res.json() 51 | }else if(res.headers.get('content-type')?.match(/(image\/|video\/|application\/octet-stream)/)){ 52 | data = Buffer.from(await res.arrayBuffer()) 53 | }else{ 54 | data = await res.text() 55 | } 56 | }catch{ 57 | data = null 58 | } 59 | 60 | return { 61 | status: res.status, 62 | headers: res.headers, 63 | data 64 | } 65 | } 66 | } -------------------------------------------------------------------------------- /src/lib/ipc.js: -------------------------------------------------------------------------------- 1 | export default function(){ 2 | let callbacks = [] 3 | 4 | return { 5 | emit(payload){ 6 | for(let callback of callbacks){ 7 | try{ 8 | callback(payload) 9 | }catch{ 10 | // *shrug* 11 | } 12 | } 13 | }, 14 | subscribe(callback){ 15 | callbacks.push(callback) 16 | } 17 | } 18 | } -------------------------------------------------------------------------------- /src/lib/url.js: -------------------------------------------------------------------------------- 1 | import { parse } from 'url' 2 | 3 | export function sanitize(url){ 4 | return url.slice(0, 8) + url.slice(8) 5 | .replace(/\/\//g,'/') 6 | .replace(/\/\.$/, '') 7 | .replace(/\/$/, '') 8 | .replace(/\?$/, '') 9 | } 10 | 11 | export function validate(url){ 12 | let { protocol, hostname } = parse(url) 13 | 14 | if(protocol !== 'http:' && protocol !== 'https:') 15 | return false 16 | 17 | if(hostname === 'localhost') 18 | return false 19 | 20 | if(hostname.includes(':')) 21 | return false 22 | 23 | if(!/[a-zA-Z]/.test(hostname)) 24 | return false 25 | 26 | return true 27 | } -------------------------------------------------------------------------------- /src/lib/version.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import { fileURLToPath } from 'url' 4 | 5 | 6 | const __filename = fileURLToPath(import.meta.url) 7 | const __dirname = path.dirname(__filename) 8 | const pkgPath = path.resolve(__dirname, '..', '..', 'package.json') 9 | const { version } = JSON.parse(fs.readFileSync(pkgPath)) 10 | 11 | 12 | export default version -------------------------------------------------------------------------------- /src/run.js: -------------------------------------------------------------------------------- 1 | import minimist from 'minimist' 2 | import log from '@mwni/log' 3 | import { find as findConfig } from './lib/config.js' 4 | import { load as loadConfig } from './lib/config.js' 5 | import { override as overrideConfig } from './lib/config.js' 6 | import startApp from './app/main.js' 7 | import rebuildCache from './cmd/rebuild-cache.js' 8 | import backup from './cmd/backup.js' 9 | import version from './lib/version.js' 10 | 11 | 12 | const args = minimist(process.argv.slice(2)) 13 | const configPath = args.config 14 | ? args.config 15 | : findConfig() 16 | 17 | 18 | log.config({ level: args.log || 'info', root: '.' }) 19 | .info(`*** XRPLMETA NODE ${version} ***`) 20 | .info(`using config at "${configPath}"`) 21 | 22 | 23 | const baseConfig = loadConfig(configPath, true) 24 | const config = overrideConfig(baseConfig, args) 25 | 26 | if(args._[0] === 'rebuild-cache'){ 27 | log.info(`rebuilding cache at "${config.node.dataDir}"`) 28 | await rebuildCache({ config, args }) 29 | }else if(args._[0] === 'backup'){ 30 | let destinationFile = args._[1] 31 | 32 | if(!destinationFile){ 33 | log.error(`backup destination file path is missing`) 34 | process.exit(1) 35 | } 36 | 37 | log.info(`writing backup to "${destinationFile}"`) 38 | await backup({ config, destinationFile }) 39 | }else if(args._.length === 0 || args._[0] === 'run'){ 40 | log.info(`data directory is at "${config.node.dataDir}"`) 41 | log.info(`will start app now`) 42 | 43 | const app = await startApp({ config, args }) 44 | 45 | process.on('SIGINT', async () => { 46 | await app.terminate() 47 | process.exit(0) 48 | }) 49 | }else{ 50 | log.error(`unknown command "${args._[0]}"`) 51 | process.exit(1) 52 | } -------------------------------------------------------------------------------- /src/srv/api.js: -------------------------------------------------------------------------------- 1 | import { sanitizeRange, sanitizePoint, sanitizeLimitOffset, sanitizeSourcePreferences } from './sanitizers/common.js' 2 | import { sanitizeToken, sanitizeTokenListSortBy, sanitizeNameLike, sanitizeTrustLevels } from './sanitizers/token.js' 3 | import { serveServerInfo } from './procedures/server.js' 4 | import { serveTokenSummary, serveTokenSeries, serveTokenPoint, serveTokenList, subscribeTokenList, unsubscribeTokenList, serveTokenExchanges } from './procedures/token.js' 5 | import { serveLedger } from './procedures/ledger.js' 6 | 7 | 8 | export const server_info = compose([ 9 | serveServerInfo() 10 | ]) 11 | 12 | export const ledger = compose([ 13 | sanitizePoint({ clamp: false }), 14 | serveLedger() 15 | ]) 16 | 17 | export const tokens = compose([ 18 | sanitizeLimitOffset({ defaultLimit: 100, maxLimit: 100000 }), 19 | sanitizeNameLike(), 20 | sanitizeTrustLevels(), 21 | sanitizeTokenListSortBy(), 22 | sanitizeSourcePreferences(), 23 | serveTokenList() 24 | ]) 25 | 26 | export const tokens_subscribe = compose([ 27 | sanitizeToken({ key: 'tokens', array: true }), 28 | sanitizeSourcePreferences(), 29 | subscribeTokenList(), 30 | tag({ mustRunMainThread: true }) 31 | ]) 32 | 33 | export const tokens_unsubscribe = compose([ 34 | sanitizeToken({ key: 'tokens', array: true }), 35 | unsubscribeTokenList(), 36 | tag({ mustRunMainThread: true }) 37 | ]) 38 | 39 | export const token = compose([ 40 | sanitizeToken({ key: 'token' }), 41 | sanitizeSourcePreferences(), 42 | serveTokenSummary() 43 | ]) 44 | 45 | export const token_metric = compose([ 46 | sanitizeToken({ key: 'token' }), 47 | sanitizePoint({ clamp: false }), 48 | serveTokenPoint() 49 | ]) 50 | 51 | export const token_series = compose([ 52 | sanitizeToken({ key: 'token' }), 53 | sanitizeRange({ withInterval: true }), 54 | serveTokenSeries() 55 | ]) 56 | 57 | export const token_exchanges = compose([ 58 | sanitizeToken({ key: 'base', allowXRP: true }), 59 | sanitizeToken({ key: 'quote', allowXRP: true }), 60 | sanitizeRange({ defaultToFullRange: true }), 61 | sanitizeLimitOffset({ defaultLimit: 100, maxLimit: 1000 }), 62 | serveTokenExchanges() 63 | ]) 64 | 65 | 66 | function compose(functions){ 67 | return args => functions.reduce( 68 | (v, f) => f(v), 69 | args 70 | ) 71 | } 72 | 73 | function tag(properties){ 74 | return f => Object.assign(f, properties) 75 | } -------------------------------------------------------------------------------- /src/srv/http.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import Router from '@koa/router' 4 | import sendFile from 'koa-send' 5 | import log from '@mwni/log' 6 | import * as procedures from './api.js' 7 | import { getCachedIconPath, iconSizes } from '../cache/icons.js' 8 | import { executeProcedure } from './worker.js' 9 | 10 | 11 | export function createRouter({ ctx }){ 12 | let router = new Router() 13 | 14 | router.get( 15 | ['/', '/info', '/server'], 16 | async svc => { 17 | await handle({ 18 | ctx, 19 | svc, 20 | procedure: 'server_info' 21 | }) 22 | } 23 | ) 24 | 25 | router.get( 26 | '/ledger', 27 | async svc => { 28 | await handle({ 29 | ctx, 30 | svc, 31 | procedure: 'ledger', 32 | params: { 33 | ...parsePoint(svc.query) 34 | } 35 | }) 36 | } 37 | ) 38 | 39 | router.get( 40 | '/tokens', 41 | async svc => { 42 | await handle({ 43 | ctx, 44 | svc, 45 | procedure: 'tokens', 46 | params: { 47 | ...svc.query, 48 | expand_meta: svc.query.expand_meta !== undefined, 49 | include_sources: svc.query.include_sources !== undefined, 50 | include_changes: svc.query.include_changes !== undefined, 51 | decode_currency: svc.query.decode_currency !== undefined, 52 | original_icons: svc.query.original_icons !== undefined, 53 | name_like: svc.query.name_like, 54 | trust_levels: svc.query.trust_levels 55 | ? svc.query.trust_levels.split(',') 56 | : undefined, 57 | prefer_sources: svc.query.prefer_sources 58 | ? svc.query.prefer_sources.split(',') 59 | : undefined 60 | } 61 | }) 62 | } 63 | ) 64 | 65 | router.get( 66 | '/tokens/exchanges/:base/:quote', 67 | async svc => { 68 | await handle({ 69 | ctx, 70 | svc, 71 | procedure: 'token_exchanges', 72 | params: { 73 | base: parseTokenURI(svc.params.base), 74 | quote: parseTokenURI(svc.params.quote), 75 | newestFirst: svc.query.newest_first !== undefined, 76 | ...parseRange(svc.query) 77 | } 78 | }) 79 | } 80 | ) 81 | 82 | router.get( 83 | '/token/:token', 84 | async svc => { 85 | await handle({ 86 | ctx, 87 | svc, 88 | procedure: 'token', 89 | params: { 90 | token: parseTokenURI(svc.params.token), 91 | expand_meta: svc.query.expand_meta !== undefined, 92 | include_sources: svc.query.include_sources !== undefined, 93 | include_changes: svc.query.include_changes !== undefined, 94 | decode_currency: svc.query.decode_currency !== undefined, 95 | original_icons: svc.query.original_icons !== undefined, 96 | prefer_sources: svc.query.prefer_sources 97 | ? svc.query.prefer_sources.split(',') 98 | : undefined 99 | } 100 | }) 101 | } 102 | ) 103 | 104 | router.get( 105 | '/token/:token/series/:metric', 106 | async svc => { 107 | await handle({ 108 | ctx, 109 | svc, 110 | procedure: 'token_series', 111 | params: { 112 | token: parseTokenURI(svc.params.token), 113 | metric: svc.params.metric, 114 | ...parseRange(svc.query) 115 | } 116 | }) 117 | } 118 | ) 119 | 120 | router.get( 121 | '/icon/:file', 122 | async svc => { 123 | try{ 124 | var [hash, fileType] = svc.params.file.split('.') 125 | 126 | if(!hash || !fileType) 127 | throw 'bad' 128 | }catch{ 129 | svc.status = 400 130 | svc.body = 'Invalid icon URL. The URL should consists of a hash and file extension, such as C0FFE.png' 131 | return 132 | } 133 | 134 | let size 135 | let suffix 136 | 137 | if(svc.query.size){ 138 | size = parseInt(svc.query.size) 139 | 140 | if(!iconSizes.includes(size)){ 141 | svc.status = 400 142 | svc.body = `The specified icon "${svc.query.size}" size is not available. Available sizes are: ${iconSizes}` 143 | return 144 | } 145 | 146 | suffix = `@${size}` 147 | } 148 | 149 | let iconPath = getCachedIconPath({ ctx, hash, suffix, fileType }) 150 | 151 | if(!fs.existsSync(iconPath)){ 152 | svc.status = 404 153 | svc.body = 'This icon does not exist. Make sure to only use icon URLs from the live token manifest.' 154 | return 155 | } 156 | 157 | await sendFile( 158 | svc, 159 | path.basename(iconPath), 160 | { 161 | root: path.dirname(iconPath) 162 | } 163 | ) 164 | } 165 | ) 166 | 167 | return router 168 | } 169 | 170 | 171 | async function handle({ ctx, svc, procedure, params = {} }){ 172 | if(!procedures[procedure]){ 173 | svc.throw(404) 174 | return 175 | } 176 | 177 | try{ 178 | svc.type = 'json' 179 | svc.body = await executeProcedure({ 180 | ctx, 181 | procedure, 182 | params 183 | }) 184 | }catch(e){ 185 | if(e.expose){ 186 | delete e.expose 187 | 188 | svc.status = 400 189 | svc.body = e 190 | }else{ 191 | svc.status = 500 192 | svc.body = { 193 | message: `Internal error while handling your request.` 194 | } 195 | log.warn(`internal error while handling procedure "${procedure}":\n${e.stack}\nparams:`, params) 196 | } 197 | } 198 | } 199 | 200 | 201 | function parseTokenURI(uri){ 202 | let [currency, issuer] = uri.split(':') 203 | 204 | return { 205 | currency, 206 | issuer 207 | } 208 | } 209 | 210 | function parseRange({ sequence_start, sequence_end, sequence_interval, time_start, time_end, time_interval }){ 211 | let range = {} 212 | 213 | if(sequence_start !== undefined){ 214 | range.sequence = { 215 | start: parseInt(sequence_start), 216 | end: sequence_end 217 | ? parseInt(sequence_end) 218 | : undefined 219 | } 220 | 221 | if(sequence_interval) 222 | range.sequence.interval = parseInt(sequence_interval) 223 | }else if(time_start !== undefined){ 224 | range.time = { 225 | start: parseInt(time_start), 226 | end: time_end 227 | ? parseInt(time_end) 228 | : undefined 229 | } 230 | 231 | if(time_interval) 232 | range.time.interval = parseInt(time_interval) 233 | } 234 | 235 | return range 236 | } 237 | 238 | function parsePoint({ sequence, time }){ 239 | if(sequence !== undefined){ 240 | return { sequence: parseInt(sequence) } 241 | }else if(time !== undefined){ 242 | return { time: parseInt(time) } 243 | } 244 | } -------------------------------------------------------------------------------- /src/srv/procedures/ledger.js: -------------------------------------------------------------------------------- 1 | import { readLedgerAt } from '../../db/helpers/ledgers.js' 2 | 3 | 4 | export function serveLedger(){ 5 | return ({ ctx, sequence, time }) => { 6 | let ledger = readLedgerAt({ 7 | ctx, 8 | sequence, 9 | time 10 | }) 11 | 12 | if(!ledger){ 13 | throw { 14 | type: `notFound`, 15 | message: `This server has no record of such a ledger. Check the available range using "server_info".`, 16 | expose: true 17 | } 18 | } 19 | 20 | return { 21 | sequence: ledger.sequence, 22 | hash: ledger.hash, 23 | close_time: ledger.closeTime, 24 | tx_count: ledger.txCount, 25 | fee_min: ledger.minFee, 26 | fee_max: ledger.maxFee, 27 | fee_avg: ledger.avgFee 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /src/srv/procedures/server.js: -------------------------------------------------------------------------------- 1 | import version from '../../lib/version.js' 2 | import { getAvailableRange } from '../../db/helpers/ledgers.js' 3 | 4 | 5 | export function serveServerInfo(){ 6 | return ({ ctx }) => { 7 | return { 8 | server_version: version, 9 | available_range: getAvailableRange({ ctx }), 10 | tokenlists: ctx.config.source.tokenlists 11 | ? ctx.config.source.tokenlists.map( 12 | list => ({ 13 | id: list.id, 14 | url: list.url, 15 | trust_level: list.trustLevel 16 | }) 17 | ) 18 | : [], 19 | total_tokens: Number(ctx.db.core.tokens.count()), 20 | total_nfts: 0 21 | } 22 | } 23 | } -------------------------------------------------------------------------------- /src/srv/sanitizers/common.js: -------------------------------------------------------------------------------- 1 | import { getAvailableRange, readLedgerAt } from '../../db/helpers/ledgers.js' 2 | 3 | export function sanitizePoint({ clamp = false }){ 4 | return ({ ctx, ...args }) => { 5 | let sequence 6 | let time 7 | 8 | if(clamp){ 9 | let available = getAvailableRange({ ctx }) 10 | 11 | if(args.hasOwnProperty('sequence')){ 12 | sequence = Math.min( 13 | Math.max( 14 | args.sequence, 15 | available.sequence.start 16 | ), 17 | available.sequence.end 18 | ) 19 | }else if(args.hasOwnProperty('time')){ 20 | time = Math.min( 21 | Math.max( 22 | args.time, 23 | available.time.start 24 | ), 25 | available.time.end 26 | ) 27 | 28 | sequence = readLedgerAt({ ctx, time }).sequence 29 | }else{ 30 | throw { 31 | type: `missingParam`, 32 | message: `This request is missing a ledger sequence or a timestamp.`, 33 | expose: true 34 | } 35 | } 36 | }else{ 37 | if(args.hasOwnProperty('sequence')){ 38 | sequence = args.sequence 39 | }else if(args.hasOwnProperty('time')){ 40 | time = args.time 41 | }else{ 42 | throw { 43 | type: `missingParam`, 44 | message: `This request is missing a ledger sequence or a timestamp.`, 45 | expose: true 46 | } 47 | } 48 | } 49 | 50 | return { 51 | ...args, 52 | ctx, 53 | sequence, 54 | time 55 | } 56 | } 57 | } 58 | 59 | export function sanitizeRange({ withInterval = false, defaultToFullRange = false } = {}){ 60 | return ({ ctx, ...args }) => { 61 | let available = getAvailableRange({ ctx }) 62 | let sequence 63 | let time 64 | let interval 65 | 66 | if(args.hasOwnProperty('sequence')){ 67 | sequence = minMaxRange({ 68 | requested: args.sequence, 69 | available: available.sequence 70 | }) 71 | 72 | if(withInterval){ 73 | if(args.sequence.hasOwnProperty('interval')){ 74 | sequence.interval = parseInt(args.sequence.interval) 75 | }else{ 76 | throw { 77 | type: `missingParam`, 78 | message: `This request is missing sequence interval specification.`, 79 | expose: true 80 | } 81 | } 82 | } 83 | }else if(args.hasOwnProperty('time')){ 84 | time = minMaxRange({ 85 | requested: args.time, 86 | available: available.time 87 | }) 88 | 89 | if(withInterval){ 90 | if(args.time.hasOwnProperty('interval')){ 91 | time.interval = parseInt(args.time.interval) 92 | }else{ 93 | throw { 94 | type: `missingParam`, 95 | message: `This request is missing time interval specification.`, 96 | expose: true 97 | } 98 | } 99 | } 100 | 101 | sequence = { 102 | start: readLedgerAt({ ctx, time: time.start }).sequence, 103 | end: readLedgerAt({ ctx, time: time.end }).sequence, 104 | } 105 | }else if(defaultToFullRange){ 106 | sequence = available.sequence 107 | time = available.time 108 | }else{ 109 | throw { 110 | type: `missingParam`, 111 | message: `This request is missing a sequence or time range.`, 112 | expose: true 113 | } 114 | } 115 | 116 | if(withInterval){ 117 | if((sequence?.interval || time?.interval) <= 0){ 118 | throw { 119 | type: `invalidParam`, 120 | message: `The interval has to be greater than zero.`, 121 | expose: true 122 | } 123 | } 124 | } 125 | 126 | return { 127 | ...args, 128 | ctx, 129 | sequence, 130 | time, 131 | interval 132 | } 133 | } 134 | } 135 | 136 | export function sanitizeLimitOffset({ defaultLimit, maxLimit }){ 137 | return ({ ctx, limit, offset, ...args }) => { 138 | return { 139 | ...args, 140 | ctx, 141 | limit: limit 142 | ? Math.min(parseInt(limit), maxLimit) 143 | : defaultLimit, 144 | offset: offset 145 | ? parseInt(offset) 146 | : undefined 147 | } 148 | } 149 | } 150 | 151 | export function sanitizeSourcePreferences(){ 152 | return ({ ctx, prefer_sources, ...args }) => { 153 | if(prefer_sources){ 154 | if(!Array.isArray(prefer_sources)){ 155 | throw { 156 | type: `invalidParam`, 157 | message: `The preferred sources need to be specified as an array.`, 158 | expose: true 159 | } 160 | } 161 | 162 | for(let source of prefer_sources){ 163 | if([ 164 | 'ledger', 165 | 'xrplmeta', 166 | 'xumm', 167 | 'domain', 168 | 'bithomp', 169 | 'xrpscan', 170 | 'twitter', 171 | 'gravatar' 172 | ].includes(source)) 173 | continue 174 | 175 | if(ctx.config.source.tokenlists){ 176 | if( 177 | ctx.config.source.tokenlists.some( 178 | list => list.id === source 179 | ) 180 | ) 181 | continue 182 | } 183 | 184 | throw { 185 | type: `invalidParam`, 186 | message: `The preferred source "${source}" does not exist.`, 187 | expose: true 188 | } 189 | } 190 | 191 | } 192 | 193 | return { 194 | ...args, 195 | ctx, 196 | prefer_sources 197 | } 198 | } 199 | } 200 | 201 | 202 | function minMaxRange({ requested, available }){ 203 | let start 204 | let end 205 | 206 | if(requested.start !== undefined){ 207 | if(requested.start < 0) 208 | start = Math.min(requested.start + available.start, available.end) 209 | else 210 | start = Math.min(Math.max(requested.start, available.start), available.end) 211 | }else{ 212 | start = available.start 213 | } 214 | 215 | if(requested.end !== undefined){ 216 | if(requested.end < 0) 217 | end = Math.max(requested.end + available.end, available.start) 218 | else 219 | end = Math.min(Math.max(requested.end, available.end), available.end) 220 | }else{ 221 | end = available.end 222 | } 223 | 224 | return { start, end } 225 | } -------------------------------------------------------------------------------- /src/srv/sanitizers/token.js: -------------------------------------------------------------------------------- 1 | import { currencyUTF8ToHex } from '@xrplkit/tokens' 2 | import { isValidClassicAddress } from 'ripple-address-codec' 3 | 4 | 5 | const sortKeymap = { 6 | trustlines_delta_24h: 'trustlinesDelta24H', 7 | trustlines_percent_24h: 'trustlinesPercent24H', 8 | trustlines_delta_7d: 'trustlinesDelta7D', 9 | trustlines_percent_7d: 'trustlinesPercent7D', 10 | holders: 'holders', 11 | holders_delta_24h: 'holdersDelta24H', 12 | holders_percent_24h: 'holdersPercent24H', 13 | holders_delta_7d: 'holdersDelta7D', 14 | holders_percent_7d: 'holdersPercent7D', 15 | supply: 'supply', 16 | supply_delta_24h: 'supplyDelta24H', 17 | supply_percent_24h: 'supplyPercent24H', 18 | supply_delta_7d: 'supplyDelta7D', 19 | supply_percent_7d: 'supplyPercent7D', 20 | marketcap: 'marketcap', 21 | marketcap_delta_24h: 'marketcapDelta24H', 22 | marketcap_percent_24h: 'marketcapPercent24H', 23 | marketcap_delta_7d: 'marketcapDelta7D', 24 | marketcap_percent_7d: 'marketcapPercent7D', 25 | price_percent_24h: 'pricePercent24H', 26 | price_percent_7d: 'pricePercent7D', 27 | volume_24h: 'volume24H', 28 | volume_7d: 'volume7D', 29 | exchanges_24h: 'exchanges24H', 30 | exchanges_7d: 'exchanges7D', 31 | takers_24h: 'takers24H', 32 | takers_7d: 'takers7D', 33 | trustlines: 'trustlines', 34 | } 35 | 36 | 37 | export function sanitizeToken({ key, array = false, allowXRP = false }){ 38 | function parse(ctx, { currency, issuer }){ 39 | if(currency === 'XRP'){ 40 | if(allowXRP) 41 | return { 42 | id: 1, 43 | currency: 'XRP' 44 | } 45 | else 46 | throw { 47 | type: `invalidParam`, 48 | message: `XRP is not allowed as parameter.`, 49 | expose: true 50 | } 51 | }else{ 52 | if(!isValidClassicAddress(issuer)) 53 | throw { 54 | type: `invalidParam`, 55 | message: `The issuing address "${key}.issuer" is malformed.`, 56 | expose: true 57 | } 58 | } 59 | 60 | let token = ctx.db.core.tokens.readOne({ 61 | where: { 62 | currency: currencyUTF8ToHex(currency), 63 | issuer: { 64 | address: issuer 65 | } 66 | }, 67 | include: { 68 | issuer: true 69 | } 70 | }) 71 | 72 | if(!token){ 73 | throw { 74 | type: `entryNotFound`, 75 | message: `The token '${currency}' issued by '${issuer}' does not exist.`, 76 | expose: true 77 | } 78 | } 79 | 80 | return token 81 | } 82 | 83 | return ({ ctx, ...args }) => { 84 | if(!args.hasOwnProperty(key)) 85 | throw { 86 | type: `missingParam`, 87 | message: `No token specified.`, 88 | expose: true 89 | } 90 | 91 | if(array){ 92 | return { 93 | ...args, 94 | ctx, 95 | [key]: args[key].map(token => parse(ctx, token)), 96 | } 97 | }else{ 98 | return { 99 | ...args, 100 | ctx, 101 | [key]: parse(ctx, args[key]), 102 | } 103 | } 104 | } 105 | } 106 | 107 | export function sanitizeNameLike(){ 108 | return ({ ctx, name_by, ...args }) => { 109 | if(name_by){ 110 | if(typeof name_by !== 'string'){ 111 | throw { 112 | type: `invalidParam`, 113 | message: `The "name_by" term has to be a string.`, 114 | expose: true 115 | } 116 | } 117 | 118 | if(name_by.length === 0){ 119 | throw { 120 | type: `invalidParam`, 121 | message: `The "name_by" term has to be at least one character long.`, 122 | expose: true 123 | } 124 | } 125 | } 126 | 127 | return { 128 | ...args, 129 | ctx, 130 | name_by 131 | } 132 | } 133 | } 134 | 135 | export function sanitizeTrustLevels(){ 136 | return ({ ctx, trust_level, trust_levels, ...args }) => { 137 | trust_levels = trust_level || trust_levels 138 | 139 | if(trust_levels){ 140 | if(!Array.isArray(trust_levels)){ 141 | throw { 142 | type: `invalidParam`, 143 | message: `The trust levels need to be specified as an array.`, 144 | expose: true 145 | } 146 | } 147 | 148 | trust_levels = trust_levels.map(level => parseInt(level)) 149 | 150 | if(trust_levels.some(level => level < 0 || level > 3)){ 151 | throw { 152 | type: `invalidParam`, 153 | message: `The trust levels need to be between 0 and 3.`, 154 | expose: true 155 | } 156 | } 157 | } 158 | 159 | return { 160 | ...args, 161 | ctx, 162 | trust_levels 163 | } 164 | } 165 | } 166 | 167 | export function sanitizeTokenListSortBy(){ 168 | return ({ ctx, sort_by, ...args }) => { 169 | if(sort_by){ 170 | sort_by = sortKeymap[sort_by] 171 | 172 | if(!sort_by){ 173 | throw { 174 | type: `invalidParam`, 175 | message: `This sorting mode is not allowed. Possible values are: ${ 176 | Object.keys(sortKeymap) 177 | .map(key => `${key}`) 178 | .join(', ') 179 | }'`, 180 | expose: true 181 | } 182 | } 183 | } 184 | 185 | return { 186 | ...args, 187 | ctx, 188 | sort_by 189 | } 190 | } 191 | } -------------------------------------------------------------------------------- /src/srv/server.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import Koa from 'koa' 3 | import websocket from 'koa-easy-ws' 4 | import json from 'koa-json' 5 | import { createRouter } from './http.js' 6 | import { createManager } from './ws.js' 7 | import { spawnWorkers } from './worker.js' 8 | 9 | 10 | export async function startServer({ ctx }){ 11 | if(!ctx.config.api.publicUrl){ 12 | let fallbackUrl = `http://localhost:${ctx.config.api.port}` 13 | 14 | log.warn(`public URL not set in config - using fallback: ${fallbackUrl}\n >> consider setting "public_url" in the [API] stanza of your config.toml`) 15 | 16 | ctx = { 17 | ...ctx, 18 | config: { 19 | ...ctx.config, 20 | api: { 21 | ...ctx.config.api, 22 | publicUrl: fallbackUrl 23 | } 24 | } 25 | } 26 | } 27 | 28 | ctx = { 29 | ...ctx, 30 | workers: await spawnWorkers({ ctx }) 31 | } 32 | 33 | let koa = new Koa() 34 | let router = createRouter({ ctx }) 35 | let ws = createManager({ ctx }) 36 | 37 | koa.use(websocket()) 38 | koa.use(async (ctx, next) => { 39 | ctx.req.on('error', error => { 40 | log.debug(`client error: ${error.message}`) 41 | }) 42 | 43 | if(ctx.ws){ 44 | ctx.req.socket.ignoreTimeout = true 45 | ws.registerSocket(await ctx.ws()) 46 | }else{ 47 | return await next(ctx) 48 | } 49 | }) 50 | 51 | koa.use(json({ pretty: true })) 52 | koa.use(router.routes(), router.allowedMethods()) 53 | 54 | koa.listen(ctx.config.api.port) 55 | .on('clientError', (error, socket) => { 56 | if(error.code === 'ERR_HTTP_REQUEST_TIMEOUT' && socket.ignoreTimeout) 57 | return 58 | 59 | log.debug(`client error:`, error) 60 | socket.destroy() 61 | }) 62 | .on('error', error => { 63 | log.warn(`server error: ${error.message}`) 64 | }) 65 | 66 | 67 | log.info(`listening on port ${ctx.config.api.port}`) 68 | 69 | await new Promise(resolve => { 70 | koa.on('close', resolve) 71 | }) 72 | } 73 | 74 | console.errorOrg = console.error 75 | console.error = text => /.*Error: (write|read) ECONN.*/g.test(text) 76 | ? undefined 77 | : console.errorOrg(text) -------------------------------------------------------------------------------- /src/srv/worker.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import * as procedures from './api.js' 5 | 6 | 7 | export async function spawnWorkers({ ctx }){ 8 | let num = ctx.config.api.workers || 3 9 | let { db, ...workerCtx } = ctx 10 | 11 | log.info(`spawning ${num} workers`) 12 | 13 | return Promise.all( 14 | Array(num).fill(0).map( 15 | async () => await spawn(':runWorker', { ctx: workerCtx }) 16 | ) 17 | ) 18 | } 19 | 20 | export async function executeProcedure({ ctx, procedure, params, requestId }){ 21 | let func = procedures[procedure] 22 | 23 | if(func.mustRunMainThread){ 24 | return json(await func({ ...params, ctx }), requestId) 25 | } 26 | 27 | let now = Date.now() 28 | let worker = ctx.workers 29 | .map(worker => ({ worker, score: now - (worker.lastRequestTime || 0) - worker.busy * 1000000000 })) 30 | .sort((a, b) => b.score - a.score) 31 | .at(0) 32 | .worker 33 | 34 | worker.busy = true 35 | worker.lastRequestTime = now 36 | 37 | try{ 38 | return await worker.execute({ procedure, params, requestId }) 39 | }catch(error){ 40 | throw error 41 | }finally{ 42 | worker.busy = false 43 | } 44 | } 45 | 46 | export async function runWorker({ ctx }){ 47 | if(ctx.log) 48 | log.pipe(ctx.log) 49 | 50 | ctx = { 51 | ...ctx, 52 | db: await openDB({ ctx }) 53 | } 54 | 55 | return { 56 | async execute({ procedure, params, requestId }){ 57 | return json(await procedures[procedure]({ ...params, ctx }), requestId) 58 | } 59 | } 60 | } 61 | 62 | function json(data, requestId){ 63 | if(requestId) 64 | data = { result: data, id: requestId } 65 | 66 | return JSON.stringify(data, null, 2) 67 | } -------------------------------------------------------------------------------- /src/srv/ws.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import * as procedures from './api.js' 3 | import { formatTokenCache } from './procedures/token.js' 4 | import { executeProcedure } from './worker.js' 5 | 6 | 7 | const checkAliveInterval = 10000 8 | 9 | 10 | export function createManager({ ctx }){ 11 | let clients = [] 12 | let counter = 0 13 | 14 | setInterval( 15 | () => { 16 | for(let client of clients){ 17 | if(!client.alive){ 18 | client.socket.close() 19 | log.debug(`client #${client.id} inactivity kick`) 20 | continue 21 | } 22 | 23 | client.alive = false 24 | client.socket.ping() 25 | } 26 | }, 27 | checkAliveInterval 28 | ) 29 | 30 | ctx.ipc.subscribe( 31 | async payload => { 32 | if(payload.tokenUpdate){ 33 | let token = payload.tokenUpdate.token 34 | let key = `${token.id}` 35 | let recipients = [] 36 | 37 | for(let client of clients){ 38 | let subscription = client.tokenSubscriptions[key] 39 | 40 | if(subscription){ 41 | recipients.push({ 42 | client, 43 | subscription 44 | }) 45 | } 46 | } 47 | 48 | if(recipients.length > 0){ 49 | pushTokenUpdate({ 50 | ctx, 51 | token, 52 | recipients 53 | }) 54 | } 55 | } 56 | } 57 | ) 58 | 59 | function logCount(change){ 60 | log.accumulate.info({ 61 | text: [ 62 | clients.length, 63 | `client(s) connected (%wsConnectionChange in %time)` 64 | ], 65 | data: { 66 | wsConnectionChange: change 67 | } 68 | }) 69 | } 70 | 71 | return { 72 | registerSocket(socket){ 73 | let client = { 74 | id: ++counter, 75 | socket, 76 | tokenSubscriptions: {}, 77 | alive: true 78 | } 79 | 80 | socket.on('message', async message => { 81 | try{ 82 | var { id, command, ...params } = JSON.parse(message) 83 | }catch{ 84 | log.debug(`client #${client.id} sent malformed request - dropping them`) 85 | socket.close() 86 | } 87 | 88 | try{ 89 | if(!procedures[command]){ 90 | throw { 91 | message: 'unknown command', 92 | expose: true 93 | } 94 | } 95 | 96 | socket.send( 97 | await executeProcedure({ 98 | ctx: { 99 | ...ctx, 100 | client 101 | }, 102 | procedure, 103 | params, 104 | requestId: id 105 | }) 106 | ) 107 | }catch(error){ 108 | let response = null 109 | 110 | if(typeof error === 'object'){ 111 | if(error.expose){ 112 | response = error 113 | delete response.expose 114 | } 115 | } 116 | 117 | if(!response){ 118 | log.debug(`internal server error while serving client #${client.id}:`, error.message) 119 | response = {message: 'internal server error'} 120 | } 121 | 122 | response.request = { 123 | ...params, 124 | command, 125 | } 126 | 127 | socket.send( 128 | JSON.stringify({ 129 | id, 130 | error: response 131 | }) 132 | ) 133 | } 134 | }) 135 | 136 | socket.on('pong', () => { 137 | client.alive = true 138 | }) 139 | 140 | socket.on('close', () => { 141 | clients.splice(clients.indexOf(client)) 142 | log.debug(`client #${client.id} disconnected`) 143 | logCount(-1) 144 | }) 145 | 146 | socket.on('error', error => { 147 | log.info(`client #${client.id} websocket error: ${error.message}`) 148 | }) 149 | 150 | clients.push(client) 151 | 152 | log.debug(`new connection (#${client.id} ${socket._socket.remoteAddress})`) 153 | logCount(1) 154 | } 155 | } 156 | } 157 | 158 | function pushTokenUpdate({ ctx, token, recipients }){ 159 | if(!token.id) 160 | throw new Error(`token.id required`) 161 | 162 | let cache = ctx.db.cache.tokens.readOne({ 163 | where: { 164 | token: token.id 165 | }, 166 | include: { 167 | token: { 168 | issuer: true 169 | } 170 | } 171 | }) 172 | 173 | for(let { client, subscription } of recipients){ 174 | client.socket.send( 175 | JSON.stringify({ 176 | type: 'tokenUpdate', 177 | token: formatTokenCache({ 178 | ctx, 179 | cache, 180 | decodeCurrency: subscription.decode_currency, 181 | preferSources: subscription.prefer_sources, 182 | expandMeta: subscription.include_sources, 183 | includeChanges: subscription.include_changes, 184 | }) 185 | }) 186 | ) 187 | } 188 | } -------------------------------------------------------------------------------- /src/xrpl/blackhole.js: -------------------------------------------------------------------------------- 1 | const blackholeAccounts = [ 2 | 'rrrrrrrrrrrrrrrrrrrrrhoLvTp', 3 | 'rrrrrrrrrrrrrrrrrrrrBZbvji', 4 | 'rrrrrrrrrrrrrrrrrNAMEtxvNvQ', 5 | 'rrrrrrrrrrrrrrrrrrrn5RM1rHd' 6 | ] 7 | 8 | export function isBlackholed(ledgerEntry){ 9 | if(!blackholeAccounts.includes(ledgerEntry.RegularKey)) 10 | return false 11 | 12 | // master key disabled 13 | if(ledgerEntry.Flags & 0x00100000 == 0) 14 | return false 15 | 16 | return true 17 | } -------------------------------------------------------------------------------- /src/xrpl/ledger.js: -------------------------------------------------------------------------------- 1 | import { rippleToUnix } from '@xrplkit/time' 2 | 3 | export async function fetch({ ctx, sequence }){ 4 | let { result } = await ctx.xrpl.request({ 5 | command: 'ledger', 6 | ledger_index: sequence, 7 | transactions: true, 8 | expand: true 9 | }) 10 | 11 | return format(result.ledger) 12 | } 13 | 14 | export function format(ledger){ 15 | return { 16 | sequence: parseInt(ledger.ledger_index), 17 | hash: ledger.ledger_hash, 18 | closeTime: rippleToUnix(ledger.close_time || ledger.ledger_time), 19 | transactions: ledger.transactions 20 | .map( 21 | tx => tx.transaction 22 | ? { ...tx.transaction, metaData: tx.meta } 23 | : tx 24 | ) 25 | } 26 | } -------------------------------------------------------------------------------- /src/xrpl/node.js: -------------------------------------------------------------------------------- 1 | import EventEmitter from 'events' 2 | import createSocket from '@xrplkit/socket' 3 | import log from '@mwni/log' 4 | 5 | 6 | export default class Node extends EventEmitter{ 7 | constructor(config){ 8 | super() 9 | 10 | this.name = config.url 11 | .replace(/^wss?:\/\//, '') 12 | .replace(/:[0-9]+/, '') 13 | 14 | this.tasks = [] 15 | this.socket = createSocket({ url: config.url }) 16 | this.availableLedgers = [] 17 | 18 | this.socket.on('transaction', tx => { 19 | this.emit('event', {hash: tx.transaction.hash, tx}) 20 | }) 21 | 22 | this.socket.on('ledgerClosed', ledger => { 23 | this.emit('event', {hash: ledger.ledger_hash, ledger}) 24 | this.hasReportedClosedLedger = true 25 | 26 | if(ledger.validated_ledgers){ 27 | this.availableLedgers = ledger.validated_ledgers 28 | .split(',') 29 | .map(range => range 30 | .split('-') 31 | .map(i => parseInt(i)) 32 | ) 33 | } 34 | }) 35 | 36 | this.socket.on('open', async () => { 37 | this.hasReportedClosedLedger = false 38 | this.emit('connected') 39 | 40 | try{ 41 | await this.socket.request({ 42 | command: 'subscribe', 43 | streams: ['ledger', 'transactions'] 44 | }) 45 | }catch(error){ 46 | log.warn(`failed to subscribe to node "${this.name}":`) 47 | log.warn(error) 48 | } 49 | }) 50 | 51 | this.socket.on('close', async event => { 52 | this.error = event.reason 53 | ? event.reason 54 | : `code ${event.code}` 55 | 56 | this.emit('disconnected') 57 | }) 58 | 59 | this.socket.on('error', error => { 60 | this.error = error.message 61 | ? error.message 62 | : `unknown connection failure` 63 | 64 | this.emit('error') 65 | }) 66 | } 67 | 68 | get status(){ 69 | return this.socket.status() 70 | } 71 | 72 | bid(payload){ 73 | if(this.busy || !this.status.connected || !this.hasReportedClosedLedger) 74 | return 0 75 | 76 | if(payload.command){ 77 | if(payload.ticket){ 78 | if(this.tasks.some(task => task.ticket === payload.ticket)) 79 | return Infinity 80 | else 81 | return 0 82 | } 83 | 84 | if(payload.ledger_index && this.availableLedgers.length > 0){ 85 | let hasLedger = payload.ledger_index === 'validated' || this.availableLedgers.some( 86 | ([start, end]) => payload.ledger_index >= start && payload.ledger_index <= end 87 | ) 88 | 89 | if(hasLedger) 90 | return 2 91 | else 92 | return 0 93 | } 94 | 95 | return 1 96 | }else if(payload.type === 'reserveTicket'){ 97 | if(payload.node){ 98 | if(payload.node !== this.name) 99 | return 0 100 | } 101 | 102 | return 1 103 | } 104 | } 105 | 106 | async do(payload){ 107 | this.busy = true 108 | 109 | try{ 110 | if(payload.command){ 111 | return await this.socket.request(payload) 112 | }else if(payload.type === 'reserveTicket'){ 113 | let ticket = Math.random() 114 | .toString(16) 115 | .toUpperCase() 116 | .slice(2, 10) 117 | 118 | this.tasks.push({ 119 | type: payload.task, 120 | ticket, 121 | node: this.name 122 | }) 123 | 124 | return {ticket} 125 | } 126 | }catch(error){ 127 | throw error 128 | }finally{ 129 | this.busy = false 130 | } 131 | } 132 | 133 | disconnect(){ 134 | this.socket.close() 135 | } 136 | } -------------------------------------------------------------------------------- /src/xrpl/nodepool.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { createEmitter } from '@mwni/events' 3 | import { wait } from '@xrplkit/time' 4 | import { format as formatLedger } from './ledger.js' 5 | import Node from './node.js' 6 | 7 | 8 | export function createPool(sources){ 9 | let events = new createEmitter() 10 | let seenHashes = [] 11 | let queue = [] 12 | let nodes = [] 13 | let latestLedger 14 | let closed = false 15 | 16 | async function workQueue(){ 17 | while(!closed){ 18 | for(let i=0; i ({node, bid: node.bid(request.payload)})) 22 | .sort((a, b) => b.bid - a.bid) 23 | 24 | if(bestBid.bid <= 0) 25 | continue 26 | 27 | request.accepted() 28 | 29 | bestBid.node.do(request.payload) 30 | .then(result => request.resolve({result, node: bestBid.node.name})) 31 | .catch(error => request.reject({ 32 | error: error.message || error.stack || error.error_message, 33 | node: bestBid.node.name 34 | })) 35 | 36 | queue.splice(i--, 1) 37 | } 38 | 39 | await wait(100) 40 | } 41 | } 42 | 43 | function sawHash(hash){ 44 | if(seenHashes.includes(hash)) 45 | return true 46 | 47 | seenHashes.push(hash) 48 | 49 | if(seenHashes.length > 10000) 50 | seenHashes.shift() 51 | } 52 | 53 | function warnAllLost(){ 54 | if(nodes.some(node => node.status.connected)) 55 | return 56 | 57 | log.warn(`lost connection to all nodes`) 58 | } 59 | 60 | 61 | log.info(`using nodes:`) 62 | 63 | for(let spec of sources){ 64 | let connections = spec.connections || 1 65 | 66 | for(let i=0; i { 71 | log.info( 72 | firstConnect 73 | ? `connected to ${spec.url}` 74 | : `reconnected to ${spec.url}` 75 | ) 76 | 77 | firstConnect = false 78 | }) 79 | 80 | node.on('disconnected', () => { 81 | log.info(`lost connection to ${spec.url}:`, node.error) 82 | warnAllLost() 83 | }) 84 | 85 | node.on('error', () => { 86 | log.debug(`failed to connect to ${spec.url}:`, node.error) 87 | }) 88 | 89 | node.on('event', ({ hash, tx, ledger }) => { 90 | if(sawHash(hash)) 91 | return 92 | 93 | if(ledger){ 94 | latestLedger = { ...ledger, transactions: [] } 95 | } 96 | 97 | if(latestLedger){ 98 | if(tx){ 99 | latestLedger.transactions.push(tx) 100 | } 101 | 102 | if(latestLedger.transactions.length === latestLedger.txn_count){ 103 | events.emit('ledger', formatLedger(latestLedger)) 104 | } 105 | } 106 | }) 107 | 108 | nodes.push(node) 109 | } 110 | 111 | log.info(` -> ${spec.url}`) 112 | } 113 | 114 | workQueue() 115 | 116 | return Object.assign( 117 | events, 118 | { 119 | request(payload){ 120 | return new Promise((resolve, reject) => { 121 | let timeout = setTimeout(() => reject('noNodeAcceptedRequest'), 30000) 122 | let accepted = () => clearTimeout(timeout) 123 | 124 | queue.push({ 125 | payload, 126 | resolve, 127 | reject, 128 | accepted 129 | }) 130 | }) 131 | }, 132 | close(){ 133 | closed = true 134 | 135 | for(let node of nodes){ 136 | node.disconnect() 137 | } 138 | }, 139 | get connectionsCount(){ 140 | return nodes.length 141 | } 142 | } 143 | ) 144 | } -------------------------------------------------------------------------------- /src/xrpl/snapshot.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { wait } from '@xrplkit/time' 3 | 4 | export async function start({ ctx, ledgerSequence, marker, node }){ 5 | if(ctx.log) 6 | log.pipe(ctx.log) 7 | 8 | let chunkSize = ctx.config.node.snapshotChunkSize || 10000 9 | let queue = [] 10 | 11 | let { result, node: assignedNode } = await ctx.xrpl.request({ 12 | type: 'reserveTicket', 13 | task: 'snapshot', 14 | ledgerSequence, 15 | node 16 | }) 17 | 18 | let ticket = result.ticket 19 | let fetching = true 20 | let resolveNext 21 | 22 | log.info(`reserved snapshot ticket with node`, assignedNode) 23 | 24 | let promise = (async() => { 25 | while(true){ 26 | while(queue.length >= 10) 27 | await wait(100) 28 | 29 | try{ 30 | let { result } = await ctx.xrpl.request({ 31 | command: 'ledger_data', 32 | ledger_index: ledgerSequence, 33 | limit: chunkSize, 34 | marker, 35 | ticket 36 | }) 37 | 38 | queue.push({ 39 | objects: result.state, 40 | marker: result.marker 41 | }) 42 | 43 | marker = result.marker 44 | 45 | if(resolveNext) 46 | resolveNext() 47 | 48 | }catch(e){ 49 | log.info(`could not fetch ledger chunk:`, e.error ? e.error : e) 50 | await wait(2500) 51 | continue 52 | } 53 | 54 | if(!marker){ 55 | fetching = false 56 | break 57 | } 58 | } 59 | })() 60 | 61 | return { 62 | ledgerSequence, 63 | node: assignedNode, 64 | async next(){ 65 | if(queue.length > 0) 66 | return queue.shift() 67 | 68 | if(!fetching) 69 | return 70 | 71 | await new Promise(resolve => resolveNext = resolve) 72 | 73 | return queue.shift() 74 | } 75 | } 76 | } -------------------------------------------------------------------------------- /src/xrpl/stream.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { wait } from '@xrplkit/time' 3 | import { fetch as fetchLedger } from './ledger.js' 4 | 5 | 6 | 7 | export async function createForwardStream({ ctx, startSequence }){ 8 | if(ctx.log) 9 | log.pipe(ctx.log) 10 | 11 | let latestLedger 12 | 13 | while(!latestLedger){ 14 | try{ 15 | latestLedger = await fetchLedger({ 16 | ctx, 17 | sequence: 'validated' 18 | }) 19 | }catch(error){ 20 | log.warn(`cannot start forward stream, unable get latest ledger: \n${error}`) 21 | await wait(1000) 22 | } 23 | } 24 | 25 | let stream = createRegistry({ 26 | name: 'live', 27 | startSequence, 28 | targetSequence: latestLedger.sequence, 29 | maxSize: ctx.config.node.streamQueueSize || 100 30 | }) 31 | 32 | ctx.xrpl.on('ledger', ledger => { 33 | stream.extend(ledger) 34 | }) 35 | 36 | createFiller({ ctx, stream, stride: 1 }) 37 | 38 | return stream 39 | } 40 | 41 | export async function createBackwardStream({ ctx, startSequence }){ 42 | if(ctx.log) 43 | log.pipe(ctx.log) 44 | 45 | let stream = createRegistry({ 46 | name: 'backfill', 47 | startSequence, 48 | targetSequence: ctx.config.source.backfillToLedger || 0, 49 | maxSize: ctx.config.source.streamQueueSize || 100 50 | }) 51 | 52 | createFiller({ ctx, stream, stride: -1 }) 53 | 54 | return stream 55 | } 56 | 57 | 58 | function createRegistry({ name, startSequence, targetSequence, maxSize }){ 59 | let currentSequence = startSequence 60 | let ledgers = {} 61 | let resolveNext = () => 0 62 | 63 | return { 64 | get currentSequence(){ 65 | return currentSequence 66 | }, 67 | 68 | get targetSequence(){ 69 | return targetSequence 70 | }, 71 | 72 | get queueSize(){ 73 | return Object.keys(ledgers).length 74 | }, 75 | 76 | has(sequence){ 77 | return !!ledgers[sequence] 78 | }, 79 | 80 | accepts(sequence){ 81 | return Math.abs(sequence - currentSequence) <= maxSize 82 | }, 83 | 84 | extend(ledger){ 85 | targetSequence = Math.max(targetSequence, ledger.sequence) 86 | 87 | if(this.accepts(ledger.sequence)) 88 | this.put(ledger) 89 | }, 90 | 91 | put(ledger){ 92 | ledgers[ledger.sequence] = ledger 93 | resolveNext() 94 | 95 | if(this.queueSize > 1){ 96 | log.accumulate.debug({ 97 | text: [ 98 | `${name} queue has`, 99 | this.queueSize, 100 | `ledgers`, 101 | `(+%${name}QueueAdd in %time)` 102 | ], 103 | data: { 104 | [`${name}QueueAdd`]: 1 105 | } 106 | }) 107 | } 108 | }, 109 | 110 | status(){ 111 | return { 112 | currentSequence, 113 | targetSequence 114 | } 115 | }, 116 | 117 | async next(){ 118 | while(!ledgers[currentSequence]){ 119 | await new Promise(resolve => resolveNext = resolve) 120 | } 121 | 122 | let ledger = ledgers[currentSequence] 123 | 124 | delete ledgers[currentSequence] 125 | 126 | currentSequence += targetSequence >= currentSequence ? 1 : -1 127 | 128 | return { 129 | ledger, 130 | ledgersBehind: targetSequence - currentSequence 131 | } 132 | } 133 | } 134 | } 135 | 136 | function createFiller({ ctx, stream, stride }){ 137 | let reservations = {} 138 | 139 | for(let n=0; n { 141 | let sequence = stream.currentSequence 142 | 143 | while(true){ 144 | let stepsToTarget = (stream.targetSequence - sequence) * stride 145 | let stepsBehindCurrent = (stream.currentSequence - sequence) * stride 146 | 147 | if(stepsToTarget < 0){ 148 | await wait(100) 149 | continue 150 | } 151 | 152 | if(!stream.accepts(sequence)){ 153 | await wait(1000) 154 | continue 155 | } 156 | 157 | if(stepsBehindCurrent > 0 || reservations[sequence] || stream.has(sequence)){ 158 | sequence += stride 159 | continue 160 | } 161 | 162 | reservations[sequence] = true 163 | 164 | try{ 165 | stream.put( 166 | await fetchLedger({ 167 | ctx, 168 | sequence 169 | }) 170 | ) 171 | }catch(error){ 172 | log.warn(`failed to fetch ledger #${sequence}:`, error) 173 | await wait(1000) 174 | }finally{ 175 | delete reservations[sequence] 176 | } 177 | } 178 | })() 179 | } 180 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.bithomp.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/bithomp.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.domains.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/domains.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.gravatar.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/gravatar.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.tokenlists.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/tokenlists.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.twitter.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/twitter.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.xrpscan.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/xrpscan.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.xumm.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/xumm.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/icon.cache.js: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import log from '@mwni/log' 3 | import { createContext } from '../../unit/env.js' 4 | import { writeTokenProps } from '../../../src/db/helpers/props.js' 5 | import { updateIconCacheFor } from '../../../src/cache/icons.js' 6 | 7 | 8 | 9 | export default async ({ config, args }) => { 10 | let ctx = await createContext() 11 | let iconUrl = args._[1] 12 | 13 | if(!iconUrl) 14 | throw new Error(`no icon url provided. use: npm livetest icon.cache [url]`) 15 | 16 | let token = { 17 | currency: '000', 18 | issuer: { 19 | address: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp', 20 | }, 21 | props: { 22 | icon: iconUrl 23 | } 24 | } 25 | 26 | writeTokenProps({ 27 | ctx, 28 | token: { 29 | currency: token.currency, 30 | issuer: token.issuer 31 | }, 32 | props: token.props, 33 | source: 'test' 34 | }) 35 | 36 | log.config({ level: 'debug' }) 37 | log.info(`downloading and caching ${iconUrl}...`) 38 | 39 | await updateIconCacheFor({ 40 | ctx, 41 | token: { 42 | currency: token.currency, 43 | issuer: token.issuer 44 | } 45 | }) 46 | 47 | log.info(`icon cache registry:`, ctx.db.cache.icons.readMany()[0]) 48 | log.info(`generated token meta:`, ctx.db.cache.tokens.readOne({ 49 | where: { 50 | token: 2 51 | } 52 | }).cachedIcons) 53 | log.info(`icon file and variants cached at ${path.join(ctx.config.node.dataDir, 'media', 'icons')}`) 54 | } -------------------------------------------------------------------------------- /test/live/cases/toml.read.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseXLS26 } from '@xrplkit/xls26' 3 | import { createFetch } from '../../../src/lib/fetch.js' 4 | import { fetchToml } from '../../../src/crawl/crawlers/domains.js' 5 | 6 | 7 | export default async ({ config, args }) => { 8 | let domain = args._[1] 9 | let fetch = createFetch() 10 | 11 | if(!domain) 12 | throw new Error(`no domain provided. use: npm livetest toml.read [domain]`) 13 | 14 | let xls26 = await fetchToml({ domain, fetch }) 15 | 16 | log.info(`parsed xls26:\n`, xls26) 17 | } -------------------------------------------------------------------------------- /test/live/run.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import minimist from 'minimist' 4 | import { fileURLToPath } from 'url' 5 | import log from '@mwni/log' 6 | import { find as findConfig } from '../../src/lib/config.js' 7 | import { load as loadConfig } from '../../src/lib/config.js' 8 | import { override as overrideConfig } from '../../src/lib/config.js' 9 | 10 | 11 | const __filename = fileURLToPath(import.meta.url) 12 | const __dirname = path.dirname(__filename) 13 | 14 | 15 | const args = minimist(process.argv.slice(2)) 16 | const component = args._[0] 17 | const configPath = args.config 18 | ? args.config 19 | : findConfig() 20 | 21 | 22 | const cases = fs.readdirSync(path.join(__dirname, 'cases')) 23 | .map(file => file.slice(0, -3)) 24 | 25 | if(!cases.includes(component)){ 26 | log.warn(`no test case selected!`) 27 | log.info(`available cases are:`) 28 | 29 | for(let key of cases){ 30 | log.info(` - ${key}`) 31 | } 32 | 33 | process.exit(1) 34 | } 35 | 36 | 37 | log.config({ 38 | level: args.log || 'debug', 39 | dir: path.resolve( 40 | path.join(__dirname, '..', '..') 41 | ) 42 | }) 43 | log.info(`*** XRPLMETA NODE LIVE COMPONENT TEST ***`) 44 | log.info(`testing component "${component}"`) 45 | log.info(`using config at "${configPath}"`) 46 | 47 | 48 | const baseConfig = loadConfig(configPath, true) 49 | const config = overrideConfig(baseConfig, args) 50 | 51 | if(args.testdb){ 52 | const testDataDir = path.join(__dirname, 'data') 53 | 54 | log.info(`overriding data dir to "${testDataDir}"`) 55 | 56 | Object.assign(config.node, { 57 | dataDir: testDataDir 58 | }) 59 | 60 | if(!fs.existsSync(testDataDir)){ 61 | log.info(`data dir "${testDataDir}" does not exist - creating it`) 62 | fs.mkdirSync(testDataDir, { 63 | recursive: true 64 | }) 65 | } 66 | } 67 | 68 | let { default: run } = await import(`./cases/${component}.js`) 69 | 70 | await run({ args, config }) 71 | 72 | log.info(`live test exited with code 0`) -------------------------------------------------------------------------------- /test/unit/db.codecs.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import codecs from '../../src/db/codecs/index.js' 3 | import { XFL } from '@xrplkit/xfl' 4 | 5 | 6 | const testValues = { 7 | 'xrpl/xfl': XFL('123.456'), 8 | 'xrpl/address': 'rwekfW4MiS5yZjXASRBDzzPPWYKuHvKP7E' 9 | } 10 | 11 | 12 | describe( 13 | 'Database Codecs', 14 | () => { 15 | for(let { acceptsFormat, returnsType, encode, decode } of codecs){ 16 | it( 17 | `should return same for ${acceptsFormat} -> ${returnsType} -> ${acceptsFormat}`, 18 | () => { 19 | let testValue = testValues[acceptsFormat] 20 | let decodedValue = decode(encode(testValue)) 21 | 22 | expect(testValue.toString()).to.be.equal(decodedValue.toString()) 23 | } 24 | ) 25 | } 26 | } 27 | ) -------------------------------------------------------------------------------- /test/unit/db.helpers.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { XFL } from '@xrplkit/xfl' 3 | import { createContext } from './env.js' 4 | import { readBalance, writeBalance } from '../../src/db/helpers/balances.js' 5 | import { readTokenMetricSeries, writeTokenMetrics } from '../../src/db/helpers/tokenmetrics.js' 6 | 7 | 8 | const ctx = await createContext() 9 | 10 | 11 | describe( 12 | 'Database Helpers', 13 | () => { 14 | it( 15 | 'write and read token balance of account', 16 | () => { 17 | let account = { 18 | address: 'rMwNibdiFaEzsTaFCG1NnmAM3Rv3vHUy5L' 19 | } 20 | 21 | let token = { 22 | currency: 'PSC', 23 | issuer: { 24 | address: 'rwekfW4MiS5yZjXASRBDzzPPWYKuHvKP7E' 25 | } 26 | } 27 | 28 | writeBalance({ 29 | ctx, 30 | account, 31 | token, 32 | ledgerSequence: 100000000, 33 | balance: '1000000' 34 | }) 35 | 36 | let balance = readBalance({ 37 | ctx, 38 | account, 39 | token, 40 | ledgerSequence: 100000000 41 | }) 42 | 43 | expect(balance.toString()).to.be.equal('1000000') 44 | } 45 | ) 46 | 47 | it( 48 | 'write and read token metric series', 49 | () => { 50 | let token = { 51 | currency: 'PSC', 52 | issuer: { 53 | address: 'rwekfW4MiS5yZjXASRBDzzPPWYKuHvKP7E' 54 | } 55 | } 56 | 57 | for(let i=0; i<3; i++){ 58 | writeTokenMetrics({ 59 | ctx, 60 | token, 61 | ledgerSequence: 1000000 + i * 1000, 62 | metrics: { 63 | trustlines: 1 + i, 64 | supply: XFL(100 + i * 100) 65 | }, 66 | updateCache: false 67 | }) 68 | } 69 | 70 | let trustlineSeries = readTokenMetricSeries({ 71 | ctx, 72 | token, 73 | sequenceStart: 0, 74 | metric: 'trustlines' 75 | }) 76 | 77 | let supplySeries = readTokenMetricSeries({ 78 | ctx, 79 | token, 80 | sequenceStart: 999999, 81 | metric: 'supply' 82 | }) 83 | 84 | expect(trustlineSeries.map(e => e.value)).to.be.deep.equal([1, 2, 3]) 85 | expect(supplySeries.map(e => e.value.toString())).to.be.deep.equal(['100', '200', '300']) 86 | } 87 | ) 88 | } 89 | ) -------------------------------------------------------------------------------- /test/unit/env.js: -------------------------------------------------------------------------------- 1 | import os from 'os' 2 | import fs from 'fs' 3 | import path from 'path' 4 | import log from '@mwni/log' 5 | import { openDB } from '../../src/db/index.js' 6 | 7 | export async function createContext({ debugQueries=false }={}){ 8 | let dataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'xrplmeta-test-')) 9 | 10 | let ctx = { 11 | config: { 12 | node: { 13 | dataDir 14 | }, 15 | debug: { 16 | queries: debugQueries 17 | } 18 | } 19 | } 20 | 21 | log.config({ level: 'error' }) 22 | 23 | console.log(`using data dir: ${dataDir}`) 24 | 25 | return { 26 | ...ctx, 27 | db: await openDB({ 28 | inMemory: true, 29 | ctx 30 | }) 31 | } 32 | } -------------------------------------------------------------------------------- /test/unit/fetch.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createFetch } from '../../src/lib/fetch.js' 3 | 4 | 5 | 6 | describe( 7 | 'Fetching via HTTP', 8 | () => { 9 | it( 10 | 'should successfully read text from https://static.xrplmeta.org/test.txt', 11 | async () => { 12 | let fetch = createFetch() 13 | let { data } = await fetch('https://static.xrplmeta.org/test.txt') 14 | 15 | expect(data).to.be.equal('it works') 16 | } 17 | ) 18 | 19 | it( 20 | 'should successfully read JSON from https://static.xrplmeta.org/test.json', 21 | async () => { 22 | let fetch = createFetch() 23 | let { data } = await fetch('https://static.xrplmeta.org/test.json') 24 | 25 | expect(data).to.be.deep.equal({ it: 'works' }) 26 | } 27 | ) 28 | } 29 | ) -------------------------------------------------------------------------------- /test/unit/icon-cache.test.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { expect } from 'chai' 3 | import { createContext } from './env.js' 4 | import { writeAccountProps, writeTokenProps } from '../../src/db/helpers/props.js' 5 | import { updateIconCacheFor } from '../../src/cache/icons.js' 6 | 7 | 8 | const ctx = await createContext() 9 | 10 | const accounts = [ 11 | { 12 | address: 'rhub8VRN55s94qWKDv6jmDy1pUykJzF3wq', 13 | props: { 14 | name: 'GateHub', 15 | icon: 'https://static.xrplmeta.org/icons/gatehub.png', 16 | trust_level: 3 17 | } 18 | }, 19 | { 20 | address: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B', 21 | props: { 22 | name: 'Bitstamp', 23 | icon: 'https://static.xrplmeta.org/icons/bitstamp.png', 24 | trust_level: 3 25 | } 26 | } 27 | ] 28 | 29 | const tokens = [ 30 | { 31 | currency: 'USD', 32 | issuer: { 33 | address: accounts[0].address 34 | }, 35 | props: { 36 | name: 'US Dollar', 37 | icon: 'https://static.xrplmeta.org/icons/USD.png', 38 | asset_class: 'fiat' 39 | } 40 | }, 41 | { 42 | currency: 'USD', 43 | issuer: { 44 | address: accounts[1].address 45 | }, 46 | props: { 47 | name: 'US Dollar', 48 | icon: 'https://static.xrplmeta.org/icons/USD.png', 49 | asset_class: 'fiat' 50 | } 51 | } 52 | ] 53 | 54 | for(let { address, props } of accounts){ 55 | writeAccountProps({ 56 | ctx, 57 | account: { 58 | address 59 | }, 60 | props, 61 | source: 'manual' 62 | }) 63 | } 64 | 65 | for(let { currency, issuer, props } of tokens){ 66 | writeTokenProps({ 67 | ctx, 68 | token: { 69 | currency, 70 | issuer 71 | }, 72 | props, 73 | source: 'manual' 74 | }) 75 | } 76 | 77 | describe( 78 | 'Icon Cache', 79 | () => { 80 | it( 81 | 'should download token icons according to icon prop', 82 | async () => { 83 | for(let token of tokens){ 84 | await updateIconCacheFor({ 85 | ctx, 86 | token: { 87 | currency: token.currency, 88 | issuer: token.issuer 89 | } 90 | }) 91 | } 92 | 93 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05.png`)).to.be.true 94 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@512.png`)).to.be.true 95 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@256.png`)).to.be.true 96 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@128.png`)).to.be.true 97 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@64.png`)).to.be.true 98 | } 99 | ) 100 | 101 | it( 102 | 'should link the cached icon to the token cache', 103 | async () => { 104 | let tokenCache1 = ctx.db.cache.tokens.readOne({ 105 | where: { 106 | token: 2 107 | } 108 | }) 109 | 110 | let tokenCache2 = ctx.db.cache.tokens.readOne({ 111 | where: { 112 | token: 3 113 | } 114 | }) 115 | 116 | expect(tokenCache1.cachedIcons).to.be.deep.equal({ 117 | [tokens[0].props.icon]: 'C676A0DE05.png' 118 | }) 119 | 120 | expect(tokenCache2.cachedIcons).to.be.deep.equal({ 121 | [tokens[1].props.icon]: 'C676A0DE05.png' 122 | }) 123 | } 124 | ) 125 | 126 | it( 127 | 'should unlink the cached icon if no longer in token props', 128 | async () => { 129 | writeTokenProps({ 130 | ctx, 131 | token: { 132 | currency: tokens[0].currency, 133 | issuer: tokens[0].issuer 134 | }, 135 | props: { 136 | ...tokens[0].props, 137 | icon: undefined 138 | }, 139 | source: 'manual' 140 | }) 141 | 142 | await updateIconCacheFor({ 143 | ctx, 144 | token: { 145 | currency: tokens[0].currency, 146 | issuer: tokens[0].issuer 147 | } 148 | }) 149 | 150 | let tokenCache = ctx.db.cache.tokens.readOne({ 151 | where: { 152 | token: 2 153 | } 154 | }) 155 | 156 | expect(tokenCache.cachedIcons).to.be.deep.equal({}) 157 | } 158 | ) 159 | 160 | it( 161 | 'delete the icon if it has no more users', 162 | async () => { 163 | writeTokenProps({ 164 | ctx, 165 | token: { 166 | currency: tokens[1].currency, 167 | issuer: tokens[1].issuer 168 | }, 169 | props: { 170 | ...tokens[1].props, 171 | icon: undefined 172 | }, 173 | source: 'manual' 174 | }) 175 | 176 | await updateIconCacheFor({ 177 | ctx, 178 | token: { 179 | currency: tokens[1].currency, 180 | issuer: tokens[1].issuer 181 | } 182 | }) 183 | 184 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05.png`)).to.be.false 185 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@512.png`)).to.be.false 186 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@256.png`)).to.be.false 187 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@128.png`)).to.be.false 188 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@64.png`)).to.be.false 189 | } 190 | ) 191 | 192 | it( 193 | 'should do the same for issuer icons', 194 | async () => { 195 | for(let account of accounts){ 196 | await updateIconCacheFor({ 197 | ctx, 198 | account: { 199 | address: account.address 200 | } 201 | }) 202 | } 203 | 204 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269.png`)).to.be.true 205 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@512.png`)).to.be.true 206 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@256.png`)).to.be.true 207 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@128.png`)).to.be.true 208 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@64.png`)).to.be.true 209 | 210 | let tokenCache = ctx.db.cache.tokens.readOne({ 211 | where: { 212 | token: 2 213 | } 214 | }) 215 | 216 | expect(tokenCache.cachedIcons).to.be.deep.equal({ 217 | [accounts[0].props.icon]: '0D821A3269.png' 218 | }) 219 | } 220 | ) 221 | } 222 | ) -------------------------------------------------------------------------------- /test/unit/prop-diff.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createContext } from './env.js' 3 | import { diffMultiAccountProps, diffMultiTokenProps } from '../../src/db/helpers/props.js' 4 | import { reduceProps } from '../../src/srv/procedures/token.js' 5 | import { updateCacheForTokenProps } from '../../src/cache/tokens.js' 6 | 7 | 8 | const ctx = await createContext() 9 | 10 | const accounts = [ 11 | { 12 | address: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp', 13 | props: { 14 | name: 'Account Zero', 15 | trust_level: 3 16 | } 17 | }, 18 | { 19 | address: 'rrrrrrrrrrrrrrrrrrrrBZbvji', 20 | props: { 21 | name: 'Account One', 22 | trust_level: 3 23 | } 24 | }, 25 | { 26 | address: 'rrrrrrrrrrrrrrrrrrrn5RM1rHd', 27 | props: { 28 | name: 'NaN Address', 29 | trust_level: 1 30 | } 31 | } 32 | ] 33 | 34 | const tokens = [ 35 | { 36 | currency: 'XAU', 37 | issuer: { 38 | address: accounts[0].address 39 | }, 40 | props: { 41 | name: 'Gold', 42 | asset_class: 'commodity' 43 | } 44 | }, 45 | { 46 | currency: 'XAG', 47 | issuer: { 48 | address: accounts[1].address 49 | }, 50 | props: { 51 | name: 'Silver', 52 | asset_class: 'commodity' 53 | } 54 | }, 55 | { 56 | currency: 'USD', 57 | issuer: { 58 | address: accounts[2].address 59 | }, 60 | props: { 61 | name: 'US Dollar', 62 | asset_class: 'fiat' 63 | } 64 | } 65 | ] 66 | 67 | describe( 68 | 'Diffing account props', 69 | () => { 70 | it( 71 | 'execute with new data', 72 | () => { 73 | diffMultiAccountProps({ 74 | ctx, 75 | accounts, 76 | source: 'test' 77 | }) 78 | } 79 | ) 80 | 81 | it( 82 | 'should insert all props', 83 | () => { 84 | expect(ctx.db.core.accountProps.readMany().length).to.be.equal(6) 85 | } 86 | ) 87 | 88 | it( 89 | 'execute with one account removed', 90 | () => { 91 | diffMultiAccountProps({ 92 | ctx, 93 | accounts: accounts.slice(0, 2), 94 | source: 'test' 95 | }) 96 | } 97 | ) 98 | 99 | it( 100 | 'should have removed the removed account\'s props', 101 | () => { 102 | expect(ctx.db.core.accountProps.readMany().length).to.be.equal(4) 103 | } 104 | ) 105 | 106 | it( 107 | 'should also remove specific removed props', 108 | () => { 109 | let accountsChanged = structuredClone(accounts) 110 | 111 | delete accountsChanged[0].props.name 112 | 113 | diffMultiAccountProps({ 114 | ctx, 115 | accounts: accountsChanged, 116 | source: 'test' 117 | }) 118 | 119 | expect(ctx.db.core.accountProps.readMany().length).to.be.equal(5) 120 | } 121 | ) 122 | } 123 | ) 124 | 125 | 126 | describe( 127 | 'Diffing token props', 128 | () => { 129 | it( 130 | 'execute with new data', 131 | () => { 132 | diffMultiTokenProps({ 133 | ctx, 134 | tokens, 135 | source: 'test' 136 | }) 137 | } 138 | ) 139 | 140 | it( 141 | 'should insert all props', 142 | () => { 143 | expect(ctx.db.core.tokenProps.readMany().length).to.be.equal(6) 144 | } 145 | ) 146 | 147 | it( 148 | 'execute with one token removed', 149 | () => { 150 | diffMultiTokenProps({ 151 | ctx, 152 | tokens: tokens.slice(0, 2), 153 | source: 'test' 154 | }) 155 | } 156 | ) 157 | 158 | it( 159 | 'should have removed the removed token\'s props', 160 | () => { 161 | expect(ctx.db.core.tokenProps.readMany().length).to.be.equal(4) 162 | } 163 | ) 164 | 165 | it( 166 | 'should also remove specific removed props', 167 | () => { 168 | let tokensChanged = structuredClone(tokens) 169 | 170 | delete tokensChanged[0].props.name 171 | 172 | diffMultiTokenProps({ 173 | ctx, 174 | tokens: tokensChanged, 175 | source: 'test' 176 | }) 177 | 178 | expect(ctx.db.core.tokenProps.readMany().length).to.be.equal(5) 179 | } 180 | ) 181 | 182 | it( 183 | 'should have the correct token prop cache', 184 | () => { 185 | for(let { currency, issuer } of tokens){ 186 | updateCacheForTokenProps({ 187 | ctx, 188 | token: { currency, issuer } 189 | }) 190 | } 191 | 192 | let props = ctx.db.cache.tokens.readMany() 193 | .map(cache => reduceProps({ props: cache.tokenProps })) 194 | 195 | let expectedProps = tokens 196 | .map(({ props }) => props) 197 | .slice(0, 3) 198 | 199 | delete expectedProps[0].name 200 | 201 | expect(props).to.be.deep.equal(expectedProps) 202 | } 203 | ) 204 | } 205 | ) 206 | -------------------------------------------------------------------------------- /test/unit/prop-rank.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { reduceProps } from '../../src/srv/procedures/token.js' 3 | 4 | 5 | const props = [ 6 | { 7 | key: 'name', 8 | value: 'US Dollar', 9 | source: 'xumm/curated' 10 | }, 11 | { 12 | key: 'name', 13 | value: 'U.S. Dollar', 14 | source: 'tokenlist' 15 | }, 16 | { 17 | key: 'name', 18 | value: 'Dollar', 19 | source: 'xrpscan/well-known' 20 | }, 21 | { 22 | key: 'name', 23 | value: 'USD', 24 | source: 'bithomp' 25 | }, 26 | ] 27 | 28 | 29 | describe( 30 | 'Ranking props by source', 31 | () => { 32 | it( 33 | 'should pick the first when no ranking given', 34 | () => { 35 | expect(reduceProps({ props }).name).to.be.equal(props[0].value) 36 | } 37 | ) 38 | 39 | it( 40 | 'should pick identical sources', 41 | () => { 42 | expect( 43 | reduceProps({ 44 | props, 45 | sourceRanking: [ 46 | 'tokenlist', 47 | 'xumm', 48 | 'bithomp' 49 | ] 50 | }).name 51 | ).to.be.equal(props[1].value) 52 | 53 | expect( 54 | reduceProps({ 55 | props, 56 | sourceRanking: [ 57 | 'xumm/curated', 58 | 'tokenlist', 59 | 'bithomp' 60 | ] 61 | }).name 62 | ).to.be.equal(props[0].value) 63 | } 64 | ) 65 | 66 | it( 67 | 'should pick wildcarded sources', 68 | () => { 69 | expect( 70 | reduceProps({ 71 | props, 72 | sourceRanking: [ 73 | 'xrpscan', 74 | 'tokenlist', 75 | 'xumm' 76 | ] 77 | }).name 78 | ).to.be.equal(props[2].value) 79 | } 80 | ) 81 | } 82 | ) -------------------------------------------------------------------------------- /test/unit/xrpl.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createPool } from '../../src/xrpl/nodepool.js' 3 | import log from '@mwni/log' 4 | 5 | log.config({ level: 'error' }) 6 | 7 | 8 | describe( 9 | 'Fetching from XRPL', 10 | () => { 11 | it( 12 | 'should successfully retrieve ledger 80,000,000', 13 | async () => { 14 | let pool = createPool([{ url: 'wss://xrplcluster.com' }]) 15 | let { result } = await pool.request({ 16 | command: 'ledger', 17 | ledger_index: 80000000 18 | }) 19 | 20 | expect(result.ledger.ledger_hash).to.be.equal('DB978F031BB14734213998060E077D5F813358222DAB07CA8148588D852A55DF') 21 | 22 | pool.close() 23 | } 24 | ).timeout(10000) 25 | 26 | it( 27 | 'should retrieve a historical ledger from a node that has it', 28 | async () => { 29 | let pool = createPool([ 30 | { url: 'wss://s1.ripple.com' }, 31 | { url: 'wss://s2.ripple.com' }, 32 | ]) 33 | 34 | let { result } = await pool.request({ 35 | command: 'ledger', 36 | ledger_index: 32570 37 | }) 38 | 39 | expect(result.ledger.ledger_hash).to.be.equal('4109C6F2045FC7EFF4CDE8F9905D19C28820D86304080FF886B299F0206E42B5') 40 | 41 | pool.close() 42 | } 43 | ).timeout(10000) 44 | } 45 | ) --------------------------------------------------------------------------------