├── .gitignore ├── src ├── db │ ├── codecs │ │ ├── index.js │ │ ├── address.js │ │ └── xfl.js │ ├── helpers │ │ ├── nftoffers.js │ │ ├── balances.js │ │ ├── heads.js │ │ ├── tokenholders.js │ │ ├── tokenoffers.js │ │ ├── ledgers.js │ │ ├── common.js │ │ ├── tokenmetrics.js │ │ └── props.js │ ├── index.js │ └── schemas │ │ └── cache.json ├── lib │ ├── ipc.js │ ├── version.js │ ├── url.js │ ├── fetch.js │ └── config.js ├── ledger │ ├── events │ │ ├── index.js │ │ ├── ledgers.js │ │ ├── tokens.js │ │ └── nfts.js │ ├── derived │ │ ├── index.js │ │ └── marketcap.js │ ├── state │ │ ├── nfts.js │ │ ├── nftoffers.js │ │ ├── accounts.js │ │ ├── tokenoffers.js │ │ ├── tokens.js │ │ └── index.js │ ├── backfill.js │ ├── sync.js │ └── snapshot.js ├── xrpl │ ├── blackhole.js │ ├── ledger.js │ ├── snapshot.js │ ├── node.js │ ├── nodepool.js │ └── stream.js ├── app │ ├── crawl.js │ ├── server.js │ ├── cache.js │ ├── ledger.js │ └── main.js ├── cmd │ ├── backup.js │ └── rebuild-cache.js ├── crawl │ ├── crawlers │ │ ├── index.js │ │ ├── xrpscan.js │ │ ├── gravatar.js │ │ ├── bithomp.js │ │ ├── trustlists.js │ │ ├── domains.js │ │ ├── x.js │ │ └── xaman.js │ ├── init.js │ └── schedule.js ├── srv │ ├── procedures │ │ ├── server.js │ │ └── ledger.js │ ├── worker.js │ ├── server.js │ ├── api.js │ ├── ws.js │ ├── sanitizers │ │ ├── token.js │ │ └── common.js │ └── http.js ├── run.js └── cache │ ├── todo.js │ ├── worker.js │ ├── icons.js │ └── tokens.js ├── deps ├── binding.gyp └── sqlite-extensions │ └── xfl.c ├── test ├── live │ ├── cases │ │ ├── crawl.twitter.js │ │ ├── crawl.xumm.js │ │ ├── crawl.bithomp.js │ │ ├── crawl.domains.js │ │ ├── crawl.xrpscan.js │ │ ├── crawl.gravatar.js │ │ ├── crawl.trustlists.js │ │ ├── toml.read.js │ │ └── icon.cache.js │ └── run.js └── unit │ ├── env.js │ ├── db.codecs.test.js │ ├── fetch.test.js │ ├── xrpl.test.js │ ├── prop-rank.test.js │ ├── db.helpers.test.js │ ├── prop-diff.test.js │ ├── icon-cache.test.js │ └── db.points.test.js ├── package.json ├── readme.md └── config.template.toml /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | deps/build 4 | config.toml -------------------------------------------------------------------------------- /src/db/codecs/index.js: -------------------------------------------------------------------------------- 1 | import xfl from './xfl.js' 2 | import address from './address.js' 3 | 4 | export default [ 5 | xfl, 6 | address 7 | ] -------------------------------------------------------------------------------- /deps/binding.gyp: -------------------------------------------------------------------------------- 1 | { 2 | 'targets': [ 3 | { 4 | 'target_name': 'sqlite-xfl', 5 | 'dependencies': ['../node_modules/better-sqlite3/deps/sqlite3.gyp:sqlite3'], 6 | 'sources': ['sqlite-extensions/xfl.c'], 7 | } 8 | ] 9 | } -------------------------------------------------------------------------------- /src/lib/ipc.js: -------------------------------------------------------------------------------- 1 | export default function(){ 2 | let callbacks = [] 3 | 4 | return { 5 | emit(payload){ 6 | for(let callback of callbacks){ 7 | try{ 8 | callback(payload) 9 | }catch{ 10 | // *shrug* 11 | } 12 | } 13 | }, 14 | subscribe(callback){ 15 | callbacks.push(callback) 16 | } 17 | } 18 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.twitter.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/x.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.xumm.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/xaman.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.bithomp.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/bithomp.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.domains.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/domains.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.xrpscan.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/xrpscan.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.gravatar.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/gravatar.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /test/live/cases/crawl.trustlists.js: -------------------------------------------------------------------------------- 1 | import run from '../../../src/crawl/crawlers/trustlists.js' 2 | import { openDB } from '../../../src/db/index.js' 3 | 4 | 5 | export default async ({ config }) => { 6 | let ctx = { config } 7 | 8 | Object.assign(ctx, { 9 | db: await openDB({ ctx }) 10 | }) 11 | 12 | await run({ ctx }) 13 | } -------------------------------------------------------------------------------- /src/lib/version.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import { fileURLToPath } from 'url' 4 | 5 | 6 | const __filename = fileURLToPath(import.meta.url) 7 | const __dirname = path.dirname(__filename) 8 | const pkgPath = path.resolve(__dirname, '..', '..', 'package.json') 9 | const { version } = JSON.parse(fs.readFileSync(pkgPath)) 10 | 11 | 12 | export default version -------------------------------------------------------------------------------- /src/db/codecs/address.js: -------------------------------------------------------------------------------- 1 | import { encodeAccountID, decodeAccountID } from 'ripple-address-codec' 2 | 3 | export default { 4 | acceptsFormat: 'xrpl/address', 5 | acceptsNull: true, 6 | returnsType: 'blob', 7 | returnsNull: true, 8 | 9 | encode(data){ 10 | return data ? decodeAccountID(data) : data 11 | }, 12 | 13 | decode(data){ 14 | return data ? encodeAccountID(data) : data 15 | } 16 | } -------------------------------------------------------------------------------- /src/ledger/events/index.js: -------------------------------------------------------------------------------- 1 | import { applyLedgerStats } from './ledgers.js' 2 | import { applyTokenExchanges } from './tokens.js' 3 | import { applyNFTokenExchanges, applyNFTokenModifications } from './nfts.js' 4 | 5 | 6 | export function applyLedgerEvents({ ctx, ledger }){ 7 | applyLedgerStats({ ctx, ledger }) 8 | applyTokenExchanges({ ctx, ledger }) 9 | applyNFTokenExchanges({ ctx, ledger }) 10 | applyNFTokenModifications({ ctx, ledger }) 11 | } -------------------------------------------------------------------------------- /src/xrpl/blackhole.js: -------------------------------------------------------------------------------- 1 | const blackholeAccounts = [ 2 | 'rrrrrrrrrrrrrrrrrrrrrhoLvTp', 3 | 'rrrrrrrrrrrrrrrrrrrrBZbvji', 4 | 'rrrrrrrrrrrrrrrrrNAMEtxvNvQ', 5 | 'rrrrrrrrrrrrrrrrrrrn5RM1rHd' 6 | ] 7 | 8 | export function isBlackholed(ledgerEntry){ 9 | if(!blackholeAccounts.includes(ledgerEntry.RegularKey)) 10 | return false 11 | 12 | // master key disabled 13 | if(ledgerEntry.Flags & 0x00100000 == 0) 14 | return false 15 | 16 | return true 17 | } -------------------------------------------------------------------------------- /src/app/crawl.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import { startCrawlers } from '../crawl/init.js' 5 | 6 | 7 | export async function run({ ctx }){ 8 | await spawn(':runCrawl', { ctx }) 9 | } 10 | 11 | 12 | export async function runCrawl({ ctx }){ 13 | if(ctx.log) 14 | log.pipe(ctx.log) 15 | 16 | log.info('starting crawlers') 17 | 18 | return await startCrawlers({ 19 | ctx: { 20 | ...ctx, 21 | db: await openDB({ ctx }) 22 | } 23 | }) 24 | } -------------------------------------------------------------------------------- /src/cmd/backup.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { openDB } from '../db/index.js' 3 | 4 | 5 | export default async function({ config, destinationFile }){ 6 | let { database } = await openDB({ ctx: { config } }) 7 | 8 | try{ 9 | await database.backup({ 10 | lockDatabase: true, 11 | destinationFile, 12 | progress: v => log.info(`backup progress: ${Math.round(v * 10000)/100} %`) 13 | }) 14 | }catch(error){ 15 | log.info(`backup failed:\n`, error) 16 | return 17 | } 18 | 19 | log.info(`backup finished sucessfully`) 20 | } -------------------------------------------------------------------------------- /test/live/cases/toml.read.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseXLS26 } from '@xrplkit/xls26' 3 | import { createFetch } from '../../../src/lib/fetch.js' 4 | import { fetchToml } from '../../../src/crawl/crawlers/domains.js' 5 | 6 | 7 | export default async ({ config, args }) => { 8 | let domain = args._[1] 9 | let fetch = createFetch() 10 | 11 | if(!domain) 12 | throw new Error(`no domain provided. use: npm livetest toml.read [domain]`) 13 | 14 | let xls26 = await fetchToml({ domain, fetch }) 15 | 16 | log.info(`parsed xls26:\n`, xls26) 17 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/index.js: -------------------------------------------------------------------------------- 1 | import domains from './domains.js' 2 | import trustlists from './trustlists.js' 3 | import xaman from './xaman.js' 4 | import bithomp from './bithomp.js' 5 | import xrpscan from './xrpscan.js' 6 | import gravatar from './gravatar.js' 7 | import x from './x.js' 8 | 9 | export default [ 10 | { name: 'domains', start: domains }, 11 | { name: 'trustlists', start: trustlists }, 12 | { name: 'xaman', start: xaman }, 13 | { name: 'bithomp', start: bithomp }, 14 | { name: 'xrpscan', start: xrpscan }, 15 | { name: 'gravatar', start: gravatar }, 16 | { name: 'x', start: x }, 17 | ] -------------------------------------------------------------------------------- /src/ledger/derived/index.js: -------------------------------------------------------------------------------- 1 | import { updateMarketcapFromExchange, updateMarketcapFromSupply } from './marketcap.js' 2 | 3 | 4 | export function updateDerived({ ctx, newItems }){ 5 | for(let exchange of newItems.tokenExchanges){ 6 | updateMarketcapFromExchange({ ctx, exchange }) 7 | } 8 | 9 | for(let supply of newItems.tokenSupply){ 10 | updateMarketcapFromSupply({ ctx, supply }) 11 | } 12 | } 13 | 14 | export function updateAllDerived({ ctx }){ 15 | let exchanges = ctx.db.core.tokenExchanges.iter() 16 | 17 | for(let exchange of exchanges){ 18 | updateMarketcapFromExchange({ ctx, exchange }) 19 | } 20 | } -------------------------------------------------------------------------------- /src/lib/url.js: -------------------------------------------------------------------------------- 1 | import { parse } from 'url' 2 | 3 | export function sanitize(url){ 4 | return url.slice(0, 8) + url.slice(8) 5 | .replace(/\/\//g,'/') 6 | .replace(/\/\.$/, '') 7 | .replace(/\/$/, '') 8 | .replace(/\?$/, '') 9 | } 10 | 11 | export function validate(url){ 12 | let { protocol, hostname } = parse(url) 13 | 14 | if(protocol !== 'http:' && protocol !== 'https:') 15 | return false 16 | 17 | if(hostname === 'localhost') 18 | return false 19 | 20 | if(hostname.includes(':')) 21 | return false 22 | 23 | if(!/[a-zA-Z]/.test(hostname)) 24 | return false 25 | 26 | return true 27 | } -------------------------------------------------------------------------------- /src/srv/procedures/server.js: -------------------------------------------------------------------------------- 1 | import version from '../../lib/version.js' 2 | import { getAvailableRange } from '../../db/helpers/ledgers.js' 3 | 4 | 5 | export function serveServerInfo(){ 6 | return ({ ctx }) => { 7 | return { 8 | server_version: version, 9 | available_range: getAvailableRange({ ctx }), 10 | trustlists: ctx.config.trustlist 11 | ? ctx.config.trustlist.map( 12 | list => ({ 13 | id: list.id, 14 | url: list.url, 15 | trust_level: list.trustLevel 16 | }) 17 | ) 18 | : [], 19 | total_tokens: Number(ctx.db.core.tokens.count()), 20 | total_nfts: 0 21 | } 22 | } 23 | } -------------------------------------------------------------------------------- /src/db/codecs/xfl.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { XFL, toSortSafeBigInt } from '@xrplkit/xfl' 3 | 4 | export default { 5 | acceptsFormat: 'xrpl/xfl', 6 | acceptsNull: true, 7 | returnsType: 'bigint', 8 | returnsNull: true, 9 | 10 | encode(data){ 11 | try{ 12 | return data !== null && data !== undefined 13 | ? toSortSafeBigInt(data) 14 | : data 15 | }catch(error){ 16 | log.error(`failed to encode XFL: ${data}`) 17 | log.error(error) 18 | throw error 19 | } 20 | }, 21 | 22 | decode(data){ 23 | return data !== null && data !== undefined 24 | ? XFL.fromSortSafeBigInt(BigInt(data)) 25 | : data 26 | } 27 | } -------------------------------------------------------------------------------- /src/app/server.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import { startServer } from '../srv/server.js' 5 | 6 | 7 | export async function run({ ctx }){ 8 | if(!ctx.config.server){ 9 | log.warn(`config is missing [SERVER] stanza: disabling server`) 10 | return 11 | } 12 | 13 | await spawn(':runServer', { ctx }) 14 | } 15 | 16 | 17 | export async function runServer({ ctx }){ 18 | if(ctx.log) 19 | log.pipe(ctx.log) 20 | 21 | log.info('starting server') 22 | 23 | return await startServer({ 24 | ctx: { 25 | ...ctx, 26 | db: await openDB({ ctx }) 27 | } 28 | }) 29 | } -------------------------------------------------------------------------------- /src/db/helpers/nftoffers.js: -------------------------------------------------------------------------------- 1 | import { writePoint } from './common.js' 2 | 3 | 4 | export function writeNFTokenOffer({ ctx, offerId, ledgerSequence, ...data }){ 5 | return writePoint({ 6 | table: ctx.db.core.nftOffers, 7 | selector: { 8 | offerId, 9 | }, 10 | ledgerSequence, 11 | backwards: ctx.backwards, 12 | data, 13 | expirable: true 14 | }) 15 | } 16 | 17 | export function expireNFTokenOffer({ ctx, offerId, ledgerSequence }){ 18 | return writePoint({ 19 | table: ctx.db.core.nftOffers, 20 | selector: { 21 | offerId, 22 | }, 23 | ledgerSequence, 24 | backwards: ctx.backwards, 25 | data: null, 26 | expirable: true 27 | }) 28 | } -------------------------------------------------------------------------------- /src/db/helpers/balances.js: -------------------------------------------------------------------------------- 1 | import { eq } from '@xrplkit/xfl' 2 | import { readPoint, writePoint } from './common.js' 3 | 4 | 5 | export function readBalance({ ctx, account, token, ledgerSequence }){ 6 | return readPoint({ 7 | table: ctx.db.core.accountBalances, 8 | selector: { 9 | token, 10 | account 11 | }, 12 | ledgerSequence 13 | }) 14 | ?.balance 15 | } 16 | 17 | export function writeBalance({ ctx, account, token, ledgerSequence, balance }){ 18 | return writePoint({ 19 | table: ctx.db.core.accountBalances, 20 | selector: { 21 | token, 22 | account 23 | }, 24 | ledgerSequence, 25 | backwards: ctx.backwards, 26 | data: { balance } 27 | }) 28 | } -------------------------------------------------------------------------------- /src/xrpl/ledger.js: -------------------------------------------------------------------------------- 1 | import { rippleToUnix } from '@xrplkit/time' 2 | 3 | export async function fetch({ ctx, sequence }){ 4 | let { result } = await ctx.xrpl.request({ 5 | command: 'ledger', 6 | ledger_index: sequence, 7 | transactions: true, 8 | expand: true 9 | }) 10 | 11 | return format(result.ledger) 12 | } 13 | 14 | export function format(ledger){ 15 | return { 16 | sequence: parseInt(ledger.ledger_index), 17 | hash: ledger.ledger_hash, 18 | closeTime: rippleToUnix(ledger.close_time || ledger.ledger_time), 19 | transactions: ledger.transactions 20 | .map( 21 | tx => tx.transaction 22 | ? { ...tx.transaction, metaData: tx.meta } 23 | : tx 24 | ) 25 | } 26 | } -------------------------------------------------------------------------------- /test/unit/env.js: -------------------------------------------------------------------------------- 1 | import os from 'os' 2 | import fs from 'fs' 3 | import path from 'path' 4 | import log from '@mwni/log' 5 | import { openDB } from '../../src/db/index.js' 6 | 7 | export async function createContext({ debugQueries=false }={}){ 8 | let dataDir = fs.mkdtempSync(path.join(os.tmpdir(), 'xrplmeta-test-')) 9 | 10 | let ctx = { 11 | config: { 12 | node: { 13 | dataDir 14 | }, 15 | debug: { 16 | queries: debugQueries 17 | } 18 | } 19 | } 20 | 21 | log.config({ level: 'error' }) 22 | 23 | console.log(`using data dir: ${dataDir}`) 24 | 25 | return { 26 | ...ctx, 27 | db: await openDB({ 28 | inMemory: true, 29 | ctx 30 | }) 31 | } 32 | } -------------------------------------------------------------------------------- /src/srv/procedures/ledger.js: -------------------------------------------------------------------------------- 1 | import { readLedgerAt } from '../../db/helpers/ledgers.js' 2 | 3 | 4 | export function serveLedger(){ 5 | return ({ ctx, sequence, time }) => { 6 | let ledger = readLedgerAt({ 7 | ctx, 8 | sequence, 9 | time 10 | }) 11 | 12 | if(!ledger){ 13 | throw { 14 | type: `notFound`, 15 | message: `This server has no record of such a ledger. Check the available range using "server_info".`, 16 | expose: true 17 | } 18 | } 19 | 20 | return { 21 | sequence: ledger.sequence, 22 | hash: ledger.hash, 23 | close_time: ledger.closeTime, 24 | tx_count: ledger.txCount, 25 | fee_min: ledger.minFee, 26 | fee_max: ledger.maxFee, 27 | fee_avg: ledger.avgFee 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /test/unit/db.codecs.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import codecs from '../../src/db/codecs/index.js' 3 | import { XFL } from '@xrplkit/xfl' 4 | 5 | 6 | const testValues = { 7 | 'xrpl/xfl': XFL('123.456'), 8 | 'xrpl/address': 'rwekfW4MiS5yZjXASRBDzzPPWYKuHvKP7E' 9 | } 10 | 11 | 12 | describe( 13 | 'Database Codecs', 14 | () => { 15 | for(let { acceptsFormat, returnsType, encode, decode } of codecs){ 16 | it( 17 | `should return same for ${acceptsFormat} -> ${returnsType} -> ${acceptsFormat}`, 18 | () => { 19 | let testValue = testValues[acceptsFormat] 20 | let decodedValue = decode(encode(testValue)) 21 | 22 | expect(testValue.toString()).to.be.equal(decodedValue.toString()) 23 | } 24 | ) 25 | } 26 | } 27 | ) -------------------------------------------------------------------------------- /src/db/helpers/heads.js: -------------------------------------------------------------------------------- 1 | const relevantTables = [ 2 | 'accountBalances', 3 | 'tokenExchanges', 4 | 'tokenSupply', 5 | 'tokenOffers' 6 | ] 7 | 8 | 9 | export function readTableHeads({ ctx }){ 10 | return relevantTables.reduce( 11 | (heads, table) => ({ 12 | ...heads, 13 | [table]: ctx.db.core[table].readOne({ 14 | orderBy: { 15 | id: 'desc' 16 | } 17 | })?.id || 0 18 | }), 19 | {} 20 | ) 21 | } 22 | 23 | export function pullNewItems({ ctx, previousHeads }){ 24 | return relevantTables.reduce( 25 | (heads, table) => ({ 26 | ...heads, 27 | [table]: ctx.db.core[table].readMany({ 28 | where: { 29 | id: { 30 | greaterThan: previousHeads[table] 31 | } 32 | } 33 | }) 34 | }), 35 | {} 36 | ) 37 | } -------------------------------------------------------------------------------- /test/unit/fetch.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createFetch } from '../../src/lib/fetch.js' 3 | 4 | 5 | 6 | describe( 7 | 'Fetching via HTTP', 8 | () => { 9 | it( 10 | 'should successfully read text from https://static.xrplmeta.org/test.txt', 11 | async () => { 12 | let fetch = createFetch() 13 | let { data } = await fetch('https://static.xrplmeta.org/test.txt') 14 | 15 | expect(data).to.be.equal('it works') 16 | } 17 | ) 18 | 19 | it( 20 | 'should successfully read JSON from https://static.xrplmeta.org/test.json', 21 | async () => { 22 | let fetch = createFetch() 23 | let { data } = await fetch('https://static.xrplmeta.org/test.json') 24 | 25 | expect(data).to.be.deep.equal({ it: 'works' }) 26 | } 27 | ) 28 | } 29 | ) -------------------------------------------------------------------------------- /src/app/cache.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import { startMetaCacheWorker, startIconCacheWorker } from '../cache/worker.js' 5 | 6 | 7 | export async function run({ ctx }){ 8 | log.info('starting cache worker') 9 | 10 | await spawn(':runMetaCacheWorker', { ctx }) 11 | await spawn(':runIconCacheWorker', { ctx }) 12 | } 13 | 14 | export async function runMetaCacheWorker({ ctx }){ 15 | if(ctx.log) 16 | log.pipe(ctx.log) 17 | 18 | return await startMetaCacheWorker({ 19 | ctx: { 20 | ...ctx, 21 | db: await openDB({ 22 | ctx, 23 | coreReadOnly: true 24 | }) 25 | } 26 | }) 27 | } 28 | 29 | export async function runIconCacheWorker({ ctx }){ 30 | if(ctx.log) 31 | log.pipe(ctx.log) 32 | 33 | return await startIconCacheWorker({ 34 | ctx: { 35 | ...ctx, 36 | db: await openDB({ 37 | ctx, 38 | coreReadOnly: true 39 | }) 40 | } 41 | }) 42 | } -------------------------------------------------------------------------------- /src/crawl/init.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import crawlers from './crawlers/index.js' 5 | 6 | 7 | export async function startCrawlers({ ctx }){ 8 | if(ctx.config.crawl?.disabled){ 9 | log.warn(`skipping all crawlers (disabled by config)`) 10 | return 11 | } 12 | 13 | for(let { name } of crawlers){ 14 | spawn(':spawnCrawler', { ctx, name }) 15 | } 16 | } 17 | 18 | export async function spawnCrawler({ ctx, name }){ 19 | let { start } = crawlers.find(crawler => crawler.name === name) 20 | let crashed = false 21 | 22 | log.pipe(ctx.log) 23 | 24 | ctx = { 25 | ...ctx, 26 | db: await openDB({ ctx }) 27 | } 28 | 29 | start({ ctx }) 30 | .catch(error => { 31 | log.warn(`skipping crawler [${name}]:`, error.message) 32 | crashed = true 33 | }) 34 | 35 | await new Promise(resolve => setTimeout(resolve, 100)) 36 | 37 | if(!crashed){ 38 | log.info(`started crawler [${name}]`) 39 | }else{ 40 | process.exit() 41 | } 42 | } -------------------------------------------------------------------------------- /src/db/helpers/tokenholders.js: -------------------------------------------------------------------------------- 1 | export function readTokenHolders({ ctx, token, ledgerSequence, offset = 0, limit = 100 }){ 2 | return ctx.db.core.accountBalances.readManyRaw({ 3 | query: 4 | `SELECT Account.id, Account.address, AccountBalance.balance 5 | FROM AccountBalance 6 | JOIN Account ON (Account.id = AccountBalance.account) 7 | JOIN ( 8 | SELECT account, MAX(ledgerSequence) as maxSequence 9 | FROM AccountBalance 10 | WHERE token = ? 11 | AND ledgerSequence <= ? 12 | GROUP BY account 13 | ) latest 14 | ON AccountBalance.account = latest.account 15 | AND AccountBalance.ledgerSequence = latest.maxSequence 16 | WHERE token = ? 17 | ORDER BY AccountBalance.balance DESC 18 | LIMIT ?, ?`, 19 | params: [ 20 | token.id, 21 | ledgerSequence, 22 | token.id, 23 | offset, 24 | limit 25 | ] 26 | }) 27 | .map(({ id, address, balance }) => ({ 28 | account: { 29 | id, 30 | address: ctx.db.core.accounts.struct.decodeField('address', address) 31 | }, 32 | balance 33 | })) 34 | } -------------------------------------------------------------------------------- /src/db/helpers/tokenoffers.js: -------------------------------------------------------------------------------- 1 | import { writePoint } from './common.js' 2 | 3 | 4 | export function writeTokenOffer({ ctx, account, accountSequence, ledgerSequence, book, quality, size }){ 5 | return writePoint({ 6 | table: ctx.db.core.tokenOffers, 7 | selector: { 8 | account, 9 | accountSequence, 10 | book, 11 | }, 12 | ledgerSequence, 13 | backwards: ctx.backwards, 14 | data: { 15 | quality, 16 | size 17 | }, 18 | expirable: true 19 | }) 20 | } 21 | 22 | export function expireTokenOffer({ ctx, account, accountSequence, ledgerSequence }){ 23 | return writePoint({ 24 | table: ctx.db.core.tokenOffers, 25 | selector: { 26 | account, 27 | accountSequence 28 | }, 29 | ledgerSequence, 30 | backwards: ctx.backwards, 31 | data: null, 32 | expirable: true 33 | }) 34 | } 35 | 36 | export function readOffersBy({ ctx, account, book, ledgerSequence }){ 37 | return ctx.db.core.tokenOffers.readMany({ 38 | where: { 39 | account, 40 | book, 41 | ledgerSequence: { 42 | lessOrEqual: ledgerSequence 43 | }, 44 | lastLedgerSequence: { 45 | greaterOrEqual: ledgerSequence 46 | } 47 | }, 48 | include: { 49 | book: true 50 | } 51 | }) 52 | } -------------------------------------------------------------------------------- /src/ledger/events/ledgers.js: -------------------------------------------------------------------------------- 1 | const pseudoTransactionTypes = [ 2 | 'EnableAmendment', 3 | 'SetFee', 4 | 'UNLModify' 5 | ] 6 | 7 | 8 | export function applyLedgerStats({ ctx, ledger }){ 9 | let baseData = { 10 | sequence: ledger.sequence, 11 | hash: ledger.hash, 12 | closeTime: ledger.closeTime, 13 | txCount: ledger.transactions.length, 14 | } 15 | 16 | if(ledger.transactions.length === 0){ 17 | ctx.db.core.ledgers.createOne({ 18 | data: baseData 19 | }) 20 | }else{ 21 | let types = {} 22 | let fees = [] 23 | 24 | for(let transaction of ledger.transactions){ 25 | if(pseudoTransactionTypes.includes(transaction.TransactionType)) 26 | continue 27 | 28 | if(!types[transaction.TransactionType]) 29 | types[transaction.TransactionType] = 0 30 | 31 | types[transaction.TransactionType]++ 32 | fees.push(parseInt(transaction.Fee)) 33 | } 34 | 35 | ctx.db.core.ledgers.createOne({ 36 | data: { 37 | ...baseData, 38 | txTypeCounts: Object.entries(types) 39 | .map(([type, count]) => ({ type, count })), 40 | minFee: Math.min(...fees), 41 | maxFee: Math.max(...fees), 42 | avgFee: Math.floor( 43 | fees.reduce((total, fee) => total + fee, 0) / fees.length 44 | ) 45 | } 46 | }) 47 | } 48 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "xrplmeta", 3 | "version": "2.22.0-alpha", 4 | "type": "module", 5 | "bin": { 6 | "xrplmeta": "./src/run.js" 7 | }, 8 | "scripts": { 9 | "start": "node src/run.js", 10 | "postinstall": "npm run build-deps", 11 | "build-deps": "node-gyp rebuild --directory=deps --release", 12 | "test": "mocha test/unit/*.test.js", 13 | "livetest": "node test/live/run.js" 14 | }, 15 | "dependencies": { 16 | "@mwni/events": "3.0.0", 17 | "@mwni/log": "3.2.0", 18 | "@mwni/workers": "1.1.0", 19 | "@structdb/sqlite": "1.7.2-alpha", 20 | "@xrplkit/socket": "2.1.0", 21 | "@xrplkit/time": "1.0.0", 22 | "@xrplkit/tokens": "1.0.1", 23 | "@xrplkit/toml": "1.0.0", 24 | "@xrplkit/txmeta": "1.4.1", 25 | "@xrplkit/xfl": "2.1.1", 26 | "@xrplkit/xls26": "2.5.0", 27 | "@koa/router": "13.1.0", 28 | "koa": "3.0.1", 29 | "koa-easy-ws": "2.1.0", 30 | "koa-json": "2.0.2", 31 | "koa-send": "5.0.1", 32 | "better-sqlite3": "12.4.1", 33 | "limiter": "2.0.1", 34 | "node-abort-controller": "3.1.1", 35 | "node-fetch": "3.3.2", 36 | "minimist": "1.2.8", 37 | "ripple-address-codec": "5.0.0", 38 | "sharp": "0.34.4", 39 | "ws": "8.18.3" 40 | }, 41 | "devDependencies": { 42 | "chai": "6.2.0", 43 | "mocha": "11.7.3" 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /test/unit/xrpl.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createPool } from '../../src/xrpl/nodepool.js' 3 | import log from '@mwni/log' 4 | 5 | log.config({ level: 'error' }) 6 | 7 | 8 | describe( 9 | 'Fetching from XRPL', 10 | () => { 11 | it( 12 | 'should successfully retrieve ledger 80,000,000', 13 | async () => { 14 | let pool = createPool([{ url: 'wss://xrplcluster.com' }]) 15 | let { result } = await pool.request({ 16 | command: 'ledger', 17 | ledger_index: 80000000 18 | }) 19 | 20 | expect(result.ledger.ledger_hash).to.be.equal('DB978F031BB14734213998060E077D5F813358222DAB07CA8148588D852A55DF') 21 | 22 | pool.close() 23 | } 24 | ).timeout(10000) 25 | 26 | it( 27 | 'should retrieve a historical ledger from a node that has it', 28 | async () => { 29 | let pool = createPool([ 30 | { url: 'wss://s1.ripple.com' }, 31 | { url: 'wss://s2.ripple.com' }, 32 | ]) 33 | 34 | let { result } = await pool.request({ 35 | command: 'ledger', 36 | ledger_index: 32570 37 | }) 38 | 39 | expect(result.ledger.ledger_hash).to.be.equal('4109C6F2045FC7EFF4CDE8F9905D19C28820D86304080FF886B299F0206E42B5') 40 | 41 | pool.close() 42 | } 43 | ).timeout(10000) 44 | } 45 | ) -------------------------------------------------------------------------------- /src/db/helpers/ledgers.js: -------------------------------------------------------------------------------- 1 | export function getAvailableRange({ ctx }){ 2 | let start = ctx.db.core.ledgers.readOne({ 3 | orderBy: { 4 | sequence: 'asc' 5 | } 6 | }) 7 | 8 | let end = ctx.db.core.ledgers.readOne({ 9 | orderBy: { 10 | sequence: 'desc' 11 | } 12 | }) 13 | 14 | return { 15 | sequence: { 16 | start: start.sequence, 17 | end: end.sequence 18 | }, 19 | time: { 20 | start: start.closeTime, 21 | end: end.closeTime 22 | } 23 | } 24 | } 25 | 26 | 27 | export function readMostRecentLedger({ ctx }){ 28 | return ctx.db.core.ledgers.readOne({ 29 | orderBy: { 30 | sequence: 'desc' 31 | } 32 | }) 33 | } 34 | 35 | 36 | export function readLedgerAt({ ctx, sequence, time, clamp, include }){ 37 | let key = sequence !== undefined 38 | ? 'sequence' 39 | : 'closeTime' 40 | 41 | let point = sequence !== undefined 42 | ? sequence 43 | : time 44 | 45 | let ledger = ctx.db.core.ledgers.readOne({ 46 | where: { 47 | [key]: { 48 | lessOrEqual: point 49 | } 50 | }, 51 | orderBy: { 52 | [key]: 'desc' 53 | }, 54 | include 55 | }) 56 | 57 | if(!ledger && clamp){ 58 | ledger = ctx.db.core.ledgers.readOne({ 59 | where: { 60 | [key]: { 61 | greaterThan: point 62 | } 63 | }, 64 | orderBy: { 65 | [key]: 'asc' 66 | }, 67 | include 68 | }) 69 | } 70 | 71 | return ledger 72 | } 73 | -------------------------------------------------------------------------------- /src/ledger/state/nfts.js: -------------------------------------------------------------------------------- 1 | import { encodeAccountID } from 'ripple-address-codec' 2 | 3 | 4 | export function parse({ index, entry }){ 5 | let address = encodeAccountID(Buffer.from(index.slice(0, 40), 'hex')) 6 | let page = { 7 | account: { address }, 8 | nfts: [] 9 | } 10 | 11 | for(let { NFToken } of entry.NFTokens){ 12 | let issuer = encodeAccountID(Buffer.from(NFToken.NFTokenID.slice(8, 48), 'hex')) 13 | let uri = NFToken.URI 14 | ? Buffer.from(NFToken.URI, 'hex') 15 | : null 16 | 17 | page.nfts.push({ 18 | owner: { address }, 19 | issuer: { address: issuer }, 20 | tokenId: NFToken.NFTokenID, 21 | uri, 22 | }) 23 | } 24 | 25 | return page 26 | } 27 | 28 | 29 | 30 | export function diff({ ctx, previous, final }){ 31 | if(previous){ 32 | for(let { owner, ...pNft } of previous.nfts){ 33 | if(final && final.nfts.some(fNft => fNft.tokenId === pNft.tokenId)) 34 | continue 35 | 36 | ctx.db.core.nfts.createOne({ 37 | data: ctx.backwards 38 | ? pNft 39 | : { ...pNft, owner: null } 40 | }) 41 | } 42 | } 43 | 44 | if(final){ 45 | for(let { owner, ...fNft } of final.nfts){ 46 | if(previous && previous.nfts.some(pNft => pNft.tokenId === fNft.tokenId)) 47 | continue 48 | 49 | ctx.db.core.nfts.createOne({ 50 | data: ctx.backwards 51 | ? fNft 52 | : { ...fNft, owner } 53 | }) 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /src/ledger/events/tokens.js: -------------------------------------------------------------------------------- 1 | import { extractExchanges } from '@xrplkit/txmeta' 2 | import { markCacheDirtyForTokenExchanges } from '../../cache/todo.js' 3 | 4 | 5 | export function applyTokenExchanges({ ctx, ledger }){ 6 | let exchanges = [] 7 | 8 | for(let transaction of ledger.transactions){ 9 | exchanges.push(...extractExchanges(transaction)) 10 | } 11 | 12 | if(exchanges.length === 0) 13 | return 14 | 15 | for(let { hash, sequence, maker, taker, takerPaid, takerGot } of exchanges){ 16 | let takerPaidToken = { 17 | currency: takerPaid.currency, 18 | issuer: takerPaid.issuer 19 | ? { address: takerPaid.issuer } 20 | : undefined 21 | } 22 | 23 | let takerGotToken = { 24 | currency: takerGot.currency, 25 | issuer: takerGot.issuer 26 | ? { address: takerGot.issuer } 27 | : undefined 28 | } 29 | 30 | ctx.db.core.tokenExchanges.createOne({ 31 | data: { 32 | txHash: hash, 33 | ledgerSequence: ledger.sequence, 34 | taker: { 35 | address: taker 36 | }, 37 | maker: { 38 | address: maker 39 | }, 40 | sequence, 41 | takerPaidToken, 42 | takerGotToken, 43 | takerPaidValue: takerPaid.value, 44 | takerGotValue: takerGot.value, 45 | } 46 | }) 47 | 48 | markCacheDirtyForTokenExchanges({ ctx, token: takerPaidToken }) 49 | markCacheDirtyForTokenExchanges({ ctx, token: takerGotToken }) 50 | } 51 | } -------------------------------------------------------------------------------- /src/app/ledger.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { createPool } from '../xrpl/nodepool.js' 4 | import { openDB } from '../db/index.js' 5 | import { createSnapshot } from '../ledger/snapshot.js' 6 | import { startSync } from '../ledger/sync.js' 7 | import { startBackfill } from '../ledger/backfill.js' 8 | 9 | 10 | export async function run({ ctx }){ 11 | ctx = { 12 | ...ctx, 13 | xrpl: createPool(ctx.config.ledger.source), 14 | } 15 | 16 | await spawn(':runSnapshot', { ctx }) 17 | 18 | spawn(':runSync', { ctx }) 19 | .then(task => task.onceInSync()) 20 | .then(() => spawn(':runBackfill', { ctx })) 21 | } 22 | 23 | 24 | export async function runSnapshot({ ctx }){ 25 | if(ctx.log) 26 | log.pipe(ctx.log) 27 | 28 | return await createSnapshot({ 29 | ctx: { 30 | ...ctx, 31 | db: await openDB({ ctx }) 32 | } 33 | }) 34 | } 35 | 36 | export async function runSync({ ctx }){ 37 | if(ctx.log) 38 | log.pipe(ctx.log) 39 | 40 | log.info('starting sync') 41 | 42 | return await startSync({ 43 | ctx: { 44 | ...ctx, 45 | db: await openDB({ ctx }) 46 | } 47 | }) 48 | } 49 | 50 | export async function runBackfill({ ctx }){ 51 | if(ctx.log) 52 | log.pipe(ctx.log) 53 | 54 | log.info('starting backfill') 55 | 56 | return await startBackfill({ 57 | ctx: { 58 | ...ctx, 59 | db: await openDB({ ctx }) 60 | } 61 | }) 62 | } -------------------------------------------------------------------------------- /test/live/cases/icon.cache.js: -------------------------------------------------------------------------------- 1 | import path from 'path' 2 | import log from '@mwni/log' 3 | import { createContext } from '../../unit/env.js' 4 | import { writeTokenProps } from '../../../src/db/helpers/props.js' 5 | import { updateIconCacheFor } from '../../../src/cache/icons.js' 6 | 7 | 8 | 9 | export default async ({ config, args }) => { 10 | let ctx = await createContext() 11 | let iconUrl = args._[1] 12 | 13 | if(!iconUrl) 14 | throw new Error(`no icon url provided. use: npm livetest icon.cache [url]`) 15 | 16 | let token = { 17 | currency: '000', 18 | issuer: { 19 | address: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp', 20 | }, 21 | props: { 22 | icon: iconUrl 23 | } 24 | } 25 | 26 | writeTokenProps({ 27 | ctx, 28 | token: { 29 | currency: token.currency, 30 | issuer: token.issuer 31 | }, 32 | props: token.props, 33 | source: 'test' 34 | }) 35 | 36 | log.config({ level: 'debug' }) 37 | log.info(`downloading and caching ${iconUrl}...`) 38 | 39 | await updateIconCacheFor({ 40 | ctx, 41 | token: { 42 | currency: token.currency, 43 | issuer: token.issuer 44 | } 45 | }) 46 | 47 | log.info(`icon cache registry:`, ctx.db.cache.icons.readMany()[0]) 48 | log.info(`generated token meta:`, ctx.db.cache.tokens.readOne({ 49 | where: { 50 | token: 2 51 | } 52 | }).cachedIcons) 53 | log.info(`icon file and variants cached at ${path.join(ctx.config.node.dataDir, 'media', 'icons')}`) 54 | } -------------------------------------------------------------------------------- /src/app/main.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { run as runLedgerApp } from './ledger.js' 3 | import { run as runCrawlApp } from './crawl.js' 4 | import { run as runCacheApp } from './cache.js' 5 | import { run as runServerApp } from './server.js' 6 | import createIPC from '../lib/ipc.js' 7 | 8 | 9 | export default async function({ config, args }){ 10 | const ctx = { 11 | ipc: createIPC(), 12 | config, 13 | log, 14 | } 15 | 16 | 17 | if(!args['only-server']){ 18 | await runLedgerApp({ ctx }) 19 | .catch(error => { 20 | log.error(`ledger app crashed due to fatal error:`) 21 | log.error(error) 22 | process.exit(1) 23 | }) 24 | 25 | log.info(`bootstrap complete`) 26 | 27 | runCrawlApp({ ctx }) 28 | .catch(error => { 29 | log.error(`crawl app crashed due to fatal error:`) 30 | log.error(error) 31 | log.warn(`attempting to continue without it`) 32 | }) 33 | 34 | runCacheApp({ ctx }) 35 | .catch(error => { 36 | log.error(`cache app crashed due to fatal error:`) 37 | log.error(error) 38 | log.warn(`attempting to continue without it`) 39 | }) 40 | } 41 | 42 | runServerApp({ ctx }) 43 | .catch(error => { 44 | log.error(`server app crashed:`) 45 | log.error(error) 46 | log.warn(`attempting to continue without it`) 47 | }) 48 | 49 | 50 | return { 51 | async terminate(){ 52 | log.info(`shutting down`) 53 | process.exit() 54 | } 55 | } 56 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/xrpscan.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleGlobal } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { diffMultiAccountProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.xrpscan 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetch = createFetch({ 15 | baseUrl: 'https://api.xrpscan.com/api/v1' 16 | }) 17 | 18 | while(true){ 19 | await scheduleGlobal({ 20 | ctx, 21 | task: 'xrpscan.well-known', 22 | interval: config.fetchInterval, 23 | routine: async () => { 24 | log.info(`fetching well-known list...`) 25 | 26 | let accounts = [] 27 | let { data } = await fetch('names/well-known') 28 | 29 | log.info(`got`, data.length, `well known`) 30 | 31 | for(let { account, name, domain, twitter } of data){ 32 | let urls = undefined 33 | 34 | if(twitter){ 35 | urls = [{ 36 | url: `https://x.com/${twitter}`, 37 | type: `social` 38 | }] 39 | } 40 | 41 | accounts.push({ 42 | address: account, 43 | props: { 44 | name, 45 | domain, 46 | urls 47 | }, 48 | }) 49 | } 50 | 51 | diffMultiAccountProps({ 52 | ctx, 53 | accounts, 54 | source: 'xrpscan/well-known' 55 | }) 56 | 57 | log.info(`updated`, accounts.length, `issuers`) 58 | } 59 | }) 60 | } 61 | } -------------------------------------------------------------------------------- /test/unit/prop-rank.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { reduceProps } from '../../src/srv/procedures/token.js' 3 | 4 | 5 | const props = [ 6 | { 7 | key: 'name', 8 | value: 'US Dollar', 9 | source: 'xaman/curated' 10 | }, 11 | { 12 | key: 'name', 13 | value: 'U.S. Dollar', 14 | source: 'trustlist' 15 | }, 16 | { 17 | key: 'name', 18 | value: 'Dollar', 19 | source: 'xrpscan/well-known' 20 | }, 21 | { 22 | key: 'name', 23 | value: 'USD', 24 | source: 'bithomp' 25 | }, 26 | ] 27 | 28 | 29 | describe( 30 | 'Ranking props by source', 31 | () => { 32 | it( 33 | 'should pick the first when no ranking given', 34 | () => { 35 | expect(reduceProps({ props }).name).to.be.equal(props[0].value) 36 | } 37 | ) 38 | 39 | it( 40 | 'should pick identical sources', 41 | () => { 42 | expect( 43 | reduceProps({ 44 | props, 45 | sourceRanking: [ 46 | 'trustlist', 47 | 'xaman', 48 | 'bithomp' 49 | ] 50 | }).name 51 | ).to.be.equal(props[1].value) 52 | 53 | expect( 54 | reduceProps({ 55 | props, 56 | sourceRanking: [ 57 | 'xaman/curated', 58 | 'trustlist', 59 | 'bithomp' 60 | ] 61 | }).name 62 | ).to.be.equal(props[0].value) 63 | } 64 | ) 65 | 66 | it( 67 | 'should pick wildcarded sources', 68 | () => { 69 | expect( 70 | reduceProps({ 71 | props, 72 | sourceRanking: [ 73 | 'xrpscan', 74 | 'trustlist', 75 | 'xaman' 76 | ] 77 | }).name 78 | ).to.be.equal(props[2].value) 79 | } 80 | ) 81 | } 82 | ) -------------------------------------------------------------------------------- /src/ledger/state/nftoffers.js: -------------------------------------------------------------------------------- 1 | import { encodeAccountID } from 'ripple-address-codec' 2 | import { amountFromRippled } from '@xrplkit/tokens' 3 | import { rippleToUnix } from '@xrplkit/time' 4 | import { expireNFTokenOffer, writeNFTokenOffer } from '../../db/helpers/nftoffers.js' 5 | 6 | 7 | export function parse({ index, entry }){ 8 | let amountToken 9 | let amountValue 10 | let issuer = encodeAccountID(Buffer.from(entry.NFTokenID.slice(8, 48), 'hex')) 11 | let isSellOffer = entry.Flags & 0x00000001 12 | let expirationTime = entry.Expiration 13 | ? rippleToUnix(entry.Expiration) 14 | : null 15 | 16 | 17 | if(entry.Amount){ 18 | let { currency, issuer, value } = amountFromRippled(entry.Amount) 19 | 20 | amountValue = value 21 | amountToken = currency === 'XRP' 22 | ? { id: 1 } 23 | : { 24 | currency, 25 | issuer: { 26 | address: issuer 27 | } 28 | } 29 | } 30 | 31 | return { 32 | account: { 33 | address: entry.Owner 34 | }, 35 | offerId: index, 36 | nft: { 37 | tokenId: entry.NFTokenID, 38 | issuer: { 39 | address: issuer 40 | } 41 | }, 42 | destination: entry.Destination 43 | ? { address: entry.Destination } 44 | : null, 45 | amountToken, 46 | amountValue, 47 | isSellOffer, 48 | expirationTime, 49 | ledgerSequence: entry.LedgerSequence 50 | } 51 | } 52 | 53 | 54 | 55 | export function diff({ ctx, previous, final }){ 56 | if(previous){ 57 | expireNFTokenOffer({ 58 | ...previous, 59 | ctx, 60 | ledgerSequence: ctx.ledgerSequence, 61 | }) 62 | } 63 | 64 | if(final){ 65 | writeNFTokenOffer({ 66 | ...final, 67 | ctx 68 | }) 69 | } 70 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/gravatar.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleIterator } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { writeAccountProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.gravatar 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetch = new createFetch({ 15 | baseUrl: 'https://www.gravatar.com', 16 | ratelimit: config.maxRequestsPerMinute 17 | }) 18 | 19 | while(true){ 20 | await scheduleIterator({ 21 | ctx, 22 | type: 'issuer', 23 | task: 'gravatar', 24 | interval: config.fetchInterval, 25 | routine: async ({ id, address, emailHash }, remaining) => { 26 | let icon 27 | 28 | if(emailHash){ 29 | log.debug(`checking avatar for ${address}`) 30 | 31 | let { status } = await fetch(`avatar/${emailHash.toLowerCase()}?d=404`) 32 | 33 | if(status === 200){ 34 | icon = `https://www.gravatar.com/avatar/${emailHash.toLowerCase()}` 35 | }else if(status !== 404){ 36 | throw `HTTP ${status}` 37 | } 38 | 39 | log.debug(`avatar for ${address}: ${icon}`) 40 | } 41 | 42 | writeAccountProps({ 43 | ctx, 44 | account: { id }, 45 | props: { 46 | icon 47 | }, 48 | source: 'gravatar/avatar' 49 | }) 50 | 51 | log.accumulate.info({ 52 | text: [`%gravatarsChecked avatars checked in %time (${remaining} remaining)`], 53 | data: { 54 | gravatarsChecked: 1 55 | } 56 | }) 57 | } 58 | }) 59 | } 60 | } -------------------------------------------------------------------------------- /src/run.js: -------------------------------------------------------------------------------- 1 | import minimist from 'minimist' 2 | import log from '@mwni/log' 3 | import { find as findConfig } from './lib/config.js' 4 | import { load as loadConfig } from './lib/config.js' 5 | import { override as overrideConfig } from './lib/config.js' 6 | import startApp from './app/main.js' 7 | import rebuildCache from './cmd/rebuild-cache.js' 8 | import backup from './cmd/backup.js' 9 | import version from './lib/version.js' 10 | 11 | 12 | const args = minimist(process.argv.slice(2)) 13 | const configPath = args.config 14 | ? args.config 15 | : findConfig() 16 | 17 | 18 | log.config({ level: args.log || 'info', root: '.' }) 19 | .info(`*** XRPLMETA NODE ${version} ***`) 20 | .info(`using config at "${configPath}"`) 21 | 22 | 23 | const baseConfig = loadConfig(configPath, true) 24 | const config = overrideConfig(baseConfig, args) 25 | 26 | if(args._[0] === 'rebuild-cache'){ 27 | log.info(`rebuilding cache at "${config.node.dataDir}"`) 28 | await rebuildCache({ config, args }) 29 | }else if(args._[0] === 'backup'){ 30 | let destinationFile = args._[1] 31 | 32 | if(!destinationFile){ 33 | log.error(`backup destination file path is missing`) 34 | process.exit(1) 35 | } 36 | 37 | log.info(`writing backup to "${destinationFile}"`) 38 | await backup({ config, destinationFile }) 39 | }else if(args._.length === 0 || args._[0] === 'run'){ 40 | log.info(`data directory is at "${config.node.dataDir}"`) 41 | log.info(`will start app now`) 42 | 43 | const app = await startApp({ config, args }) 44 | 45 | process.on('SIGINT', async () => { 46 | await app.terminate() 47 | process.exit(0) 48 | }) 49 | }else{ 50 | log.error(`unknown command "${args._[0]}"`) 51 | process.exit(1) 52 | } -------------------------------------------------------------------------------- /src/xrpl/snapshot.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { wait } from '@xrplkit/time' 3 | 4 | export async function start({ ctx, ledgerSequence, marker, node }){ 5 | if(ctx.log) 6 | log.pipe(ctx.log) 7 | 8 | let chunkSize = ctx.config.ledger.snapshotChunkSize || 10000 9 | let queue = [] 10 | 11 | let { result, node: assignedNode } = await ctx.xrpl.request({ 12 | type: 'reserveTicket', 13 | task: 'snapshot', 14 | ledgerSequence, 15 | node 16 | }) 17 | 18 | let ticket = result.ticket 19 | let fetching = true 20 | let resolveNext 21 | 22 | log.info(`reserved snapshot ticket with node`, assignedNode) 23 | 24 | let promise = (async() => { 25 | while(true){ 26 | while(queue.length >= 10) 27 | await wait(100) 28 | 29 | try{ 30 | let { result } = await ctx.xrpl.request({ 31 | command: 'ledger_data', 32 | ledger_index: ledgerSequence, 33 | limit: chunkSize, 34 | marker, 35 | ticket 36 | }) 37 | 38 | queue.push({ 39 | objects: result.state, 40 | marker: result.marker 41 | }) 42 | 43 | marker = result.marker 44 | 45 | if(resolveNext) 46 | resolveNext() 47 | 48 | }catch(e){ 49 | log.info(`could not fetch ledger chunk:`, e.error ? e.error : e) 50 | await wait(2500) 51 | continue 52 | } 53 | 54 | if(!marker){ 55 | fetching = false 56 | break 57 | } 58 | } 59 | })() 60 | 61 | return { 62 | ledgerSequence, 63 | node: assignedNode, 64 | async next(){ 65 | if(queue.length > 0) 66 | return queue.shift() 67 | 68 | if(!fetching) 69 | return 70 | 71 | await new Promise(resolve => resolveNext = resolve) 72 | 73 | return queue.shift() 74 | } 75 | } 76 | } -------------------------------------------------------------------------------- /src/lib/fetch.js: -------------------------------------------------------------------------------- 1 | import { RateLimiter } from 'limiter' 2 | import { sanitize } from './url.js' 3 | import { AbortController } from 'node-abort-controller' 4 | import fetch from 'node-fetch' 5 | 6 | 7 | export function createFetch({ baseUrl, headers, ratelimit, timeout = 20 } = {}){ 8 | let limiter = ratelimit 9 | ? new RateLimiter({ 10 | tokensPerInterval: ratelimit, 11 | interval: 'minute' 12 | }) 13 | : null 14 | 15 | return async (url = '', options = {}) => { 16 | if(limiter) 17 | await limiter.removeTokens(1) 18 | 19 | let res 20 | let data 21 | let controller = new AbortController() 22 | let timeoutTimer = setTimeout(() => controller.abort(), timeout * 1000) 23 | let sanitizedUrl = sanitize(baseUrl ? `${baseUrl}/${url}` : url) 24 | 25 | try{ 26 | res = await fetch( 27 | sanitizedUrl, 28 | { 29 | signal: controller.signal, 30 | headers: { 31 | 'user-agent': 'XRPL-Meta-Node (https://xrplmeta.org)', 32 | ...headers, 33 | ...options.headers 34 | } 35 | } 36 | ) 37 | }catch(error){ 38 | res?.blob()?.catch(() => null) 39 | throw error 40 | }finally{ 41 | clearTimeout(timeoutTimer) 42 | } 43 | 44 | if(options.raw){ 45 | return res 46 | } 47 | 48 | try{ 49 | if(res.headers.get('content-type')?.includes('application/json')){ 50 | data = await res.json() 51 | }else if(res.headers.get('content-type')?.match(/(image\/|video\/|application\/octet-stream)/)){ 52 | data = Buffer.from(await res.arrayBuffer()) 53 | }else{ 54 | data = await res.text() 55 | } 56 | }catch{ 57 | data = null 58 | } 59 | 60 | return { 61 | status: res.status, 62 | headers: res.headers, 63 | data 64 | } 65 | } 66 | } -------------------------------------------------------------------------------- /src/db/index.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import { fileURLToPath } from 'url' 4 | import createStructDB from '@structdb/sqlite' 5 | import codecs from './codecs/index.js' 6 | 7 | 8 | const __filename = fileURLToPath(import.meta.url) 9 | const __dirname = path.dirname(__filename) 10 | 11 | 12 | export async function openDB({ ctx, coreReadOnly=false, inMemory=false }){ 13 | return { 14 | core: await openCoreDB({ 15 | ctx, 16 | readOnly: coreReadOnly, 17 | inMemory 18 | }), 19 | cache: await openCacheDB({ 20 | ctx, 21 | inMemory 22 | }) 23 | } 24 | } 25 | 26 | export async function openCoreDB({ ctx, readOnly=false, inMemory=false }){ 27 | let db = await createStructDB({ 28 | file: inMemory 29 | ? ':memory:' 30 | : `${ctx.config.node.dataDir}/core.db`, 31 | schema: JSON.parse( 32 | fs.readFileSync( 33 | path.join(__dirname, 'schemas/core.json') 34 | ) 35 | ), 36 | journalMode: 'WAL', 37 | timeout: 600000, 38 | debug: ctx.config.debug?.queries, 39 | codecs, 40 | readOnly 41 | }) 42 | 43 | db.loadExtension( 44 | path.join( 45 | __dirname, 46 | '..', 47 | '..', 48 | 'deps', 49 | 'build', 50 | 'Release', 51 | 'sqlite-xfl.node' 52 | ) 53 | ) 54 | 55 | db.tokens.createOne({ 56 | data: { 57 | currency: 'XRP', 58 | issuer: null 59 | } 60 | }) 61 | 62 | return db 63 | } 64 | 65 | export async function openCacheDB({ ctx, inMemory=false }){ 66 | return await createStructDB({ 67 | file: inMemory 68 | ? ':memory:' 69 | : `${ctx.config.node.dataDir}/cache.db`, 70 | schema: JSON.parse( 71 | fs.readFileSync( 72 | path.join(__dirname, 'schemas/cache.json') 73 | ) 74 | ), 75 | journalMode: 'WAL', 76 | debug: ctx.config.debug?.queries, 77 | codecs 78 | }) 79 | } -------------------------------------------------------------------------------- /src/cache/todo.js: -------------------------------------------------------------------------------- 1 | import { getAccountId, getTokenId } from '../db/helpers/common.js' 2 | 3 | export function markCacheDirtyForAccountProps({ ctx, account }){ 4 | if(ctx.backwards) 5 | return 6 | 7 | ctx.db.cache.todos.createOne({ 8 | data: { 9 | task: 'account.props', 10 | subject: getAccountId({ ctx, account }) 11 | } 12 | }) 13 | } 14 | 15 | export function markCacheDirtyForTokenProps({ ctx, token }){ 16 | if(ctx.backwards) 17 | return 18 | 19 | ctx.db.cache.todos.createOne({ 20 | data: { 21 | task: 'token.props', 22 | subject: getTokenId({ ctx, token }) 23 | } 24 | }) 25 | } 26 | 27 | export function markCacheDirtyForTokenMetrics({ ctx, token, metrics }){ 28 | if(ctx.backwards) 29 | return 30 | 31 | let subject = getTokenId({ ctx, token }) 32 | 33 | for(let metric of Object.keys(metrics)){ 34 | ctx.db.cache.todos.createOne({ 35 | data: { 36 | task: `token.metrics.${metric}`, 37 | subject 38 | } 39 | }) 40 | } 41 | } 42 | 43 | export function markCacheDirtyForTokenExchanges({ ctx, token }){ 44 | if(ctx.backwards) 45 | return 46 | 47 | if(token.currency === 'XRP') 48 | return 49 | 50 | ctx.db.cache.todos.createOne({ 51 | data: { 52 | task: 'token.exchanges', 53 | subject: getTokenId({ ctx, token }) 54 | } 55 | }) 56 | } 57 | 58 | export function markCacheDirtyForTokenIcons({ ctx, token }){ 59 | ctx.db.cache.todos.createOne({ 60 | data: { 61 | task: 'token.icons', 62 | subject: getTokenId({ ctx, token }) 63 | } 64 | }) 65 | } 66 | 67 | export function markCacheDirtyForAccountIcons({ ctx, account }){ 68 | ctx.db.cache.todos.createOne({ 69 | data: { 70 | task: 'account.icons', 71 | subject: getAccountId({ ctx, account }) 72 | } 73 | }) 74 | } -------------------------------------------------------------------------------- /src/ledger/backfill.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { applyLedgerEvents } from './events/index.js' 4 | import { applyLedgerStateFromTransactions } from './state/index.js' 5 | import { updateDerived } from './derived/index.js' 6 | import { pullNewItems, readTableHeads } from '../db/helpers/heads.js' 7 | import { wait } from '@xrplkit/time' 8 | 9 | 10 | export async function startBackfill({ ctx }){ 11 | let { sequence: firstSequence } = ctx.db.core.ledgers.readOne({ 12 | orderBy: { 13 | sequence: 'asc' 14 | }, 15 | take: 1 16 | }) 17 | 18 | let stream = await spawn( 19 | '../xrpl/stream.js:createBackwardStream', 20 | { 21 | ctx, 22 | startSequence: firstSequence - 1 23 | } 24 | ) 25 | 26 | while(true){ 27 | let { ledger } = await stream.next() 28 | 29 | ctx.db.core.tx(() => { 30 | ctx = { 31 | ...ctx, 32 | currentLedger: ledger, 33 | ledgerSequence: ledger.sequence, 34 | backwards: true 35 | } 36 | 37 | try{ 38 | let heads = readTableHeads({ ctx }) 39 | 40 | applyLedgerEvents({ ctx, ledger }) 41 | applyLedgerStateFromTransactions({ ctx, ledger }) 42 | updateDerived({ 43 | ctx, 44 | newItems: pullNewItems({ 45 | ctx, 46 | previousHeads: heads 47 | }) 48 | }) 49 | }catch(error){ 50 | log.error(`fatal error while backfilling ledger #${ledger.sequence}:`) 51 | log.error(error.stack) 52 | 53 | throw error 54 | } 55 | }) 56 | 57 | log.accumulate.info({ 58 | text: [ 59 | `at ledger #${ledger.sequence} ${ 60 | new Date(ledger.closeTime * 1000) 61 | .toISOString() 62 | .slice(0, -5) 63 | .replace('T', ' ') 64 | } (+%backfilledLedgers in %time)` 65 | ], 66 | data: { 67 | backfilledLedgers: 1 68 | } 69 | }) 70 | 71 | await wait(10) 72 | } 73 | } -------------------------------------------------------------------------------- /src/srv/worker.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { openDB } from '../db/index.js' 4 | import * as procedures from './api.js' 5 | 6 | 7 | export async function spawnWorkers({ ctx }){ 8 | let num = ctx.config.server.workers || 3 9 | let { db, ...workerCtx } = ctx 10 | 11 | log.info(`spawning ${num} workers`) 12 | 13 | return Promise.all( 14 | Array(num).fill(0).map( 15 | async () => await spawn(':runWorker', { ctx: workerCtx }) 16 | ) 17 | ) 18 | } 19 | 20 | export async function executeProcedure({ ctx, procedure, params, requestId }){ 21 | let func = procedures[procedure] 22 | 23 | if(func.mustRunMainThread){ 24 | return json(await func({ ...params, ctx }), requestId) 25 | } 26 | 27 | let now = Date.now() 28 | let ranking = ctx.workers 29 | .map(worker => ({ worker, score: now - (worker.lastRequestTime || 0) - (!!worker.busy) * 1000000000 })) 30 | .sort((a, b) => b.score - a.score) 31 | 32 | 33 | log.debug(`available for handling ${procedure}`, ranking.map(({ worker, score }) => ({ busy: worker.busy, score }))) 34 | 35 | let worker = ranking.at(0).worker 36 | 37 | worker.busy = true 38 | worker.lastRequestTime = now 39 | 40 | try{ 41 | return await worker.execute({ procedure, params, requestId }) 42 | }catch(error){ 43 | throw error 44 | }finally{ 45 | worker.busy = false 46 | } 47 | } 48 | 49 | export async function runWorker({ ctx }){ 50 | if(ctx.log) 51 | log.pipe(ctx.log) 52 | 53 | ctx = { 54 | ...ctx, 55 | db: await openDB({ ctx }) 56 | } 57 | 58 | return { 59 | async execute({ procedure, params, requestId }){ 60 | return json(await procedures[procedure]({ ...params, ctx }), requestId) 61 | } 62 | } 63 | } 64 | 65 | function json(data, requestId){ 66 | if(requestId) 67 | data = { result: data, id: requestId } 68 | 69 | return JSON.stringify(data, null, 2) 70 | } -------------------------------------------------------------------------------- /src/ledger/events/nfts.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseOffer } from '../state/nftoffers.js' 3 | 4 | 5 | export function applyNFTokenExchanges({ ctx, ledger }){ 6 | for(let transaction of ledger.transactions){ 7 | if(transaction.TransactionType !== 'NFTokenAcceptOffer') 8 | continue 9 | 10 | if(transaction.metaData.TransactionResult !== 'tesSUCCESS') 11 | continue 12 | 13 | let offer 14 | 15 | for(let { DeletedNode } of transaction.metaData.AffectedNodes){ 16 | if(!DeletedNode) 17 | continue 18 | 19 | if(DeletedNode.LedgerEntryType !== 'NFTokenOffer') 20 | continue 21 | 22 | if(DeletedNode.LedgerIndex === transaction.NFTokenBuyOffer 23 | || DeletedNode.LedgerIndex === transaction.NFTokenSellOffer){ 24 | offer = { 25 | ...parseOffer({ 26 | index: DeletedNode.LedgerIndex, 27 | entry: DeletedNode.FinalFields 28 | }), 29 | ledgerSequence: DeletedNode.FinalFields.PreviousTxnLgrSeq, 30 | lastLedgerSequence: ledger.sequence - 1 31 | } 32 | } 33 | } 34 | 35 | if(!offer){ 36 | log.warn(`unable to determine accepted nft offer of ${transaction.hash}`) 37 | continue 38 | } 39 | 40 | ctx.db.core.nftExchanges.createOne({ 41 | data: { 42 | txHash: transaction.hash, 43 | account: { 44 | address: transaction.Account 45 | }, 46 | offer, 47 | nft: offer.nft, 48 | ledgerSequence: ledger.sequence 49 | } 50 | }) 51 | } 52 | } 53 | 54 | export function applyNFTokenModifications({ ctx, ledger }){ 55 | for(let transaction of ledger.transactions){ 56 | if(transaction.TransactionType !== 'NFTokenModify') 57 | continue 58 | 59 | if(transaction.metaData.TransactionResult !== 'tesSUCCESS') 60 | continue 61 | 62 | ctx.db.core.nfts.updateOne({ 63 | data: { 64 | uri: transaction.URI 65 | ? Buffer.from(transaction.URI, 'hex') 66 | : null, 67 | }, 68 | where: { 69 | tokenId: transaction.NFTokenID 70 | } 71 | }) 72 | } 73 | } -------------------------------------------------------------------------------- /src/ledger/state/accounts.js: -------------------------------------------------------------------------------- 1 | import { div } from '@xrplkit/xfl' 2 | import { isBlackholed } from '../../xrpl/blackhole.js' 3 | import { writeBalance } from '../../db/helpers/balances.js' 4 | import { markCacheDirtyForAccountProps } from '../../cache/todo.js' 5 | 6 | 7 | export function parse({ entry }){ 8 | return { 9 | address: entry.Account, 10 | balance: div(entry.Balance, '1000000'), 11 | ledgerSequence: entry.LedgerSequence, 12 | emailHash: entry.EmailHash, 13 | transferRate: entry.TransferRate, 14 | blackholed: isBlackholed(entry), 15 | domain: entry.Domain 16 | ? Buffer.from(entry.Domain, 'hex').toString() 17 | : undefined, 18 | } 19 | } 20 | 21 | export function diff({ ctx, previous, final }){ 22 | let address = final?.address || previous?.address 23 | 24 | if(final){ 25 | let { balance, ledgerSequence, ...meta } = final 26 | var { id } = ctx.db.core.accounts.createOne({ 27 | data: ctx.backwards 28 | ? { address } 29 | : meta 30 | }) 31 | 32 | if(final?.Domain != previous?.Domain) 33 | markCacheDirtyForAccountProps({ ctx, account: final }) 34 | }else{ 35 | var { id } = ctx.db.core.accounts.createOne({ 36 | data: { 37 | address 38 | } 39 | }) 40 | } 41 | 42 | if(ctx.backwards && !previous){ 43 | // edge case when backfilling AccountRoot deletions 44 | writeBalance({ 45 | ctx, 46 | account: { id }, 47 | token: { 48 | currency: 'XRP', 49 | issuer: null 50 | }, 51 | ledgerSequence: ctx.ledgerSequence, 52 | balance: '0', 53 | }) 54 | } 55 | 56 | if(final){ 57 | writeBalance({ 58 | ctx, 59 | account: { id }, 60 | token: { 61 | currency: 'XRP', 62 | issuer: null 63 | }, 64 | ledgerSequence: final.ledgerSequence, 65 | balance: final.balance, 66 | }) 67 | }else{ 68 | writeBalance({ 69 | ctx, 70 | account: { id }, 71 | token: { 72 | currency: 'XRP', 73 | issuer: null 74 | }, 75 | ledgerSequence: ctx.ledgerSequence, 76 | balance: '0', 77 | }) 78 | } 79 | } -------------------------------------------------------------------------------- /test/live/run.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import minimist from 'minimist' 4 | import { fileURLToPath } from 'url' 5 | import log from '@mwni/log' 6 | import { find as findConfig } from '../../src/lib/config.js' 7 | import { load as loadConfig } from '../../src/lib/config.js' 8 | import { override as overrideConfig } from '../../src/lib/config.js' 9 | 10 | 11 | const __filename = fileURLToPath(import.meta.url) 12 | const __dirname = path.dirname(__filename) 13 | 14 | 15 | const args = minimist(process.argv.slice(2)) 16 | const component = args._[0] 17 | const configPath = args.config 18 | ? args.config 19 | : findConfig() 20 | 21 | 22 | const cases = fs.readdirSync(path.join(__dirname, 'cases')) 23 | .map(file => file.slice(0, -3)) 24 | 25 | if(!cases.includes(component)){ 26 | log.warn(`no test case selected!`) 27 | log.info(`available cases are:`) 28 | 29 | for(let key of cases){ 30 | log.info(` - ${key}`) 31 | } 32 | 33 | process.exit(1) 34 | } 35 | 36 | 37 | log.config({ 38 | level: args.log || 'debug', 39 | dir: path.resolve( 40 | path.join(__dirname, '..', '..') 41 | ) 42 | }) 43 | log.info(`*** XRPLMETA NODE LIVE COMPONENT TEST ***`) 44 | log.info(`testing component "${component}"`) 45 | log.info(`using config at "${configPath}"`) 46 | 47 | 48 | const baseConfig = loadConfig(configPath, true) 49 | const config = overrideConfig(baseConfig, args) 50 | 51 | if(args.testdb){ 52 | const testDataDir = path.join(__dirname, 'data') 53 | 54 | log.info(`overriding data dir to "${testDataDir}"`) 55 | 56 | Object.assign(config.node, { 57 | dataDir: testDataDir 58 | }) 59 | 60 | if(!fs.existsSync(testDataDir)){ 61 | log.info(`data dir "${testDataDir}" does not exist - creating it`) 62 | fs.mkdirSync(testDataDir, { 63 | recursive: true 64 | }) 65 | } 66 | } 67 | 68 | let { default: run } = await import(`./cases/${component}.js`) 69 | 70 | await run({ args, config }) 71 | 72 | log.info(`live test exited with code 0`) -------------------------------------------------------------------------------- /src/srv/server.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import Koa from 'koa' 3 | import websocket from 'koa-easy-ws' 4 | import json from 'koa-json' 5 | import { createRouter } from './http.js' 6 | import { createManager } from './ws.js' 7 | import { spawnWorkers } from './worker.js' 8 | 9 | 10 | export async function startServer({ ctx }){ 11 | if(!ctx.config.server.publicUrl){ 12 | let fallbackUrl = `http://localhost:${ctx.config.server.port}` 13 | 14 | log.warn(`public URL not set in config - using fallback: ${fallbackUrl}\n >> consider setting "public_url" in the [API] stanza of your config.toml`) 15 | 16 | ctx = { 17 | ...ctx, 18 | config: { 19 | ...ctx.config, 20 | api: { 21 | ...ctx.config.api, 22 | publicUrl: fallbackUrl 23 | } 24 | } 25 | } 26 | } 27 | 28 | ctx = { 29 | ...ctx, 30 | workers: await spawnWorkers({ ctx }) 31 | } 32 | 33 | let koa = new Koa() 34 | let router = createRouter({ ctx }) 35 | let ws = createManager({ ctx }) 36 | 37 | koa.use(websocket()) 38 | koa.use(async (ctx, next) => { 39 | ctx.req.on('error', error => { 40 | log.debug(`client error: ${error.message}`) 41 | }) 42 | 43 | if(ctx.ws){ 44 | ctx.req.socket.ignoreTimeout = true 45 | ws.registerSocket(await ctx.ws()) 46 | }else{ 47 | return await next(ctx) 48 | } 49 | }) 50 | 51 | koa.use(json({ pretty: true })) 52 | koa.use(router.routes(), router.allowedMethods()) 53 | 54 | koa.listen(ctx.config.server.port) 55 | .on('clientError', (error, socket) => { 56 | if(error.code === 'ERR_HTTP_REQUEST_TIMEOUT' && socket.ignoreTimeout) 57 | return 58 | 59 | log.debug(`client error:`, error) 60 | socket.destroy() 61 | }) 62 | .on('error', error => { 63 | log.warn(`server error: ${error.message}`) 64 | }) 65 | 66 | 67 | log.info(`listening on port ${ctx.config.server.port}`) 68 | 69 | await new Promise(resolve => { 70 | koa.on('close', resolve) 71 | }) 72 | } 73 | 74 | console.errorOrg = console.error 75 | console.error = text => /.*Error: (write|read) ECONN.*/g.test(text) 76 | ? undefined 77 | : console.errorOrg(text) -------------------------------------------------------------------------------- /src/lib/config.js: -------------------------------------------------------------------------------- 1 | import os from 'os' 2 | import fs from 'fs' 3 | import path from 'path' 4 | import { fileURLToPath } from 'url' 5 | import log from '@mwni/log' 6 | import { parse as parseToml } from '@xrplkit/toml' 7 | 8 | 9 | const __filename = fileURLToPath(import.meta.url) 10 | const __dirname = path.dirname(__filename) 11 | 12 | export function find(){ 13 | let preferredPath = path.join(os.homedir(), '.xrplmeta', 'config.toml') 14 | let paths = ['config.toml', preferredPath] 15 | 16 | for(let path of paths){ 17 | if(fs.existsSync(path)) 18 | return path 19 | } 20 | 21 | return preferredPath 22 | } 23 | 24 | 25 | export function load(file, createIfMissing){ 26 | if(!fs.existsSync(file)){ 27 | log.warn(`no config at "${file}" - creating new from template`) 28 | 29 | if(createIfMissing) 30 | create(file) 31 | } 32 | 33 | let content = fs.readFileSync(file, 'utf-8') 34 | let config = parseToml(content, 'camelCase') 35 | 36 | // schema checks here 37 | 38 | return config 39 | } 40 | 41 | export function create(file){ 42 | let dir = path.dirname(file) 43 | let root = path.dirname(process.argv[1]) 44 | let templatePath = path.join(__dirname, '../../config.template.toml') 45 | let template = fs.readFileSync(templatePath, 'utf-8') 46 | let customizedTemplate = template 47 | .replace( 48 | 'data_dir = ""', 49 | `data_dir = "${dir.replace(/\\/g, '\\\\')}"` 50 | ) 51 | 52 | if(!fs.existsSync(dir)) 53 | fs.mkdirSync(dir) 54 | 55 | fs.writeFileSync(file, customizedTemplate) 56 | } 57 | 58 | export function override(config, ...overrides){ 59 | if (!overrides.length) 60 | return config 61 | 62 | let source = overrides.shift() 63 | 64 | if(isObject(config) && isObject(source)){ 65 | for (const key in source){ 66 | if(isObject(source[key])){ 67 | if(!config[key]) 68 | Object.assign(config, { [key]: {} }) 69 | 70 | override(config[key], source[key]) 71 | }else{ 72 | Object.assign(config, { [key]: source[key] }) 73 | } 74 | } 75 | } 76 | 77 | return override(config, ...overrides) 78 | } 79 | 80 | function isObject(item) { 81 | return item && typeof item === 'object' && !Array.isArray(item) 82 | } -------------------------------------------------------------------------------- /src/cmd/rebuild-cache.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { openDB } from '../db/index.js' 3 | import { updateIconCacheFor } from '../cache/icons.js' 4 | import { 5 | updateCacheForTokenProps, 6 | updateCacheForAccountProps, 7 | updateCacheForTokenExchanges, 8 | updateCacheForTokenMetrics, 9 | } from '../cache/tokens.js' 10 | 11 | 12 | export default async function({ config, args }){ 13 | const ctx = { 14 | config, 15 | log, 16 | db: await openDB({ 17 | ctx: { config }, 18 | coreReadOnly: true 19 | }) 20 | } 21 | 22 | let tokens 23 | 24 | if(args.token){ 25 | let [currency, issuer] = args.token.split(':') 26 | 27 | tokens = [ctx.db.core.tokens.readOne({ 28 | where: { 29 | currency, 30 | issuer: { 31 | address: issuer 32 | } 33 | } 34 | })] 35 | 36 | if(!tokens[0]) 37 | throw new Error(`token "${args.token}" not found`) 38 | }else{ 39 | tokens = ctx.db.core.tokens.readMany().slice(1) // first is XRP 40 | 41 | if(args.clean){ 42 | log.time.info(`cache.wipe`, `wiping current cache`) 43 | ctx.db.cache.tokens.deleteMany() 44 | ctx.db.cache.icons.deleteMany() 45 | ctx.db.cache.iconUsers.deleteMany() 46 | ctx.db.cache.todos.deleteMany() 47 | log.time.info(`cache.wipe`, `wiped cache in %`) 48 | } 49 | } 50 | 51 | log.time.info(`cache.tokens`, `rebuilding for`, tokens.length, `token(s)`) 52 | 53 | for(let i=0; i { 38 | log.info(`fetching services list...`) 39 | 40 | let accounts = [] 41 | 42 | let { data } = await fetch('services') 43 | let services = data.services 44 | 45 | log.info(`got`, services.length, `services`) 46 | 47 | for(let service of services){ 48 | for(let { address } of service.addresses){ 49 | let urls = undefined 50 | 51 | if(service.socialAccounts && service.socialAccounts.length > 0){ 52 | urls = Object.entries(service.socialAccounts).map( 53 | ([key, handle]) => ({ 54 | url: socialMediaUrls[key].replace('%', handle), 55 | type: 'social' 56 | }) 57 | ) 58 | } 59 | 60 | accounts.push({ 61 | address, 62 | props: { 63 | name: service.name, 64 | domain: service.domain, 65 | urls, 66 | }, 67 | }) 68 | } 69 | } 70 | 71 | diffMultiAccountProps({ 72 | ctx, 73 | accounts, 74 | source: 'bithomp/services' 75 | }) 76 | 77 | log.info(`updated`, accounts.length, `issuers`) 78 | } 79 | }) 80 | } 81 | } -------------------------------------------------------------------------------- /test/unit/db.helpers.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { XFL } from '@xrplkit/xfl' 3 | import { createContext } from './env.js' 4 | import { readBalance, writeBalance } from '../../src/db/helpers/balances.js' 5 | import { readTokenMetricSeries, writeTokenMetrics } from '../../src/db/helpers/tokenmetrics.js' 6 | 7 | 8 | const ctx = await createContext() 9 | 10 | 11 | describe( 12 | 'Database Helpers', 13 | () => { 14 | it( 15 | 'write and read token balance of account', 16 | () => { 17 | let account = { 18 | address: 'rMwNibdiFaEzsTaFCG1NnmAM3Rv3vHUy5L' 19 | } 20 | 21 | let token = { 22 | currency: 'PSC', 23 | issuer: { 24 | address: 'rwekfW4MiS5yZjXASRBDzzPPWYKuHvKP7E' 25 | } 26 | } 27 | 28 | writeBalance({ 29 | ctx, 30 | account, 31 | token, 32 | ledgerSequence: 100000000, 33 | balance: '1000000' 34 | }) 35 | 36 | let balance = readBalance({ 37 | ctx, 38 | account, 39 | token, 40 | ledgerSequence: 100000000 41 | }) 42 | 43 | expect(balance.toString()).to.be.equal('1000000') 44 | } 45 | ) 46 | 47 | it( 48 | 'write and read token metric series', 49 | () => { 50 | let token = { 51 | currency: 'PSC', 52 | issuer: { 53 | address: 'rwekfW4MiS5yZjXASRBDzzPPWYKuHvKP7E' 54 | } 55 | } 56 | 57 | for(let i=0; i<3; i++){ 58 | writeTokenMetrics({ 59 | ctx, 60 | token, 61 | ledgerSequence: 1000000 + i * 1000, 62 | metrics: { 63 | trustlines: 1 + i, 64 | supply: XFL(100 + i * 100) 65 | }, 66 | updateCache: false 67 | }) 68 | } 69 | 70 | let trustlineSeries = readTokenMetricSeries({ 71 | ctx, 72 | token, 73 | sequenceStart: 0, 74 | metric: 'trustlines' 75 | }) 76 | 77 | let supplySeries = readTokenMetricSeries({ 78 | ctx, 79 | token, 80 | sequenceStart: 999999, 81 | metric: 'supply' 82 | }) 83 | 84 | expect(trustlineSeries.map(e => e.value)).to.be.deep.equal([1, 2, 3]) 85 | expect(supplySeries.map(e => e.value.toString())).to.be.deep.equal(['100', '200', '300']) 86 | } 87 | ) 88 | } 89 | ) -------------------------------------------------------------------------------- /src/ledger/state/tokenoffers.js: -------------------------------------------------------------------------------- 1 | import { XFL } from '@xrplkit/xfl' 2 | import { amountFromRippled } from '@xrplkit/tokens' 3 | import { rippleToUnix } from '@xrplkit/time' 4 | import { writeTokenOffer, expireTokenOffer } from '../../db/helpers/tokenoffers.js' 5 | 6 | export function skip({ ctx }){ 7 | return !ctx.config.ledger.captureOffers 8 | } 9 | 10 | export function parse({ entry }){ 11 | let takerPays = amountFromRippled(entry.TakerPays) 12 | let takerGets = amountFromRippled(entry.TakerGets) 13 | let size = takerGets.value 14 | let qualityHex = entry.BookDirectory.slice(-16) 15 | 16 | try{ 17 | let qualityMantissa = Buffer.from(`00${qualityHex.slice(2)}`, 'hex') 18 | .readBigInt64BE(0) 19 | 20 | let qualityExponent = Buffer.from(qualityHex.slice(0, 2), 'hex') 21 | .readInt8(0) 22 | - 100 23 | + (takerPays.currency === 'XRP' ? -6 : 0) 24 | - (takerGets.currency === 'XRP' ? -6 : 0) 25 | 26 | var quality = XFL(`${qualityMantissa}e${qualityExponent}`) 27 | }catch{ 28 | return 29 | } 30 | 31 | return { 32 | account: { address: entry.Account }, 33 | accountSequence: entry.Sequence, 34 | ledgerSequence: entry.LedgerSequence, 35 | book: { 36 | takerPays: { 37 | currency: takerPays.currency, 38 | issuer: takerPays.issuer 39 | ? { address: takerPays.issuer } 40 | : undefined 41 | }, 42 | takerGets: { 43 | currency: takerGets.currency, 44 | issuer: takerGets.issuer 45 | ? { address: takerGets.issuer } 46 | : undefined 47 | }, 48 | }, 49 | quality, 50 | size, 51 | expirationTime: entry.Expiration 52 | ? rippleToUnix(entry.Expiration) 53 | : null 54 | } 55 | } 56 | 57 | export function diff({ ctx, previous, final }){ 58 | if(previous){ 59 | expireTokenOffer({ 60 | ctx, 61 | account: previous.account, 62 | accountSequence: previous.accountSequence, 63 | ledgerSequence: ctx.ledgerSequence, 64 | book: previous.book 65 | }) 66 | } 67 | 68 | if(final){ 69 | writeTokenOffer({ 70 | ctx, 71 | account: final.account, 72 | accountSequence: final.accountSequence, 73 | ledgerSequence: final.ledgerSequence, 74 | book: final.book, 75 | quality: final.quality, 76 | size: final.size, 77 | expirationTime: final.expirationTime 78 | }) 79 | } 80 | } -------------------------------------------------------------------------------- /src/srv/api.js: -------------------------------------------------------------------------------- 1 | import { sanitizeRange, sanitizePoint, sanitizeLimitOffset, sanitizeSourcePreferences } from './sanitizers/common.js' 2 | import { sanitizeToken, sanitizeTokenListSortBy, sanitizeNameLike, sanitizeTrustLevels } from './sanitizers/token.js' 3 | import { serveServerInfo } from './procedures/server.js' 4 | import { serveTokenSummary, serveTokenSeries, serveTokenPoint, serveTokenList, subscribeTokenList, unsubscribeTokenList, serveTokenExchanges, serveTokenHolders } from './procedures/token.js' 5 | import { serveLedger } from './procedures/ledger.js' 6 | 7 | 8 | export const server_info = compose([ 9 | serveServerInfo() 10 | ]) 11 | 12 | export const ledger = compose([ 13 | sanitizePoint(), 14 | serveLedger() 15 | ]) 16 | 17 | export const tokens = compose([ 18 | sanitizeLimitOffset({ defaultLimit: 100, maxLimit: 100000 }), 19 | sanitizeNameLike(), 20 | sanitizeTrustLevels(), 21 | sanitizeTokenListSortBy(), 22 | sanitizeSourcePreferences(), 23 | serveTokenList() 24 | ]) 25 | 26 | export const tokens_subscribe = compose([ 27 | sanitizeToken({ key: 'tokens', array: true }), 28 | sanitizeSourcePreferences(), 29 | subscribeTokenList(), 30 | tag({ mustRunMainThread: true }) 31 | ]) 32 | 33 | export const tokens_unsubscribe = compose([ 34 | sanitizeToken({ key: 'tokens', array: true }), 35 | unsubscribeTokenList(), 36 | tag({ mustRunMainThread: true }) 37 | ]) 38 | 39 | export const token = compose([ 40 | sanitizeToken({ key: 'token' }), 41 | sanitizeSourcePreferences(), 42 | serveTokenSummary() 43 | ]) 44 | 45 | export const token_metric = compose([ 46 | sanitizeToken({ key: 'token' }), 47 | sanitizePoint(), 48 | serveTokenPoint() 49 | ]) 50 | 51 | export const token_series = compose([ 52 | sanitizeToken({ key: 'token' }), 53 | sanitizeRange({ withInterval: true }), 54 | serveTokenSeries() 55 | ]) 56 | 57 | export const token_exchanges = compose([ 58 | sanitizeToken({ key: 'base', allowXRP: true }), 59 | sanitizeToken({ key: 'quote', allowXRP: true }), 60 | sanitizeRange({ defaultToFullRange: true }), 61 | sanitizeLimitOffset({ defaultLimit: 100, maxLimit: 1000 }), 62 | serveTokenExchanges() 63 | ]) 64 | 65 | export const token_holders = compose([ 66 | sanitizeToken({ key: 'token' }), 67 | sanitizePoint({ defaultToLatest: true }), 68 | sanitizeLimitOffset({ defaultLimit: 100, maxLimit: 100000 }), 69 | serveTokenHolders() 70 | ]) 71 | 72 | 73 | function compose(functions){ 74 | return args => functions.reduce( 75 | (v, f) => f(v), 76 | args 77 | ) 78 | } 79 | 80 | function tag(properties){ 81 | return f => Object.assign(f, properties) 82 | } -------------------------------------------------------------------------------- /src/ledger/derived/marketcap.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { mul } from '@xrplkit/xfl' 3 | import { readTokenMetricSeries, readTokenMetrics, writeTokenMetrics } from '../../db/helpers/tokenmetrics.js' 4 | import { readTokenExchangeAligned, alignTokenExchange } from '../../db/helpers/tokenexchanges.js' 5 | 6 | 7 | export function updateMarketcapFromExchange({ ctx, exchange }){ 8 | try{ 9 | exchange = alignTokenExchange({ 10 | exchange, 11 | quote: { currency: 'XRP' } 12 | }) 13 | }catch(error){ 14 | if(exchange.takerGotToken.id === 1 || exchange.takerPaidToken.id === 1){ 15 | log.warn(`market cap update failed: ${error.message}`) 16 | } 17 | return 18 | } 19 | 20 | if(ctx.backwards){ 21 | let firstMarketcap = ctx.db.core.tokenMarketcap.readOne({ 22 | where: { 23 | token: exchange.base, 24 | ledgerSequence: { 25 | greaterOrEqual: ctx.ledgerSequence 26 | } 27 | }, 28 | orderBy: { 29 | ledgerSequence: 'asc' 30 | } 31 | }) 32 | 33 | let series = readTokenMetricSeries({ 34 | ctx, 35 | token: exchange.base, 36 | metric: 'supply', 37 | sequenceStart: ctx.ledgerSequence, 38 | sequenceEnd: firstMarketcap?.ledgerSequence 39 | }) 40 | 41 | for(let { ledgerSequence: sequence, value: supply } of series){ 42 | writeTokenMetrics({ 43 | ctx, 44 | token: exchange.base, 45 | ledgerSequence: sequence, 46 | metrics: { 47 | marketcap: supply 48 | ? mul(supply, exchange.price) 49 | : '0' 50 | } 51 | }) 52 | } 53 | }else{ 54 | let { supply } = readTokenMetrics({ 55 | ctx, 56 | token: exchange.base, 57 | ledgerSequence: ctx.ledgerSequence, 58 | metrics: { 59 | supply: true 60 | } 61 | }) 62 | 63 | writeTokenMetrics({ 64 | ctx, 65 | token: exchange.base, 66 | ledgerSequence: ctx.ledgerSequence, 67 | metrics: { 68 | marketcap: supply 69 | ? mul(supply, exchange.price) 70 | : '0' 71 | } 72 | }) 73 | } 74 | } 75 | 76 | export function updateMarketcapFromSupply({ ctx, supply }){ 77 | let exchange = readTokenExchangeAligned({ 78 | ctx, 79 | base: supply.token, 80 | quote: { 81 | currency: 'XRP' 82 | }, 83 | ledgerSequence: ctx.ledgerSequence, 84 | skipDust: true 85 | }) 86 | 87 | if(ctx.backwards && !exchange) 88 | return 89 | 90 | writeTokenMetrics({ 91 | ctx, 92 | token: supply.token, 93 | ledgerSequence: ctx.ledgerSequence, 94 | metrics: { 95 | marketcap: exchange 96 | ? mul(supply.value, exchange.price) 97 | : '0' 98 | } 99 | }) 100 | } -------------------------------------------------------------------------------- /src/ledger/sync.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { spawn } from '@mwni/workers' 3 | import { applyLedgerEvents } from './events/index.js' 4 | import { applyLedgerStateFromTransactions } from './state/index.js' 5 | import { updateDerived } from './derived/index.js' 6 | import { pullNewItems, readTableHeads } from '../db/helpers/heads.js' 7 | 8 | 9 | export async function startSync({ ctx }){ 10 | let onceInSyncTrigger 11 | 12 | let { sequence: lastSequence } = ctx.db.core.ledgers.readOne({ 13 | orderBy: { 14 | sequence: 'desc' 15 | }, 16 | take: 1 17 | }) 18 | 19 | let stream = await spawn( 20 | '../xrpl/stream.js:createForwardStream', 21 | { 22 | ctx, 23 | startSequence: lastSequence + 1 24 | } 25 | ) 26 | 27 | log.info(`catching up from ledger #${lastSequence} -> #${(await stream.status()).targetSequence}`) 28 | 29 | ;(async () => { 30 | while(true){ 31 | log.time.debug(`sync.cycle`) 32 | 33 | let { ledger, ledgersBehind } = await stream.next() 34 | 35 | ctx.db.core.tx(() => { 36 | ctx = { 37 | ...ctx, 38 | currentLedger: ledger, 39 | ledgerSequence: ledger.sequence, 40 | } 41 | 42 | try{ 43 | let heads = readTableHeads({ ctx }) 44 | 45 | applyLedgerEvents({ ctx, ledger }) 46 | applyLedgerStateFromTransactions({ ctx, ledger }) 47 | updateDerived({ 48 | ctx, 49 | newItems: pullNewItems({ 50 | ctx, 51 | previousHeads: heads 52 | }) 53 | }) 54 | }catch(error){ 55 | log.error(`fatal error while syncing ledger #${ledger.sequence}:`) 56 | log.error(error.stack) 57 | 58 | throw error 59 | } 60 | }) 61 | 62 | 63 | if(ledgersBehind > 0){ 64 | log.accumulate.info({ 65 | text: [ 66 | ledgersBehind, 67 | `ledgers behind (+%advancedLedgers in %time)` 68 | ], 69 | data: { 70 | advancedLedgers: 1 71 | } 72 | }) 73 | }else{ 74 | log.flush() 75 | 76 | if(onceInSyncTrigger){ 77 | onceInSyncTrigger() 78 | onceInSyncTrigger = undefined 79 | log.info(`catched up with live`) 80 | } 81 | 82 | log.info(`in sync with ledger #${ledger.sequence} ${ 83 | new Date(ledger.closeTime * 1000) 84 | .toISOString() 85 | .slice(0, -5) 86 | .replace('T', ' ') 87 | }`) 88 | } 89 | 90 | log.time.debug(`sync.cycle`, `sync cycle took % for`, ledger.transactions.length, `tx`) 91 | } 92 | })() 93 | 94 | return { 95 | onceInSync(){ 96 | return new Promise(resolve => { 97 | onceInSyncTrigger = resolve 98 | }) 99 | } 100 | } 101 | } -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | 2 | # The XRPL Meta Node 3 | 4 | This is a Javascript implementation of the [XRPL Meta](https://xrplmeta.org) project. 5 | 6 | XRPL Meta collects metadata about digital assets on the XRP Ledger. It makes the data available via a JSON REST and WebSocket API, just like [rippled](https://github.com/XRPLF/rippled). It connects to one or multiple rippled or [clio](https://github.com/XRPLF/clio) nodes and tracks updates in real time. Historical data is being backfilled. 7 | 8 | 9 | 10 | ## Technical Overview 11 | 12 | On the first launch 13 | - The server will create its SQLite database files in the config specified data directory 14 | - It will then create a full snapshot of the most recent ledger 15 | 16 | From there on 17 | - It will sync itself with the live transaction stream 18 | - Backfill ledger history simultaneously 19 | - Scrape additional metadata sources, such as [Bithomp](https://bithomp.com), [XRP Scan](https://xrpscan.com) and [Xaman](https://xaman.dev) 20 | 21 | 22 | 23 | ## The Config File 24 | 25 | When starting the node for the first time, it will automatically create a directory called `.xrplmeta` in the user's home directory. A copy of the [default configuration file](https://github.com/xrplmeta/node/blob/develop/config.template.toml) will be put there, and used. 26 | 27 | Alternatively, you can specify which config file to use using 28 | 29 | node src/run --config /path/to/config.toml 30 | 31 | The config file uses "stanzas" for configuring each relevant component, such as the [public server API](https://github.com/xrplmeta/node/tree/develop/src/srv) and the [crawlers](https://github.com/xrplmeta/node/tree/develop/src/crawl/crawlers). Delete or comment the respective stanza to disable the component. 32 | 33 | Review the comments in [default configuration file](https://github.com/xrplmeta/node/blob/develop/config.template.toml) for further explanation of the individual parameters. 34 | 35 | 36 | 37 | ## API Documentation 38 | 39 | https://xrplmeta.org/docs 40 | 41 | The node will listen for incoming HTTP connections on the port specified in the config file. These can either serve a REST query, or be upgraded to a WebSocket connection. 42 | 43 | 44 | 45 | ## Install for production use 46 | 47 | Install the public NPM package: 48 | 49 | npm install -g xrplmeta 50 | 51 | This will add the `xrplmeta` command to your PATH. Simply run this command to start the server. A template configuration file will be placed in your user directory. It is recommended to adjust this config. 52 | 53 | 54 | 55 | ## Install for development 56 | 57 | Clone this repository and install the dependencies: 58 | 59 | npm install 60 | 61 | The development node can be started using: 62 | 63 | node src/run 64 | 65 | 66 | 67 | ## Requirements 68 | 69 | - Node.js version +14 70 | 71 | - An internet connection 72 | 73 | - More than 3 GB of disk storage -------------------------------------------------------------------------------- /src/db/helpers/common.js: -------------------------------------------------------------------------------- 1 | const maxLedgerSequence = 1_000_000_000_000 2 | 3 | 4 | export function readPoint({ table, selector, ledgerSequence, expirable }){ 5 | if(ledgerSequence === undefined){ 6 | return table.readOne({ 7 | where: selector, 8 | orderBy: { 9 | ledgerSequence: 'desc' 10 | } 11 | }) 12 | }else if(expirable){ 13 | return table.readOne({ 14 | where: { 15 | ...selector, 16 | ledgerSequence: { 17 | lessOrEqual: ledgerSequence 18 | }, 19 | lastLedgerSequence: { 20 | greaterOrEqual: ledgerSequence 21 | } 22 | }, 23 | orderBy: { 24 | ledgerSequence: 'desc' 25 | } 26 | }) 27 | }else{ 28 | return table.readOne({ 29 | where: { 30 | ...selector, 31 | ledgerSequence: { 32 | lessOrEqual: ledgerSequence 33 | } 34 | }, 35 | orderBy: { 36 | ledgerSequence: 'desc' 37 | } 38 | }) 39 | } 40 | } 41 | 42 | export function writePoint({ table, selector, ledgerSequence, backwards, data, expirable }){ 43 | let point = readPoint({ 44 | table, 45 | selector, 46 | ledgerSequence, 47 | expirable 48 | }) 49 | 50 | if(point){ 51 | let replace = point.ledgerSequence === ledgerSequence 52 | 53 | if(data){ 54 | let changes = {} 55 | 56 | for(let [key, value] of Object.entries(data)){ 57 | let a = value != null ? value.toString() : value 58 | let b = point[key] != null ? point[key].toString() : point[key] 59 | 60 | if(a != b){ 61 | changes[key] = value 62 | } 63 | } 64 | 65 | if(Object.keys(changes).length === 0) 66 | return 67 | 68 | if(replace){ 69 | return table.updateOne({ 70 | data: changes, 71 | where: { 72 | id: point.id 73 | } 74 | }) 75 | } 76 | }else{ 77 | if(replace){ 78 | return table.deleteOne({ 79 | where: { 80 | id: point.id 81 | } 82 | }) 83 | } 84 | } 85 | 86 | if(expirable){ 87 | table.updateOne({ 88 | data: { 89 | lastLedgerSequence: ledgerSequence - 1 90 | }, 91 | where: { 92 | id: point.id 93 | } 94 | }) 95 | } 96 | } 97 | 98 | if(!data && expirable) 99 | return 100 | 101 | if(!data && !expirable && !point) 102 | return 103 | 104 | return table.createOne({ 105 | data: { 106 | ...selector, 107 | ...( 108 | expirable 109 | ? { 110 | ledgerSequence, 111 | lastLedgerSequence: maxLedgerSequence 112 | } 113 | : { 114 | ledgerSequence 115 | } 116 | ), 117 | ...data 118 | } 119 | }) 120 | } 121 | 122 | export function getAccountId({ ctx, account }){ 123 | if(account.id) 124 | return account.id 125 | 126 | return ctx.db.core.accounts.readOne({ 127 | where: account, 128 | select: { 129 | id: true 130 | } 131 | }).id 132 | } 133 | 134 | export function getTokenId({ ctx, token }){ 135 | if(token.id) 136 | return token.id 137 | 138 | return ctx.db.core.tokens.readOne({ 139 | where: token, 140 | select: { 141 | id: true 142 | } 143 | }).id 144 | } -------------------------------------------------------------------------------- /src/ledger/snapshot.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { unixNow } from '@xrplkit/time' 3 | import { spawn } from '@mwni/workers' 4 | import { fetch as fetchLedger } from '../xrpl/ledger.js' 5 | import { applyLedgerStateFromObjects } from './state/index.js' 6 | import { applyLedgerEvents } from './events/index.js' 7 | import { updateAllDerived } from './derived/index.js' 8 | 9 | 10 | export async function createSnapshot({ ctx }){ 11 | ctx = { 12 | ...ctx, 13 | snapshotState: ctx.db.core.snapshots.readLast(), 14 | ledgerSequence: 0 15 | } 16 | 17 | if(!ctx.snapshotState){ 18 | await createSnapshotState({ ctx }) 19 | log.info(`creating snapshot of ledger #${ctx.snapshotState.ledgerSequence} - this may take a long time`) 20 | } 21 | 22 | if(ctx.snapshotState.entriesCount === 0 || ctx.snapshotState.marker){ 23 | try{ 24 | await copyFromFeed({ 25 | ctx, 26 | feed: await createFeed({ 27 | ctx, 28 | ledgerSequence: ctx.snapshotState.ledgerSequence, 29 | marker: ctx.snapshotState.marker, 30 | node: ctx.snapshotState.originNode 31 | }) 32 | }) 33 | }catch(error){ 34 | log.error(`fatal error while copying from ledger feed:`) 35 | log.error(error.stack) 36 | 37 | throw error.stack 38 | } 39 | } 40 | 41 | if(!ctx.snapshotState.completionTime){ 42 | log.time.info(`snapshot.derivatives`, `creating derivative data ...`) 43 | updateAllDerived({ ctx }) 44 | log.time.info(`snapshot.derivatives`, `created derivative data in %`) 45 | 46 | ctx.db.core.snapshots.updateOne({ 47 | data: { 48 | completionTime: unixNow(), 49 | marker: null 50 | }, 51 | where: { 52 | id: ctx.snapshotState.id 53 | } 54 | }) 55 | 56 | log.info(`ledger snapshot complete`) 57 | } 58 | } 59 | 60 | async function createSnapshotState({ ctx }){ 61 | let ledger = await fetchLedger({ 62 | ctx, 63 | sequence: 'validated' 64 | }) 65 | 66 | applyLedgerEvents({ ctx, ledger }) 67 | 68 | ctx.currentLedger = ledger 69 | ctx.snapshotState = ctx.db.core.snapshots.createOne({ 70 | data: { 71 | ledgerSequence: ledger.sequence, 72 | creationTime: unixNow() 73 | } 74 | }) 75 | } 76 | 77 | async function createFeed({ ctx, ledgerSequence, marker, node }){ 78 | return await spawn( 79 | '../xrpl/snapshot.js:start', 80 | { 81 | ctx, 82 | ledgerSequence, 83 | marker, 84 | node 85 | } 86 | ) 87 | } 88 | 89 | async function copyFromFeed({ ctx, feed }){ 90 | while(true){ 91 | let chunk = await feed.next() 92 | 93 | if(!chunk) 94 | break 95 | 96 | ctx.db.core.tx(() => { 97 | applyLedgerStateFromObjects({ 98 | ctx, 99 | objects: chunk.objects 100 | }) 101 | 102 | ctx.snapshotState = ctx.db.core.snapshots.updateOne({ 103 | data: { 104 | originNode: feed.node, 105 | marker: chunk.marker, 106 | entriesCount: ctx.snapshotState.entriesCount + chunk.objects.length 107 | }, 108 | where: { 109 | id: ctx.snapshotState.id 110 | } 111 | }) 112 | }) 113 | 114 | log.accumulate.info({ 115 | text: [ 116 | `processed`, 117 | ctx.snapshotState.entriesCount, 118 | `ledger objects (+%objects in %time)` 119 | ], 120 | data: { 121 | objects: chunk.objects.length 122 | } 123 | }) 124 | } 125 | 126 | log.flush() 127 | log.info(`reached end of ledger data`) 128 | } -------------------------------------------------------------------------------- /src/xrpl/node.js: -------------------------------------------------------------------------------- 1 | import EventEmitter from 'events' 2 | import createSocket from '@xrplkit/socket' 3 | import log from '@mwni/log' 4 | 5 | 6 | export default class Node extends EventEmitter{ 7 | constructor(config){ 8 | super() 9 | 10 | this.name = config.url 11 | .replace(/^wss?:\/\//, '') 12 | .replace(/:[0-9]+/, '') 13 | 14 | this.tasks = [] 15 | this.socket = createSocket({ url: config.url }) 16 | this.availableLedgers = [] 17 | 18 | this.socket.on('transaction', tx => { 19 | this.emit('event', {hash: tx.transaction.hash, tx}) 20 | }) 21 | 22 | this.socket.on('ledgerClosed', ledger => { 23 | this.emit('event', {hash: ledger.ledger_hash, ledger}) 24 | this.hasReportedClosedLedger = true 25 | 26 | if(ledger.validated_ledgers){ 27 | this.availableLedgers = ledger.validated_ledgers 28 | .split(',') 29 | .map(range => range 30 | .split('-') 31 | .map(i => parseInt(i)) 32 | ) 33 | } 34 | }) 35 | 36 | this.socket.on('open', async () => { 37 | this.hasReportedClosedLedger = false 38 | this.emit('connected') 39 | 40 | try{ 41 | await this.socket.request({ 42 | command: 'subscribe', 43 | streams: ['ledger', 'transactions'] 44 | }) 45 | }catch(error){ 46 | log.warn(`failed to subscribe to node "${this.name}":`) 47 | log.warn(error) 48 | } 49 | }) 50 | 51 | this.socket.on('close', async event => { 52 | this.error = event.reason 53 | ? event.reason 54 | : `code ${event.code}` 55 | 56 | this.emit('disconnected') 57 | }) 58 | 59 | this.socket.on('error', error => { 60 | this.error = error.message 61 | ? error.message 62 | : `unknown connection failure` 63 | 64 | this.emit('error') 65 | }) 66 | } 67 | 68 | get status(){ 69 | return this.socket.status() 70 | } 71 | 72 | bid(payload){ 73 | if(this.busy || !this.status.connected || !this.hasReportedClosedLedger) 74 | return 0 75 | 76 | if(payload.command){ 77 | if(payload.ticket){ 78 | if(this.tasks.some(task => task.ticket === payload.ticket)) 79 | return Infinity 80 | else 81 | return 0 82 | } 83 | 84 | if(payload.ledger_index && this.availableLedgers.length > 0){ 85 | let hasLedger = payload.ledger_index === 'validated' || this.availableLedgers.some( 86 | ([start, end]) => payload.ledger_index >= start && payload.ledger_index <= end 87 | ) 88 | 89 | if(hasLedger) 90 | return 2 91 | else 92 | return 0 93 | } 94 | 95 | return 1 96 | }else if(payload.type === 'reserveTicket'){ 97 | if(payload.node){ 98 | if(payload.node !== this.name) 99 | return 0 100 | } 101 | 102 | return 1 103 | } 104 | } 105 | 106 | async do(payload){ 107 | this.busy = true 108 | 109 | try{ 110 | if(payload.command){ 111 | return await this.socket.request(payload) 112 | }else if(payload.type === 'reserveTicket'){ 113 | let ticket = Math.random() 114 | .toString(16) 115 | .toUpperCase() 116 | .slice(2, 10) 117 | 118 | this.tasks.push({ 119 | type: payload.task, 120 | ticket, 121 | node: this.name 122 | }) 123 | 124 | return {ticket} 125 | } 126 | }catch(error){ 127 | throw error 128 | }finally{ 129 | this.busy = false 130 | } 131 | } 132 | 133 | disconnect(){ 134 | this.socket.close() 135 | } 136 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/trustlists.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseXLS26 } from '@xrplkit/xls26' 3 | import { scheduleGlobal } from '../schedule.js' 4 | import { createFetch } from '../../lib/fetch.js' 5 | import { diffMultiAccountProps, diffMultiTokenProps } from '../../db/helpers/props.js' 6 | import { currencyUTF8ToHex } from '@xrplkit/tokens' 7 | 8 | 9 | export default async function({ ctx }){ 10 | let configs = ctx.config.trustlist 11 | 12 | if(!configs || configs.length == 0){ 13 | throw new Error(`disabled by config`) 14 | } 15 | 16 | await Promise.all( 17 | configs 18 | .filter(config => !config.disabled) 19 | .map(config => crawlList({ ctx, ...config })) 20 | ) 21 | } 22 | 23 | async function crawlList({ ctx, id, url, fetchInterval = 600, trustLevel = 0, ignoreAdvisories = false }){ 24 | let fetch = createFetch({ 25 | baseUrl: url 26 | }) 27 | 28 | while(true){ 29 | await scheduleGlobal({ 30 | ctx, 31 | task: `trustlist.${id}`, 32 | interval: fetchInterval, 33 | routine: async () => { 34 | log.info(`reading ${url}`) 35 | 36 | let tokens = [] 37 | let accounts = [] 38 | 39 | let { status, data } = await fetch() 40 | 41 | if(status !== 200){ 42 | throw `${url}: HTTP ${response.status}` 43 | } 44 | 45 | try{ 46 | var { issuers: declaredIssuers, tokens: declaredTokens, issues, advisories } = parseXLS26(data) 47 | }catch(error){ 48 | console.log(error) 49 | throw error 50 | } 51 | 52 | if(issues.length > 0){ 53 | log.debug(`trustlist [${id}] has issues: ${ 54 | issues 55 | .map(issue => ` - ${issue}`) 56 | .join(`\n`) 57 | }`) 58 | } 59 | 60 | for(let { address, ...props } of declaredIssuers){ 61 | if(props.hasOwnProperty('trust_level')) 62 | props.trust_level = Math.min(props.trust_level, trustLevel) 63 | 64 | accounts.push({ 65 | address, 66 | props 67 | }) 68 | } 69 | 70 | for(let { currency, issuer, ...props } of declaredTokens){ 71 | if(props.hasOwnProperty('trust_level')) 72 | props.trust_level = Math.min(props.trust_level, trustLevel) 73 | 74 | tokens.push({ 75 | currency: currencyUTF8ToHex(currency), 76 | issuer: { 77 | address: issuer 78 | }, 79 | props 80 | }) 81 | } 82 | 83 | let advisoryUpdates = 0 84 | 85 | if(!ignoreAdvisories && trustLevel > 0){ 86 | let groupedAdvisories = {} 87 | 88 | for(let { address, ...props } of advisories){ 89 | if(!groupedAdvisories[address]) 90 | groupedAdvisories[address] = [] 91 | 92 | groupedAdvisories[address].push(props) 93 | } 94 | 95 | for(let [address, advisories] of Object.entries(groupedAdvisories)){ 96 | advisoryUpdates++ 97 | accounts.push({ 98 | address, 99 | props: { 100 | advisories 101 | } 102 | }) 103 | } 104 | } 105 | 106 | diffMultiAccountProps({ 107 | ctx, 108 | accounts, 109 | source: `trustlist/${id}` 110 | }) 111 | 112 | diffMultiTokenProps({ 113 | ctx, 114 | tokens, 115 | source: `trustlist/${id}` 116 | }) 117 | 118 | log.info(`trustlist [${id}] synced (issuers: ${issues.length} tokens: ${tokens.length} advisories: ${advisoryUpdates})`) 119 | } 120 | }) 121 | } 122 | } -------------------------------------------------------------------------------- /src/xrpl/nodepool.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { createEmitter } from '@mwni/events' 3 | import { wait } from '@xrplkit/time' 4 | import { format as formatLedger } from './ledger.js' 5 | import Node from './node.js' 6 | 7 | 8 | export function createPool(sources){ 9 | let events = new createEmitter() 10 | let seenHashes = [] 11 | let queue = [] 12 | let nodes = [] 13 | let latestLedger 14 | let closed = false 15 | 16 | async function workQueue(){ 17 | while(!closed){ 18 | for(let i=0; i ({node, bid: node.bid(request.payload)})) 22 | .sort((a, b) => b.bid - a.bid) 23 | 24 | if(bestBid.bid <= 0) 25 | continue 26 | 27 | request.accepted() 28 | 29 | bestBid.node.do(request.payload) 30 | .then(result => request.resolve({result, node: bestBid.node.name})) 31 | .catch(error => request.reject({ 32 | error: error.message || error.stack || error.error_message, 33 | node: bestBid.node.name 34 | })) 35 | 36 | queue.splice(i--, 1) 37 | } 38 | 39 | await wait(100) 40 | } 41 | } 42 | 43 | function sawHash(hash){ 44 | if(seenHashes.includes(hash)) 45 | return true 46 | 47 | seenHashes.push(hash) 48 | 49 | if(seenHashes.length > 10000) 50 | seenHashes.shift() 51 | } 52 | 53 | function warnAllLost(){ 54 | if(nodes.some(node => node.status.connected)) 55 | return 56 | 57 | log.warn(`lost connection to all nodes`) 58 | } 59 | 60 | 61 | log.info(`using nodes:`) 62 | 63 | for(let spec of sources){ 64 | let connections = spec.connections || 1 65 | 66 | for(let i=0; i { 71 | log.info( 72 | firstConnect 73 | ? `connected to ${spec.url}` 74 | : `reconnected to ${spec.url}` 75 | ) 76 | 77 | firstConnect = false 78 | }) 79 | 80 | node.on('disconnected', () => { 81 | log.info(`lost connection to ${spec.url}:`, node.error) 82 | warnAllLost() 83 | }) 84 | 85 | node.on('error', () => { 86 | log.debug(`failed to connect to ${spec.url}:`, node.error) 87 | }) 88 | 89 | node.on('event', ({ hash, tx, ledger }) => { 90 | if(sawHash(hash)) 91 | return 92 | 93 | if(ledger){ 94 | latestLedger = { ...ledger, transactions: [] } 95 | } 96 | 97 | if(latestLedger){ 98 | if(tx){ 99 | latestLedger.transactions.push(tx) 100 | } 101 | 102 | if(latestLedger.transactions.length === latestLedger.txn_count){ 103 | events.emit('ledger', formatLedger(latestLedger)) 104 | } 105 | } 106 | }) 107 | 108 | nodes.push(node) 109 | } 110 | 111 | log.info(` -> ${spec.url}`) 112 | } 113 | 114 | workQueue() 115 | 116 | return Object.assign( 117 | events, 118 | { 119 | request(payload){ 120 | return new Promise((resolve, reject) => { 121 | let timeout = setTimeout(() => reject('noNodeAcceptedRequest'), 30000) 122 | let accepted = () => clearTimeout(timeout) 123 | 124 | queue.push({ 125 | payload, 126 | resolve, 127 | reject, 128 | accepted 129 | }) 130 | }) 131 | }, 132 | close(){ 133 | closed = true 134 | 135 | for(let node of nodes){ 136 | node.disconnect() 137 | } 138 | }, 139 | get connectionsCount(){ 140 | return nodes.length 141 | } 142 | } 143 | ) 144 | } -------------------------------------------------------------------------------- /src/db/helpers/tokenmetrics.js: -------------------------------------------------------------------------------- 1 | import { readPoint, writePoint } from './common.js' 2 | import { markCacheDirtyForTokenMetrics } from '../../cache/todo.js' 3 | 4 | 5 | const metricTables = { 6 | trustlines: 'tokenTrustlines', 7 | holders: 'tokenHolders', 8 | supply: 'tokenSupply', 9 | marketcap: 'tokenMarketcap' 10 | } 11 | 12 | 13 | export function writeTokenMetrics({ ctx, token, ledgerSequence, metrics, updateCache = true }){ 14 | for(let [key, value] of Object.entries(metrics)){ 15 | writePoint({ 16 | table: ctx.db.core[metricTables[key]], 17 | selector: { 18 | token 19 | }, 20 | ledgerSequence, 21 | backwards: ctx.backwards, 22 | data: value.toString() !== '0' 23 | ? { value } 24 | : null 25 | }) 26 | } 27 | 28 | if(updateCache) 29 | markCacheDirtyForTokenMetrics({ ctx, token, metrics }) 30 | } 31 | 32 | 33 | export function readTokenMetrics({ ctx, token, ledgerSequence, metrics }){ 34 | let point = {} 35 | 36 | for(let key of Object.keys(metrics)){ 37 | let entry = readPoint({ 38 | table: ctx.db.core[metricTables[key]], 39 | selector: { 40 | token 41 | }, 42 | ledgerSequence 43 | }) 44 | 45 | if(entry){ 46 | point[key] = entry.value 47 | } 48 | } 49 | 50 | return point 51 | } 52 | 53 | 54 | 55 | export function readTokenMetricSeries({ ctx, token, metric, sequenceStart, sequenceEnd }){ 56 | return ctx.db.core[metricTables[metric]].readMany({ 57 | where: { 58 | token, 59 | ledgerSequence: { 60 | greaterOrEqual: sequenceStart, 61 | ...( 62 | sequenceEnd 63 | ? { lessOrEqual: sequenceEnd } 64 | : {} 65 | ) 66 | } 67 | }, 68 | orderBy: { 69 | ledgerSequence: 'asc' 70 | } 71 | }) 72 | } 73 | 74 | 75 | 76 | export function readTokenMetricIntervalSeries({ ctx, token, metric, sequence, time }){ 77 | let table = metricTables[metric] 78 | 79 | if(time){ 80 | return ctx.db.core[table].readManyRaw({ 81 | query: 82 | `SELECT MAX(Ledger.closeTime) as time, value 83 | FROM ${table} 84 | LEFT JOIN Ledger ON (Ledger.sequence = ledgerSequence) 85 | WHERE token = ? 86 | AND ( 87 | (Ledger.closeTime >= ? AND Ledger.closeTime <= ?) 88 | OR 89 | ( 90 | ledgerSequence = ( 91 | SELECT ledgerSequence 92 | FROM ${table} 93 | WHERE token = ? 94 | AND ledgerSequence < ? 95 | ORDER BY ledgerSequence DESC 96 | LIMIT 1 97 | ) 98 | ) 99 | ) 100 | GROUP BY Ledger.closeTime / CAST(? as INTEGER) 101 | ORDER BY Ledger.closeTime ASC`, 102 | params: [ 103 | token.id, 104 | time.start, 105 | time.end, 106 | token.id, 107 | sequence.start, 108 | time.interval, 109 | ] 110 | }) 111 | }else{ 112 | return ctx.db.core[table].readManyRaw({ 113 | query: 114 | `SELECT MAX(ledgerSequence) as sequence, value 115 | FROM ${table} 116 | WHERE token = ? 117 | AND ( 118 | (ledgerSequence >= ? AND ledgerSequence <= ?) 119 | OR 120 | ( 121 | ledgerSequence = ( 122 | SELECT ledgerSequence 123 | FROM ${table} 124 | WHERE token = ? 125 | AND ledgerSequence < ? 126 | ORDER BY ledgerSequence DESC 127 | LIMIT 1 128 | ) 129 | ) 130 | ) 131 | GROUP BY ledgerSequence / CAST(? as INTEGER) 132 | ORDER BY ledgerSequence ASC`, 133 | params: [ 134 | token.id, 135 | sequence.start, 136 | sequence.end, 137 | token.id, 138 | sequence.start, 139 | sequence.interval, 140 | ] 141 | }) 142 | } 143 | } -------------------------------------------------------------------------------- /src/ledger/state/tokens.js: -------------------------------------------------------------------------------- 1 | import { sum, sub, eq, lt, gt, neg, max } from '@xrplkit/xfl' 2 | import { writeBalance } from '../../db/helpers/balances.js' 3 | import { writeTokenMetrics, readTokenMetrics } from '../../db/helpers/tokenmetrics.js' 4 | 5 | 6 | export function parse({ entry }){ 7 | let lowIssuer = entry.HighLimit.value !== '0' || lt(entry.Balance.value, '0') 8 | let highIssuer = entry.LowLimit.value !== '0' || gt(entry.Balance.value, '0') 9 | let transformed = {} 10 | 11 | if(lowIssuer){ 12 | transformed.low = { 13 | account: { 14 | address: entry.HighLimit.issuer 15 | }, 16 | token: { 17 | currency: entry.Balance.currency, 18 | issuer: { 19 | address: entry.LowLimit.issuer 20 | } 21 | }, 22 | balance: max(0, neg(entry.Balance.value)), 23 | ledgerSequence: entry.LedgerSequence 24 | } 25 | } 26 | 27 | if(highIssuer){ 28 | transformed.high = { 29 | account: { 30 | address: entry.LowLimit.issuer 31 | }, 32 | token: { 33 | currency: entry.Balance.currency, 34 | issuer: { 35 | address: entry.HighLimit.issuer 36 | } 37 | }, 38 | balance: max(0, entry.Balance.value), 39 | ledgerSequence: entry.LedgerSequence 40 | } 41 | } 42 | 43 | return transformed 44 | } 45 | 46 | 47 | export function group({ previous, final }){ 48 | let groups = [] 49 | 50 | for(let side of ['low', 'high']){ 51 | let entry = final 52 | ? final[side] 53 | : previous[side] 54 | 55 | if(!entry) 56 | continue 57 | 58 | groups.push({ 59 | group: { 60 | token: entry.token, 61 | key: `${entry.token.currency}:${entry.token.issuer.address}`, 62 | }, 63 | previous: previous ? previous[side] : undefined, 64 | final: final ? final[side] : undefined 65 | }) 66 | } 67 | 68 | return groups 69 | } 70 | 71 | 72 | export function diff({ ctx, token, deltas }){ 73 | token = ctx.db.core.tokens.createOne({ 74 | data: token 75 | }) 76 | 77 | let { trustlines, holders, supply } = readTokenMetrics({ 78 | ctx, 79 | token, 80 | metrics: { 81 | trustlines: true, 82 | holders: true, 83 | supply: true 84 | }, 85 | ledgerSequence: ctx.ledgerSequence 86 | }) 87 | 88 | let metrics = { 89 | trustlines: trustlines || 0, 90 | holders: holders || 0, 91 | supply: supply || 0, 92 | } 93 | 94 | for(let { previous, final } of deltas){ 95 | if(previous && final){ 96 | metrics.supply = sum( 97 | metrics.supply, 98 | sub(final.balance, previous.balance) 99 | ) 100 | 101 | if(eq(previous.balance, 0) && gt(final.balance, 0)){ 102 | metrics.holders++ 103 | }else if(eq(final.balance, 0) && gt(previous.balance, 0)){ 104 | metrics.holders-- 105 | } 106 | }else if(final){ 107 | metrics.trustlines++ 108 | 109 | if(gt(final.balance, 0)){ 110 | metrics.supply = sum(metrics.supply, final.balance) 111 | metrics.holders++ 112 | } 113 | }else{ 114 | metrics.trustlines-- 115 | 116 | if(gt(previous.balance, 0)){ 117 | metrics.supply = sub(metrics.supply, previous.balance) 118 | metrics.holders-- 119 | } 120 | } 121 | 122 | if(ctx.backwards && !previous){ 123 | // edge case when backfilling RippleState deletions 124 | writeBalance({ 125 | ctx, 126 | account: final.account, 127 | token, 128 | ledgerSequence: ctx.ledgerSequence, 129 | balance: '0', 130 | }) 131 | } 132 | 133 | if(final){ 134 | writeBalance({ 135 | ctx, 136 | account: final.account, 137 | token, 138 | ledgerSequence: final.ledgerSequence, 139 | balance: final.balance 140 | }) 141 | }else{ 142 | writeBalance({ 143 | ctx, 144 | account: previous.account, 145 | token, 146 | ledgerSequence: ctx.ledgerSequence, 147 | balance: '0', 148 | }) 149 | } 150 | } 151 | 152 | writeTokenMetrics({ 153 | ctx, 154 | token, 155 | metrics, 156 | ledgerSequence: ctx.ledgerSequence 157 | }) 158 | } -------------------------------------------------------------------------------- /src/ledger/state/index.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import * as accounts from './accounts.js' 3 | import * as tokens from './tokens.js' 4 | import * as tokenOffers from './tokenoffers.js' 5 | import * as nfts from './nfts.js' 6 | import * as nftOffers from './nftoffers.js' 7 | 8 | 9 | const ledgerEntryModules = { 10 | AccountRoot: accounts, 11 | RippleState: tokens, 12 | Offer: tokenOffers, 13 | NFTokenPage: nfts, 14 | NFTokenOffer: nftOffers, 15 | } 16 | 17 | 18 | export function applyLedgerStateFromObjects({ ctx, objects }){ 19 | return applyDeltas({ 20 | ctx, 21 | deltas: objects.map(entry => ({ 22 | type: entry.LedgerEntryType, 23 | index: entry.index, 24 | final: { 25 | ...entry, 26 | LedgerSequence: entry.PreviousTxnLgrSeq 27 | } 28 | })) 29 | }) 30 | } 31 | 32 | export function applyLedgerStateFromTransactions({ ctx, ledger }){ 33 | let deltas = [] 34 | 35 | for(let transaction of ledger.transactions){ 36 | let meta = transaction.meta || transaction.metaData 37 | 38 | for(let { CreatedNode, ModifiedNode, DeletedNode } of meta.AffectedNodes){ 39 | if(CreatedNode && CreatedNode.NewFields){ 40 | deltas.push({ 41 | type: CreatedNode.LedgerEntryType, 42 | index: CreatedNode.LedgerIndex, 43 | final: { 44 | ...CreatedNode.NewFields, 45 | LedgerSequence: ledger.sequence 46 | } 47 | }) 48 | }else if(ModifiedNode && ModifiedNode.FinalFields){ 49 | if(ModifiedNode.LedgerEntryType === 'DirectoryNode') 50 | continue 51 | 52 | if(ctx.backwards && !ModifiedNode.PreviousTxnLgrSeq){ 53 | log.warn(`transaction #${transaction.hash} is missing PreviousTxnLgrSeq - skipping`) 54 | continue 55 | } 56 | 57 | deltas.push({ 58 | type: ModifiedNode.LedgerEntryType, 59 | index: ModifiedNode.LedgerIndex, 60 | previous: { 61 | ...ModifiedNode.FinalFields, 62 | ...ModifiedNode.PreviousFields, 63 | LedgerSequence: ModifiedNode.PreviousTxnLgrSeq 64 | }, 65 | final: { 66 | ...ModifiedNode.FinalFields, 67 | LedgerSequence: ledger.sequence 68 | } 69 | }) 70 | }else if(DeletedNode){ 71 | deltas.push({ 72 | type: DeletedNode.LedgerEntryType, 73 | index: DeletedNode.LedgerIndex, 74 | previous: { 75 | ...DeletedNode.FinalFields, 76 | ...DeletedNode.PreviousFields, 77 | LedgerSequence: DeletedNode.FinalFields.PreviousTxnLgrSeq 78 | } 79 | }) 80 | } 81 | } 82 | } 83 | 84 | if(ctx.backwards){ 85 | return applyDeltas({ 86 | ctx, 87 | deltas: deltas 88 | .map(({ type, index, previous, final }) => ({ type, index, previous: final, final: previous })) 89 | .reverse(), 90 | }) 91 | }else{ 92 | return applyDeltas({ 93 | ctx, 94 | deltas 95 | }) 96 | } 97 | } 98 | 99 | function applyDeltas({ ctx, deltas }){ 100 | let groups = {} 101 | let solos = [] 102 | 103 | for(let { type, index, previous, final } of deltas){ 104 | let module = ledgerEntryModules[type] 105 | 106 | if(!module) 107 | continue 108 | 109 | if(module.skip && module.skip({ ctx })) 110 | continue 111 | 112 | let parsedPrevious = previous 113 | ? module.parse({ index, entry: previous }) 114 | : undefined 115 | 116 | let parsedFinal = final 117 | ? module.parse({ index, entry: final }) 118 | : undefined 119 | 120 | if(!parsedPrevious && !parsedFinal) 121 | continue 122 | 123 | if(module.group){ 124 | let grouped = module.group({ 125 | previous: parsedPrevious, 126 | final: parsedFinal 127 | }) 128 | 129 | for(let { group, previous, final } of grouped){ 130 | if(!groups[group.key]) 131 | groups[group.key] = { 132 | ...group, 133 | type, 134 | deltas: [] 135 | } 136 | 137 | groups[group.key].deltas.push({ 138 | previous, 139 | final 140 | }) 141 | } 142 | }else{ 143 | solos.push({ 144 | type, 145 | previous: parsedPrevious, 146 | final: parsedFinal 147 | }) 148 | } 149 | } 150 | 151 | for(let { type, key, ...group } of Object.values(groups)){ 152 | ledgerEntryModules[type].diff({ ctx, ...group }) 153 | } 154 | 155 | for(let { type, ...delta } of solos){ 156 | ledgerEntryModules[type].diff({ ctx, ...delta }) 157 | } 158 | } -------------------------------------------------------------------------------- /src/xrpl/stream.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { wait } from '@xrplkit/time' 3 | import { fetch as fetchLedger } from './ledger.js' 4 | 5 | 6 | 7 | export async function createForwardStream({ ctx, startSequence }){ 8 | if(ctx.log) 9 | log.pipe(ctx.log) 10 | 11 | let latestLedger 12 | 13 | while(!latestLedger){ 14 | try{ 15 | latestLedger = await fetchLedger({ 16 | ctx, 17 | sequence: 'validated' 18 | }) 19 | }catch(error){ 20 | log.warn(`cannot start forward stream, unable get latest ledger: \n${error}`) 21 | await wait(1000) 22 | } 23 | } 24 | 25 | let stream = createRegistry({ 26 | name: 'live', 27 | startSequence, 28 | targetSequence: latestLedger.sequence, 29 | maxSize: ctx.config.ledger.streamQueueSize || 100 30 | }) 31 | 32 | ctx.xrpl.on('ledger', ledger => { 33 | stream.extend(ledger) 34 | }) 35 | 36 | createFiller({ ctx, stream, stride: 1 }) 37 | 38 | return stream 39 | } 40 | 41 | export async function createBackwardStream({ ctx, startSequence }){ 42 | if(ctx.log) 43 | log.pipe(ctx.log) 44 | 45 | let stream = createRegistry({ 46 | name: 'backfill', 47 | startSequence, 48 | targetSequence: ctx.config.ledger.backfillToLedger || 0, 49 | maxSize: ctx.config.ledger.streamQueueSize || 100 50 | }) 51 | 52 | createFiller({ ctx, stream, stride: -1 }) 53 | 54 | return stream 55 | } 56 | 57 | 58 | function createRegistry({ name, startSequence, targetSequence, maxSize }){ 59 | let currentSequence = startSequence 60 | let ledgers = {} 61 | let resolveNext = () => 0 62 | 63 | return { 64 | get currentSequence(){ 65 | return currentSequence 66 | }, 67 | 68 | get targetSequence(){ 69 | return targetSequence 70 | }, 71 | 72 | get queueSize(){ 73 | return Object.keys(ledgers).length 74 | }, 75 | 76 | has(sequence){ 77 | return !!ledgers[sequence] 78 | }, 79 | 80 | accepts(sequence){ 81 | return Math.abs(sequence - currentSequence) <= maxSize 82 | }, 83 | 84 | extend(ledger){ 85 | targetSequence = Math.max(targetSequence, ledger.sequence) 86 | 87 | if(this.accepts(ledger.sequence)) 88 | this.put(ledger) 89 | }, 90 | 91 | put(ledger){ 92 | ledgers[ledger.sequence] = ledger 93 | resolveNext() 94 | 95 | if(this.queueSize > 1){ 96 | log.accumulate.debug({ 97 | text: [ 98 | `${name} queue has`, 99 | this.queueSize, 100 | `ledgers`, 101 | `(+%${name}QueueAdd in %time)` 102 | ], 103 | data: { 104 | [`${name}QueueAdd`]: 1 105 | } 106 | }) 107 | } 108 | }, 109 | 110 | status(){ 111 | return { 112 | currentSequence, 113 | targetSequence 114 | } 115 | }, 116 | 117 | async next(){ 118 | while(!ledgers[currentSequence]){ 119 | await new Promise(resolve => resolveNext = resolve) 120 | } 121 | 122 | let ledger = ledgers[currentSequence] 123 | 124 | delete ledgers[currentSequence] 125 | 126 | currentSequence += targetSequence >= currentSequence ? 1 : -1 127 | 128 | return { 129 | ledger, 130 | ledgersBehind: targetSequence - currentSequence 131 | } 132 | } 133 | } 134 | } 135 | 136 | function createFiller({ ctx, stream, stride }){ 137 | let reservations = {} 138 | 139 | for(let n=0; n { 141 | let sequence = stream.currentSequence 142 | 143 | while(true){ 144 | let stepsToTarget = (stream.targetSequence - sequence) * stride 145 | let stepsBehindCurrent = (stream.currentSequence - sequence) * stride 146 | 147 | if(stepsToTarget < 0){ 148 | await wait(100) 149 | continue 150 | } 151 | 152 | if(!stream.accepts(sequence)){ 153 | await wait(1000) 154 | continue 155 | } 156 | 157 | if(stepsBehindCurrent > 0 || reservations[sequence] || stream.has(sequence)){ 158 | sequence += stride 159 | continue 160 | } 161 | 162 | reservations[sequence] = true 163 | 164 | try{ 165 | stream.put( 166 | await fetchLedger({ 167 | ctx, 168 | sequence 169 | }) 170 | ) 171 | }catch(error){ 172 | log.warn(`failed to fetch ledger #${sequence}:`, error) 173 | await wait(1000) 174 | }finally{ 175 | delete reservations[sequence] 176 | } 177 | } 178 | })() 179 | } 180 | } -------------------------------------------------------------------------------- /config.template.toml: -------------------------------------------------------------------------------- 1 | [NODE] 2 | # The file directory where the node can store its persistent data 3 | data_dir = "" 4 | 5 | 6 | [LEDGER] 7 | # Any ledgers before this sequence will be ignored 8 | # For full-history livenet, this value should be 32570, beause the first 32569 ledgers were lost 9 | backfill_to_ledger = 32570 10 | # How many ledger objects per ledger_data request to fetch 11 | snapshot_chunk_size = 64000 12 | # How many ledgers to fetch in advance before writing them to the database 13 | stream_queue_size = 100 14 | 15 | 16 | [[LEDGER.SOURCE]] 17 | # A public websocket endpoint provided by a rippled or clio node 18 | # The node will pull all relevant ledger data from there 19 | url = "wss://xrplcluster.com" 20 | connections = 2 21 | 22 | [[LEDGER.SOURCE]] 23 | url = "wss://s1.ripple.com" 24 | 25 | 26 | 27 | [[TRUSTLIST]] 28 | # Trustlists are manually published lists containing token metadata according to XLS-26 29 | # https://github.com/XRPLF/XRPL-Standards/discussions/71 30 | # You can add as many as you want 31 | # The id field is used for ranking. Read below at "source_ranking" 32 | # The trust_level field defines the maximum possible trust level the list can set for any token 33 | id = "xrplmeta" 34 | url = "https://xrplmeta.org/trusted.toml" 35 | fetch_interval = 30 36 | trust_level = 3 37 | 38 | [[TRUSTLIST]] 39 | id = "xaman" 40 | url = "https://unhosted.exchange/tokens.toml" 41 | fetch_interval = 60 42 | trust_level = 3 43 | 44 | 45 | [TOMLS] 46 | # All issuing accounts with the Domain field set are being automatically crawled for 47 | # DOMAIN/.well-known/xrp-ledger.toml. If the issuer published metadata about their token 48 | # according to XLS-26, the data will be copied and presented by the server. 49 | concurrency = 3 50 | connection_timeout = 10 51 | fetch_interval = 600 52 | 53 | 54 | [XRPSCAN] 55 | # xrpscan.com provides usernames, social media links and verifications of XRPL accounts. 56 | # https://docs.xrpscan.com/api-doc.html#get-names 57 | fetch_interval = 600 58 | 59 | 60 | [GRAVATAR] 61 | # gravatar.com provides avatar images for XRPL accounts that are linked via the on-chain Account "EmailHash" field. 62 | fetch_interval = 43200 63 | max_requests_per_minute = 60 64 | 65 | 66 | # [XAMAN] 67 | # xaman.app provides a list of curated assets, KYC status and avatar images. 68 | # It is required to obtain an API key to use this service. 69 | # https://apps.xaman.dev 70 | # 71 | # api_key = "paste_here" 72 | # api_secret = "paste_here" 73 | # fetch_interval_assets = 60 74 | # fetch_interval_kyc = 43200 75 | # fetch_interval_avatar = 43200 76 | # max_requests_per_minute = 30 77 | 78 | 79 | # [BITHOMP] 80 | # bithomp.com provides icons, usernames, website and social media links for XRPL accounts. 81 | # It is required to obtain an API key for this service. 82 | # https://bithomp.com/developer 83 | # 84 | # refresh_interval = 600 85 | # api_key = "" 86 | 87 | 88 | # [X] 89 | # x.com can provide icons, display names, website links and descriptions for both XRPL accounts and tokens. 90 | # It is required to obtain an API key for this service. 91 | # https://docs.x.com/x-api/getting-started/getting-access 92 | # 93 | # bearer_token = "paste_here" 94 | # fetch_interval = 3600 95 | # max_requests_per_minute = 60 96 | 97 | 98 | 99 | [SERVER] 100 | # The server listens on the specified port for incoming HTTP (REST + WebSocket) connections. 101 | port = 4080 102 | 103 | # Set this to the URL under which your node is reachable. 104 | # This is required for the API to correctly return the URLs of locally cached media files, such as token icons. 105 | # public_url = "https://example.com" 106 | 107 | # Multiple sources can give conflicting data for the same field. This array defines who's data has precedence, 108 | # from highest to lowest. Example for the values below: 109 | # if Xaman, Bithomp and X all define an icon for a token, the one from Xaman will be used. 110 | source_ranking = [ 111 | 'ledger', # on-chain values, such as the "Domain" field 112 | 'trustlist', # .toml files published by trusted entities 113 | 'issuer/domain', # .toml files published by the token issuer, linked via the "Domain" field 114 | 'xaman/curated', # names and icons, manually set by Xaman 115 | 'xaman/avatar', # user defined icons from Xaman 116 | 'gravatar', 117 | 'bithomp', 118 | 'xrpscan', 119 | 'x' 120 | ] -------------------------------------------------------------------------------- /src/srv/ws.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import * as procedures from './api.js' 3 | import { formatTokenCache } from './procedures/token.js' 4 | import { executeProcedure } from './worker.js' 5 | 6 | 7 | const checkAliveInterval = 10000 8 | 9 | 10 | export function createManager({ ctx }){ 11 | let clients = [] 12 | let counter = 0 13 | 14 | setInterval( 15 | () => { 16 | for(let client of clients){ 17 | if(!client.alive){ 18 | client.socket.close() 19 | log.debug(`client #${client.id} inactivity kick`) 20 | continue 21 | } 22 | 23 | client.alive = false 24 | client.socket.ping() 25 | } 26 | }, 27 | checkAliveInterval 28 | ) 29 | 30 | ctx.ipc.subscribe( 31 | async payload => { 32 | if(payload.tokenUpdate){ 33 | let token = payload.tokenUpdate.token 34 | let key = `${token.id}` 35 | let recipients = [] 36 | 37 | for(let client of clients){ 38 | let subscription = client.tokenSubscriptions[key] 39 | 40 | if(subscription){ 41 | recipients.push({ 42 | client, 43 | subscription 44 | }) 45 | } 46 | } 47 | 48 | if(recipients.length > 0){ 49 | pushTokenUpdate({ 50 | ctx, 51 | token, 52 | recipients 53 | }) 54 | } 55 | } 56 | } 57 | ) 58 | 59 | function logCount(change){ 60 | log.accumulate.info({ 61 | text: [ 62 | clients.length, 63 | `client(s) connected (%wsConnectionChange in %time)` 64 | ], 65 | data: { 66 | wsConnectionChange: change 67 | } 68 | }) 69 | } 70 | 71 | return { 72 | registerSocket(socket){ 73 | let client = { 74 | id: ++counter, 75 | socket, 76 | tokenSubscriptions: {}, 77 | alive: true 78 | } 79 | 80 | socket.on('message', async message => { 81 | try{ 82 | var { id, command, ...params } = JSON.parse(message) 83 | }catch{ 84 | log.debug(`client #${client.id} sent malformed request - dropping them`) 85 | socket.close() 86 | } 87 | 88 | try{ 89 | if(!procedures[command]){ 90 | throw { 91 | message: 'unknown command', 92 | expose: true 93 | } 94 | } 95 | 96 | socket.send( 97 | await executeProcedure({ 98 | ctx: { 99 | ...ctx, 100 | client 101 | }, 102 | procedure: command, 103 | params, 104 | requestId: id 105 | }) 106 | ) 107 | }catch(error){ 108 | let response = null 109 | 110 | if(typeof error === 'object'){ 111 | if(error.expose){ 112 | response = error 113 | delete response.expose 114 | } 115 | } 116 | 117 | if(!response){ 118 | log.debug(`internal server error while serving client #${client.id}:`, error.message) 119 | response = {message: 'internal server error'} 120 | } 121 | 122 | response.request = { 123 | ...params, 124 | command, 125 | } 126 | 127 | socket.send( 128 | JSON.stringify({ 129 | id, 130 | error: response 131 | }) 132 | ) 133 | } 134 | }) 135 | 136 | socket.on('pong', () => { 137 | client.alive = true 138 | }) 139 | 140 | socket.on('close', () => { 141 | clients.splice(clients.indexOf(client)) 142 | log.debug(`client #${client.id} disconnected`) 143 | logCount(-1) 144 | }) 145 | 146 | socket.on('error', error => { 147 | log.info(`client #${client.id} websocket error: ${error.message}`) 148 | }) 149 | 150 | clients.push(client) 151 | 152 | log.debug(`new connection (#${client.id} ${socket._socket.remoteAddress})`) 153 | logCount(1) 154 | } 155 | } 156 | } 157 | 158 | function pushTokenUpdate({ ctx, token, recipients }){ 159 | if(!token.id) 160 | throw new Error(`token.id required`) 161 | 162 | let cache = ctx.db.cache.tokens.readOne({ 163 | where: { 164 | token: token.id 165 | }, 166 | include: { 167 | token: { 168 | issuer: true 169 | } 170 | } 171 | }) 172 | 173 | for(let { client, subscription } of recipients){ 174 | client.socket.send( 175 | JSON.stringify({ 176 | type: 'tokenUpdate', 177 | token: formatTokenCache({ 178 | ctx, 179 | cache, 180 | decodeCurrency: subscription.decode_currency, 181 | preferSources: subscription.prefer_sources, 182 | expandMeta: subscription.include_sources, 183 | includeChanges: subscription.include_changes, 184 | }) 185 | }) 186 | ) 187 | } 188 | } -------------------------------------------------------------------------------- /src/crawl/schedule.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { unixNow, wait } from '@xrplkit/time' 3 | 4 | 5 | export async function scheduleGlobal({ ctx, task, interval, routine }){ 6 | let duration = 0 7 | let previousOperation = ctx.db.core.operations.readOne({ 8 | where: { 9 | subjectType: 'global', 10 | subjectId: 0, 11 | task 12 | } 13 | }) 14 | 15 | if(previousOperation) 16 | duration = interval - unixNow() + previousOperation.time 17 | 18 | if(duration > 0) 19 | log.debug(`${task}:`, `waiting ${duration} seconds for next operation`) 20 | 21 | await wait(duration * 1000 + 1) 22 | 23 | try{ 24 | await routine() 25 | 26 | ctx.db.core.operations.createOne({ 27 | data: { 28 | subjectType: 'global', 29 | subjectId: 0, 30 | task, 31 | time: unixNow() 32 | } 33 | }) 34 | }catch(error){ 35 | log.warn(`scheduled task "${task}" failed:\n`, error.stack || error.message || error) 36 | await wait(4000) 37 | } 38 | } 39 | 40 | export async function scheduleIterator({ ctx, type, where, include, task, interval, concurrency = 1, routine }){ 41 | let { table, ids } = collectItemIds({ ctx, type, where }) 42 | 43 | log.debug(`${task}:`, ids.length, `items[${table}] to iterate`) 44 | 45 | await Promise.all( 46 | Array(concurrency) 47 | .fill(0) 48 | .map(async () => { 49 | while(ids.length > 0){ 50 | let id = ids.shift() 51 | let item = ctx.db.core[table].readOne({ 52 | where: { 53 | id 54 | }, 55 | include 56 | }) 57 | 58 | let previousOperation = ctx.db.core.operations.readOne({ 59 | where: { 60 | subjectType: type, 61 | subjectId: item.id, 62 | task, 63 | time: { 64 | greaterThan: unixNow() - interval 65 | } 66 | } 67 | }) 68 | 69 | if(previousOperation) 70 | continue 71 | 72 | try{ 73 | await routine(item, ids.length) 74 | }catch(error){ 75 | log.warn(`scheduled task "${task}" failed for item:\n`, error.stack || error.message || error) 76 | await wait(3000) 77 | } 78 | 79 | ctx.db.core.operations.createOne({ 80 | data: { 81 | subjectType: type, 82 | subjectId: item.id, 83 | task, 84 | time: unixNow() 85 | } 86 | }) 87 | } 88 | }) 89 | ) 90 | 91 | await wait(1) 92 | } 93 | 94 | 95 | export async function scheduleBatchedIterator({ ctx, type, where, include, task, interval, batchSize, accumulate, commit }){ 96 | let queue = [] 97 | let flush = async () => { 98 | let batch = queue.splice(0, batchSize) 99 | 100 | try{ 101 | await commit(batch) 102 | }catch(error){ 103 | log.warn(`scheduled task "${task}" failed for batch:\n`, error.stack || error.message || error) 104 | } 105 | 106 | let time = unixNow() 107 | 108 | for(let { items } of batch){ 109 | for(let item of items){ 110 | ctx.db.core.operations.createOne({ 111 | data: { 112 | subjectType: type, 113 | subjectId: item.id, 114 | task, 115 | time 116 | } 117 | }) 118 | } 119 | } 120 | } 121 | 122 | let { table, ids } = collectItemIds({ ctx, type, where }) 123 | let now = unixNow() 124 | 125 | log.debug(`${task}:`, ids.length, `items[${table}] to iterate`) 126 | 127 | for(let id of ids){ 128 | let item = ctx.db.core[table].readOne({ 129 | where: { id }, 130 | include 131 | }) 132 | 133 | let previousOperation = ctx.db.core.operations.readOne({ 134 | where: { 135 | subjectType: type, 136 | subjectId: item.id, 137 | task, 138 | time: { 139 | greaterThan: now - interval 140 | } 141 | } 142 | }) 143 | 144 | await wait(1) 145 | 146 | if(previousOperation) 147 | continue 148 | 149 | queue = accumulate(queue, item) 150 | 151 | if(queue.length >= batchSize) 152 | await flush() 153 | } 154 | 155 | if(queue.length > 0) 156 | await flush() 157 | 158 | await wait(1) 159 | } 160 | 161 | function collectItemIds({ ctx, type, where }){ 162 | if(type === 'issuer'){ 163 | return { 164 | table: 'accounts', 165 | ids: ctx.db.core.tokens.readMany({ 166 | select: { issuer: true }, 167 | distinct: ['issuer'], 168 | where 169 | }) 170 | .map(row => row.issuer?.id) 171 | .filter(Boolean) 172 | .reverse() 173 | } 174 | }else{ 175 | return { 176 | table: 'tokens', 177 | ids: ctx.db.core.tokens.readMany({ 178 | select: { id: true }, 179 | where 180 | }) 181 | .map(row => row.id) 182 | .reverse() 183 | } 184 | } 185 | } -------------------------------------------------------------------------------- /test/unit/prop-diff.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createContext } from './env.js' 3 | import { diffMultiAccountProps, diffMultiTokenProps } from '../../src/db/helpers/props.js' 4 | import { reduceProps } from '../../src/srv/procedures/token.js' 5 | import { updateCacheForTokenProps } from '../../src/cache/tokens.js' 6 | 7 | 8 | const ctx = await createContext() 9 | 10 | const accounts = [ 11 | { 12 | address: 'rrrrrrrrrrrrrrrrrrrrrhoLvTp', 13 | props: { 14 | name: 'Account Zero', 15 | trust_level: 3 16 | } 17 | }, 18 | { 19 | address: 'rrrrrrrrrrrrrrrrrrrrBZbvji', 20 | props: { 21 | name: 'Account One', 22 | trust_level: 3 23 | } 24 | }, 25 | { 26 | address: 'rrrrrrrrrrrrrrrrrrrn5RM1rHd', 27 | props: { 28 | name: 'NaN Address', 29 | trust_level: 1 30 | } 31 | } 32 | ] 33 | 34 | const tokens = [ 35 | { 36 | currency: 'XAU', 37 | issuer: { 38 | address: accounts[0].address 39 | }, 40 | props: { 41 | name: 'Gold', 42 | asset_class: 'commodity' 43 | } 44 | }, 45 | { 46 | currency: 'XAG', 47 | issuer: { 48 | address: accounts[1].address 49 | }, 50 | props: { 51 | name: 'Silver', 52 | asset_class: 'commodity' 53 | } 54 | }, 55 | { 56 | currency: 'USD', 57 | issuer: { 58 | address: accounts[2].address 59 | }, 60 | props: { 61 | name: 'US Dollar', 62 | asset_class: 'fiat' 63 | } 64 | } 65 | ] 66 | 67 | describe( 68 | 'Diffing account props', 69 | () => { 70 | it( 71 | 'execute with new data', 72 | () => { 73 | diffMultiAccountProps({ 74 | ctx, 75 | accounts, 76 | source: 'test' 77 | }) 78 | } 79 | ) 80 | 81 | it( 82 | 'should insert all props', 83 | () => { 84 | expect(ctx.db.core.accountProps.readMany().length).to.be.equal(6) 85 | } 86 | ) 87 | 88 | it( 89 | 'execute with one account removed', 90 | () => { 91 | diffMultiAccountProps({ 92 | ctx, 93 | accounts: accounts.slice(0, 2), 94 | source: 'test' 95 | }) 96 | } 97 | ) 98 | 99 | it( 100 | 'should have removed the removed account\'s props', 101 | () => { 102 | expect(ctx.db.core.accountProps.readMany().length).to.be.equal(4) 103 | } 104 | ) 105 | 106 | it( 107 | 'should also remove specific removed props', 108 | () => { 109 | let accountsChanged = structuredClone(accounts) 110 | 111 | delete accountsChanged[0].props.name 112 | 113 | diffMultiAccountProps({ 114 | ctx, 115 | accounts: accountsChanged, 116 | source: 'test' 117 | }) 118 | 119 | expect(ctx.db.core.accountProps.readMany().length).to.be.equal(5) 120 | } 121 | ) 122 | } 123 | ) 124 | 125 | 126 | describe( 127 | 'Diffing token props', 128 | () => { 129 | it( 130 | 'execute with new data', 131 | () => { 132 | diffMultiTokenProps({ 133 | ctx, 134 | tokens, 135 | source: 'test' 136 | }) 137 | } 138 | ) 139 | 140 | it( 141 | 'should insert all props', 142 | () => { 143 | expect(ctx.db.core.tokenProps.readMany().length).to.be.equal(6) 144 | } 145 | ) 146 | 147 | it( 148 | 'execute with one token removed', 149 | () => { 150 | diffMultiTokenProps({ 151 | ctx, 152 | tokens: tokens.slice(0, 2), 153 | source: 'test' 154 | }) 155 | } 156 | ) 157 | 158 | it( 159 | 'should have removed the removed token\'s props', 160 | () => { 161 | expect(ctx.db.core.tokenProps.readMany().length).to.be.equal(4) 162 | } 163 | ) 164 | 165 | it( 166 | 'should also remove specific removed props', 167 | () => { 168 | let tokensChanged = structuredClone(tokens) 169 | 170 | delete tokensChanged[0].props.name 171 | 172 | diffMultiTokenProps({ 173 | ctx, 174 | tokens: tokensChanged, 175 | source: 'test' 176 | }) 177 | 178 | expect(ctx.db.core.tokenProps.readMany().length).to.be.equal(5) 179 | } 180 | ) 181 | 182 | it( 183 | 'should have the correct token prop cache', 184 | () => { 185 | for(let { currency, issuer } of tokens){ 186 | updateCacheForTokenProps({ 187 | ctx, 188 | token: { currency, issuer } 189 | }) 190 | } 191 | 192 | let props = ctx.db.cache.tokens.readMany() 193 | .map(cache => reduceProps({ props: cache.tokenProps })) 194 | 195 | let expectedProps = tokens 196 | .map(({ props }) => props) 197 | .slice(0, 3) 198 | 199 | delete expectedProps[0].name 200 | 201 | expect(props).to.be.deep.equal(expectedProps) 202 | } 203 | ) 204 | } 205 | ) 206 | -------------------------------------------------------------------------------- /src/srv/sanitizers/token.js: -------------------------------------------------------------------------------- 1 | import { currencyUTF8ToHex } from '@xrplkit/tokens' 2 | import { isValidClassicAddress } from 'ripple-address-codec' 3 | 4 | 5 | const sortKeymap = { 6 | trustlines_delta_24h: 'trustlinesDelta24H', 7 | trustlines_percent_24h: 'trustlinesPercent24H', 8 | trustlines_delta_7d: 'trustlinesDelta7D', 9 | trustlines_percent_7d: 'trustlinesPercent7D', 10 | holders: 'holders', 11 | holders_delta_24h: 'holdersDelta24H', 12 | holders_percent_24h: 'holdersPercent24H', 13 | holders_delta_7d: 'holdersDelta7D', 14 | holders_percent_7d: 'holdersPercent7D', 15 | supply: 'supply', 16 | supply_delta_24h: 'supplyDelta24H', 17 | supply_percent_24h: 'supplyPercent24H', 18 | supply_delta_7d: 'supplyDelta7D', 19 | supply_percent_7d: 'supplyPercent7D', 20 | marketcap: 'marketcap', 21 | marketcap_delta_24h: 'marketcapDelta24H', 22 | marketcap_percent_24h: 'marketcapPercent24H', 23 | marketcap_delta_7d: 'marketcapDelta7D', 24 | marketcap_percent_7d: 'marketcapPercent7D', 25 | price_percent_24h: 'pricePercent24H', 26 | price_percent_7d: 'pricePercent7D', 27 | volume_24h: 'volume24H', 28 | volume_7d: 'volume7D', 29 | exchanges_24h: 'exchanges24H', 30 | exchanges_7d: 'exchanges7D', 31 | takers_24h: 'takers24H', 32 | takers_7d: 'takers7D', 33 | trustlines: 'trustlines', 34 | } 35 | 36 | 37 | export function sanitizeToken({ key, array = false, allowXRP = false }){ 38 | function parse(ctx, { currency, issuer }){ 39 | if(currency === 'XRP'){ 40 | if(allowXRP) 41 | return { 42 | id: 1, 43 | currency: 'XRP' 44 | } 45 | else 46 | throw { 47 | type: `invalidParam`, 48 | message: `XRP is not allowed as parameter.`, 49 | expose: true 50 | } 51 | }else{ 52 | if(!isValidClassicAddress(issuer)) 53 | throw { 54 | type: `invalidParam`, 55 | message: `The issuing address "${key}.issuer" is malformed.`, 56 | expose: true 57 | } 58 | } 59 | 60 | let token = ctx.db.core.tokens.readOne({ 61 | where: { 62 | currency: currencyUTF8ToHex(currency), 63 | issuer: { 64 | address: issuer 65 | } 66 | }, 67 | include: { 68 | issuer: true 69 | } 70 | }) 71 | 72 | if(!token){ 73 | throw { 74 | type: `entryNotFound`, 75 | message: `The token '${currency}' issued by '${issuer}' does not exist.`, 76 | expose: true 77 | } 78 | } 79 | 80 | return token 81 | } 82 | 83 | return ({ ctx, ...args }) => { 84 | if(!args.hasOwnProperty(key)) 85 | throw { 86 | type: `missingParam`, 87 | message: `No token specified.`, 88 | expose: true 89 | } 90 | 91 | if(array){ 92 | return { 93 | ...args, 94 | ctx, 95 | [key]: args[key].map(token => parse(ctx, token)), 96 | } 97 | }else{ 98 | return { 99 | ...args, 100 | ctx, 101 | [key]: parse(ctx, args[key]), 102 | } 103 | } 104 | } 105 | } 106 | 107 | export function sanitizeNameLike(){ 108 | return ({ ctx, name_by, ...args }) => { 109 | if(name_by){ 110 | if(typeof name_by !== 'string'){ 111 | throw { 112 | type: `invalidParam`, 113 | message: `The "name_by" term has to be a string.`, 114 | expose: true 115 | } 116 | } 117 | 118 | if(name_by.length === 0){ 119 | throw { 120 | type: `invalidParam`, 121 | message: `The "name_by" term has to be at least one character long.`, 122 | expose: true 123 | } 124 | } 125 | } 126 | 127 | return { 128 | ...args, 129 | ctx, 130 | name_by 131 | } 132 | } 133 | } 134 | 135 | export function sanitizeTrustLevels(){ 136 | return ({ ctx, trust_level, trust_levels, ...args }) => { 137 | trust_levels = trust_level || trust_levels 138 | 139 | if(trust_levels){ 140 | if(!Array.isArray(trust_levels)){ 141 | throw { 142 | type: `invalidParam`, 143 | message: `The trust levels need to be specified as an array.`, 144 | expose: true 145 | } 146 | } 147 | 148 | trust_levels = trust_levels.map(level => parseInt(level)) 149 | 150 | if(trust_levels.some(level => level < 0 || level > 3)){ 151 | throw { 152 | type: `invalidParam`, 153 | message: `The trust levels need to be between 0 and 3.`, 154 | expose: true 155 | } 156 | } 157 | } 158 | 159 | return { 160 | ...args, 161 | ctx, 162 | trust_levels 163 | } 164 | } 165 | } 166 | 167 | export function sanitizeTokenListSortBy(){ 168 | return ({ ctx, sort_by, ...args }) => { 169 | if(sort_by){ 170 | sort_by = sortKeymap[sort_by] 171 | 172 | if(!sort_by){ 173 | throw { 174 | type: `invalidParam`, 175 | message: `This sorting mode is not allowed. Possible values are: ${ 176 | Object.keys(sortKeymap) 177 | .map(key => `${key}`) 178 | .join(', ') 179 | }'`, 180 | expose: true 181 | } 182 | } 183 | } 184 | 185 | return { 186 | ...args, 187 | ctx, 188 | sort_by 189 | } 190 | } 191 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/domains.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { parse as parseXLS26 } from '@xrplkit/xls26' 3 | import { parse as parseURL } from 'url' 4 | import { sanitize as sanitizeURL } from '../../lib/url.js' 5 | import { scheduleIterator } from '../schedule.js' 6 | import { createFetch } from '../../lib/fetch.js' 7 | import { clearAccountProps, clearTokenProps, readAccountProps, writeAccountProps, writeTokenProps } from '../../db/helpers/props.js' 8 | import { currencyUTF8ToHex } from '@xrplkit/tokens' 9 | import { reduceProps } from '../../srv/procedures/token.js' 10 | 11 | 12 | const tomlStandardPath = '.well-known/xrp-ledger.toml' 13 | 14 | 15 | export default async function({ ctx }){ 16 | let config = ctx.config.tomls 17 | 18 | if(!config || config.disabled){ 19 | throw new Error(`disabled by config`) 20 | } 21 | 22 | let fetch = createFetch({ 23 | timeout: config.connectionTimeout || 20 24 | }) 25 | 26 | while(true){ 27 | await scheduleIterator({ 28 | ctx, 29 | type: 'issuer', 30 | task: 'domains', 31 | interval: config.fetchInterval, 32 | concurrency: 3, 33 | routine: async ({ id, address }, remaining) => { 34 | let { domain } = reduceProps({ 35 | props: readAccountProps({ 36 | ctx, 37 | account: { id } 38 | }), 39 | sourceRanking: [ 40 | 'trustlist', 41 | 'ledger', 42 | 'issuer/domain', 43 | 'xaman', 44 | 'bithomp', 45 | 'xrpscan', 46 | 'x' 47 | ] 48 | }) 49 | 50 | if(domain){ 51 | try{ 52 | var xls26 = await fetchToml({ domain, fetch }) 53 | }catch(error){ 54 | log.debug(`issuer (${address}): ${error.message}`) 55 | return 56 | }finally{ 57 | log.accumulate.info({ 58 | text: [`%xrplTomlLookups xrp-ledger.toml lookups in %time (${remaining} remaining)`], 59 | data: { 60 | xrplTomlLookups: 1 61 | } 62 | }) 63 | } 64 | 65 | let publishedIssuers = 0 66 | let publishedTokens = 0 67 | 68 | for(let { address: issuer, ...props } of xls26.issuers){ 69 | if(issuer !== address) 70 | continue 71 | 72 | delete props.trust_level 73 | 74 | writeAccountProps({ 75 | ctx, 76 | account: { 77 | address: issuer 78 | }, 79 | props, 80 | source: `issuer/domain/${address}` 81 | }) 82 | 83 | publishedIssuers++ 84 | } 85 | 86 | for(let { currency, issuer, ...props } of xls26.tokens){ 87 | if(issuer !== address) 88 | continue 89 | 90 | delete props.trust_level 91 | 92 | writeTokenProps({ 93 | ctx, 94 | token: { 95 | currency: currencyUTF8ToHex(currency), 96 | issuer: { 97 | address: issuer 98 | } 99 | }, 100 | props, 101 | source: `issuer/domain/${address}` 102 | }) 103 | 104 | publishedTokens++ 105 | } 106 | 107 | log.debug(`issuer (${address}) valid xls26:`, xls26) 108 | 109 | if(publishedIssuers || publishedTokens){ 110 | log.accumulate.info({ 111 | text: [`%domainIssuersUpdated issuers and %domainTokensUpdated tokens updated in %time`], 112 | data: { 113 | domainIssuersUpdated: publishedIssuers, 114 | domainTokensUpdated: publishedTokens, 115 | } 116 | }) 117 | } 118 | }else{ 119 | clearAccountProps({ 120 | ctx, 121 | account: { id }, 122 | source: `issuer/domain/${address}` 123 | }) 124 | 125 | for(let token of ctx.db.core.tokens.readMany({ 126 | where: { 127 | issuer: { id } 128 | } 129 | })){ 130 | clearTokenProps({ 131 | ctx, 132 | token, 133 | source: `issuer/domain/${address}` 134 | }) 135 | } 136 | } 137 | } 138 | }) 139 | } 140 | } 141 | 142 | export async function fetchToml({ domain, fetch }){ 143 | let { protocol, host, pathname } = parseURL(domain) 144 | 145 | if(protocol && protocol !== 'https:' && protocol !== 'http:') 146 | throw new Error(`unsupported protocol: ${domain}`) 147 | 148 | if(!host) 149 | host = '' 150 | 151 | if(!pathname) 152 | pathname = '' 153 | 154 | let tomlUrls = (protocol ? [protocol] : ['https:', 'http:']) 155 | .map(protocol => `${protocol}//${host}${pathname}/${tomlStandardPath}`) 156 | .map(sanitizeURL) 157 | 158 | for(let tomlUrl of tomlUrls){ 159 | log.debug(`fetching ${tomlUrl}`) 160 | 161 | try{ 162 | let { status, data } = await fetch(tomlUrl) 163 | 164 | if(status !== 200) 165 | throw new Error(`HTTP ${status}`) 166 | 167 | return parseXLS26(data) 168 | }catch(error){ 169 | log.debug(`failed ${tomlUrl}: ${error.message}`) 170 | 171 | if(error.message === 'HTTP 404' || tomlUrl === tomlUrls.at(-1)) 172 | throw new Error( 173 | error.message.includes(tomlUrl) 174 | ? error.message 175 | : `${tomlUrl} -> ${error.message}` 176 | ) 177 | } 178 | } 179 | } -------------------------------------------------------------------------------- /deps/sqlite-extensions/xfl.c: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | SQLITE_EXTENSION_INIT1 6 | 7 | 8 | static int64_t const minMantissa = 1000000000000000ull; 9 | static int64_t const maxMantissa = 9999999999999999ull; 10 | static int32_t const minExponent = -96; 11 | static int32_t const maxExponent = 80; 12 | 13 | static int64_t const mantissaMask = 0b111111111111111111111111111111111111111111111111111111; 14 | static int64_t const exponentMask = 0b11111111; 15 | 16 | static int64_t const INVALID_FLOAT = -1ll; 17 | 18 | 19 | static bool xfl_is_negative(int64_t float1) 20 | { 21 | return ((float1 >> 62U) & 1ULL) == 0; 22 | } 23 | 24 | static int32_t xfl_get_exponent(int64_t float1) 25 | { 26 | if (float1 < 0) 27 | return INVALID_FLOAT; 28 | 29 | if (float1 == 0) 30 | return 0; 31 | 32 | if (float1 < 0) 33 | return INVALID_FLOAT; 34 | 35 | uint64_t exponent = (uint64_t)float1; 36 | 37 | exponent >>= 54U; 38 | exponent &= 0xFFU; 39 | 40 | if(xfl_is_negative(float1)){ 41 | exponent ^= exponentMask; 42 | } 43 | 44 | return (int32_t)exponent - 97; 45 | } 46 | 47 | static int64_t xfl_get_mantissa(int64_t float1) 48 | { 49 | if (float1 < 0) 50 | return INVALID_FLOAT; 51 | 52 | if (float1 == 0) 53 | return 0; 54 | 55 | if (float1 < 0) 56 | return INVALID_FLOAT; 57 | 58 | int64_t mantissa = float1 - ((((int64_t)float1) >> 54U) << 54U); 59 | 60 | if(xfl_is_negative(float1)){ 61 | mantissa = -(mantissa ^ mantissaMask); 62 | } 63 | 64 | return mantissa; 65 | } 66 | 67 | 68 | static int64_t xfl_make(int64_t mantissa, int32_t exponent) 69 | { 70 | if (mantissa == 0) 71 | return 0; 72 | 73 | bool neg = mantissa < 0; 74 | 75 | if (neg) 76 | mantissa *= -1; 77 | 78 | while (mantissa < minMantissa) 79 | { 80 | mantissa *= 10; 81 | exponent--; 82 | 83 | if (exponent < minExponent) 84 | return INVALID_FLOAT; 85 | } 86 | 87 | while (mantissa > maxMantissa) 88 | { 89 | mantissa /= 10; 90 | exponent++; 91 | 92 | if (exponent > maxExponent) 93 | return INVALID_FLOAT; 94 | } 95 | 96 | exponent = exponent - minExponent + 1; 97 | 98 | int64_t out = 0; 99 | 100 | if(neg){ 101 | exponent = exponent ^ exponentMask; 102 | mantissa = mantissa ^ mantissaMask; 103 | }else{ 104 | out = 1; 105 | } 106 | 107 | out <<= 8; 108 | out |= exponent; 109 | out <<= 54; 110 | out |= mantissa; 111 | 112 | return out; 113 | } 114 | 115 | static int64_t xfl_invert_sign(int64_t float1) 116 | { 117 | return xfl_make( 118 | -xfl_get_mantissa(float1), 119 | xfl_get_exponent(float1) 120 | ); 121 | } 122 | 123 | static int64_t xfl_sum(int64_t a, int64_t b) 124 | { 125 | int64_t am = xfl_get_mantissa(a); 126 | int32_t ae = xfl_get_exponent(a); 127 | int64_t bm = xfl_get_mantissa(b); 128 | int32_t be = xfl_get_exponent(b); 129 | 130 | if(am == 0) 131 | return b; 132 | 133 | if(bm == 0) 134 | return a; 135 | 136 | while (ae < be){ 137 | am /= 10; 138 | ae++; 139 | } 140 | 141 | while (be < ae){ 142 | bm /= 10; 143 | be++; 144 | } 145 | 146 | am += bm; 147 | 148 | return xfl_make(am, ae); 149 | } 150 | 151 | static int64_t xfl_sub(int64_t a, int64_t b) 152 | { 153 | return xfl_sum(a, xfl_invert_sign(b)); 154 | } 155 | 156 | 157 | 158 | 159 | static void sum_step(sqlite3_context* ctx, int argc, sqlite3_value* argv[]) 160 | { 161 | if(sqlite3_value_type(argv[0]) != SQLITE_INTEGER){ 162 | sqlite3_result_error(ctx, "xfl_sum() only works with integers", -1); 163 | return; 164 | } 165 | 166 | sqlite3_int64* current_sum = (sqlite3_int64*)sqlite3_aggregate_context( 167 | ctx, 168 | sizeof(sqlite3_int64) 169 | ); 170 | 171 | if(current_sum){ 172 | *current_sum = xfl_sum( 173 | *current_sum, 174 | sqlite3_value_int64(argv[0]) 175 | ); 176 | } 177 | } 178 | 179 | static void sum_inverse(sqlite3_context* ctx, int argc, sqlite3_value* argv[]) 180 | { 181 | sqlite3_int64* current_sum = (sqlite3_int64*)sqlite3_aggregate_context( 182 | ctx, 183 | sizeof(sqlite3_int64) 184 | ); 185 | 186 | *current_sum = xfl_sub( 187 | *current_sum, 188 | sqlite3_value_int64(argv[0]) 189 | ); 190 | } 191 | 192 | static void sum_value(sqlite3_context* ctx) 193 | { 194 | sqlite3_int64* current_sum = (sqlite3_int64*)sqlite3_aggregate_context(ctx, 0); 195 | sqlite3_int64 value = 0; 196 | 197 | if(current_sum){ 198 | value = *current_sum; 199 | } 200 | 201 | sqlite3_result_int64(ctx, value); 202 | } 203 | 204 | 205 | #ifdef _WIN32 206 | __declspec(dllexport) 207 | #endif 208 | 209 | int sqlite3_extension_init(sqlite3* db, char** err_msg, const sqlite3_api_routines* api) { 210 | SQLITE_EXTENSION_INIT2(api) 211 | 212 | if (err_msg != 0) 213 | *err_msg = 0; 214 | 215 | sqlite3_create_window_function( 216 | db, 217 | "xfl_sum", 218 | 1, 219 | SQLITE_UTF8, 220 | 0, 221 | sum_step, 222 | sum_value, 223 | sum_value, 224 | sum_inverse, 225 | 0 226 | ); 227 | 228 | return SQLITE_OK; 229 | } 230 | -------------------------------------------------------------------------------- /src/crawl/crawlers/x.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleBatchedIterator } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { writeAccountProps, writeTokenProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.x 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetch = new createFetch({ 15 | baseUrl: 'https://api.x.com/2', 16 | headers: { 17 | authorization: `Bearer ${config.bearerToken}` 18 | }, 19 | ratelimit: config.maxRequestsPerMinute 20 | }) 21 | 22 | while(true){ 23 | await scheduleBatchedIterator({ 24 | ctx, 25 | type: 'token', 26 | task: 'x', 27 | interval: config.fetchInterval, 28 | batchSize: 100, 29 | where: { 30 | OR: [ 31 | { 32 | props: { 33 | key: 'urls' 34 | } 35 | }, 36 | { 37 | issuer: { 38 | props: { 39 | key: 'urls' 40 | } 41 | } 42 | } 43 | ] 44 | }, 45 | include: { 46 | issuer: true 47 | }, 48 | accumulate: (tasks, token) => { 49 | if(!token.issuer) 50 | return 51 | 52 | let issuerUrls = ctx.db.core.accountProps.readMany({ 53 | where: { 54 | account: token.issuer, 55 | key: 'urls' 56 | } 57 | }) 58 | 59 | let tokenUrls = ctx.db.core.tokenProps.readMany({ 60 | where: { 61 | token, 62 | key: 'urls' 63 | } 64 | }) 65 | 66 | for(let prop of [...issuerUrls, ...tokenUrls]){ 67 | let link = prop.value 68 | .filter(link => link.type !== 'support') 69 | .find(link => link.url.includes('x.com')) 70 | 71 | if(!link) 72 | continue 73 | 74 | let handle = link.url.split('/')[3] 75 | 76 | if(!handle) 77 | continue 78 | 79 | if(!/^[A-Za-z0-9_]{1,15}$/.test(handle)) 80 | continue 81 | 82 | let task = tasks.find(task => task.handle === handle) 83 | 84 | if(!task){ 85 | tasks.push(task = { 86 | handle, 87 | items: [], 88 | issuers: [], 89 | tokens: [] 90 | }) 91 | } 92 | 93 | task.items.push(token) 94 | 95 | if(prop.token){ 96 | task.tokens.push(prop.token) 97 | }else{ 98 | task.issuers.push(prop.account) 99 | } 100 | } 101 | 102 | return tasks 103 | }, 104 | commit: async tasks => { 105 | log.info(`got batch of`, tasks.length, `x profiles to fetch`) 106 | 107 | let usernamesQuery = tasks 108 | .map(({ handle }) => handle) 109 | .join(',') 110 | 111 | let { status, data: {data, errors} } = await fetch( 112 | 'users/by?user.fields=name,profile_image_url,description,entities,public_metrics' 113 | + `&usernames=${encodeURIComponent(usernamesQuery)}` 114 | ) 115 | 116 | if(status !== 200) 117 | throw `HTTP ${status}` 118 | 119 | if(!data){ 120 | throw errors[0] 121 | } 122 | 123 | log.info(`fetched`, data.length, `profiles`) 124 | 125 | let updatedTokens = 0 126 | let updatedAccounts = 0 127 | 128 | for(let { handle, tokens, issuers } of tasks){ 129 | let profile = data.find(entry => entry.username.toLowerCase() === handle.toLowerCase()) 130 | let props = { 131 | followers: undefined, 132 | name: undefined, 133 | icon: undefined, 134 | description: undefined, 135 | domain: undefined 136 | } 137 | 138 | 139 | if(profile){ 140 | props.followers = profile.public_metrics.followers_count 141 | props.name = profile.name 142 | props.description = profile.description 143 | props.icon = profile.profile_image_url 144 | ? profile.profile_image_url.replace('_normal', '') 145 | : undefined 146 | 147 | if(profile.entities?.url?.urls){ 148 | props.domain = profile.entities.url.urls[0].expanded_url 149 | .replace(/^https?:\/\//, '') 150 | .replace(/\/$/, '') 151 | } 152 | 153 | if(profile.entities?.description?.urls){ 154 | let offset = 0 155 | 156 | for(let { start, end, expanded_url } of profile.entities.description.urls){ 157 | props.description = props.description.slice(0, start + offset) + expanded_url + props.description.slice(end + offset) 158 | offset += expanded_url.length - (end - start) 159 | } 160 | } 161 | } 162 | 163 | for(let token of tokens){ 164 | writeTokenProps({ 165 | ctx, 166 | token, 167 | props, 168 | source: 'x/profile' 169 | }) 170 | 171 | updatedTokens++ 172 | } 173 | 174 | for(let account of issuers){ 175 | writeAccountProps({ 176 | ctx, 177 | account, 178 | props, 179 | source: 'x/profile' 180 | }) 181 | 182 | updatedAccounts++ 183 | } 184 | } 185 | 186 | log.info(`updated`, updatedAccounts, `issuers and`, updatedTokens, `tokens`) 187 | } 188 | }) 189 | } 190 | } -------------------------------------------------------------------------------- /src/cache/worker.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { wait } from '@xrplkit/time' 3 | import { 4 | updateCacheForAccountProps, 5 | updateCacheForTokenExchanges, 6 | updateCacheForTokenMetrics, 7 | updateCacheForTokenProps 8 | } from './tokens.js' 9 | import { updateIconCacheFor } from './icons.js' 10 | 11 | 12 | export async function startMetaCacheWorker({ ctx }){ 13 | let running = true 14 | 15 | ;(async () => { 16 | while(running){ 17 | let todo = ctx.db.cache.todos.readOne({ 18 | where: { 19 | NOT: { 20 | task: { 21 | in: [ 22 | 'account.icons', 23 | 'token.icons', 24 | ] 25 | } 26 | } 27 | } 28 | }) 29 | 30 | if(!todo){ 31 | await wait(25) 32 | continue 33 | } 34 | 35 | try{ 36 | switch(todo.task){ 37 | case 'account.props': { 38 | updateCacheForAccountProps({ 39 | ctx, 40 | account: { 41 | id: todo.subject 42 | } 43 | }) 44 | break 45 | } 46 | case 'token.props': { 47 | updateCacheForTokenProps({ 48 | ctx, 49 | token: { 50 | id: todo.subject 51 | } 52 | }) 53 | break 54 | } 55 | case 'token.exchanges': { 56 | updateCacheForTokenExchanges({ 57 | ctx, 58 | token: { 59 | id: todo.subject 60 | } 61 | }) 62 | break 63 | } 64 | case 'token.metrics.trustlines': { 65 | updateCacheForTokenMetrics({ 66 | ctx, 67 | token: { 68 | id: todo.subject 69 | }, 70 | metrics: { 71 | trustlines: true 72 | } 73 | }) 74 | break 75 | } 76 | case 'token.metrics.holders': { 77 | updateCacheForTokenMetrics({ 78 | ctx, 79 | token: { 80 | id: todo.subject 81 | }, 82 | metrics: { 83 | holders: true 84 | } 85 | }) 86 | break 87 | } 88 | case 'token.metrics.supply': { 89 | updateCacheForTokenMetrics({ 90 | ctx, 91 | token: { 92 | id: todo.subject 93 | }, 94 | metrics: { 95 | supply: true 96 | } 97 | }) 98 | break 99 | } 100 | case 'token.metrics.marketcap': { 101 | updateCacheForTokenMetrics({ 102 | ctx, 103 | token: { 104 | id: todo.subject 105 | }, 106 | metrics: { 107 | marketcap: true 108 | } 109 | }) 110 | break 111 | } 112 | } 113 | }catch(error){ 114 | log.warn(`cache update for token ${todo.subject} failed: ${error.stack || error.message || error}`) 115 | } 116 | 117 | ctx.db.cache.todos.deleteOne({ 118 | where: { 119 | id: todo.id 120 | } 121 | }) 122 | 123 | let remainingCount = ctx.db.cache.todos.count({ 124 | where: { 125 | NOT: { 126 | task: { 127 | in: [ 128 | 'account.icons', 129 | 'token.icons', 130 | ] 131 | } 132 | } 133 | } 134 | }) 135 | 136 | log.accumulate.info({ 137 | text: [`processed %cacheTasksProcessed cache updates in %time (${remainingCount} remaining)`], 138 | data: { cacheTasksProcessed: 1 } 139 | }) 140 | 141 | await wait(1) 142 | } 143 | })() 144 | 145 | return { 146 | stop(){ 147 | running = false 148 | } 149 | } 150 | } 151 | 152 | export async function startIconCacheWorker({ ctx }){ 153 | let running = true 154 | 155 | ;(async () => { 156 | while(running){ 157 | let todo = ctx.db.cache.todos.readOne({ 158 | where: { 159 | task: { 160 | in: [ 161 | 'account.icons', 162 | 'token.icons', 163 | ] 164 | } 165 | } 166 | }) 167 | 168 | if(!todo){ 169 | await wait(1000) 170 | continue 171 | } 172 | 173 | switch(todo.task){ 174 | case 'account.icons': { 175 | updateIconCacheFor({ 176 | ctx, 177 | account: { 178 | id: todo.subject 179 | } 180 | }) 181 | break 182 | } 183 | case 'token.icons': { 184 | updateIconCacheFor({ 185 | ctx, 186 | token: { 187 | id: todo.subject 188 | } 189 | }) 190 | break 191 | } 192 | } 193 | 194 | ctx.db.cache.todos.deleteOne({ 195 | where: { 196 | id: todo.id 197 | } 198 | }) 199 | 200 | let remainingCount = ctx.db.cache.todos.count({ 201 | where: { 202 | task: { 203 | in: [ 204 | 'account.icons', 205 | 'token.icons', 206 | ] 207 | } 208 | } 209 | }) 210 | 211 | log.accumulate.info({ 212 | text: [`processed %iconCacheTasksProcessed icon cache updates in %time (${remainingCount} remaining)`], 213 | data: { iconCacheTasksProcessed: 1 } 214 | }) 215 | 216 | await wait(1) 217 | } 218 | })() 219 | 220 | return { 221 | stop(){ 222 | running = false 223 | } 224 | } 225 | } -------------------------------------------------------------------------------- /src/srv/sanitizers/common.js: -------------------------------------------------------------------------------- 1 | import { getAvailableRange, readLedgerAt } from '../../db/helpers/ledgers.js' 2 | 3 | export function sanitizePoint({ defaultToLatest = false } = {}){ 4 | return ({ ctx, ...args }) => { 5 | let available = getAvailableRange({ ctx }) 6 | let sequence 7 | let time 8 | 9 | if(args.hasOwnProperty('sequence')){ 10 | sequence = Math.min( 11 | Math.max( 12 | args.sequence, 13 | available.sequence.start 14 | ), 15 | available.sequence.end 16 | ) 17 | }else if(args.hasOwnProperty('time')){ 18 | time = Math.min( 19 | Math.max( 20 | args.time, 21 | available.time.start 22 | ), 23 | available.time.end 24 | ) 25 | 26 | sequence = readLedgerAt({ ctx, time }).sequence 27 | }else{ 28 | if(defaultToLatest){ 29 | sequence = available.sequence.end 30 | time = available.time.end 31 | }else throw { 32 | type: `missingParam`, 33 | message: `This request is missing a ledger sequence or a timestamp.`, 34 | expose: true 35 | } 36 | } 37 | 38 | if(!sequence) 39 | sequence = readLedgerAt({ ctx, time }).sequence 40 | 41 | return { 42 | ...args, 43 | ctx, 44 | sequence, 45 | time 46 | } 47 | } 48 | } 49 | 50 | export function sanitizeRange({ withInterval = false, defaultToFullRange = false } = {}){ 51 | return ({ ctx, ...args }) => { 52 | let available = getAvailableRange({ ctx }) 53 | let sequence 54 | let time 55 | let interval 56 | 57 | if(args.hasOwnProperty('sequence')){ 58 | sequence = minMaxRange({ 59 | requested: args.sequence, 60 | available: available.sequence 61 | }) 62 | 63 | if(withInterval){ 64 | if(args.sequence.hasOwnProperty('interval')){ 65 | sequence.interval = parseInt(args.sequence.interval) 66 | }else{ 67 | throw { 68 | type: `missingParam`, 69 | message: `This request is missing sequence interval specification.`, 70 | expose: true 71 | } 72 | } 73 | } 74 | }else if(args.hasOwnProperty('time')){ 75 | time = minMaxRange({ 76 | requested: args.time, 77 | available: available.time 78 | }) 79 | 80 | if(withInterval){ 81 | if(args.time.hasOwnProperty('interval')){ 82 | time.interval = parseInt(args.time.interval) 83 | }else{ 84 | throw { 85 | type: `missingParam`, 86 | message: `This request is missing time interval specification.`, 87 | expose: true 88 | } 89 | } 90 | } 91 | 92 | sequence = { 93 | start: readLedgerAt({ ctx, time: time.start }).sequence, 94 | end: readLedgerAt({ ctx, time: time.end }).sequence, 95 | } 96 | }else if(defaultToFullRange){ 97 | sequence = available.sequence 98 | time = available.time 99 | }else{ 100 | throw { 101 | type: `missingParam`, 102 | message: `This request is missing a sequence or time range.`, 103 | expose: true 104 | } 105 | } 106 | 107 | if(withInterval){ 108 | if((sequence?.interval || time?.interval) <= 0){ 109 | throw { 110 | type: `invalidParam`, 111 | message: `The interval has to be greater than zero.`, 112 | expose: true 113 | } 114 | } 115 | } 116 | 117 | return { 118 | ...args, 119 | ctx, 120 | sequence, 121 | time, 122 | interval 123 | } 124 | } 125 | } 126 | 127 | export function sanitizeLimitOffset({ defaultLimit, maxLimit }){ 128 | return ({ ctx, limit, offset, ...args }) => { 129 | return { 130 | ...args, 131 | ctx, 132 | limit: limit 133 | ? Math.min(parseInt(limit), maxLimit) 134 | : defaultLimit, 135 | offset: offset 136 | ? parseInt(offset) 137 | : undefined 138 | } 139 | } 140 | } 141 | 142 | export function sanitizeSourcePreferences(){ 143 | return ({ ctx, prefer_sources, ...args }) => { 144 | if(prefer_sources){ 145 | if(!Array.isArray(prefer_sources)){ 146 | throw { 147 | type: `invalidParam`, 148 | message: `The preferred sources need to be specified as an array.`, 149 | expose: true 150 | } 151 | } 152 | 153 | for(let source of prefer_sources){ 154 | if([ 155 | 'ledger', 156 | 'xaman', 157 | 'domain', 158 | 'bithomp', 159 | 'xrpscan', 160 | 'x', 161 | 'gravatar' 162 | ].includes(source)) 163 | continue 164 | 165 | if(ctx.config.trustlist){ 166 | if( 167 | ctx.config.trustlist.some( 168 | list => list.id === source 169 | ) 170 | ) 171 | continue 172 | } 173 | 174 | throw { 175 | type: `invalidParam`, 176 | message: `The preferred source "${source}" does not exist.`, 177 | expose: true 178 | } 179 | } 180 | 181 | } 182 | 183 | return { 184 | ...args, 185 | ctx, 186 | prefer_sources 187 | } 188 | } 189 | } 190 | 191 | 192 | function minMaxRange({ requested, available }){ 193 | let start 194 | let end 195 | 196 | if(requested.start !== undefined){ 197 | if(requested.start < 0) 198 | start = Math.min(requested.start + available.start, available.end) 199 | else 200 | start = Math.min(Math.max(requested.start, available.start), available.end) 201 | }else{ 202 | start = available.start 203 | } 204 | 205 | if(requested.end !== undefined){ 206 | if(requested.end < 0) 207 | end = Math.max(requested.end + available.end, available.start) 208 | else 209 | end = Math.min(Math.max(requested.end, available.end), available.end) 210 | }else{ 211 | end = available.end 212 | } 213 | 214 | return { start, end } 215 | } -------------------------------------------------------------------------------- /src/crawl/crawlers/xaman.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { scheduleGlobal, scheduleIterator } from '../schedule.js' 3 | import { createFetch } from '../../lib/fetch.js' 4 | import { diffMultiAccountProps, diffMultiTokenProps, readAccountProps, writeAccountProps } from '../../db/helpers/props.js' 5 | 6 | 7 | export default async function({ ctx }){ 8 | let config = ctx.config.xaman 9 | 10 | if(!config || config.disabled){ 11 | throw new Error(`disabled by config`) 12 | } 13 | 14 | let fetchApi = createFetch({ 15 | baseUrl: 'https://xaman.app/api/v1/platform/', 16 | headers: { 17 | 'x-api-key': config.apiKey, 18 | 'x-api-secret': config.apiSecret 19 | }, 20 | ratelimit: config.maxRequestsPerMinute 21 | }) 22 | 23 | let fetchAvatar = createFetch({ 24 | baseUrl: 'https://xaman.app/avatar/', 25 | ratelimit: config.maxRequestsPerMinute 26 | }) 27 | 28 | await Promise.all([ 29 | crawlAssets({ 30 | ctx, 31 | fetch: fetchApi, 32 | interval: config.fetchIntervalAssets 33 | }), 34 | crawlKyc({ 35 | ctx, 36 | fetch: fetchApi, 37 | interval: config.fetchIntervalKyc, 38 | concurrency: config.concurrency || 3 39 | }), 40 | crawlAvatar({ 41 | ctx, 42 | fetch: fetchAvatar, 43 | interval: config.fetchIntervalAvatar 44 | }) 45 | ]) 46 | } 47 | 48 | async function crawlAssets({ ctx, fetch, interval }){ 49 | while(true){ 50 | await scheduleGlobal({ 51 | ctx, 52 | task: 'xaman.curated', 53 | interval, 54 | routine: async () => { 55 | log.info(`fetching curated asset list...`) 56 | 57 | let tokens = [] 58 | let accounts = [] 59 | 60 | let { data } = await fetch('curated-assets') 61 | 62 | if(!data?.details){ 63 | log.warn(`got malformed XAMAN curated asset list:`, data) 64 | throw new Error(`malformed response`) 65 | } 66 | 67 | log.info(`got ${Object.values(data.details).length} curated assets`) 68 | 69 | for(let issuer of Object.values(data.details)){ 70 | if(issuer.info_source.type !== 'native') 71 | continue 72 | 73 | for(let currency of Object.values(issuer.currencies)){ 74 | accounts.push({ 75 | address: currency.issuer, 76 | props: { 77 | name: issuer.name.length > 0 78 | ? issuer.name 79 | : undefined, 80 | domain: issuer.domain, 81 | icon: issuer.avatar, 82 | trust_level: issuer.shortlist ? 3 : 2 83 | } 84 | }) 85 | 86 | tokens.push({ 87 | currency: currency.currency, 88 | issuer: { 89 | address: currency.issuer 90 | }, 91 | props: { 92 | name: currency.name > 0 93 | ? currency.name 94 | : undefined, 95 | icon: currency.avatar, 96 | trust_level: ( 97 | currency.info_source.type === 'native' 98 | ? (currency.shortlist ? 3 : 2) 99 | : 1 100 | ) 101 | } 102 | }) 103 | } 104 | } 105 | 106 | diffMultiAccountProps({ 107 | ctx, 108 | accounts, 109 | source: 'xaman/curated' 110 | }) 111 | 112 | diffMultiTokenProps({ 113 | ctx, 114 | tokens, 115 | source: 'xaman/curated' 116 | }) 117 | 118 | log.info(`updated`, tokens.length, `tokens and`, accounts.length, `issuers`) 119 | } 120 | }) 121 | } 122 | } 123 | 124 | 125 | async function crawlKyc({ ctx, fetch, interval, concurrency }){ 126 | while(true){ 127 | await scheduleIterator({ 128 | ctx, 129 | type: 'issuer', 130 | task: 'xaman.kyc', 131 | interval, 132 | concurrency, 133 | routine: async ({ id, address }, remaining) => { 134 | let currentKyc = ctx.db.core.accountProps.readOne({ 135 | where: { 136 | account: { id }, 137 | key: 'kyc', 138 | source: 'xaman/kyc' 139 | } 140 | }) 141 | 142 | if(currentKyc?.value === true){ 143 | log.debug(`skipping KYC check for ${address}: already approved`) 144 | return 145 | } 146 | 147 | log.debug(`checking KYC for ${address}`) 148 | 149 | let { data } = await fetch(`kyc-status/${address}`) 150 | 151 | writeAccountProps({ 152 | ctx, 153 | account: { id }, 154 | props: { 155 | kyc: data.kycApproved 156 | }, 157 | source: 'xaman/kyc' 158 | }) 159 | 160 | log.debug(`KYC for ${address}: ${data.kycApproved}`) 161 | 162 | log.accumulate.info({ 163 | text: [`%kycChecked KYC checked in %time (${remaining} remaining)`], 164 | data: { 165 | kycChecked: 1 166 | } 167 | }) 168 | } 169 | }) 170 | } 171 | } 172 | 173 | async function crawlAvatar({ ctx, fetch, interval }){ 174 | while(true){ 175 | await scheduleIterator({ 176 | ctx, 177 | type: 'issuer', 178 | task: 'xaman.avatar', 179 | interval, 180 | routine: async ({ id, address }, remaining) => { 181 | log.debug(`checking avatar for ${address}`) 182 | 183 | let { headers } = await fetch( 184 | `${address}.png`, 185 | { redirect: 'manual' } 186 | ) 187 | 188 | let avatar = headers.get('location') 189 | ? headers.get('location').split('?')[0] 190 | : undefined 191 | 192 | writeAccountProps({ 193 | ctx, 194 | account: { id }, 195 | props: { 196 | icon: avatar 197 | }, 198 | source: 'xaman/avatar' 199 | }) 200 | 201 | log.debug(`avatar for ${address}: ${avatar}`) 202 | 203 | log.accumulate.info({ 204 | text: [`%avatarsChecked avatars checked in %time (${remaining} remaining)`], 205 | data: { 206 | avatarsChecked: 1 207 | } 208 | }) 209 | } 210 | }) 211 | } 212 | } -------------------------------------------------------------------------------- /test/unit/icon-cache.test.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import { expect } from 'chai' 3 | import { createContext } from './env.js' 4 | import { writeAccountProps, writeTokenProps } from '../../src/db/helpers/props.js' 5 | import { updateIconCacheFor } from '../../src/cache/icons.js' 6 | 7 | 8 | const ctx = await createContext() 9 | 10 | const accounts = [ 11 | { 12 | address: 'rhub8VRN55s94qWKDv6jmDy1pUykJzF3wq', 13 | props: { 14 | name: 'GateHub', 15 | icon: 'https://static.xrplmeta.org/icons/gatehub.png', 16 | trust_level: 3 17 | } 18 | }, 19 | { 20 | address: 'rvYAfWj5gh67oV6fW32ZzP3Aw4Eubs59B', 21 | props: { 22 | name: 'Bitstamp', 23 | icon: 'https://static.xrplmeta.org/icons/bitstamp.png', 24 | trust_level: 3 25 | } 26 | } 27 | ] 28 | 29 | const tokens = [ 30 | { 31 | currency: 'USD', 32 | issuer: { 33 | address: accounts[0].address 34 | }, 35 | props: { 36 | name: 'US Dollar', 37 | icon: 'https://static.xrplmeta.org/icons/USD.png', 38 | asset_class: 'fiat' 39 | } 40 | }, 41 | { 42 | currency: 'USD', 43 | issuer: { 44 | address: accounts[1].address 45 | }, 46 | props: { 47 | name: 'US Dollar', 48 | icon: 'https://static.xrplmeta.org/icons/USD.png', 49 | asset_class: 'fiat' 50 | } 51 | } 52 | ] 53 | 54 | for(let { address, props } of accounts){ 55 | writeAccountProps({ 56 | ctx, 57 | account: { 58 | address 59 | }, 60 | props, 61 | source: 'manual' 62 | }) 63 | } 64 | 65 | for(let { currency, issuer, props } of tokens){ 66 | writeTokenProps({ 67 | ctx, 68 | token: { 69 | currency, 70 | issuer 71 | }, 72 | props, 73 | source: 'manual' 74 | }) 75 | } 76 | 77 | describe( 78 | 'Icon Cache', 79 | () => { 80 | it( 81 | 'should download token icons according to icon prop', 82 | async () => { 83 | for(let token of tokens){ 84 | await updateIconCacheFor({ 85 | ctx, 86 | token: { 87 | currency: token.currency, 88 | issuer: token.issuer 89 | } 90 | }) 91 | } 92 | 93 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05.png`)).to.be.true 94 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@512.png`)).to.be.true 95 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@256.png`)).to.be.true 96 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@128.png`)).to.be.true 97 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@64.png`)).to.be.true 98 | } 99 | ) 100 | 101 | it( 102 | 'should link the cached icon to the token cache', 103 | async () => { 104 | let tokenCache1 = ctx.db.cache.tokens.readOne({ 105 | where: { 106 | token: 2 107 | } 108 | }) 109 | 110 | let tokenCache2 = ctx.db.cache.tokens.readOne({ 111 | where: { 112 | token: 3 113 | } 114 | }) 115 | 116 | expect(tokenCache1.cachedIcons).to.be.deep.equal({ 117 | [tokens[0].props.icon]: 'C676A0DE05.png' 118 | }) 119 | 120 | expect(tokenCache2.cachedIcons).to.be.deep.equal({ 121 | [tokens[1].props.icon]: 'C676A0DE05.png' 122 | }) 123 | } 124 | ) 125 | 126 | it( 127 | 'should unlink the cached icon if no longer in token props', 128 | async () => { 129 | writeTokenProps({ 130 | ctx, 131 | token: { 132 | currency: tokens[0].currency, 133 | issuer: tokens[0].issuer 134 | }, 135 | props: { 136 | ...tokens[0].props, 137 | icon: undefined 138 | }, 139 | source: 'manual' 140 | }) 141 | 142 | await updateIconCacheFor({ 143 | ctx, 144 | token: { 145 | currency: tokens[0].currency, 146 | issuer: tokens[0].issuer 147 | } 148 | }) 149 | 150 | let tokenCache = ctx.db.cache.tokens.readOne({ 151 | where: { 152 | token: 2 153 | } 154 | }) 155 | 156 | expect(tokenCache.cachedIcons).to.be.deep.equal({}) 157 | } 158 | ) 159 | 160 | it( 161 | 'delete the icon if it has no more users', 162 | async () => { 163 | writeTokenProps({ 164 | ctx, 165 | token: { 166 | currency: tokens[1].currency, 167 | issuer: tokens[1].issuer 168 | }, 169 | props: { 170 | ...tokens[1].props, 171 | icon: undefined 172 | }, 173 | source: 'manual' 174 | }) 175 | 176 | await updateIconCacheFor({ 177 | ctx, 178 | token: { 179 | currency: tokens[1].currency, 180 | issuer: tokens[1].issuer 181 | } 182 | }) 183 | 184 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05.png`)).to.be.false 185 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@512.png`)).to.be.false 186 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@256.png`)).to.be.false 187 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@128.png`)).to.be.false 188 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/C676A0DE05@64.png`)).to.be.false 189 | } 190 | ) 191 | 192 | it( 193 | 'should do the same for issuer icons', 194 | async () => { 195 | for(let account of accounts){ 196 | await updateIconCacheFor({ 197 | ctx, 198 | account: { 199 | address: account.address 200 | } 201 | }) 202 | } 203 | 204 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269.png`)).to.be.true 205 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@512.png`)).to.be.true 206 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@256.png`)).to.be.true 207 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@128.png`)).to.be.true 208 | expect(fs.existsSync(`${ctx.config.node.dataDir}/media/icons/0D821A3269@64.png`)).to.be.true 209 | 210 | let tokenCache = ctx.db.cache.tokens.readOne({ 211 | where: { 212 | token: 2 213 | } 214 | }) 215 | 216 | expect(tokenCache.cachedIcons).to.be.deep.equal({ 217 | [accounts[0].props.icon]: '0D821A3269.png' 218 | }) 219 | } 220 | ) 221 | } 222 | ) -------------------------------------------------------------------------------- /src/srv/http.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import Router from '@koa/router' 4 | import sendFile from 'koa-send' 5 | import log from '@mwni/log' 6 | import * as procedures from './api.js' 7 | import { getCachedIconPath, iconSizes } from '../cache/icons.js' 8 | import { executeProcedure } from './worker.js' 9 | 10 | 11 | export function createRouter({ ctx }){ 12 | let router = new Router() 13 | 14 | router.get( 15 | ['/', '/info', '/server'], 16 | async svc => { 17 | await handle({ 18 | ctx, 19 | svc, 20 | procedure: 'server_info' 21 | }) 22 | } 23 | ) 24 | 25 | router.get( 26 | '/ledger', 27 | async svc => { 28 | await handle({ 29 | ctx, 30 | svc, 31 | procedure: 'ledger', 32 | params: { 33 | ...parsePoint(svc.query) 34 | } 35 | }) 36 | } 37 | ) 38 | 39 | router.get( 40 | '/tokens', 41 | async svc => { 42 | await handle({ 43 | ctx, 44 | svc, 45 | procedure: 'tokens', 46 | params: { 47 | ...svc.query, 48 | expand_meta: svc.query.expand_meta !== undefined, 49 | include_sources: svc.query.include_sources !== undefined, 50 | include_changes: svc.query.include_changes !== undefined, 51 | decode_currency: svc.query.decode_currency !== undefined, 52 | original_icons: svc.query.original_icons !== undefined, 53 | name_like: svc.query.name_like, 54 | trust_levels: svc.query.trust_levels 55 | ? svc.query.trust_levels.split(',') 56 | : undefined, 57 | prefer_sources: svc.query.prefer_sources 58 | ? svc.query.prefer_sources.split(',') 59 | : undefined 60 | } 61 | }) 62 | } 63 | ) 64 | 65 | router.get( 66 | '/tokens/exchanges/:base/:quote', 67 | async svc => { 68 | await handle({ 69 | ctx, 70 | svc, 71 | procedure: 'token_exchanges', 72 | params: { 73 | base: parseTokenURI(svc.params.base), 74 | quote: parseTokenURI(svc.params.quote), 75 | newestFirst: svc.query.newest_first !== undefined, 76 | ...parseRange(svc.query) 77 | } 78 | }) 79 | } 80 | ) 81 | 82 | router.get( 83 | '/token/:token', 84 | async svc => { 85 | await handle({ 86 | ctx, 87 | svc, 88 | procedure: 'token', 89 | params: { 90 | token: parseTokenURI(svc.params.token), 91 | expand_meta: svc.query.expand_meta !== undefined, 92 | include_sources: svc.query.include_sources !== undefined, 93 | include_changes: svc.query.include_changes !== undefined, 94 | decode_currency: svc.query.decode_currency !== undefined, 95 | original_icons: svc.query.original_icons !== undefined, 96 | prefer_sources: svc.query.prefer_sources 97 | ? svc.query.prefer_sources.split(',') 98 | : undefined 99 | } 100 | }) 101 | } 102 | ) 103 | 104 | router.get( 105 | '/token/:token/series/:metric', 106 | async svc => { 107 | await handle({ 108 | ctx, 109 | svc, 110 | procedure: 'token_series', 111 | params: { 112 | token: parseTokenURI(svc.params.token), 113 | metric: svc.params.metric, 114 | ...parseRange(svc.query) 115 | } 116 | }) 117 | } 118 | ) 119 | 120 | router.get( 121 | '/token/:token/holders', 122 | async svc => { 123 | await handle({ 124 | ctx, 125 | svc, 126 | procedure: 'token_holders', 127 | params: { 128 | ...svc.query, 129 | token: parseTokenURI(svc.params.token), 130 | ...parsePoint(svc.query) 131 | } 132 | }) 133 | } 134 | ) 135 | 136 | router.get( 137 | '/icon/:file', 138 | async svc => { 139 | try{ 140 | var [hash, fileType] = svc.params.file.split('.') 141 | 142 | if(!hash || !fileType) 143 | throw 'bad' 144 | }catch{ 145 | svc.status = 400 146 | svc.body = 'Invalid icon URL. The URL should consists of a hash and file extension, such as C0FFE.png' 147 | return 148 | } 149 | 150 | let size 151 | let suffix 152 | 153 | if(svc.query.size){ 154 | size = parseInt(svc.query.size) 155 | 156 | if(!iconSizes.includes(size)){ 157 | svc.status = 400 158 | svc.body = `The specified icon "${svc.query.size}" size is not available. Available sizes are: ${iconSizes}` 159 | return 160 | } 161 | 162 | suffix = `@${size}` 163 | } 164 | 165 | let iconPath = getCachedIconPath({ ctx, hash, suffix, fileType }) 166 | 167 | if(!fs.existsSync(iconPath)){ 168 | svc.status = 404 169 | svc.body = 'This icon does not exist. Make sure to only use icon URLs from the live token manifest.' 170 | return 171 | } 172 | 173 | await sendFile( 174 | svc, 175 | path.basename(iconPath), 176 | { 177 | root: path.dirname(iconPath) 178 | } 179 | ) 180 | } 181 | ) 182 | 183 | return router 184 | } 185 | 186 | 187 | async function handle({ ctx, svc, procedure, params = {} }){ 188 | if(!procedures[procedure]){ 189 | svc.throw(404) 190 | return 191 | } 192 | 193 | try{ 194 | svc.type = 'json' 195 | svc.body = await executeProcedure({ 196 | ctx, 197 | procedure, 198 | params 199 | }) 200 | }catch(e){ 201 | if(e.expose){ 202 | delete e.expose 203 | 204 | svc.status = 400 205 | svc.body = e 206 | }else{ 207 | svc.status = 500 208 | svc.body = { 209 | message: `Internal error while handling your request.` 210 | } 211 | log.warn(`internal error while handling procedure "${procedure}":\n${e.stack}\nparams:`, params) 212 | } 213 | } 214 | } 215 | 216 | 217 | function parseTokenURI(uri){ 218 | let [currency, issuer] = uri.split(':') 219 | 220 | return { 221 | currency, 222 | issuer 223 | } 224 | } 225 | 226 | function parseRange({ sequence_start, sequence_end, sequence_interval, time_start, time_end, time_interval }){ 227 | let range = {} 228 | 229 | if(sequence_start !== undefined){ 230 | range.sequence = { 231 | start: parseInt(sequence_start), 232 | end: sequence_end 233 | ? parseInt(sequence_end) 234 | : undefined 235 | } 236 | 237 | if(sequence_interval) 238 | range.sequence.interval = parseInt(sequence_interval) 239 | }else if(time_start !== undefined){ 240 | range.time = { 241 | start: parseInt(time_start), 242 | end: time_end 243 | ? parseInt(time_end) 244 | : undefined 245 | } 246 | 247 | if(time_interval) 248 | range.time.interval = parseInt(time_interval) 249 | } 250 | 251 | return range 252 | } 253 | 254 | function parsePoint({ sequence, time }){ 255 | if(sequence !== undefined){ 256 | return { sequence: parseInt(sequence) } 257 | }else if(time !== undefined){ 258 | return { time: parseInt(time) } 259 | } 260 | } -------------------------------------------------------------------------------- /src/db/helpers/props.js: -------------------------------------------------------------------------------- 1 | import { isSameToken } from '@xrplkit/tokens' 2 | import { readTokenMetrics } from './tokenmetrics.js' 3 | import { 4 | markCacheDirtyForAccountIcons, 5 | markCacheDirtyForAccountProps, 6 | markCacheDirtyForTokenIcons, 7 | markCacheDirtyForTokenProps 8 | } from '../../cache/todo.js' 9 | 10 | 11 | 12 | export function diffMultiTokenProps({ ctx, tokens, source }){ 13 | let propIds = [] 14 | 15 | for(let { currency, issuer, props } of tokens){ 16 | writeTokenProps({ 17 | ctx, 18 | token: { 19 | currency, 20 | issuer 21 | }, 22 | props, 23 | source 24 | }) 25 | 26 | for(let key of Object.keys(props)){ 27 | let prop = ctx.db.core.tokenProps.readOne({ 28 | where: { 29 | token: { 30 | currency, 31 | issuer 32 | }, 33 | key, 34 | source 35 | } 36 | }) 37 | 38 | if(prop) 39 | propIds.push(prop.id) 40 | } 41 | } 42 | 43 | let staleProps = ctx.db.core.tokenProps.readMany({ 44 | where: { 45 | NOT: { 46 | id: { 47 | in: propIds 48 | } 49 | }, 50 | source 51 | }, 52 | include: { 53 | token: true 54 | } 55 | }) 56 | 57 | ctx.db.core.tokenProps.deleteMany({ 58 | where: { 59 | id: { 60 | in: staleProps.map( 61 | ({ id }) => id 62 | ) 63 | } 64 | } 65 | }) 66 | 67 | let deletionAffectedTokens = staleProps 68 | .map(({ token }) => token) 69 | .filter( 70 | (token, index, tokens) => index === tokens.findIndex( 71 | ({ currency, issuer }) => isSameToken(token, { currency, issuer }) 72 | ) 73 | ) 74 | 75 | for(let token of deletionAffectedTokens){ 76 | markCacheDirtyForTokenProps({ ctx, token }) 77 | } 78 | } 79 | 80 | export function diffMultiAccountProps({ ctx, accounts, source }){ 81 | let propIds = [] 82 | 83 | for(let { address, props } of accounts){ 84 | writeAccountProps({ 85 | ctx, 86 | account: { 87 | address 88 | }, 89 | props, 90 | source 91 | }) 92 | 93 | for(let key of Object.keys(props)){ 94 | let prop = ctx.db.core.accountProps.readOne({ 95 | where: { 96 | account: { 97 | address 98 | }, 99 | key, 100 | source 101 | } 102 | }) 103 | 104 | if(prop) 105 | propIds.push(prop.id) 106 | } 107 | } 108 | 109 | let staleProps = ctx.db.core.accountProps.readMany({ 110 | where: { 111 | NOT: { 112 | id: { 113 | in: propIds 114 | } 115 | }, 116 | source 117 | }, 118 | include: { 119 | account: true 120 | } 121 | }) 122 | 123 | ctx.db.core.accountProps.deleteMany({ 124 | where: { 125 | id: { 126 | in: staleProps.map( 127 | ({ id }) => id 128 | ) 129 | } 130 | } 131 | }) 132 | 133 | let deletionAffectedAccounts = staleProps 134 | .map(({ account }) => account) 135 | .filter( 136 | (account, index, accounts) => index === accounts.findIndex( 137 | ({ address }) => address === account.address 138 | ) 139 | ) 140 | 141 | for(let account of deletionAffectedAccounts){ 142 | markCacheDirtyForAccountProps({ ctx, account }) 143 | } 144 | } 145 | 146 | 147 | export function readTokenProps({ ctx, token }){ 148 | let props = ctx.db.core.tokenProps.readMany({ 149 | where: { 150 | token 151 | } 152 | }) 153 | 154 | let issuerGivenTrustLevelProps = [] 155 | let issuerProps = readAccountProps({ 156 | ctx, 157 | account: token.issuer 158 | ? token.issuer 159 | : ctx.db.core.tokens.readOne({ where: token }).issuer 160 | }) 161 | 162 | for(let { key, value, source } of issuerProps){ 163 | if(key !== 'trust_level') 164 | continue 165 | 166 | let existingTrustProp = props.find( 167 | prop => prop.key === 'trust_level' && prop.source === source 168 | ) 169 | 170 | if(existingTrustProp){ 171 | existingTrustProp.value = Math.max(existingTrustProp.value, 1) 172 | }else{ 173 | issuerGivenTrustLevelProps.push({ 174 | key: 'trust_level', 175 | value, 176 | source 177 | }) 178 | } 179 | } 180 | 181 | if(issuerGivenTrustLevelProps.length > 0){ 182 | let { holders } = readTokenMetrics({ 183 | ctx, 184 | token, 185 | metrics: { 186 | holders: true 187 | } 188 | }) 189 | 190 | if(holders > 0){ 191 | props.push(...issuerGivenTrustLevelProps) 192 | } 193 | } 194 | 195 | return props.map(({ key, value, source }) => ({ key, value, source })) 196 | } 197 | 198 | export function writeTokenProps({ ctx, token, props, source }){ 199 | if(Object.keys(props).length === 0) 200 | return 201 | 202 | ctx.db.core.tx(() => { 203 | for(let [key, value] of Object.entries(props)){ 204 | if(value == null){ 205 | ctx.db.core.tokenProps.deleteOne({ 206 | where: { 207 | token, 208 | key, 209 | source 210 | } 211 | }) 212 | }else{ 213 | ctx.db.core.tokenProps.createOne({ 214 | data: { 215 | token, 216 | key, 217 | value, 218 | source 219 | } 220 | }) 221 | } 222 | } 223 | }) 224 | 225 | markCacheDirtyForTokenProps({ ctx, token }) 226 | 227 | if(props.hasOwnProperty('icon')) 228 | markCacheDirtyForTokenIcons({ ctx, token }) 229 | } 230 | 231 | 232 | export function readAccountProps({ ctx, account }){ 233 | let props = ctx.db.core.accountProps.readMany({ 234 | where: { 235 | account 236 | } 237 | }) 238 | 239 | let kycProps = props.filter( 240 | prop => prop.key === 'kyc' && prop.value === true 241 | ) 242 | 243 | for(let { source } of kycProps){ 244 | let trustProp = props.find( 245 | prop => prop.key === 'trust_level' && prop.source === source 246 | ) 247 | 248 | if(trustProp){ 249 | trustProp.value = Math.max(trustProp.value, 1) 250 | }else{ 251 | props.push({ 252 | key: 'trust_level', 253 | value: 1, 254 | source 255 | }) 256 | } 257 | } 258 | 259 | let { domain } = ctx.db.core.accounts.readOne({ 260 | where: account, 261 | select: { 262 | domain: true 263 | } 264 | }) 265 | 266 | if(domain) 267 | props.push({ 268 | key: 'domain', 269 | value: domain, 270 | source: 'ledger' 271 | }) 272 | 273 | 274 | return props.map(({ key, value, source }) => ({ key, value, source })) 275 | } 276 | 277 | export function writeAccountProps({ ctx, account, props, source }){ 278 | ctx.db.core.tx(() => { 279 | for(let [key, value] of Object.entries(props)){ 280 | if(value == null){ 281 | ctx.db.core.accountProps.deleteOne({ 282 | where: { 283 | account, 284 | key, 285 | source 286 | } 287 | }) 288 | }else{ 289 | ctx.db.core.accountProps.createOne({ 290 | data: { 291 | account, 292 | key, 293 | value, 294 | source 295 | } 296 | }) 297 | } 298 | } 299 | }) 300 | 301 | markCacheDirtyForAccountProps({ ctx, account }) 302 | 303 | if(props.hasOwnProperty('icon')) 304 | markCacheDirtyForAccountIcons({ ctx, account }) 305 | } 306 | 307 | 308 | export function clearTokenProps({ ctx, token, source }){ 309 | let deletedNum = ctx.db.core.tokenProps.deleteMany({ 310 | where: { 311 | token, 312 | source 313 | } 314 | }) 315 | 316 | if(deletedNum > 0){ 317 | markCacheDirtyForTokenProps({ ctx, token }) 318 | markCacheDirtyForTokenIcons({ ctx, token }) 319 | } 320 | } 321 | 322 | export function clearAccountProps({ ctx, account, source }){ 323 | let deletedNum = ctx.db.core.accountProps.deleteMany({ 324 | where: { 325 | account, 326 | source 327 | } 328 | }) 329 | 330 | if(deletedNum > 0){ 331 | markCacheDirtyForAccountProps({ ctx, account }) 332 | markCacheDirtyForAccountIcons({ ctx, account }) 333 | } 334 | } -------------------------------------------------------------------------------- /src/cache/icons.js: -------------------------------------------------------------------------------- 1 | import fs from 'fs' 2 | import path from 'path' 3 | import sharp from 'sharp' 4 | import log from '@mwni/log' 5 | import { createHash } from 'crypto' 6 | import { unixNow } from '@xrplkit/time' 7 | import { readAccountProps, readTokenProps } from '../db/helpers/props.js' 8 | import { validate as validateURL } from '../lib/url.js' 9 | import { createFetch } from '../lib/fetch.js' 10 | import { getAccountId, getTokenId } from '../db/helpers/common.js' 11 | import { getCommonTokenCacheFields } from './tokens.js' 12 | 13 | 14 | const mimeTypes = { 15 | 'image/jpeg': 'jpg', 16 | 'image/png': 'png', 17 | 'image/gif': 'gif', 18 | 'image/webp': 'webp', 19 | 'image/svg+xml': 'svg' 20 | } 21 | 22 | export const iconSizes = [ 23 | 512, 24 | 256, 25 | 128, 26 | 64 27 | ] 28 | 29 | export async function updateIconCacheFor({ ctx, token, account }){ 30 | let user 31 | let targetTokens 32 | 33 | if(token){ 34 | token.id = getTokenId({ ctx, token }) 35 | user = { 36 | userType: 'token', 37 | userId: token.id 38 | } 39 | targetTokens = [token] 40 | }else if(account){ 41 | account.id = getAccountId({ ctx, account }) 42 | user = { 43 | userType: 'account', 44 | userId: account.id 45 | } 46 | targetTokens = ctx.db.core.tokens.readMany({ 47 | where: { 48 | issuer: account 49 | } 50 | }) 51 | }else 52 | throw new Error(`must specify either "token" or "account"`) 53 | 54 | 55 | let using = ctx.db.cache.iconUsers.readMany({ 56 | where: { 57 | ...user 58 | }, 59 | include: { 60 | icon: true 61 | } 62 | }) 63 | 64 | let props = token 65 | ? readTokenProps({ ctx, token }) 66 | : readAccountProps({ ctx, account }) 67 | 68 | let urls = props 69 | .filter(prop => prop.key === 'icon') 70 | .map(prop => prop.value) 71 | .filter((url, i, urls) => urls.indexOf(url) === i) 72 | .filter(validateURL) 73 | 74 | log.debug(`got ${urls.length} icon URL(s) for ${token ? `token ${token.id}` : `account ${account.id}`}`) 75 | 76 | for(let url of urls){ 77 | let cache = ctx.db.cache.icons.readOne({ 78 | where: { 79 | url 80 | } 81 | }) 82 | 83 | if(!cache){ 84 | cache = ctx.db.cache.icons.createOne({ 85 | data: { 86 | url 87 | } 88 | }) 89 | } 90 | 91 | ctx.db.cache.iconUsers.createOne({ 92 | data: { 93 | icon: cache, 94 | ...user 95 | } 96 | }) 97 | 98 | let lifetime = cache.error 99 | ? 60 * 10 100 | : (ctx.config.cache?.icons?.lifetime || 60 * 60) 101 | 102 | if(!cache.timeUpdated || cache.timeUpdated < unixNow() - lifetime){ 103 | try{ 104 | let { hash, fileType } = await downloadAndProcessIcon({ ctx, url }) 105 | 106 | cache = ctx.db.cache.icons.updateOne({ 107 | data: { 108 | hash, 109 | fileType, 110 | timeUpdated: unixNow() 111 | }, 112 | where: { 113 | id: cache.id 114 | } 115 | }) 116 | }catch(error){ 117 | log.debug(`failed to download ${url}: ${error.message}`) 118 | 119 | ctx.db.cache.icons.updateOne({ 120 | data: { 121 | timeUpdated: unixNow(), 122 | error: error.message 123 | }, 124 | where: { 125 | id: cache.id 126 | } 127 | }) 128 | 129 | continue 130 | } 131 | }else{ 132 | log.debug(`icon ${url} not yet due for renewal`) 133 | } 134 | 135 | for(let token of targetTokens){ 136 | linkCachedIconToTokenCache({ 137 | ctx, 138 | token, 139 | cachedIcon: cache 140 | }) 141 | } 142 | } 143 | 144 | let previouslyUsedUrls = using 145 | .map(use => use.icon.url) 146 | .filter((url, i, urls) => urls.indexOf(url) === i) 147 | 148 | let removedUsing = using 149 | .filter(use => !urls.includes(use.icon.url)) 150 | 151 | let removedUrls = previouslyUsedUrls 152 | .filter(url => !urls.includes(url)) 153 | 154 | for(let url of removedUrls){ 155 | for(let token of targetTokens){ 156 | unlinkCachedIconFromTokenCache({ ctx, token, url }) 157 | } 158 | } 159 | 160 | for(let use of removedUsing){ 161 | ctx.db.cache.iconUsers.deleteOne({ 162 | where: { 163 | id: use.id 164 | } 165 | }) 166 | } 167 | 168 | for(let url of previouslyUsedUrls){ 169 | let userCount = ctx.db.cache.iconUsers.count({ 170 | where: { 171 | icon:{ 172 | url 173 | } 174 | } 175 | }) 176 | 177 | if(Number(userCount) === 0){ 178 | log.debug(`icon "${url}" has no more users - deleting it`) 179 | deleteIcon({ ctx, url }) 180 | } 181 | } 182 | } 183 | 184 | 185 | function linkCachedIconToTokenCache({ ctx, token, cachedIcon }){ 186 | let tokenCache = ctx.db.cache.tokens.readOne({ 187 | where: { 188 | token: token.id 189 | } 190 | }) 191 | 192 | if(!tokenCache){ 193 | ctx.db.cache.tokens.createOne({ 194 | data: { 195 | ...getCommonTokenCacheFields({ ctx, token }), 196 | cachedIcons: { 197 | [cachedIcon.url]: `${cachedIcon.hash}.${cachedIcon.fileType}` 198 | } 199 | } 200 | }) 201 | }else{ 202 | ctx.db.cache.tokens.updateOne({ 203 | data: { 204 | cachedIcons: { 205 | ...tokenCache.cachedIcons, 206 | [cachedIcon.url]: `${cachedIcon.hash}.${cachedIcon.fileType}` 207 | } 208 | }, 209 | where: { 210 | id: tokenCache.id 211 | } 212 | }) 213 | } 214 | 215 | log.debug(`linked cached icon ${cachedIcon.url} -> ${cachedIcon.hash}.${cachedIcon.fileType} to token ${token.id}`) 216 | } 217 | 218 | function unlinkCachedIconFromTokenCache({ ctx, token, url }){ 219 | let tokenCache = ctx.db.cache.tokens.readOne({ 220 | where: { 221 | token: token.id 222 | } 223 | }) 224 | 225 | ctx.db.cache.tokens.updateOne({ 226 | data: { 227 | cachedIcons: { 228 | ...tokenCache.cachedIcon, 229 | [url]: undefined 230 | } 231 | }, 232 | where: { 233 | id: tokenCache.id 234 | } 235 | }) 236 | 237 | log.debug(`unlinked cached icon ${url} from token ${token.id}`) 238 | } 239 | 240 | async function downloadAndProcessIcon({ ctx, url }){ 241 | let fetch = createFetch() 242 | let res = await fetch(url, { raw: true }) 243 | let mime = res.headers.get('content-type') 244 | let fileType = mimeTypes[mime] 245 | 246 | if(!fileType) 247 | throw new Error(`unsupported format: ${mime}`) 248 | 249 | let buffer = Buffer.from(await res.arrayBuffer()) 250 | let hash = createHash('md5') 251 | .update(buffer) 252 | .digest('hex') 253 | .slice(0, 10) 254 | .toUpperCase() 255 | 256 | let makePath = suffix => getCachedIconPath({ ctx, hash, suffix, fileType }) 257 | 258 | fs.writeFileSync( 259 | makePath(), 260 | buffer 261 | ) 262 | 263 | if(fileType !== 'svg'){ 264 | for(let size of iconSizes){ 265 | await sharp(buffer) 266 | .png() 267 | .resize(size, size, { fit: 'cover' }) 268 | .toFile(makePath(`@${size}`)) 269 | } 270 | } 271 | 272 | log.debug(`downloaded ${url} (hash ${hash})`) 273 | 274 | return { hash, fileType } 275 | } 276 | 277 | function deleteIcon({ ctx, url }){ 278 | let icon = ctx.db.cache.icons.readOne({ 279 | where: { 280 | url 281 | } 282 | }) 283 | 284 | fs.rmSync( 285 | getCachedIconPath({ ctx, ...icon }), 286 | { force: true } 287 | ) 288 | 289 | if(icon.fileType !== 'svg'){ 290 | for(let size of iconSizes){ 291 | fs.rmSync( 292 | getCachedIconPath({ ctx, ...icon, suffix: `@${size}` }), 293 | { force: true } 294 | ) 295 | } 296 | } 297 | 298 | ctx.db.cache.icons.deleteOne({ 299 | where: { 300 | id: icon.id 301 | } 302 | }) 303 | } 304 | 305 | function getIconCacheDir({ ctx }){ 306 | let dir = path.join(ctx.config.node.dataDir, 'media', 'icons') 307 | 308 | if(!fs.existsSync(dir)) 309 | fs.mkdirSync(dir, { recursive: true }) 310 | 311 | return dir 312 | } 313 | 314 | export function getCachedIconPath({ ctx, hash, suffix, fileType }){ 315 | return path.join(getIconCacheDir({ ctx }), `${hash}${suffix || ''}.${fileType}`) 316 | } -------------------------------------------------------------------------------- /src/db/schemas/cache.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "XRPL Meta Cache Database Structure", 3 | "type": "object", 4 | "properties": { 5 | "tokens": { 6 | "type": "array", 7 | "items": { 8 | "$ref": "#/definitions/Token" 9 | } 10 | }, 11 | "icons": { 12 | "type": "array", 13 | "items": { 14 | "$ref": "#/definitions/Icon" 15 | } 16 | }, 17 | "iconUsers": { 18 | "type": "array", 19 | "items": { 20 | "$ref": "#/definitions/IconUser" 21 | } 22 | }, 23 | "todos": { 24 | "type": "array", 25 | "items": { 26 | "$ref": "#/definitions/Todo" 27 | } 28 | } 29 | }, 30 | "definitions": { 31 | "Token": { 32 | "type": "object", 33 | "properties": { 34 | "id": { 35 | "type": "integer", 36 | "id": true 37 | }, 38 | "token": { 39 | "type": "integer" 40 | }, 41 | "tokenCurrencyHex": { 42 | "type": "string" 43 | }, 44 | "tokenCurrencyUtf8": { 45 | "type": "string" 46 | }, 47 | "tokenName": { 48 | "type": "string" 49 | }, 50 | "tokenProps": { 51 | "type": "any" 52 | }, 53 | "issuerAddress": { 54 | "type": "string" 55 | }, 56 | "issuerName": { 57 | "type": "string" 58 | }, 59 | "issuerProps": { 60 | "type": "any" 61 | }, 62 | "cachedIcons": { 63 | "type": "any" 64 | }, 65 | "trustLevel": { 66 | "type": "integer", 67 | "default": 0 68 | }, 69 | "trustlines": { 70 | "type": "integer", 71 | "default": 0 72 | }, 73 | "trustlinesDelta24H": { 74 | "type": "integer", 75 | "default": 0 76 | }, 77 | "trustlinesPercent24H": { 78 | "type": "number", 79 | "default": 0 80 | }, 81 | "trustlinesDelta7D": { 82 | "type": "integer", 83 | "default": 0 84 | }, 85 | "trustlinesPercent7D": { 86 | "type": "number", 87 | "default": 0 88 | }, 89 | "holders": { 90 | "type": "integer", 91 | "default": 0 92 | }, 93 | "holdersDelta24H": { 94 | "type": "integer", 95 | "default": 0 96 | }, 97 | "holdersPercent24H": { 98 | "type": "number", 99 | "default": 0 100 | }, 101 | "holdersDelta7D": { 102 | "type": "integer", 103 | "default": 0 104 | }, 105 | "holdersPercent7D": { 106 | "type": "number", 107 | "default": 0 108 | }, 109 | "supply": { 110 | "type": "string", 111 | "format": "xrpl/xfl", 112 | "default": "0" 113 | }, 114 | "supplyDelta24H": { 115 | "type": "string", 116 | "format": "xrpl/xfl", 117 | "default": "0" 118 | }, 119 | "supplyPercent24H": { 120 | "type": "number", 121 | "default": 0 122 | }, 123 | "supplyDelta7D": { 124 | "type": "string", 125 | "format": "xrpl/xfl", 126 | "default": "0" 127 | }, 128 | "supplyPercent7D": { 129 | "type": "number", 130 | "default": 0 131 | }, 132 | "marketcap": { 133 | "type": "string", 134 | "format": "xrpl/xfl", 135 | "default": "0" 136 | }, 137 | "marketcapDelta24H": { 138 | "type": "string", 139 | "format": "xrpl/xfl", 140 | "default": "0" 141 | }, 142 | "marketcapPercent24H": { 143 | "type": "number", 144 | "default": 0 145 | }, 146 | "marketcapDelta7D": { 147 | "type": "string", 148 | "format": "xrpl/xfl", 149 | "default": "0" 150 | }, 151 | "marketcapPercent7D": { 152 | "type": "number", 153 | "default": 0 154 | }, 155 | "price": { 156 | "type": "string", 157 | "format": "xrpl/xfl", 158 | "default": "0" 159 | }, 160 | "pricePercent24H": { 161 | "type": "number", 162 | "default": 0 163 | }, 164 | "pricePercent7D": { 165 | "type": "number", 166 | "default": 0 167 | }, 168 | "volume24H": { 169 | "type": "string", 170 | "format": "xrpl/xfl", 171 | "default": "0" 172 | }, 173 | "volume7D": { 174 | "type": "string", 175 | "format": "xrpl/xfl", 176 | "default": "0" 177 | }, 178 | "exchanges24H": { 179 | "type": "integer", 180 | "default": 0 181 | }, 182 | "exchanges7D": { 183 | "type": "integer", 184 | "default": 0 185 | }, 186 | "takers24H": { 187 | "type": "integer", 188 | "default": 0 189 | }, 190 | "takers7D": { 191 | "type": "integer", 192 | "default": 0 193 | } 194 | }, 195 | "required": [ 196 | "token", 197 | "tokenCurrencyHex", 198 | "tokenCurrencyUtf8", 199 | "issuerAddress" 200 | ], 201 | "unique": [ 202 | "token" 203 | ], 204 | "index": [ 205 | "tokenCurrencyHex", 206 | "tokenName", 207 | "issuerAddress", 208 | "issuerName", 209 | "trustLevel", 210 | "trustlines", 211 | "trustlinesDelta24H", 212 | "trustlinesPercent24H", 213 | "trustlinesDelta7D", 214 | "trustlinesPercent7D", 215 | "holders", 216 | "holdersDelta24H", 217 | "holdersPercent24H", 218 | "holdersDelta7D", 219 | "holdersPercent7D", 220 | "supply", 221 | "supplyDelta24H", 222 | "supplyPercent24H", 223 | "supplyDelta7D", 224 | "supplyPercent7D", 225 | "marketcap", 226 | "marketcapDelta24H", 227 | "marketcapPercent24H", 228 | "marketcapDelta7D", 229 | "marketcapPercent7D", 230 | "price", 231 | "pricePercent24H", 232 | "pricePercent7D", 233 | "volume24H", 234 | "volume7D", 235 | "exchanges24H", 236 | "exchanges7D", 237 | "takers24H", 238 | "takers7D" 239 | ] 240 | }, 241 | "Icon": { 242 | "type": "object", 243 | "properties": { 244 | "id": { 245 | "type": "integer", 246 | "id": true 247 | }, 248 | "url": { 249 | "type": "string" 250 | }, 251 | "hash": { 252 | "type": "string" 253 | }, 254 | "fileType": { 255 | "type": "string" 256 | }, 257 | "timeUpdated": { 258 | "type": "integer" 259 | }, 260 | "error": { 261 | "type": "string" 262 | }, 263 | "users": { 264 | "type": "array", 265 | "items": { 266 | "$ref": "#/definitions/IconUser" 267 | } 268 | } 269 | }, 270 | "required": [ 271 | "url" 272 | ], 273 | "unique": [ 274 | "url" 275 | ], 276 | "index": [ 277 | "timeUpdated" 278 | ] 279 | }, 280 | "IconUser": { 281 | "type": "object", 282 | "properties": { 283 | "id": { 284 | "type": "integer", 285 | "id": true 286 | }, 287 | "icon": { 288 | "$ref": "#/definitions/Icon" 289 | }, 290 | "userType": { 291 | "type": "string", 292 | "enum": [ 293 | "account", 294 | "token" 295 | ] 296 | }, 297 | "userId": { 298 | "type": "integer" 299 | } 300 | }, 301 | "required": [ 302 | "icon", 303 | "userType", 304 | "userId" 305 | ], 306 | "unique": [ 307 | ["icon", "userType", "userId"] 308 | ], 309 | "index": [ 310 | ["userType", "userId"] 311 | ] 312 | }, 313 | "Todo": { 314 | "type": "object", 315 | "properties": { 316 | "id": { 317 | "type": "integer", 318 | "id": true 319 | }, 320 | "task": { 321 | "type": "string", 322 | "enum": [ 323 | "account.props", 324 | "account.icons", 325 | "token.props", 326 | "token.exchanges", 327 | "token.metrics.trustlines", 328 | "token.metrics.holders", 329 | "token.metrics.supply", 330 | "token.metrics.marketcap", 331 | "token.icons" 332 | ] 333 | }, 334 | "subject": { 335 | "type": "integer" 336 | } 337 | }, 338 | "required": [ 339 | "task", 340 | "subject" 341 | ], 342 | "unique": [ 343 | ["task", "subject"] 344 | ], 345 | "index": [ 346 | "task" 347 | ] 348 | } 349 | } 350 | } -------------------------------------------------------------------------------- /src/cache/tokens.js: -------------------------------------------------------------------------------- 1 | import log from '@mwni/log' 2 | import { sub, mul, div, min, gt } from '@xrplkit/xfl' 3 | import { currencyHexToUTF8 } from '@xrplkit/tokens' 4 | import { readLedgerAt, readMostRecentLedger } from '../db/helpers/ledgers.js' 5 | import { readTokenMetrics } from '../db/helpers/tokenmetrics.js' 6 | import { readTokenExchangeAligned, readTokenExchangeCount, readTokenExchangeUniqueTakerCount, readTokenVolume } from '../db/helpers/tokenexchanges.js' 7 | import { readAccountProps, readTokenProps } from '../db/helpers/props.js' 8 | 9 | 10 | const maxChangePercent = 999999999 11 | const metricInts = ['trustlines', 'holders'] 12 | 13 | 14 | 15 | export function updateCacheForTokenProps({ ctx, token }){ 16 | if(ctx.backwards) 17 | return 18 | 19 | let props = readTokenProps({ ctx, token }) 20 | let tokenName = props.find(prop => prop.key === 'name')?.value 21 | let changedCache = ctx.db.cache.tokens.createOne({ 22 | data: { 23 | ...getCommonTokenCacheFields({ ctx, token }), 24 | tokenName, 25 | tokenProps: props, 26 | trustLevel: Math.max( 27 | 0, 28 | ...props 29 | .filter(({ key }) => key === 'trust_level') 30 | .map(({ value }) => value) 31 | ) 32 | }, 33 | returnUnchanged: false 34 | }) 35 | 36 | if(changedCache){ 37 | dispatchTokenUpdate({ ctx, token, subject: 'tokenProps' }) 38 | } 39 | } 40 | 41 | export function updateCacheForAccountProps({ ctx, account }){ 42 | if(ctx.backwards) 43 | return 44 | 45 | let props = readAccountProps({ 46 | ctx, 47 | account 48 | }) 49 | 50 | let issuerName = props.find(prop => prop.key === 'name')?.value 51 | 52 | let tokens = ctx.db.core.tokens.readMany({ 53 | where: { 54 | issuer: account 55 | } 56 | }) 57 | 58 | for(let token of tokens){ 59 | if(!token.issuer) 60 | continue 61 | 62 | let changedCache = ctx.db.cache.tokens.createOne({ 63 | data: { 64 | ...getCommonTokenCacheFields({ ctx, token }), 65 | issuerName, 66 | issuerProps: props 67 | }, 68 | returnUnchanged: false 69 | }) 70 | 71 | if(changedCache){ 72 | dispatchTokenUpdate({ ctx, token, subject: 'issuerProps' }) 73 | } 74 | 75 | updateCacheForTokenProps({ ctx, token }) 76 | } 77 | } 78 | 79 | export function updateCacheForTokenMetrics({ ctx, token, metrics }){ 80 | if(ctx.backwards) 81 | return 82 | 83 | let cache = {} 84 | let sequences = getCommonLedgerSequences({ ctx }) 85 | 86 | let currentValues = readTokenMetrics({ 87 | ctx, 88 | token, 89 | metrics, 90 | ledgerSequence: sequences.current 91 | }) 92 | 93 | let pre24hValues = readTokenMetrics({ 94 | ctx, 95 | token, 96 | metrics, 97 | ledgerSequence: sequences.pre24h 98 | }) 99 | 100 | let pre7dValues = readTokenMetrics({ 101 | ctx, 102 | token, 103 | metrics, 104 | ledgerSequence: sequences.pre7d 105 | }) 106 | 107 | for(let key of Object.keys(metrics)){ 108 | let current = currentValues[key] || 0 109 | let pre24h = pre24hValues[key] || 0 110 | let pre7d = pre7dValues[key] || 0 111 | let delta24h = sub(current, pre24h) 112 | let delta7d = sub(current, pre7d) 113 | 114 | let percent24h = gt(pre24h, 0) 115 | ? Number(min(mul(div(delta24h, pre24h), 100), maxChangePercent)) 116 | : 0 117 | 118 | let percent7d = gt(pre7d, 0) 119 | ? Number(min(mul(div(delta7d, pre7d), 100), maxChangePercent)) 120 | : 0 121 | 122 | if(metricInts.includes(key)){ 123 | delta24h = Number(delta24h) 124 | delta7d = Number(delta7d) 125 | }else{ 126 | current = current.toString() 127 | delta24h = delta24h.toString() 128 | delta7d = delta7d.toString() 129 | } 130 | 131 | cache[key] = current 132 | cache[`${key}Delta24H`] = delta24h 133 | cache[`${key}Percent24H`] = percent24h 134 | cache[`${key}Delta7D`] = delta7d 135 | cache[`${key}Percent7D`] = percent7d 136 | } 137 | 138 | let changedCache = ctx.db.cache.tokens.createOne({ 139 | data: { 140 | ...getCommonTokenCacheFields({ ctx, token }), 141 | ...cache 142 | }, 143 | returnUnchanged: false 144 | }) 145 | 146 | if(changedCache){ 147 | dispatchTokenUpdate({ ctx, token, subject: 'metrics' }) 148 | } 149 | } 150 | 151 | export function updateCacheForTokenExchanges({ ctx, token }){ 152 | if(ctx.backwards) 153 | return 154 | 155 | if(token.currency === 'XRP') 156 | return 157 | 158 | let sequences = getCommonLedgerSequences({ ctx }) 159 | 160 | let current = readTokenExchangeAligned({ 161 | ctx, 162 | base: token, 163 | quote: { 164 | currency: 'XRP' 165 | }, 166 | ledgerSequence: sequences.current, 167 | skipDust: true 168 | })?.price || 0 169 | 170 | let pre24h = readTokenExchangeAligned({ 171 | ctx, 172 | base: token, 173 | quote: { 174 | currency: 'XRP' 175 | }, 176 | ledgerSequence: sequences.pre24h, 177 | skipDust: true 178 | })?.price || 0 179 | 180 | let pre7d = readTokenExchangeAligned({ 181 | ctx, 182 | base: token, 183 | quote: { 184 | currency: 'XRP' 185 | }, 186 | ledgerSequence: sequences.pre7d, 187 | skipDust: true 188 | })?.price || 0 189 | 190 | let delta24h = sub(current, pre24h) 191 | let delta7d = sub(current, pre7d) 192 | 193 | let percent24h = gt(pre24h, 0) 194 | ? Number(min(mul(div(delta24h, pre24h), 100), maxChangePercent)) 195 | : 0 196 | 197 | let percent7d = gt(pre7d, 0) 198 | ? Number(min(mul(div(delta7d, pre7d), 100), maxChangePercent)) 199 | : 0 200 | 201 | let volume24H = readTokenVolume({ 202 | ctx, 203 | base: token, 204 | quote: { 205 | id: 1, 206 | currency: 'XRP' 207 | }, 208 | sequenceStart: sequences.pre24h, 209 | sequenceEnd: sequences.current 210 | }) 211 | 212 | let volume7D = readTokenVolume({ 213 | ctx, 214 | base: token, 215 | quote: { 216 | id: 1, 217 | currency: 'XRP' 218 | }, 219 | sequenceStart: sequences.pre7d, 220 | sequenceEnd: sequences.current 221 | }) 222 | 223 | let exchanges24H = readTokenExchangeCount({ 224 | ctx, 225 | base: token, 226 | quote: { 227 | id: 1, 228 | currency: 'XRP' 229 | }, 230 | sequenceStart: sequences.pre24h, 231 | sequenceEnd: sequences.current 232 | }) 233 | 234 | let exchanges7D = readTokenExchangeCount({ 235 | ctx, 236 | base: token, 237 | quote: { 238 | id: 1, 239 | currency: 'XRP' 240 | }, 241 | sequenceStart: sequences.pre7d, 242 | sequenceEnd: sequences.current 243 | }) 244 | 245 | let takers24H = readTokenExchangeUniqueTakerCount({ 246 | ctx, 247 | base: token, 248 | quote: { 249 | id: 1, 250 | currency: 'XRP' 251 | }, 252 | sequenceStart: sequences.pre24h, 253 | sequenceEnd: sequences.current 254 | }) 255 | 256 | let takers7D = readTokenExchangeUniqueTakerCount({ 257 | ctx, 258 | base: token, 259 | quote: { 260 | id: 1, 261 | currency: 'XRP' 262 | }, 263 | sequenceStart: sequences.pre7d, 264 | sequenceEnd: sequences.current 265 | }) 266 | 267 | let changedCache = ctx.db.cache.tokens.createOne({ 268 | data: { 269 | ...getCommonTokenCacheFields({ ctx, token }), 270 | price: current.toString(), 271 | pricePercent24H: percent24h, 272 | pricePercent7D: percent7d, 273 | volume24H: volume24H.toString(), 274 | volume7D: volume7D.toString(), 275 | exchanges24H, 276 | exchanges7D, 277 | takers24H, 278 | takers7D 279 | }, 280 | returnUnchanged: false 281 | }) 282 | 283 | if(changedCache){ 284 | dispatchTokenUpdate({ ctx, token, subject: 'metrics' }) 285 | } 286 | } 287 | 288 | export function getCommonTokenCacheFields({ ctx, token }){ 289 | if(!token.id || !token.issuer || !token.issuer.address) 290 | token = ctx.db.core.tokens.readOne({ 291 | where: token, 292 | include: { 293 | issuer: true 294 | } 295 | }) 296 | 297 | return { 298 | token: token.id, 299 | tokenCurrencyHex: token.currency, 300 | tokenCurrencyUtf8: currencyHexToUTF8(token.currency), 301 | issuerAddress: token.issuer.address 302 | } 303 | } 304 | 305 | function getCommonLedgerSequences({ ctx }){ 306 | let currentLedger = readMostRecentLedger({ ctx }) 307 | 308 | return { 309 | current: currentLedger.sequence, 310 | pre24h: readLedgerAt({ 311 | ctx, 312 | time: currentLedger.closeTime - 60 * 60 * 24, 313 | clamp: true 314 | }).sequence, 315 | pre7d: readLedgerAt({ 316 | ctx, 317 | time: currentLedger.closeTime - 60 * 60 * 24 * 7, 318 | clamp: true 319 | }).sequence 320 | } 321 | } 322 | 323 | function dispatchTokenUpdate({ ctx, token, subject }){ 324 | if(!ctx.ipc) 325 | return 326 | 327 | ctx.ipc.emit({ 328 | tokenUpdate: { 329 | token, 330 | subject 331 | } 332 | }) 333 | } -------------------------------------------------------------------------------- /test/unit/db.points.test.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai' 2 | import { createContext } from './env.js' 3 | import { readPoint, writePoint } from '../../src/db/helpers/common.js' 4 | 5 | 6 | const ctx = await createContext() 7 | const account = { address: 'rMwNibdiFaEzsTaFCG1NnmAM3Rv3vHUy5L' } 8 | const token = { 9 | currency: 'RLUSD', 10 | issuer: { address: 'rMxCKbEDwqr76QuheSUMdEGf4B9xJ8m5De' } 11 | } 12 | const book = { 13 | takerPays: token, 14 | takerGets: { currency: 'XRP', issuer: null } 15 | } 16 | 17 | 18 | describe( 19 | 'Database Points', 20 | () => { 21 | it( 22 | 'it should create first non-expirable point at sequence 100', 23 | () => { 24 | writePoint({ 25 | table: ctx.db.core.accountBalances, 26 | selector: { account, token }, 27 | ledgerSequence: 100, 28 | backwards: false, 29 | data: { balance: '10' }, 30 | expirable: false 31 | }) 32 | } 33 | ) 34 | 35 | it( 36 | 'it should read latest and exact sequence for first point', 37 | () => { 38 | let pLatest = readPoint({ 39 | table: ctx.db.core.accountBalances, 40 | selector: { account, token }, 41 | }) 42 | expect(pLatest.balance.toString()).to.equal('10') 43 | 44 | let p100 = readPoint({ 45 | table: ctx.db.core.accountBalances, 46 | selector: { account, token }, 47 | ledgerSequence: 100 48 | }) 49 | expect(p100.balance.toString()).to.equal('10') 50 | } 51 | ) 52 | 53 | it( 54 | 'it should be idempotent when writing same data at same sequence', 55 | () => { 56 | writePoint({ 57 | table: ctx.db.core.accountBalances, 58 | selector: { account, token }, 59 | ledgerSequence: 100, 60 | backwards: false, 61 | data: { balance: '10' }, 62 | expirable: false 63 | }) 64 | let p100Same = readPoint({ 65 | table: ctx.db.core.accountBalances, 66 | selector: { account, token }, 67 | ledgerSequence: 100 68 | }) 69 | expect(p100Same.balance.toString()).to.equal('10') 70 | } 71 | ) 72 | 73 | it( 74 | 'it should overwrite data at the same sequence', 75 | () => { 76 | writePoint({ 77 | table: ctx.db.core.accountBalances, 78 | selector: { account, token }, 79 | ledgerSequence: 100, 80 | backwards: false, 81 | data: { balance: '15' }, 82 | expirable: false 83 | }) 84 | let p100Updated = readPoint({ 85 | table: ctx.db.core.accountBalances, 86 | selector: { account, token }, 87 | ledgerSequence: 100 88 | }) 89 | expect(p100Updated.balance.toString()).to.equal('15') 90 | } 91 | ) 92 | 93 | it( 94 | 'it should add later point at 105 and step reads accordingly', 95 | () => { 96 | writePoint({ 97 | table: ctx.db.core.accountBalances, 98 | selector: { account, token }, 99 | ledgerSequence: 105, 100 | backwards: false, 101 | data: { balance: '20' }, 102 | expirable: false 103 | }) 104 | 105 | let p104 = readPoint({ 106 | table: ctx.db.core.accountBalances, 107 | selector: { account, token }, 108 | ledgerSequence: 104 109 | }) 110 | expect(p104.balance.toString()).to.equal('15') 111 | 112 | let p105 = readPoint({ 113 | table: ctx.db.core.accountBalances, 114 | selector: { account, token }, 115 | ledgerSequence: 105 116 | }) 117 | expect(p105.balance.toString()).to.equal('20') 118 | 119 | let p999 = readPoint({ 120 | table: ctx.db.core.accountBalances, 121 | selector: { account, token }, 122 | ledgerSequence: 999 123 | }) 124 | expect(p999.balance.toString()).to.equal('20') 125 | } 126 | ) 127 | 128 | it( 129 | 'it should write a later forward non-expirable point at 220', 130 | () => { 131 | writePoint({ 132 | table: ctx.db.core.accountBalances, 133 | selector: { account, token }, 134 | ledgerSequence: 220, 135 | backwards: false, 136 | data: { balance: '70' }, 137 | expirable: false 138 | }) 139 | } 140 | ) 141 | 142 | it( 143 | 'it should write an earlier backfilled non-expirable point at 200', 144 | () => { 145 | writePoint({ 146 | table: ctx.db.core.accountBalances, 147 | selector: { account, token }, 148 | ledgerSequence: 200, 149 | backwards: true, 150 | data: { balance: '50' }, 151 | expirable: false 152 | }) 153 | } 154 | ) 155 | 156 | it( 157 | 'it should read sequences around both non-expirable points correctly', 158 | () => { 159 | let p199 = readPoint({ 160 | table: ctx.db.core.accountBalances, 161 | selector: { account, token }, 162 | ledgerSequence: 199 163 | }) 164 | expect(p199.balance.toString()).to.equal('20') 165 | 166 | let p200 = readPoint({ 167 | table: ctx.db.core.accountBalances, 168 | selector: { account, token }, 169 | ledgerSequence: 200 170 | }) 171 | expect(p200.balance.toString()).to.equal('50') 172 | 173 | let p210 = readPoint({ 174 | table: ctx.db.core.accountBalances, 175 | selector: { account, token }, 176 | ledgerSequence: 210 177 | }) 178 | expect(p210.balance.toString()).to.equal('50') 179 | 180 | let p220 = readPoint({ 181 | table: ctx.db.core.accountBalances, 182 | selector: { account, token }, 183 | ledgerSequence: 220 184 | }) 185 | expect(p220.balance.toString()).to.equal('70') 186 | 187 | let p221 = readPoint({ 188 | table: ctx.db.core.accountBalances, 189 | selector: { account, token }, 190 | ledgerSequence: 221 191 | }) 192 | expect(p221.balance.toString()).to.equal('70') 193 | } 194 | ) 195 | 196 | it( 197 | 'it should create expirable open-ended offer at sequence 100', 198 | () => { 199 | writePoint({ 200 | table: ctx.db.core.tokenOffers, 201 | selector: { account, accountSequence: 1, book }, 202 | ledgerSequence: 100, 203 | backwards: false, 204 | data: { quality: '1', size: '100' }, 205 | expirable: true 206 | }) 207 | 208 | let o100 = readPoint({ 209 | table: ctx.db.core.tokenOffers, 210 | selector: { account, accountSequence: 1, book }, 211 | ledgerSequence: 100, 212 | expirable: true 213 | }) 214 | expect(o100.quality.toString()).to.equal('1') 215 | expect(o100.size.toString()).to.equal('100') 216 | } 217 | ) 218 | 219 | it( 220 | 'it should read the open-ended offer later at sequence 150', 221 | () => { 222 | let o150 = readPoint({ 223 | table: ctx.db.core.tokenOffers, 224 | selector: { account, accountSequence: 1, book }, 225 | ledgerSequence: 150, 226 | expirable: true 227 | }) 228 | expect(o150.size.toString()).to.equal('100') 229 | } 230 | ) 231 | 232 | it( 233 | 'it should evolve expirable offer at 160 closing previous interval', 234 | () => { 235 | writePoint({ 236 | table: ctx.db.core.tokenOffers, 237 | selector: { account, accountSequence: 1, book }, 238 | ledgerSequence: 160, 239 | backwards: false, 240 | data: { quality: '2', size: '80' }, 241 | expirable: true 242 | }) 243 | } 244 | ) 245 | 246 | it( 247 | 'it should read old value at 159 before evolution', 248 | () => { 249 | let o159 = readPoint({ 250 | table: ctx.db.core.tokenOffers, 251 | selector: { account, accountSequence: 1, book }, 252 | ledgerSequence: 159, 253 | expirable: true 254 | }) 255 | expect(o159.size.toString()).to.equal('100') 256 | } 257 | ) 258 | 259 | it( 260 | 'it should read new value at 160 after evolution', 261 | () => { 262 | let o160 = readPoint({ 263 | table: ctx.db.core.tokenOffers, 264 | selector: { account, accountSequence: 1, book }, 265 | ledgerSequence: 160, 266 | expirable: true 267 | }) 268 | expect(o160.size.toString()).to.equal('80') 269 | } 270 | ) 271 | 272 | it( 273 | 'it should expire expirable offer at 170 and close interval', 274 | () => { 275 | writePoint({ 276 | table: ctx.db.core.tokenOffers, 277 | selector: { account, accountSequence: 1, book }, 278 | ledgerSequence: 170, 279 | backwards: false, 280 | data: null, 281 | expirable: true 282 | }) 283 | 284 | let o169 = readPoint({ 285 | table: ctx.db.core.tokenOffers, 286 | selector: { account, accountSequence: 1, book }, 287 | ledgerSequence: 169, 288 | expirable: true 289 | }) 290 | expect(o169.size.toString()).to.equal('80') 291 | 292 | let o170 = readPoint({ 293 | table: ctx.db.core.tokenOffers, 294 | selector: { account, accountSequence: 1, book }, 295 | ledgerSequence: 170, 296 | expirable: true 297 | }) 298 | expect(o170).to.equal(undefined) 299 | } 300 | ) 301 | } 302 | ) 303 | --------------------------------------------------------------------------------