├── res └── .gitkeep ├── .gitignore ├── run_actions.sh ├── run_events.sh ├── package.json ├── LICENSE ├── utils.js ├── fetcher.js └── app.js /res/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | \.env 2 | node_modules 3 | yarn\.lock 4 | logs 5 | res/ 6 | -------------------------------------------------------------------------------- /run_actions.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # set -e 3 | 4 | cd $(dirname "$0") 5 | mkdir -p logs 6 | DATE=$(date "+%Y_%m_%d") 7 | 8 | export RES_PATH=res/actions 9 | mkdir -p $RES_PATH 10 | 11 | export ACTION=actions 12 | export PORT=3015 13 | export WS_PORT=3016 14 | export HISTORY_LIMIT=20000 15 | 16 | yarn start 2>&1 | tee -a logs/actions_$DATE.txt 17 | -------------------------------------------------------------------------------- /run_events.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # set -e 3 | 4 | cd $(dirname "$0") 5 | mkdir -p logs 6 | DATE=$(date "+%Y_%m_%d") 7 | 8 | export RES_PATH=res/events 9 | mkdir -p $RES_PATH 10 | 11 | export ACTION=events 12 | export PORT=3005 13 | export WS_PORT=3006 14 | export HISTORY_LIMIT=1000000 15 | # export FILTER='{"status": "SUCCESS", "accountId": "game.hot.tg"}' 16 | export SAVE_LAST_BLOCK=false 17 | 18 | yarn start 2>&1 | tee -a logs/events_$DATE.txt 19 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "events-api", 3 | "version": "0.2.3", 4 | "main": "index.js", 5 | "license": "MIT", 6 | "dependencies": { 7 | "dotenv": "^16.4.5", 8 | "@koa/cors": "^5.0.0", 9 | "koa": "^2.15.3", 10 | "koa-bodyparser": "^4.4.1", 11 | "koa-router": "^12.0.1", 12 | "ws": "^8.16.0" 13 | }, 14 | "scripts": { 15 | "start": "node app", 16 | "dev": "nodemon -r dotenv/config app" 17 | }, 18 | "devDependencies": { 19 | "nodemon": "^2.0.6", 20 | "prettier": "^2.2.1" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This is free and unencumbered software released into the public domain. 2 | 3 | Anyone is free to copy, modify, publish, use, compile, sell, or 4 | distribute this software, either in source code form or as a compiled 5 | binary, for any purpose, commercial or non-commercial, and by any 6 | means. 7 | 8 | In jurisdictions that recognize copyright laws, the author or authors 9 | of this software dedicate any and all copyright interest in the 10 | software to the public domain. We make this dedication for the benefit 11 | of the public at large and to the detriment of our heirs and 12 | successors. We intend this dedication to be an overt act of 13 | relinquishment in perpetuity of all present and future rights to this 14 | software under copyright law. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 19 | IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 | OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 | ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 | OTHER DEALINGS IN THE SOFTWARE. 23 | 24 | For more information, please refer to 25 | -------------------------------------------------------------------------------- /utils.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | function saveJson(json, filename) { 4 | try { 5 | const data = JSON.stringify(json); 6 | fs.writeFileSync(filename, data); 7 | } catch (e) { 8 | console.error("Failed to save JSON:", filename, e); 9 | } 10 | } 11 | 12 | function loadJson(filename, ignore) { 13 | try { 14 | let rawData = fs.readFileSync(filename); 15 | return JSON.parse(rawData); 16 | } catch (e) { 17 | if (!ignore) { 18 | console.error("Failed to load JSON:", filename, e); 19 | } 20 | } 21 | return null; 22 | } 23 | 24 | const getFilteredRows = (rows, filter) => { 25 | return rows.filter((row) => 26 | Array.isArray(filter) 27 | ? filter.some((f) => recursiveFilter(f, row)) 28 | : isObject(filter) 29 | ? recursiveFilter(filter, row) 30 | : false 31 | ); 32 | }; 33 | 34 | const isObject = function (o) { 35 | return o === Object(o) && !Array.isArray(o) && typeof o !== "function"; 36 | }; 37 | 38 | const recursiveFilter = (filter, obj) => { 39 | if (isObject(filter) && isObject(obj)) { 40 | return Object.keys(filter).every((key) => 41 | recursiveFilter(filter[key], obj[key]) 42 | ); 43 | } else if (Array.isArray(filter) && Array.isArray(obj)) { 44 | return filter.every((value, index) => recursiveFilter(value, obj[index])); 45 | } else { 46 | return filter === obj; 47 | } 48 | }; 49 | 50 | module.exports = { 51 | getFilteredRows, 52 | isObject, 53 | recursiveFilter, 54 | saveJson, 55 | loadJson, 56 | }; 57 | -------------------------------------------------------------------------------- /fetcher.js: -------------------------------------------------------------------------------- 1 | const MainnetUrl = "https://mainnet.neardata.xyz/v0"; 2 | const EventLogPrefix = "EVENT_JSON:"; 3 | const FetchTimeoutStart = 10000; 4 | const FetchTimeoutIncrease = 5000; 5 | 6 | const ReceiptStatus = { 7 | Success: "SUCCESS", 8 | Failure: "FAILURE", 9 | }; 10 | 11 | function timeoutPromise(time, abortController) { 12 | return new Promise((resolve, reject) => { 13 | setTimeout(() => { 14 | abortController.abort(); 15 | return reject(`Timeout: ${time}ms exceeded`); 16 | }, time); 17 | }); 18 | } 19 | 20 | const fetchBlock = async (blockHeight) => { 21 | const iters = 10; 22 | let timeout = 100; 23 | let fetchTimeout = FetchTimeoutStart; 24 | const jsonFetch = async (url, abortController) => { 25 | const start = Date.now(); 26 | const response = await fetch(url, { signal: abortController.signal }); 27 | const text = await response.text(); 28 | const elapsed = Date.now() - start; 29 | console.log(`Fetched ${text.length} bytes. Elapsed: ${elapsed}ms`); 30 | return JSON.parse(text); 31 | }; 32 | for (let i = 0; i < iters; i++) { 33 | try { 34 | const url = `${MainnetUrl}/block/${blockHeight}`; 35 | const abortController = new AbortController(); 36 | console.log("Fetching block", url); 37 | return await Promise.race([ 38 | jsonFetch(url, abortController), 39 | timeoutPromise(fetchTimeout, abortController), 40 | ]); 41 | } catch (e) { 42 | console.error("Failed to fetch block", blockHeight, e); 43 | await new Promise((r) => setTimeout(r, timeout)); 44 | fetchTimeout += FetchTimeoutIncrease; 45 | timeout *= 2; 46 | } 47 | } 48 | return null; 49 | }; 50 | 51 | const Fetcher = { 52 | init: async function (lastBlockHeight) { 53 | if (!lastBlockHeight) { 54 | const query = await fetch(`${MainnetUrl}/last_block/final`); 55 | const block = await query.json(); 56 | lastBlockHeight = block.block.header.height; 57 | } 58 | this.lastBlockHeight = lastBlockHeight; 59 | return this; 60 | }, 61 | 62 | processBlock: function (block) { 63 | const res = { 64 | events: [], 65 | actions: [], 66 | }; 67 | if (!block) { 68 | return res; 69 | } 70 | const blockHeight = block.block.header.height; 71 | const blockHash = block.block.header.hash; 72 | const blockTimestampNs = block.block.header.timestamp_nanosec; 73 | const blockTimestampMs = parseFloat(blockTimestampNs) / 1e6; 74 | let receiptIndex = 0; 75 | for (const shard of block.shards) { 76 | const shardId = shard.shard_id; 77 | for (const outcome of shard.receipt_execution_outcomes) { 78 | const txHash = outcome.tx_hash; 79 | const { 80 | predecessor_id: predecessorId, 81 | receiver_id: accountId, 82 | receipt_id: receiptId, 83 | receipt, 84 | } = outcome.receipt; 85 | const { 86 | status: executionStatus, 87 | gas_burnt: gasBurnt, 88 | tokens_burnt: tokensBurnt, 89 | logs, 90 | } = outcome.execution_outcome.outcome; 91 | const status = 92 | "SuccessValue" in executionStatus || 93 | "SuccessReceiptId" in executionStatus 94 | ? ReceiptStatus.Success 95 | : ReceiptStatus.Failure; 96 | if ("Action" in receipt) { 97 | const { 98 | signer_id: signerId, 99 | signer_public_key: signerPublicKey, 100 | actions, 101 | gas_price: gasPrice, 102 | } = receipt.Action; 103 | // Parse logs 104 | for (let logIndex = 0; logIndex < logs.length; logIndex++) { 105 | const log = logs[logIndex]; 106 | if (log.startsWith(EventLogPrefix)) { 107 | let event = null; 108 | try { 109 | event = JSON.parse(log.slice(EventLogPrefix.length)); 110 | } catch (e) { 111 | console.debug("Failed to parse event log", e); 112 | } 113 | const fullEvent = { 114 | blockHeight, 115 | blockHash, 116 | blockTimestampMs, 117 | blockTimestampNs, 118 | shardId, 119 | txHash, 120 | receiptId, 121 | signerId, 122 | signerPublicKey, 123 | accountId, 124 | predecessorId, 125 | status, 126 | logIndex, 127 | event, 128 | }; 129 | res.events.push(fullEvent); 130 | } 131 | } 132 | 133 | // Parse actions 134 | for ( 135 | let actionIndex = 0; 136 | actionIndex < actions.length; 137 | actionIndex++ 138 | ) { 139 | const action = actions[actionIndex]; 140 | const fullAction = { 141 | blockHeight, 142 | blockHash, 143 | blockTimestampMs, 144 | blockTimestampNs, 145 | shardId, 146 | txHash, 147 | receiptId, 148 | signerId, 149 | signerPublicKey, 150 | accountId, 151 | predecessorId, 152 | status, 153 | gasBurnt, 154 | tokensBurnt, 155 | gasPrice, 156 | actionIndex, 157 | action, 158 | }; 159 | res.actions.push(fullAction); 160 | } 161 | } 162 | ++receiptIndex; 163 | } 164 | } 165 | return res; 166 | }, 167 | 168 | fetchNextBlock: async function () { 169 | const block = await fetchBlock(this.lastBlockHeight + 1); 170 | const res = this.processBlock(block); 171 | this.lastBlockHeight++; 172 | return res; 173 | }, 174 | }; 175 | 176 | module.exports = Fetcher; 177 | -------------------------------------------------------------------------------- /app.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const fs = require("fs"); 3 | const { getFilteredRows, saveJson, loadJson } = require("./utils"); 4 | 5 | const cors = require("@koa/cors"); 6 | 7 | const Koa = require("koa"); 8 | const app = new Koa(); 9 | app.proxy = true; 10 | 11 | const Router = require("koa-router"); 12 | const router = new Router(); 13 | 14 | const bodyParser = require("koa-bodyparser"); 15 | 16 | const Fetcher = require("./fetcher"); 17 | 18 | const WebSocket = require("ws"); 19 | 20 | const ResPath = process.env.RES_PATH || "res"; 21 | const WsSubsFilename = ResPath + "/ws_subs.json"; 22 | 23 | const PastRowsTrimTo = parseInt(process.env.HISTORY_LIMIT || "1000000"); 24 | const PastRowsLimit = Math.round(PastRowsTrimTo * 1.02); 25 | 26 | const SaveLastBlock = process.env.SAVE_LAST_BLOCK === "true"; 27 | const LastBlockFilename = ResPath + "/last_block.json"; 28 | 29 | const MaxRowsLimit = 1000; 30 | const DefaultRowsLimit = 100; 31 | 32 | (async () => { 33 | if (!fs.existsSync(ResPath)) { 34 | fs.mkdirSync(ResPath); 35 | } 36 | 37 | const lastBlockHeight = 38 | (SaveLastBlock && loadJson(LastBlockFilename, true)) || undefined; 39 | 40 | const action = process.env.ACTION; 41 | let filter = {}; 42 | try { 43 | filter = JSON.parse(process.env.FILTER ?? "{}"); 44 | } catch (e) { 45 | console.error("Failed to parse filter", e); 46 | } 47 | 48 | const fetcher = await Fetcher.init(lastBlockHeight); 49 | console.log("Fetcher initialized", fetcher.lastBlockHeight); 50 | 51 | const fetchNext = async () => { 52 | try { 53 | const res = await fetcher.fetchNextBlock(); 54 | const rows = action === "actions" ? res.actions : res.events; 55 | return rows; 56 | } catch (e) { 57 | console.error(e); 58 | } 59 | return []; 60 | }; 61 | 62 | const pastRows = []; 63 | // Spawning fetch thread 64 | let fetchThread, processRows; 65 | fetchThread = async () => { 66 | while (true) { 67 | try { 68 | const rows = await fetchNext(); 69 | const filteredRows = getFilteredRows(rows, filter); 70 | pastRows.push(...filteredRows); 71 | if (pastRows.length > PastRowsLimit) { 72 | pastRows.splice(0, pastRows.length - PastRowsTrimTo); 73 | } 74 | console.log( 75 | `Added ${filteredRows.length} out of ${rows.length} ${action}. Total ${pastRows.length} ${action}.` 76 | ); 77 | if (SaveLastBlock) { 78 | saveJson(fetcher.lastBlockHeight, LastBlockFilename); 79 | } 80 | processRows(rows); 81 | } catch (e) { 82 | console.error(e); 83 | } 84 | } 85 | }; 86 | 87 | // const subs = loadJson(SubsFilename, true) || {}; 88 | 89 | const WS_PORT = process.env.WS_PORT || 7071; 90 | 91 | const wss = new WebSocket.Server({ port: WS_PORT }); 92 | console.log("WebSocket server listening on http://localhost:%d/", WS_PORT); 93 | 94 | const wsClients = new Map(); 95 | const wsSubs = new Map(); 96 | 97 | // subs.push({ 98 | // "filter": [{ 99 | // "account_id": "nft.nearapps.near", 100 | // "status": "SUCCESS", 101 | // "standard": "nep171", 102 | // "event": "nft_mint", 103 | // "data_account_id": "bla.near", 104 | // }], 105 | // "url": "http://127.0.0.1:3000/event" 106 | // }); 107 | 108 | const processRowsInternal = async (rows) => { 109 | [...wsSubs.values()].forEach((sub) => { 110 | const filteredEvents = getFilteredRows(rows, sub.filter); 111 | if (filteredEvents.length > 0 && wsClients.has(sub.ws)) { 112 | try { 113 | sub.ws.send( 114 | JSON.stringify({ 115 | secret: sub.secret, 116 | [action]: filteredEvents, 117 | }) 118 | ); 119 | } catch (e) { 120 | console.log(`Failed to send ${action} to ws`, e); 121 | } 122 | } 123 | }); 124 | }; 125 | 126 | processRows = (rows) => { 127 | processRowsInternal(rows).catch((e) => 128 | console.error("Process Rows failed", e) 129 | ); 130 | }; 131 | 132 | console.log("Starting fetch thread"); 133 | fetchThread().catch((e) => console.error("Fetch thread failed", e)); 134 | 135 | const saveWsSubs = () => { 136 | saveJson( 137 | [...wsSubs.values()].map( 138 | ({ xForwardedFor, remoteAddress, secret, filter }) => ({ 139 | xForwardedFor, 140 | remoteAddress, 141 | secret, 142 | filter, 143 | }) 144 | ), 145 | WsSubsFilename 146 | ); 147 | }; 148 | 149 | const getPastRows = (filter, limit) => { 150 | const filteredRows = getFilteredRows(pastRows, filter); 151 | limit = Math.min( 152 | Math.max(parseInt(limit) || DefaultRowsLimit, 0), 153 | Math.min(MaxRowsLimit, filteredRows.length) 154 | ); 155 | return filteredRows.slice(filteredRows.length - limit); 156 | }; 157 | 158 | wss.on("connection", (ws, req) => { 159 | console.log("WS Connection open"); 160 | ws.on("error", console.error); 161 | 162 | wsClients.set(ws, null); 163 | 164 | ws.on("close", () => { 165 | console.log("connection closed"); 166 | wsClients.delete(ws); 167 | wsSubs.delete(ws); 168 | saveWsSubs(); 169 | }); 170 | 171 | ws.on("message", (messageAsString) => { 172 | try { 173 | const message = JSON.parse(messageAsString); 174 | if ("filter" in message && "secret" in message) { 175 | console.log(`WS subscribed to ${action}`); 176 | wsSubs.set(ws, { 177 | ws, 178 | secret: message.secret, 179 | filter: message.filter, 180 | xForwardedFor: req.headers["x-forwarded-for"], 181 | remoteAddress: req.connection.remoteAddress, 182 | }); 183 | saveWsSubs(); 184 | if (message[`fetch_past_${action}`]) { 185 | ws.send( 186 | JSON.stringify({ 187 | secret: message.secret, 188 | [action]: getPastRows( 189 | message.filter, 190 | message[`fetch_past_${action}`] 191 | ), 192 | note: "past", 193 | }) 194 | ); 195 | } 196 | } 197 | } catch (e) { 198 | console.log("Bad message", e); 199 | } 200 | }); 201 | }); 202 | 203 | // // Save subs once a minute 204 | // setInterval(() => { 205 | // saveJson(subs, SubsFilename); 206 | // }, 60000); 207 | 208 | router.post(`/${action}`, (ctx) => { 209 | ctx.type = "application/json; charset=utf-8"; 210 | try { 211 | const body = ctx.request.body; 212 | if ("filter" in body) { 213 | ctx.body = JSON.stringify( 214 | { 215 | [action]: getPastRows(body.filter, body.limit), 216 | }, 217 | null, 218 | 2 219 | ); 220 | } else { 221 | ctx.body = 'err: Required fields are "filter"'; 222 | } 223 | } catch (e) { 224 | ctx.body = `err: ${e}`; 225 | } 226 | }); 227 | 228 | // router.post("/subscribe", (ctx) => { 229 | // ctx.type = "application/json; charset=utf-8"; 230 | // try { 231 | // const body = ctx.request.body; 232 | // if ("filter" in body && "url" in body && "secret" in body) { 233 | // const secret = body.secret; 234 | // if (secret in subs) { 235 | // throw new Error(`Secret "${secret}" is already present`); 236 | // } 237 | // subs[secret] = { 238 | // ip: ctx.request.ip, 239 | // filter: body.filter, 240 | // url: body.url, 241 | // secret, 242 | // }; 243 | // saveJson(subs, SubsFilename); 244 | // ctx.body = JSON.stringify( 245 | // { 246 | // ok: true, 247 | // }, 248 | // null, 249 | // 2 250 | // ); 251 | // } else { 252 | // ctx.body = 'err: Required fields are "filter", "url", "secret"'; 253 | // } 254 | // } catch (e) { 255 | // ctx.body = `err: ${e}`; 256 | // } 257 | // }); 258 | // 259 | // router.post("/unsubscribe", (ctx) => { 260 | // ctx.type = "application/json; charset=utf-8"; 261 | // try { 262 | // const body = ctx.request.body; 263 | // const secret = body.secret; 264 | // if (secret in subs) { 265 | // delete subs[secret]; 266 | // saveJson(subs, SubsFilename); 267 | // ctx.body = JSON.stringify( 268 | // { 269 | // ok: true, 270 | // }, 271 | // null, 272 | // 2 273 | // ); 274 | // } else { 275 | // ctx.body = 'err: No subscription found for "secret"'; 276 | // } 277 | // } catch (e) { 278 | // ctx.body = `err: ${e}`; 279 | // } 280 | // }); 281 | 282 | app 283 | .use(async (ctx, next) => { 284 | console.log(ctx.method, ctx.path); 285 | await next(); 286 | }) 287 | .use(cors()) 288 | .use(bodyParser()) 289 | .use(router.routes()) 290 | .use(router.allowedMethods()); 291 | 292 | const PORT = process.env.PORT || 3000; 293 | app.listen(PORT); 294 | console.log("Listening on http://localhost:%d/", PORT); 295 | })(); 296 | --------------------------------------------------------------------------------