├── .eslintignore ├── .eslintrc.js ├── .gitignore ├── .prettierignore ├── .prettierrc ├── .vscode └── settings.json ├── CHANGELOG.md ├── CONTRIBUTORS.md ├── LICENSE ├── MIGRATION.md ├── README.md ├── examples ├── .gitignore ├── advanced │ ├── common │ │ ├── client-and-socket-notifications.ts │ │ ├── forever-streaming.ts │ │ ├── graphql-gql-tag.ts │ │ ├── graphql-never-miss-a-beat.js │ │ ├── graphql-never-miss-a-beat.ts │ │ ├── multiple-active-streams.ts │ │ ├── navigating-forks.ts │ │ ├── never-miss-a-beat.ts │ │ └── nodejs-fetch-and-websocket-options.ts │ └── eosio │ │ ├── has-account.ts │ │ ├── search-reconstruct-tree.ts │ │ ├── stream-only-irreversible-events.ts │ │ └── track-ram-usage.ts ├── basic │ ├── eosio │ │ ├── dfuse-community-edition.ts │ │ ├── dfuse-for-eosio.ts │ │ ├── search-your-latest-transactions-graphql.ts │ │ ├── search-your-latest-transactions.ts │ │ ├── state-check-balance.ts │ │ ├── stream-global-state-ws.ts │ │ ├── stream-transfers-graphql.ts │ │ └── stream-transfers-ws.ts │ └── ethereum │ │ ├── search-your-latest-transactions.ts │ │ ├── stream-transactions.ts │ │ └── stream-transfers.ts ├── config.ts └── reference │ ├── common │ ├── api-request.ts │ ├── auth-issue.ts │ └── browser.html │ ├── eosio │ ├── fetch-block-id-by-time.ts │ ├── fetch-transaction.ts │ ├── search-transactions.ts │ ├── state-abi-bin-to-json.ts │ ├── state-abi.ts │ ├── state-key-accounts.ts │ ├── state-permission-links.ts │ ├── state-table-row.ts │ ├── state-table-scopes.ts │ ├── state-table.ts │ ├── state-tables-for-accounts.ts │ ├── state-tables-for-scopes.ts │ ├── stream-action-traces.ts │ ├── stream-head-info.ts │ ├── stream-table-rows.ts │ └── stream-transaction.ts │ └── ethereum │ └── stream-pending-transactions.ts ├── jest.config.js ├── package.json ├── patches └── typedoc-plugin-toc-group+0.0.4.patch ├── rollup.config.es.js ├── rollup.config.js ├── rollup.config.umd.js ├── scripts ├── compress-umd-build.js ├── gh-publish-docs.js ├── gh-push.js └── helpers.js ├── src ├── client │ ├── __tests__ │ │ ├── api-token-manager.test.ts │ │ ├── client.test.ts │ │ ├── graphql-stream-client.test.ts │ │ ├── http-client.test.ts │ │ ├── mocks.ts │ │ ├── socket.test.ts │ │ └── stream-client.test.ts │ ├── api-token-manager.ts │ ├── api-token-store.ts │ ├── client.ts │ ├── graphql-stream-client.ts │ ├── http-client.ts │ ├── refresh-scheduler.ts │ ├── socket.ts │ └── stream-client.ts ├── helpers │ ├── __tests__ │ │ └── transaction.test.ts │ ├── message.ts │ ├── promises.ts │ ├── time.ts │ └── transaction.ts ├── index.ts ├── message │ ├── inbound.ts │ └── outbound.ts └── types │ ├── action-trace.ts │ ├── auth-token.ts │ ├── block-id.ts │ ├── client.ts │ ├── common.ts │ ├── error.ts │ ├── graphql-stream-client.ts │ ├── graphql.ts │ ├── head-info.ts │ ├── http-client.ts │ ├── listen.ts │ ├── progress.ts │ ├── search.ts │ ├── socket.ts │ ├── state.ts │ ├── stream-client.ts │ ├── stream.ts │ ├── table-delta.ts │ ├── table-snapshot.ts │ └── transaction.ts ├── tools └── docs │ └── theme │ └── partials │ └── toc.root.hbs ├── tsconfig.json ├── typedoc.json └── yarn.lock /.eslintignore: -------------------------------------------------------------------------------- 1 | .vscode 2 | dist 3 | docs 4 | node_modules 5 | -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: ["@typescript-eslint", "eslint-comments", "jest", "promise"], 3 | extends: [ 4 | "plugin:@typescript-eslint/recommended", 5 | "plugin:eslint-comments/recommended", 6 | "plugin:jest/recommended", 7 | "plugin:promise/recommended", 8 | "prettier", 9 | "prettier/@typescript-eslint", 10 | ], 11 | env: { 12 | node: true, 13 | browser: true, 14 | jest: true, 15 | }, 16 | rules: { 17 | camelcase: "off", 18 | "no-prototype-builtins": "off", 19 | "no-use-before-define": "off", 20 | "eslint-comments/disable-enable-pair": "off", 21 | "@typescript-eslint/ban-ts-comment": "off", 22 | "@typescript-eslint/explicit-function-return-type": [ 23 | "error", 24 | { allowExpressions: true, allowTypedFunctionExpressions: true }, 25 | ], 26 | "@typescript-eslint/no-explicit-any": "off", 27 | "@typescript-eslint/explicit-module-boundary-types": "off", 28 | "@typescript-eslint/no-use-before-define": "off", 29 | }, 30 | overrides: [ 31 | { 32 | files: ["*.js"], 33 | rules: { 34 | "@typescript-eslint/explicit-function-return-type": "off", 35 | "@typescript-eslint/no-var-requires": "off", 36 | }, 37 | }, 38 | ], 39 | } 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Autoenv/Direnv 2 | .env 3 | .envrc 4 | 5 | # Build 6 | /docs 7 | /dist 8 | 9 | # Coverage 10 | coverage/ 11 | 12 | # NPM 13 | package-lock.json 14 | 15 | # Rollup 16 | .rpt2_cache 17 | 18 | # System Files 19 | *.orig 20 | .DS_Store 21 | Thumbs.db 22 | 23 | # VSCode 24 | .vscode/launch.json 25 | 26 | # Yarn 27 | yarn-error.log 28 | node_modules/ 29 | 30 | # Webstorm 31 | .idea -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | ## Coverage 2 | /coverage 3 | 4 | ## Docs Theme Partials 5 | /docs/theme/partials/*.hbs 6 | 7 | ## Generated Files 8 | index.js 9 | /build 10 | /dist 11 | /types 12 | *.d.ts 13 | 14 | ## Git 15 | .git 16 | .gitignore 17 | 18 | ## NPM 19 | package-lock.json 20 | 21 | ## Prettier 22 | .prettierignore 23 | 24 | ## VSCode 25 | .vscode 26 | 27 | ## Yarn 28 | package.json 29 | yarn.lock 30 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "arrowParens": "always", 3 | "printWidth": 100, 4 | "semi": false 5 | } 6 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.formatOnSave": true 3 | } -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | ## Contributors 2 | 3 | - Denis Carriere (https://eosnation.io) 4 | - Matthieu Vachon (https://dfuse.io) 5 | - Frederik Schöll (https://eosnation.io) 6 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 dfuse Platform Inc. (“dfuse”) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /examples/.gitignore: -------------------------------------------------------------------------------- 1 | /personal 2 | /advanced/common/last_cursor.txt -------------------------------------------------------------------------------- /examples/advanced/common/client-and-socket-notifications.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | InboundMessage, 5 | InboundMessageType, 6 | waitFor, 7 | SocketOptions, 8 | GraphqlStreamMessage, 9 | } from "@dfuse/client" 10 | 11 | /** 12 | * In this example, we will showcase how to be get notifications when certain 13 | * events happen in the client and underlying socket. 14 | * 15 | * You probably won't see that much here, unless you are able to 16 | * generate a closing of the connection and then allow it to come 17 | * back. Restarting the network interface while the script is running 18 | * might achieve this. 19 | * 20 | * In this example, you will register a listener for the following events: 21 | * - Socket `onError`: when an error occurs with the connection. You will still receive an `onClose` right aftet this one. 22 | * - Socket `onClose`: when the connection of the `Socket` was closed. 23 | * - Socket `onReconnect`: when the socket has automatically reconnected. 24 | * 25 | * We will also register an `onPostRestart` listener on the `Stream`, which is called after 26 | * a `listen` has been sent back to the remote endpoint due to a socket `onReconnect`. 27 | * 28 | * The example also show all cases that can happen with both streaming methods. 29 | */ 30 | async function main(): Promise { 31 | const socketOptions: SocketOptions = { 32 | onError(event: any) { 33 | console.log("Socket emitted an error event.", { 34 | message: event.message, 35 | error: event.error, 36 | }) 37 | }, 38 | 39 | onClose(event: any) { 40 | console.log("Socket has closed its connection.", { reason: event.reason, code: event.code }) 41 | }, 42 | 43 | onReconnect() { 44 | console.log("Socket has been reconnected with remote server.") 45 | }, 46 | } 47 | 48 | const client = createDfuseClient({ 49 | apiKey: DFUSE_API_KEY, 50 | network: DFUSE_API_NETWORK, 51 | streamClientOptions: { 52 | socketOptions, 53 | }, 54 | graphqlStreamClientOptions: { 55 | socketOptions, 56 | }, 57 | }) 58 | 59 | const graphqlOperation = `subscription($cursor: String!) { 60 | searchTransactionsForward(query: "action:onblock", cursor: $cursor) { 61 | undo cursor 62 | block { num timestamp } 63 | } 64 | }` 65 | 66 | const graphqlStream = await client.graphql( 67 | graphqlOperation, 68 | (message: GraphqlStreamMessage) => { 69 | if (message.type === "error") { 70 | // When `terminal: true`, an auto-reconnection is automatically performed 71 | console.log("GraphQL stream error.", message.errors, message.terminal) 72 | return 73 | } 74 | 75 | if (message.type === "data") { 76 | console.log( 77 | "GraphQL stream data.", 78 | JSON.stringify({ ...message.data.searchTransactionsForward, cursor: undefined }) 79 | ) 80 | 81 | // Mark latest location where we want to start back at 82 | graphqlStream.mark({ cursor: message.data.searchTransactionsForward.cursor }) 83 | } 84 | 85 | if (message.type === "complete") { 86 | console.log("GraphQL stream completed.") 87 | } 88 | } 89 | ) 90 | 91 | graphqlStream.onPostRestart = () => { 92 | console.log() 93 | console.log( 94 | "<============= GraphQL stream has restarted to its previous `mark()` location =============>" 95 | ) 96 | } 97 | 98 | const wsStream = await client.streamHeadInfo((message: InboundMessage) => { 99 | if (message.type === InboundMessageType.ERROR) { 100 | console.log("WebSocket stream error.", message.data) 101 | return 102 | } 103 | 104 | if (message.type === InboundMessageType.LISTENING) { 105 | console.log("WebSocket stream is now listening.") 106 | } 107 | 108 | if (message.type === InboundMessageType.HEAD_INFO) { 109 | console.log("WebSocket stream data.", JSON.stringify(message.data)) 110 | 111 | // Mark latest location where we want to start back at 112 | wsStream.mark({ atBlockNum: message.data.head_block_num }) 113 | } 114 | }) 115 | 116 | wsStream.onPostRestart = () => { 117 | console.log() 118 | console.log( 119 | "<============= WebSocket stream has restarted to its previous `mark()` location =============>" 120 | ) 121 | } 122 | 123 | await waitFor(35000) 124 | await graphqlStream.close() 125 | await wsStream.close() 126 | 127 | client.release() 128 | } 129 | 130 | runMain(main) 131 | -------------------------------------------------------------------------------- /examples/advanced/common/forever-streaming.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, InboundMessage, waitFor, Stream } from "@dfuse/client" 3 | 4 | /** 5 | * In this example, we showcase how the library always keeps your 6 | * streams active. By default, the library will automatically restart all 7 | * your active streams after a reconnection event occurred on the 8 | * underlying socket. 9 | * 10 | * With zero effort from your part, your stream is always receiving 11 | * messages. This pattern can be used when you only really need a never ending 12 | * streaming of messages, whithout caring about any messages that may have 13 | * missed while being disconnected from the socket. 14 | * 15 | * It's possible to deactivate this behavior by using the [[StreamClientOptions]] 16 | * `autoRestartStreamsOnReconnect` and set it to `false`. 17 | * 18 | * **Important** 19 | * If it's really important to never miss a single message, you will need to also mark 20 | * progress to ensure you reconnect at the right moment. Look at the `never-miss-a-beat.ts` 21 | * example that showcases how to implement a bulletproof data integrity pattern and ensure 22 | * you never miss or skip an important message by mistake. 23 | */ 24 | async function main(): Promise { 25 | const client = createDfuseClient({ 26 | apiKey: DFUSE_API_KEY, 27 | network: DFUSE_API_NETWORK, 28 | }) 29 | 30 | const stream: Stream = await client.streamActionTraces( 31 | { 32 | accounts: "eosio.token", 33 | action_names: "create", 34 | }, 35 | onMessage 36 | ) 37 | 38 | stream.onPostRestart = () => { 39 | console.log("Socket reconnected, your stream(s) have restarted automatically!") 40 | } 41 | 42 | console.log("Socket is now connected.") 43 | 44 | await waitFor(38000) 45 | await stream.close() 46 | 47 | client.release() 48 | } 49 | 50 | function onMessage(message: InboundMessage): void { 51 | if (message.type === "listening") { 52 | // You should see this message a second time when restart of stream occurs 53 | console.log("Stream is now listening.") 54 | return 55 | } 56 | 57 | if (message.type === "action_trace") { 58 | console.log("Streaming transfer.") 59 | } 60 | } 61 | 62 | runMain(main) 63 | -------------------------------------------------------------------------------- /examples/advanced/common/graphql-gql-tag.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, GraphqlResponse } from "@dfuse/client" 3 | import gql from "graphql-tag" 4 | import { print as printGraphqlDocument } from "graphql/language/printer" 5 | 6 | /** 7 | * This example showcases usage of `gql` string template literal 8 | * that parses the GraphQL document before sending it to the server 9 | * so you are sure the syntax is correct. This feature depends on 10 | * package `graphql-tag` and `graphql` to be available. The actual 11 | * parsing and turning them into proper JSON can be performed 12 | * at compile time using the appropriate Webpack or Rollup loader. 13 | * 14 | * Those dependencies are totally optional, check out the `examples/basic/gragpql-search-your-latest-transactions.ts` 15 | * file for an example that does not use those depencendies. 16 | */ 17 | type Message = { 18 | searchTransactionsBackward: { 19 | results: { 20 | block: { 21 | num: number 22 | } 23 | trace: { 24 | id: string 25 | matchingActions: { 26 | json: any 27 | }[] 28 | } 29 | }[] 30 | } 31 | } 32 | 33 | async function main(): Promise { 34 | const client = createDfuseClient({ 35 | apiKey: DFUSE_API_KEY, 36 | network: DFUSE_API_NETWORK, 37 | }) 38 | 39 | try { 40 | const response = (await client.graphql(printGraphqlDocument(searchTransferQuery), { 41 | variables: { limit: 10 }, 42 | })) as GraphqlResponse 43 | 44 | console.log(prettifyJson(response)) 45 | } catch (error) { 46 | console.log("An error occurred", error) 47 | } 48 | 49 | client.release() 50 | } 51 | 52 | const searchTransferQuery = gql` 53 | query($limit: Int64!) { 54 | searchTransactionsBackward(query: "receiver:eosio.token action:transfer", limit: $limit) { 55 | results { 56 | block { 57 | num 58 | } 59 | trace { 60 | id 61 | matchingActions { 62 | json 63 | } 64 | } 65 | } 66 | } 67 | } 68 | ` 69 | 70 | runMain(main) 71 | -------------------------------------------------------------------------------- /examples/advanced/common/graphql-never-miss-a-beat.js: -------------------------------------------------------------------------------- 1 | const { runMain } = require("../config") 2 | const { createDfuseClient } = require("@dfuse/client") 3 | const { writeFileSync, readFileSync, existsSync } = require("fs") 4 | const path = require("path") 5 | 6 | global.fetch = require("node-fetch") 7 | global.WebSocket = require("ws") 8 | 9 | /** 10 | * In this example, we will showcase how to implement bulletproof 11 | * data integrity while using the dfuse GraphQL Stream by ensuring 12 | * you never miss a single beat. 13 | * 14 | * This pattern can be used when you want to process messages only 15 | * once, while still ensuring you correctly receive all the blocks, 16 | * transactions and actions you want to process. 17 | * 18 | * We go through an example of how to easily mark the stream progress 19 | * and how the marker is then used when the socket reconnects to 20 | * restart the stream at the exact location you need. 21 | * 22 | * In the example we will implement an action persistence storer, 23 | * having our code restart at the exact correct place a commit had 24 | * occurred. 25 | */ 26 | const LAST_CURSOR_FILENAME = "last_cursor.txt" 27 | 28 | async function main() { 29 | const client = createDfuseClient({ 30 | apiKey: process.env.DFUSE_API_KEY, 31 | network: process.env.DFUSE_API_NETWORK, 32 | graphqlStreamClientOptions: { 33 | socketOptions: { 34 | reconnectDelayInMs: 250, 35 | }, 36 | }, 37 | }) 38 | 39 | const engine = new Engine(client) 40 | await engine.run() 41 | 42 | client.release() 43 | } 44 | 45 | class Engine { 46 | constructor(client) { 47 | this.client = client 48 | this.pendingActions = [] 49 | this.committedActions = [] 50 | } 51 | 52 | async run() { 53 | console.log("Engine starting") 54 | 55 | /** 56 | * At the engine start, we load back our latest persisted cursor, 57 | * if it exists. This way, we either start fresh because it's the 58 | * very first time to script is run. 59 | * 60 | * Or, already ran but was stopped or crashed while streaming 61 | * data. In this case, our persistence storage (a simple file 62 | * in this demo), will contains our last persisted stored cursor. 63 | */ 64 | let lastPersistedCursor = "" 65 | const lastCursorPath = path.resolve(__dirname, LAST_CURSOR_FILENAME) 66 | if (existsSync(lastCursorPath)) { 67 | lastPersistedCursor = readFileSync(lastCursorPath).toString() 68 | console.log("Read last persisted cursor, start back at cursor " + lastPersistedCursor) 69 | } 70 | 71 | /** 72 | * Two things to note in the operation GraphQL document. 73 | * 74 | * First thing, we use a `$cursor` variable to pass the cursor. This is critical 75 | * for proper functionning of the auto restart feature. On initial start of the 76 | * stream, the `$cursor` variable is used straight from the `variables` options 77 | * of the `graphql` method (which is either empty or the last persisted cursor). 78 | * However, upon a stream re-connection, the `variables.cursor` is automatically 79 | * updated with the latest marked cursor when provided enabling the stream to 80 | * automatically restart at the exact location it stops, i.e. the `cursor`. 81 | * 82 | * Second thing, we use the `liveMarkerInterval` which with give us a notification each 83 | * 10 blocks. This is useful to update the cursor when your query is low traffic. 84 | * Otherwise, you could restart thousands of blocks behing tip of chain. See 85 | * `onProgress` for further details about cursor saving on this notification. 86 | * 87 | * **Note** The `cursor` value when defined (i.e. not the empty string) always takes 88 | * precedence over `lowBlockNum`/`highBlockNum` boundaries. For example, a query 89 | * `cursor: "", lowBlockNum: 10, highBlockNum: 20` will start from `lowBlockNum` 90 | * then stream up, while `cursor: , lowBlockNum: 10, highBlockNum: 20` 91 | * will start at `` location, maybe transaction #3 within block #15 and 92 | * then reach top boundary and stop there. 93 | */ 94 | const operation = ` 95 | subscription ($cursor: String!) { 96 | searchTransactionsForward(query: "receiver:therealkarma action:transfer", cursor: $cursor, liveMarkerInterval: 10) { 97 | undo cursor 98 | block { id num } 99 | trace { matchingActions { json } } 100 | } 101 | } 102 | ` 103 | 104 | this.stream = await this.client.graphql( 105 | operation, 106 | (message) => { 107 | if (message.type === "data") { 108 | this.onResult(message.data) 109 | } 110 | 111 | if (message.type === "error") { 112 | this.onError(message.errors, message.terminal) 113 | } 114 | 115 | if (message.type === "complete") { 116 | this.onComplete() 117 | } 118 | }, 119 | { 120 | variables: { 121 | /** 122 | * The `cursor` variable is used on initial start of the stream. Afterwards, if the 123 | * stream is marked (via `marker.mark(...)` like in the demo), the marked `cursor` will 124 | * be used upon a reconnection. This means `lastPersistedCursor` is only really used 125 | * once and overriden later on by the library. Other variables, if any, are left intact 126 | * and only the cursor is updated to reflect the current marker state. 127 | */ 128 | cursor: lastPersistedCursor, 129 | }, 130 | } 131 | ) 132 | 133 | this.stream.onPostRestart = () => { 134 | console.log() 135 | console.log( 136 | "<============= Stream has reconnected to the socket correctly (at latest `mark()`) =============>" 137 | ) 138 | console.log() 139 | 140 | /** 141 | * When the stream reconnects, we must flush all of the current pending transactions 142 | * as the stream restarts at our last marked block, inclusively. 143 | * 144 | * Since we mark after commit, anything currently in pending was not committed. 145 | * As such, let's flush all pending actions. The dfuse GraphQL Stream API will stream 146 | * them back anyway due to `cursor`. 147 | */ 148 | console.log("Flushing pending action(s) due to refresh") 149 | this.pendingActions = [] 150 | } 151 | 152 | console.log("Stream connected, ready to receive messages") 153 | 154 | // This join the stream, resolving only when the stream completes, which is never in our example 155 | // so we wait forever at that point. 156 | await this.stream.join() 157 | } 158 | 159 | onProgress(blockId, blockNum, cursor) { 160 | console.log(`Live marker received @ ${printBlock(blockId, blockNum)}`) 161 | 162 | // We commit also on progress. The reasoning is that we have now move 10 blocks 163 | // forward through the chain, and we received a corresponding cursor. In the 164 | // commit phase, we will mark the stream with `stream.mark({ cursor })` which 165 | // we ensure that on reconnection, the cursor will start back right at the 166 | // correct progress cursor, this is cost effective and improves slightly the 167 | // reconnection performance as we start closer to the tip of the chain. 168 | this.commit(cursor) 169 | } 170 | 171 | onResult(message) { 172 | const data = message.searchTransactionsForward 173 | const { id: blockId, num: blockNum } = data.block 174 | 175 | // A message without the trace object being set means we deal with a live marker progress message 176 | if (!data.trace) { 177 | this.onProgress(blockId, blockNum, data.cursor) 178 | return 179 | } 180 | 181 | data.trace.matchingActions.forEach((action) => { 182 | const { from, to, quantity } = action.json 183 | 184 | console.log( 185 | `Pending transfer [${from} -> ${to} ${quantity}] @ ${printBlock(blockId, blockNum)}` 186 | ) 187 | this.pendingActions.push(action.json) 188 | }) 189 | 190 | console.log("Comitting changes after transaction") 191 | this.commit(data.cursor) 192 | } 193 | 194 | onError(errors, terminal) { 195 | console.log("Received an 'error' message", JSON.stringify(errors, nil, " ")) 196 | 197 | if (terminal) { 198 | console.log( 199 | "Received a terminal 'error' message, the stream will automatically reconnects in 250ms" 200 | ) 201 | } 202 | } 203 | 204 | onComplete() { 205 | console.log("Received a 'complete' message, no more results for this stream") 206 | } 207 | 208 | commit(cursor) { 209 | if (this.pendingActions.length > 0) { 210 | console.log(`Committing all actions up to cursor ${cursor}`) 211 | 212 | // Here, in your production code, action would be saved in a database, as well as error handling 213 | this.pendingActions.forEach((action) => this.committedActions.push(action)) 214 | this.pendingActions = [] 215 | } 216 | 217 | /** 218 | * This is one of the most important calls of the example. By marking the stream 219 | * at the right block, upon restarting, the stream will automatically start back 220 | * at this block ensuring you never miss a single action. 221 | */ 222 | this.ensureStream().mark({ cursor }) 223 | 224 | /** 225 | * In a real-word production code, you need to also persist the cursor into 226 | * a persistent storage. This is important so when the actual process ends 227 | * or crash, upon restart, you simply load your latest saved `cursor` and 228 | * starts back from that point. 229 | * 230 | * In this demo, we simply save it to a file on the file system. This could be 231 | * easily replaced with a database save, cloud upload, local storage in the 232 | * browser on anything that is persistent across restarts of the script. 233 | */ 234 | writeFileSync(path.resolve(__dirname, LAST_CURSOR_FILENAME), cursor) 235 | } 236 | 237 | ensureStream() { 238 | if (this.stream) { 239 | return this.stream 240 | } 241 | 242 | throw new Error("Stream should be set at this runtime execution point") 243 | } 244 | } 245 | 246 | function printBlock(blockId, blockNum) { 247 | return `${blockId.slice(0, 8)}...${blockId.slice(-8)} (${blockNum})` 248 | } 249 | 250 | runMain(main) 251 | -------------------------------------------------------------------------------- /examples/advanced/common/multiple-active-streams.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | waitFor, 5 | Stream, 6 | dynamicMessageDispatcher, 7 | ActionTraceInboundMessage, 8 | OnStreamMessage, 9 | } from "@dfuse/client" 10 | 11 | type BuyRamBytesData = { 12 | bytes: number 13 | receiver: string 14 | payer: string 15 | } 16 | 17 | type TransferData = { 18 | from: string 19 | to: string 20 | quantity: string 21 | memo: string 22 | } 23 | 24 | /** 25 | * In this example, we showcase how to have multiple active streams 26 | * at the same time. We will listen for `eosio::buyrambytes` action 27 | * on one stream, and for `eosio.token::transfer` notifications performed 28 | * on receiver `eosio.ram`. 29 | * 30 | * We will also show the differences and impacts of having two separate 31 | * streams, instead of a single one by implementing a single stream that 32 | * listens for both actions in one pass. 33 | * 34 | * You will learn how to have multiple active streams, that multiple 35 | * active streams are independent from each other and that the ordering of messages 36 | * across streams is not guaranteed. 37 | * 38 | * You will also see how to workaround this problem in some circumstances 39 | * by creating a merged stream, filtering required messages from 40 | * a pool of possibilities. Having a single stream will always guarantee the 41 | * ordering of messages. 42 | */ 43 | async function main(): Promise { 44 | const client = createDfuseClient({ 45 | apiKey: DFUSE_API_KEY, 46 | network: DFUSE_API_NETWORK, 47 | }) 48 | 49 | const buyRamData = { accounts: "eosio", action_names: "buyrambytes" } 50 | const buyRamStream: Stream = await client.streamActionTraces( 51 | buyRamData, 52 | dynamicMessageDispatcher({ 53 | listening: onListeningFactory("buy_ram"), 54 | action_trace: onBuyRamAction, 55 | }) 56 | ) 57 | 58 | const ramData = { accounts: "eosio.token", action_names: "transfer", receivers: "eosio.ram" } 59 | const ramStream: Stream = await client.streamActionTraces( 60 | ramData, 61 | dynamicMessageDispatcher({ 62 | listening: onListeningFactory("ram_transfer"), 63 | action_trace: onTransferToEosioRamAction, 64 | }) 65 | ) 66 | 67 | console.log( 68 | "Notice how `Buy RAM` and `RAM cost` happens in random order, due to using 2 independent streams." 69 | ) 70 | await waitFor(60000) 71 | await buyRamStream.close() 72 | await ramStream.close() 73 | 74 | console.log("") 75 | 76 | const mergedData = { 77 | accounts: "eosio|eosio.token", 78 | action_names: "buyrambytes|transfer", 79 | receivers: "eosio|eosio.token|eosio.ram", 80 | } 81 | const mergedStream: Stream = await client.streamActionTraces( 82 | mergedData, 83 | dynamicMessageDispatcher({ 84 | listening: onListeningFactory("merged"), 85 | action_trace: onMergedAction, 86 | }) 87 | ) 88 | 89 | console.log( 90 | "Notice how `Buy RAM` is always before `RAM cost` thanks to the strict ordering of a single stream." 91 | ) 92 | await waitFor(60000) 93 | await mergedStream.close() 94 | 95 | client.release() 96 | } 97 | 98 | function onListeningFactory(tag: string): OnStreamMessage { 99 | return () => { 100 | console.log(`Stream [${tag}] is now listening.`) 101 | } 102 | } 103 | 104 | function onBuyRamAction(message: ActionTraceInboundMessage): void { 105 | const data = message.data.trace.act.data 106 | console.log(`Buy RAM: ${data.payer} pays ${data.bytes} bytes to ${data.receiver}`) 107 | } 108 | 109 | function onTransferToEosioRamAction(message: ActionTraceInboundMessage): void { 110 | const data = message.data.trace.act.data 111 | console.log(`RAM cost: ${data.from} pays ${data.quantity} for the RAM`) 112 | } 113 | 114 | /** 115 | * This is coming from a stream with multiple possibilities. The default 116 | * logic is that you will receive any action matching one of the various 117 | * combination of forming the three parameters `account/action/receiver`. 118 | * 119 | * In most use cases, you only care about a subset of the 120 | * combinations, as in our example here where we only care about 121 | * two possibilities. 122 | * 123 | * When using a merged stream, you have a strict ordering of the 124 | * action as they appear on the chain, in the correct order. So 125 | * buy ram will come in after `eosio.ram` transfer action (as our 126 | * current `newaccount` action is implemented, might be different in 127 | * the future on a different side/siste chain). 128 | */ 129 | function onMergedAction(message: ActionTraceInboundMessage): void { 130 | const action = message.data.trace.act 131 | if (action.account === "eosio" && action.name === "buyrambytes") { 132 | onBuyRamAction(message as ActionTraceInboundMessage) 133 | return 134 | } 135 | 136 | if ( 137 | action.account === "eosio.token" && 138 | action.name === "transfer" && 139 | message.data.trace.receipt.receiver === "eosio.ram" 140 | ) { 141 | onTransferToEosioRamAction(message as ActionTraceInboundMessage) 142 | return 143 | } 144 | 145 | // We don't care about any other possibilities, so let's discard them 146 | } 147 | 148 | runMain(main) 149 | -------------------------------------------------------------------------------- /examples/advanced/common/navigating-forks.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-non-null-assertion */ 2 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 3 | import { 4 | createDfuseClient, 5 | waitFor, 6 | Stream, 7 | DfuseClient, 8 | dynamicMessageDispatcher, 9 | TableDeltaInboundMessage, 10 | TableSnapshotInboundMessage, 11 | } from "@dfuse/client" 12 | 13 | /** 14 | * In this example, we will showcase how to navigate microforks 15 | * by correclty processing the new/undo/redo steps, ensuring that you have 16 | * up-to-date data against the current longest active chain on 17 | * the network. 18 | * 19 | * Microforks can happen in many different scenarios and need 20 | * to be handled correctly to ensure up-to-date information is 21 | * available. 22 | * 23 | * To learn more about microforks, check out 24 | * https://www.eoscanada.com/en/microforks-everything-you-need-to-know-about-microforks-on-an-eos-blockchain 25 | * for global base knowledge about them. 26 | * 27 | * The dfuse Stream API is able to send you undo/redo steps when 28 | * some blocks are not part of the longest chain anymore (`undo`) or 29 | * in the opposite, become part of the longust chain again (`redo`). 30 | * 31 | * In this example, we keep a list of the 5 last updates to the 32 | * `eosio/global/eosio` table. Upon each `new` step, the update is pushed 33 | * on the stack (last item being popped first if the stack is at max capacity 34 | * of 5 elements). On an `undo` step, we pop the top element from the 35 | * top of the stack. On a `redo` step, we push it back the top applying 36 | * the same rule as with a `new` step. 37 | * 38 | * @see https://docs.dfuse.io/#websocket-based-api-navigating-forks 39 | */ 40 | async function main(): Promise { 41 | const client = createDfuseClient({ 42 | apiKey: DFUSE_API_KEY, 43 | network: DFUSE_API_NETWORK, 44 | }) 45 | 46 | const engine = new Engine(client) 47 | await engine.start() 48 | 49 | await waitFor(50000) 50 | await engine.stop() 51 | 52 | client.release() 53 | } 54 | 55 | // Only retrieve the actual fields we need, the full row is bigger than that 56 | type EosioGlobalRow = { 57 | total_ram_stake: number 58 | total_unpaid_blocks: number 59 | } 60 | 61 | class Engine { 62 | private client: DfuseClient 63 | private stream?: Stream 64 | 65 | private updates: EosioGlobalRow[] = [] 66 | 67 | constructor(client: DfuseClient) { 68 | this.client = client 69 | } 70 | 71 | public async start(): Promise { 72 | console.log("Engine starting") 73 | this.stream = await this.client.streamTableRows( 74 | { 75 | code: "eosio", 76 | table: "global", 77 | scope: "eosio", 78 | }, 79 | dynamicMessageDispatcher({ 80 | listening: this.onListening, 81 | table_delta: this.onTableDelta, 82 | table_snapshot: this.onTableSnapshot, 83 | progress: this.onProgress, 84 | }), 85 | { 86 | listen: true, 87 | fetch: true, 88 | // We use progress to display the current state of the table at a regular interval 89 | with_progress: 50, 90 | } 91 | ) 92 | } 93 | 94 | private onListening = (): void => { 95 | console.log("Stream is now listening for action(s)") 96 | } 97 | 98 | private onProgress = (): void => { 99 | printUpdates(this.updates) 100 | } 101 | 102 | private onTableSnapshot = (message: TableSnapshotInboundMessage): void => { 103 | console.log("Initializing first update to initial state of table") 104 | 105 | // We expect a single row to exist on this table 106 | this.updates = [message.data.rows[0].json!] 107 | 108 | printUpdates(this.updates, "") 109 | } 110 | 111 | private onTableDelta = (message: TableDeltaInboundMessage): void => { 112 | switch (message.data.step) { 113 | case "new": 114 | this.pushUpdate(message.data.dbop.new!.json!) 115 | break 116 | 117 | case "undo": 118 | console.log("Ohhhh dealing with undo...") 119 | this.popUpdate() 120 | break 121 | 122 | case "redo": 123 | console.log("Ohhhh dealing with redo...") 124 | this.pushUpdate(message.data.dbop.new!.json!) 125 | break 126 | } 127 | } 128 | 129 | public async stop(): Promise { 130 | await this.ensureStream().close() 131 | 132 | console.log("Current last 5 updates") 133 | printUpdates(this.updates) 134 | } 135 | 136 | private popUpdate(): void { 137 | if (this.updates.length >= 1) { 138 | this.updates = [...this.updates.slice(0, 4)] 139 | } 140 | } 141 | 142 | private pushUpdate(update: EosioGlobalRow): void { 143 | if (this.updates.length >= 5) { 144 | this.updates = [...this.updates.slice(1), update] 145 | } else { 146 | this.updates = [...this.updates, update] 147 | } 148 | } 149 | 150 | private ensureStream(): Stream { 151 | if (this.stream) { 152 | return this.stream 153 | } 154 | 155 | throw new Error("Stream should be set at this runtime execution point") 156 | } 157 | } 158 | 159 | function printUpdates(updates: EosioGlobalRow[], header?: string): void { 160 | if (header !== "") { 161 | console.log("5 last updates (or less)") 162 | } 163 | 164 | if (!updates || updates.length <= 0) { 165 | console.log("Nothing yet...") 166 | return 167 | } 168 | 169 | updates.forEach((update) => console.log(`- ${printDelta(update)}`)) 170 | console.log() 171 | } 172 | 173 | function printDelta(row: EosioGlobalRow): string { 174 | return `${row.total_ram_stake} / ${row.total_unpaid_blocks}` 175 | } 176 | 177 | runMain(main) 178 | -------------------------------------------------------------------------------- /examples/advanced/common/never-miss-a-beat.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | waitFor, 5 | Stream, 6 | DfuseClient, 7 | dynamicMessageDispatcher, 8 | ProgressInboundMessage, 9 | ActionTraceInboundMessage, 10 | Action, 11 | } from "@dfuse/client" 12 | 13 | /** 14 | * In this example, we will showcase how to implement bulletproof 15 | * data integrity while using the dfuse Stream by ensuring you never 16 | * miss a single beat. 17 | * 18 | * This pattern can be used when you want to process messages only 19 | * once, while still ensuring you correctly receive all the blocks, 20 | * transactions and actions you want to process. 21 | * 22 | * We go through an example of how to easily mark the stream progress 23 | * and how the marker is then used when the socket reconnects to 24 | * restart the stream at the exact location you need. 25 | * 26 | * In the example we will implement an action persistence storer, 27 | * having our code restart at the exact correct place a commit had 28 | * occurred. 29 | * 30 | * @see https://docs.dfuse.io/#websocket-based-api-never-missing-a-beat 31 | */ 32 | async function main(): Promise { 33 | const client = createDfuseClient({ 34 | apiKey: DFUSE_API_KEY, 35 | network: DFUSE_API_NETWORK, 36 | streamClientOptions: { 37 | socketOptions: { 38 | reconnectDelayInMs: 250, 39 | }, 40 | }, 41 | }) 42 | 43 | const engine = new Engine(client) 44 | await engine.start() 45 | 46 | await waitFor(50000) 47 | await engine.stop() 48 | 49 | client.release() 50 | } 51 | 52 | type KarmaTransfer = { 53 | from: string 54 | to: string 55 | quantity: string 56 | memo: string 57 | } 58 | 59 | class Engine { 60 | private client: DfuseClient 61 | private stream?: Stream 62 | 63 | private pendingActions: Action[] = [] 64 | private lastCommittedBlockNum = 0 65 | 66 | private committedActions: Action[] = [] 67 | 68 | constructor(client: DfuseClient) { 69 | this.client = client 70 | } 71 | 72 | public async start(): Promise { 73 | const dispatcher = dynamicMessageDispatcher({ 74 | listening: this.onListening, 75 | action_trace: this.onAction, 76 | progress: this.onProgress, 77 | }) 78 | 79 | console.log("Engine starting") 80 | this.stream = await this.client.streamActionTraces( 81 | { 82 | accounts: "therealkarma", 83 | action_names: "transfer", 84 | }, 85 | dispatcher, 86 | { 87 | // You can use the `with_progress` to be sure to commit 88 | // actions at least each 10 blocks. This is useful if your stream 89 | // is low traffic so you don't need to wait until the next 90 | // action to commit all changes. 91 | with_progress: 10, 92 | } 93 | ) 94 | 95 | this.stream.onPostRestart = () => { 96 | console.log() 97 | console.log( 98 | "<============= Stream has reconnected to the socket correctly (at latest `mark()`) =============>" 99 | ) 100 | console.log() 101 | 102 | // Upon a reconnection, we need to clear previously accumulated actions 103 | this.flushPending() 104 | } 105 | 106 | console.log("Stream connected, ready to receive messages") 107 | } 108 | 109 | private onListening = (): void => { 110 | console.log("Stream is now listening for action(s)") 111 | } 112 | 113 | private onProgress = (message: ProgressInboundMessage): void => { 114 | const { block_id, block_num } = message.data 115 | 116 | /** 117 | * Once a progress message is seen, it means we've seen all messages for 118 | * blocks prior it, so let's commit until this point. 119 | */ 120 | console.log() 121 | console.log("Committing changes due to seeing a message from a progress message") 122 | this.commit(block_id, block_num) 123 | } 124 | 125 | private onAction = (message: ActionTraceInboundMessage): void => { 126 | /** 127 | * Once a message from a block ahead of the last committed block is seen, 128 | * commit all changes up to this point. 129 | */ 130 | const { block_id, block_num } = message.data 131 | if (block_num > this.lastCommittedBlockNum) { 132 | console.log() 133 | console.log( 134 | "Comitting changes due to seeing a message from a block ahead of our last committed block" 135 | ) 136 | this.commit(block_id, block_num) 137 | } 138 | 139 | const action = message.data.trace.act 140 | const { from, to, quantity } = action.data 141 | 142 | console.log( 143 | `Pending transfer [${from} -> ${to} ${quantity}] @ ${printBlock(block_id, block_num)}` 144 | ) 145 | this.pendingActions.push(message.data.trace.act) 146 | } 147 | 148 | private commit(blockId: string, blockNum: number): void { 149 | console.log(`Committing all actions up to block ${printBlock(blockId, blockNum)}`) 150 | 151 | if (this.pendingActions.length > 0) { 152 | // Here, in your production code, action would be saved in a database, as well as error handling 153 | this.pendingActions.forEach((action) => this.committedActions.push(action)) 154 | } 155 | 156 | console.log(`Bumping last committed block and clearing pending actions`) 157 | this.pendingActions = [] 158 | this.lastCommittedBlockNum = blockNum 159 | 160 | /** 161 | * This is one of the most important calls of the example. By marking the stream 162 | * at the right block, upon restarting, the stream will automatically start back 163 | * at this block ensuring you never miss a single action. 164 | */ 165 | console.log(`Marking stream up to block ${printBlock(blockId, blockNum)}`) 166 | this.ensureStream().mark({ atBlockNum: blockNum }) 167 | 168 | /** 169 | * In a real-word production code, you would also need to persist the 170 | * `this.lastCommittedBlockNum` value to ensure that upon a process 171 | * restart, you start back from this exact value. 172 | */ 173 | 174 | console.log("") 175 | } 176 | 177 | /** 178 | * When the stream reconnects, we must flush all of the current pending transactions 179 | * as the stream restarts at our last marked block, inclusively. 180 | * 181 | * Since we mark after commit, anything currently in pending was not committed. 182 | * As such, let's flush all pending actions. The dfuse Stream API will stream them back. 183 | */ 184 | public flushPending(): void { 185 | console.log("Flushing pending action(s) due to refresh") 186 | this.pendingActions = [] 187 | } 188 | 189 | public async stop(): Promise { 190 | await this.ensureStream().close() 191 | 192 | console.log("Committed actions") 193 | this.committedActions.forEach((action) => { 194 | const { from, to, quantity } = action.data 195 | console.log(`- Commit transfer [${from} -> ${to} ${quantity}]`) 196 | }) 197 | } 198 | 199 | private ensureStream(): Stream { 200 | if (this.stream) { 201 | return this.stream 202 | } 203 | 204 | throw new Error("Stream should be set at this runtime execution point") 205 | } 206 | } 207 | 208 | function printBlock(blockId: string, blockNum: number): string { 209 | return `${blockId.slice(0, 8)}...${blockId.slice(-8)} (${blockNum})` 210 | } 211 | 212 | runMain(main) 213 | -------------------------------------------------------------------------------- /examples/advanced/common/nodejs-fetch-and-websocket-options.ts: -------------------------------------------------------------------------------- 1 | import { createDfuseClient, InboundMessage, InboundMessageType, waitFor } from "@dfuse/client" 2 | import { IncomingMessage } from "http" 3 | /** 4 | * In this example, we will showcase how to avoid polluting the global 5 | * scope to configure the `fetch` and `WebSocket` values. 6 | * 7 | * Indeed, polluting the global scope is not a recommended practicte, 8 | * especially when alternatives are present to handle those cases. 9 | * 10 | * We will configure the `DfuseClient` instance when creating it via 11 | * the factory to pass the `fetch` and `WebSocket options directly. 12 | * 13 | * **Note** This example completely avoids importing `../config` to ensure 14 | * we really do not pollute the global scope. Ensures you have the 15 | * appropriate environment variables set. 16 | */ 17 | 18 | // We add the example imports here to make the example clearer, in your 19 | // own code, group them with yours. 20 | import nodeFetch from "node-fetch" 21 | import WebSocketClient from "ws" 22 | import { runMain } from "../../config" 23 | 24 | async function main(): Promise { 25 | const client = createDfuseClient({ 26 | apiKey: process.env.DFUSE_API_KEY || "", 27 | network: process.env.DFUSE_API_NETWORK || "mainnet.eos.dfuse.io", 28 | httpClientOptions: { 29 | fetch: nodeFetch, 30 | }, 31 | graphqlStreamClientOptions: { 32 | socketOptions: { 33 | // The WebSocket factory used for GraphQL stream must use this special protocols set 34 | // We intend on making the library handle this for you automatically in the future, 35 | // for now, it's required otherwise, the GraphQL will not connect correctly. 36 | webSocketFactory: (url) => webSocketFactory(url, ["graphql-ws"]), 37 | }, 38 | }, 39 | streamClientOptions: { 40 | socketOptions: { 41 | webSocketFactory: (url) => webSocketFactory(url), 42 | }, 43 | }, 44 | }) 45 | 46 | const onMessage = (message: InboundMessage): void => { 47 | if (message.type === InboundMessageType.LISTENING) { 48 | console.log("Stream is now listening.") 49 | } 50 | } 51 | 52 | const stream = await client.streamActionTraces( 53 | { 54 | accounts: "eosio.token", 55 | action_names: "issue", 56 | }, 57 | onMessage 58 | ) 59 | 60 | console.log("Socket is now connected.") 61 | await waitFor(35000) 62 | await stream.close() 63 | 64 | client.release() 65 | } 66 | 67 | /** 68 | * The factory receives the full resolved URL, API token included, 69 | * of the remote endpoint to connect to. 70 | * 71 | * When using the Node.js enviroment in your own 72 | * factory, it is here that you can customize the WebSocket client instance. 73 | * In the factory below, we jump the `maxPayload` size to 200M, 74 | * which can be useful when streaming really big tables like the 75 | * `voters` table on EOS. 76 | * 77 | * We also add error logging for errors occurring at the HTTP Upgrade 78 | * level before turning the connection into a WebSocket connection. This 79 | * can happen when authorization happens with your API token. 80 | * 81 | * **Note** Don't try to override the `onOpen`, `onClose`, `onError` 82 | * and `onMessage` handler, they are overwritten by the `Socket` instance 83 | * for its own usage. 84 | * 85 | * **Important Web Browser Usage Notice** 86 | * We are in a Node.js context here, the `WebSocketClient` is a 87 | * Node.js implementation of WebSocket Protocol. It does not have 88 | * quite the same API interface. The configuration done below 89 | * will not work in a Browser environment! Check W3C Browser 90 | * WebSocket API to see what is accepted as it's second argument. 91 | * 92 | * @see https://developer.mozilla.org/en-US/docs/Web/API/WebSocket/WebSocket#Parameters 93 | */ 94 | async function webSocketFactory(url: string, protocols: string[] = []): Promise { 95 | const webSocket = new WebSocketClient(url, protocols, { 96 | handshakeTimeout: 30 * 1000, // 30s 97 | maxPayload: 10 * 1024 * 1024, // 10 MiB (max accepted value from the server) 98 | }) 99 | 100 | const onUpgrade = (response: IncomingMessage): void => { 101 | console.log("Socket upgrade response status code.", response.statusCode) 102 | 103 | // You need to remove the listener at some point since this factory 104 | // is called at each reconnection with the remote endpoint! 105 | webSocket.removeListener("upgrade", onUpgrade) 106 | } 107 | 108 | webSocket.on("upgrade", onUpgrade) 109 | 110 | return webSocket 111 | } 112 | 113 | runMain(main) 114 | -------------------------------------------------------------------------------- /examples/advanced/eosio/has-account.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, DfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | console.log("Does 'eoscanadacom' exist?", await hasAccount(client, "eoscanadacom")) 8 | console.log("Does 'eosblahblah' exist?", await hasAccount(client, "eosblahblah")) 9 | 10 | client.release() 11 | } 12 | 13 | async function hasAccount(client: DfuseClient, account: string): Promise { 14 | const response = await client.stateTable("eosio", account, "userres") 15 | 16 | // If we get at least one row, the account exists. Otherwise, it doesn't. 17 | return response.rows.length > 0 18 | } 19 | 20 | runMain(main) 21 | -------------------------------------------------------------------------------- /examples/advanced/eosio/search-reconstruct-tree.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK, prettifyJson } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | /** 5 | * This example shows how you can reconstruct the EOSIO execution tree 6 | * when using the dfuse GraphQL Search endpoint. 7 | * 8 | * You will be using `executedActions`, `closestUnnotifiedAncestorAction` 9 | * and the `seq` field. The `executedActions` gives the flat list ordered 10 | * by execution order. The `closestUnnotifiedAncestorAction` gives the 11 | * execution parent of the action and finally, the `seq` acts as an id 12 | * for each action so you can map the children to its parent easily. 13 | * 14 | * The algorithm is to simply loop over all executed actions, and add it 15 | * to its parent if `closestUnnotifiedAncestorAction` is present or as 16 | * a top level actions if not. 17 | * 18 | * The example below show the actual implementation of the algorithm 19 | * outlined above. 20 | */ 21 | async function main(): Promise { 22 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 23 | 24 | const operation = `{ 25 | searchTransactionsBackward(query: "action:bet account:diceproxy.bg", limit: 1, lowBlockNum: 78820587, highBlockNum: 78820589) { 26 | results { 27 | trace { 28 | id 29 | executedActions { 30 | seq 31 | receiver 32 | account 33 | name 34 | isNotify 35 | isMatchingQuery 36 | closestUnnotifiedAncestorAction { 37 | seq 38 | } 39 | } 40 | } 41 | } 42 | } 43 | }` 44 | 45 | try { 46 | const response = await client.graphql(operation) 47 | const results = response.data.searchTransactionsBackward.results || [] 48 | if (results.length <= 0) { 49 | return 50 | } 51 | 52 | const trace = results[0].trace 53 | const idToActionMap: Record = {} 54 | const topLevelActions: any[] = [] 55 | 56 | trace.executedActions.forEach((action: any) => { 57 | idToActionMap[action.seq] = action 58 | if (action.closestUnnotifiedAncestorAction) { 59 | const parentAction = idToActionMap[action.closestUnnotifiedAncestorAction.seq] 60 | if (!parentAction.inline_traces) { 61 | parentAction.inline_traces = [] 62 | } 63 | 64 | parentAction.inline_traces.push(action) 65 | } else { 66 | topLevelActions.push(action) 67 | } 68 | 69 | delete action.seq 70 | delete action.closestUnnotifiedAncestorAction 71 | }) 72 | 73 | console.log(prettifyJson(topLevelActions)) 74 | } catch (error) { 75 | console.log("An error occurred", error) 76 | } 77 | 78 | client.release() 79 | } 80 | 81 | runMain(main) 82 | -------------------------------------------------------------------------------- /examples/advanced/eosio/stream-only-irreversible-events.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | InboundMessage, 5 | InboundMessageType, 6 | waitFor, 7 | ActionTraceData, 8 | } from "@dfuse/client" 9 | 10 | /** 11 | * In this example, you will use the `irreversible_only` option on your 12 | * stream so that you only receive a notification once the data has been deemed 13 | * irreversible by the chain. 14 | * 15 | * **Note** Only `streamActionTraces` will correctly support the common 16 | * `irreversible_only` flag for now. If you try on anything else, you 17 | * will still receive reversible notifications, be aware! 18 | */ 19 | async function main(): Promise { 20 | const client = createDfuseClient({ 21 | apiKey: DFUSE_API_KEY, 22 | network: DFUSE_API_NETWORK, 23 | }) 24 | 25 | const stream = await client.streamActionTraces( 26 | { accounts: "eosio.token", action_names: "transfer" }, 27 | onMessage, 28 | { 29 | /** 30 | * Request to only obtain irreversible notifications by specifying this 31 | * common flag and setting its value to true. 32 | */ 33 | irreversible_only: true, 34 | } 35 | ) 36 | 37 | await waitFor(5000) 38 | await stream.close() 39 | 40 | client.release() 41 | } 42 | 43 | function onMessage(message: InboundMessage): void { 44 | if (message.type !== InboundMessageType.ACTION_TRACE) { 45 | return 46 | } 47 | 48 | const { from, to, quantity, memo } = (message.data as ActionTraceData).trace.act.data 49 | console.log(`Irreversible transfer [${from} -> ${to}, ${quantity}] (${memo})`) 50 | } 51 | 52 | runMain(main) 53 | -------------------------------------------------------------------------------- /examples/advanced/eosio/track-ram-usage.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | SearchTransactionRow, 5 | RamOp, 6 | DfuseClient, 7 | flattenActionTraces, 8 | waitFor, 9 | } from "@dfuse/client" 10 | 11 | const account = "eoscanadacom" 12 | const resultPerPage = 50 13 | const maxResults = 500 14 | 15 | /** 16 | * In this example, we showcase how to use a cursor to paginate through 17 | * multiple pages of search results. 18 | * 19 | * At the same time, we go over how to work with the search result rows 20 | * to obtain only the matching actions of the transaction, instead of dealing 21 | * with all the actions in the transaction. 22 | * 23 | * This is usually what most people expect to have from the API (the 24 | * actions of the transaction that matched the search criteria). 25 | */ 26 | async function main(): Promise { 27 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 28 | const query = `(ram.released:${account} OR ram.consumed:${account})` 29 | 30 | try { 31 | let resultCount = 0 32 | let runningTotal = 0 33 | let cursor = "" 34 | let pageCount = 0 35 | 36 | while (resultCount <= maxResults) { 37 | const page = await fetchPage(client, query, cursor) 38 | pageCount++ 39 | 40 | resultCount += page.transactions.length 41 | cursor = page.cursor 42 | 43 | page.transactions.forEach((result: SearchTransactionRow) => { 44 | console.log(`- https://${inferEosqHost()}/tx/${result.lifecycle.id}`) 45 | 46 | /** 47 | * A transaction is composed of a deep trace of action traces 48 | * representing the execution of the various transaction's actions. 49 | * 50 | * In dfuse API, there is sometimes the need to assiocate some data 51 | * with a particular action trace. For example, a database operation 52 | * backlink through an `action_idx` property to the actual action 53 | * trace that generates this operation. 54 | * 55 | * The `action_idx` is not easy to work with at the transaction 56 | * level since the dfuse API consumer needs to perform a depth-first 57 | * traversal of the execution tree to determine the actual action. 58 | * 59 | * To ease that process, the helper `flattenActionTraces` can be used 60 | * to get a flattened list of action traces, where each index of the list 61 | * map to the correct `action_idx` value. 62 | * 63 | * This can than later be used to easily find the action representing 64 | * a given `action_idx` value. See below for usage with RAM op. 65 | */ 66 | const flattenedActionTraces = flattenActionTraces(result.lifecycle) 67 | 68 | // /** 69 | // * Using dfuse Search API, you received the full transaction as a result. 70 | // * However a transaction may contain 10 different actions, 71 | // * while only 2 out of the 10 actually matches the query. 72 | // * 73 | // * The `matchingActionTraces` helper can easily be used to extract 74 | // * only the matching action traces out of a `SearchTransactionRow` 75 | // * result. 76 | // */ 77 | // const actionTraces = matchingActionTraces(result) 78 | 79 | // eslint-disable-next-line @typescript-eslint/no-non-null-assertion 80 | result.lifecycle.ramops!.forEach((ramOp: RamOp) => { 81 | // FIXME: Right logic for RAM op! 82 | 83 | runningTotal += ramOp.delta 84 | 85 | const ramText = `${ramOp.op} ${ramOp.delta}` 86 | const action = flattenedActionTraces[ramOp.action_idx] 87 | const actionText = `${action.receipt.receiver}/${action.act.account}:${action.act.name}` 88 | 89 | console.log(` * ${ramText} @ ${actionText} (${runningTotal})`) 90 | }) 91 | 92 | console.log() 93 | }) 94 | 95 | /** 96 | * To determine the end of current pages through dfuse Search API, you 97 | * must conditionally check either if the cursor returns empty or 98 | * if the search result count of this request is lower than our 99 | * expected results per page. 100 | * 101 | * **Caveat** In an ascending search, the dfuse Search stops at 102 | * at current time Irreversible Block or Head Block, but more blocks 103 | * might come in the future. This means that even if there is less 104 | * results than expected per page, future blocks might add more 105 | * results, hence the `cursor` not returning as empty. 106 | * 107 | * **Note** Doing a descending search will yield an empty 108 | * string cursor at some point because you will reach the Genesis 109 | * Block of the chain (Block #1). 110 | */ 111 | if (page.cursor === "" || page.transactions.length < resultPerPage) { 112 | // No more pages, stop page fetch 113 | break 114 | } 115 | 116 | console.log(`RAM Running Total (${resultCount} transactions included) is ${runningTotal}`) 117 | console.log(`Fetching next page (#${pageCount + 1}) in 5s ...`) 118 | await waitFor(5000) 119 | } 120 | 121 | console.log(`Running total is ${runningTotal}`) 122 | console.log(`Completed after reading ${pageCount} page(s)`) 123 | } catch (error) { 124 | console.log("An error occurred", error) 125 | } 126 | 127 | client.release() 128 | } 129 | 130 | type Page = { 131 | cursor: string 132 | transactions: SearchTransactionRow[] 133 | } 134 | 135 | /** 136 | * Fetches a single page of results for a particular query, starting 137 | * back at `cursor` if present and returning the matching actions out 138 | * of our results. 139 | */ 140 | async function fetchPage(client: DfuseClient, query: string, cursor?: string): Promise { 141 | const searchResult = await client.searchTransactions(query, { 142 | limit: resultPerPage, 143 | cursor, 144 | }) 145 | 146 | return { 147 | cursor: searchResult.cursor, 148 | transactions: searchResult.transactions || [], 149 | } 150 | } 151 | 152 | function inferEosqHost(): string { 153 | if (DFUSE_API_NETWORK === "mainnet") { 154 | return "eosq.app" 155 | } 156 | 157 | if (["jungle", "kylin", "worbli"].includes(DFUSE_API_NETWORK)) { 158 | return `${DFUSE_API_NETWORK}.eosq.app` 159 | } 160 | 161 | return `${DFUSE_API_NETWORK}` 162 | } 163 | 164 | runMain(main) 165 | -------------------------------------------------------------------------------- /examples/basic/eosio/dfuse-community-edition.ts: -------------------------------------------------------------------------------- 1 | import { runMain } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | type AccountTableRow = { 5 | balance: string 6 | } 7 | 8 | async function main(): Promise { 9 | // Here what you need to connect to the dfuse Community Edition hosted by EOS Nation 10 | const client = createDfuseClient({ 11 | network: "kylin.dfuse.eosnation.io", 12 | authentication: false, 13 | }) 14 | 15 | try { 16 | const response = await client.stateTable("eosio.token", "eosio", "accounts") 17 | const balance = response.rows[0].json?.balance 18 | const atBlockNum = response.up_to_block_num 19 | 20 | console.log(`Your balance at block ${atBlockNum} is ${balance}`) 21 | } catch (error) { 22 | console.log("An error occurred", error) 23 | } 24 | 25 | client.release() 26 | } 27 | 28 | runMain(main) 29 | -------------------------------------------------------------------------------- /examples/basic/eosio/dfuse-for-eosio.ts: -------------------------------------------------------------------------------- 1 | import { runMain } from "../../config" 2 | import { createDfuseClient, waitFor } from "@dfuse/client" 3 | 4 | /** 5 | * This shows how to configure the `client-js` instance when working with 6 | * a locally dfuse instance running through `dfuseeos` binary using the 7 | * standard configuration. 8 | * 9 | * This example assumes you have dfuse for EOSIO running 10 | * (https://github.com/dfuse-io/dfuse-eosio#getting-started) using the standard 11 | * configuration. 12 | */ 13 | async function main(): Promise { 14 | const client = createDfuseClient({ 15 | network: "localhost:8080", 16 | authentication: false, 17 | secure: false, 18 | }) 19 | 20 | const streamTransfer = `subscription($cursor: String!) { 21 | searchTransactionsForward(query: "receiver:eosio action:onblock", cursor: $cursor) { 22 | undo cursor 23 | trace { 24 | matchingActions { json } 25 | } 26 | } 27 | }` 28 | 29 | const stream = await client.graphql(streamTransfer, (message) => { 30 | if (message.type === "error") { 31 | console.log("An error occurred", message.errors, message.terminal) 32 | } 33 | 34 | if (message.type === "data") { 35 | const data = message.data.searchTransactionsForward 36 | const actions = data.trace.matchingActions 37 | 38 | actions.forEach(({ json }: any) => { 39 | const { 40 | header: { timestamp: timeSlot, producer }, 41 | } = json 42 | console.log(`Action [${producer} @ ${timeSlot}]`) 43 | }) 44 | 45 | stream.mark({ cursor: data.cursor }) 46 | } 47 | 48 | if (message.type === "complete") { 49 | console.log("Stream completed") 50 | } 51 | }) 52 | 53 | await waitFor(5000) 54 | await stream.close() 55 | 56 | client.release() 57 | } 58 | 59 | runMain(main) 60 | -------------------------------------------------------------------------------- /examples/basic/eosio/search-your-latest-transactions-graphql.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, DFUSE_API_NETWORK, prettifyJson } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | const operation = `subscription($cursor: String!) { 8 | searchTransactionsBackward(query:"receiver:consortiumlv action:xschedule db.table:kysimused" lowBlockNum: -5000000, highBlockNum: -1, cursor: $cursor) { 9 | cursor 10 | trace { id matchingActions { json, dbOps(code: "consortiumlv"){operation, oldJSON {object error}} } } 11 | } 12 | }` 13 | 14 | const stream = await client.graphql(operation, (message) => { 15 | if (message.type === "data") { 16 | const { 17 | trace: { matchingActions }, 18 | } = message.data.searchTransactionsBackward 19 | matchingActions.forEach((action: any) => { 20 | console.log(prettifyJson(action.dbOps)) 21 | }) 22 | } 23 | 24 | if (message.type === "error") { 25 | console.log("An error occurred", message.errors, message.terminal) 26 | } 27 | 28 | if (message.type === "complete") { 29 | console.log("Completed") 30 | } 31 | }) 32 | 33 | await stream.join() 34 | await client.release() 35 | } 36 | 37 | main().catch((error) => console.log("Unexpected error", error)) 38 | -------------------------------------------------------------------------------- /examples/basic/eosio/search-your-latest-transactions.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, SearchTransactionRow } from "@dfuse/client" 3 | 4 | const account = "eoscanadacom" 5 | 6 | async function main(): Promise { 7 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 8 | 9 | try { 10 | const response = await client.searchTransactions(`auth:${account}`, { 11 | limit: 10, 12 | sort: "desc", 13 | }) 14 | 15 | console.log() 16 | console.log(`Your latest 10 transactions`) 17 | 18 | if (!response.transactions || response.transactions.length <= 0) { 19 | console.log("Oups nothing found") 20 | return 21 | } 22 | 23 | const transactions = response.transactions || [] 24 | transactions.forEach((result: SearchTransactionRow) => { 25 | console.log( 26 | `- ${buildEosqLink(result.lifecycle.id)} (Block #${ 27 | result.lifecycle.execution_trace?.block_num 28 | })` 29 | ) 30 | }) 31 | console.log() 32 | } catch (error) { 33 | console.log("An error occurred", error) 34 | } 35 | 36 | client.release() 37 | } 38 | 39 | function buildEosqLink(transactionId: string): string { 40 | let suffix = "" 41 | if (["jungle", "kylin", "worbli"].includes(DFUSE_API_NETWORK)) { 42 | suffix = `.${DFUSE_API_NETWORK}` 43 | } 44 | 45 | return `https://${suffix}eosq.app/tx/${transactionId}` 46 | } 47 | 48 | runMain(main) 49 | -------------------------------------------------------------------------------- /examples/basic/eosio/state-check-balance.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, DfuseClient } from "@dfuse/client" 3 | 4 | const account = "eoscanadacom" 5 | const blockNum = 42_500_250 6 | 7 | async function main(): Promise { 8 | const client = createDfuseClient({ 9 | apiKey: DFUSE_API_KEY, 10 | network: DFUSE_API_NETWORK, 11 | }) 12 | 13 | try { 14 | const { balance: atBalance, blockNum: atBlockNum } = await fetchBalance(client, blockNum) 15 | const { balance: currentBalance, blockNum: currentBlockNum } = await fetchBalance(client) 16 | 17 | console.log(`Your balance at block ${atBlockNum} was ${atBalance}`) 18 | console.log(`Your current balance at block ${currentBlockNum} is ${currentBalance}`) 19 | } catch (error) { 20 | console.log("An error occurred", error) 21 | } 22 | 23 | client.release() 24 | } 25 | 26 | async function fetchBalance( 27 | client: DfuseClient, 28 | atBlock?: number 29 | ): Promise<{ balance?: string; blockNum: number }> { 30 | const options = { blockNum: atBlock === undefined ? undefined : atBlock } 31 | const response = await client.stateTable( 32 | "eosio.token", 33 | account, 34 | "accounts", 35 | options 36 | ) 37 | 38 | return { balance: response.rows[0].json?.balance, blockNum: response.up_to_block_num || blockNum } 39 | } 40 | 41 | type AccountTableRow = { 42 | balance: string 43 | } 44 | 45 | runMain(main) 46 | -------------------------------------------------------------------------------- /examples/basic/eosio/stream-global-state-ws.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | InboundMessage, 5 | InboundMessageType, 6 | waitFor, 7 | TableDeltaData, 8 | } from "@dfuse/client" 9 | 10 | async function main(): Promise { 11 | const client = createDfuseClient({ 12 | apiKey: DFUSE_API_KEY, 13 | network: DFUSE_API_NETWORK, 14 | }) 15 | 16 | const stream = await client.streamTableRows( 17 | { code: "eosio", scope: "eosio", table: "global" }, 18 | (message: InboundMessage) => { 19 | if (message.type === InboundMessageType.TABLE_DELTA) { 20 | const { dbop, block_num } = message.data as TableDeltaData 21 | const { total_ram_stake, total_unpaid_blocks } = dbop.new?.json 22 | 23 | console.log( 24 | `Global state change @ #${block_num} [Total RAM Stake ${total_ram_stake}, Total Unpaid Block Count ${total_unpaid_blocks}]` 25 | ) 26 | } 27 | } 28 | ) 29 | 30 | await waitFor(5000) 31 | await stream.close() 32 | 33 | client.release() 34 | } 35 | 36 | runMain(main) 37 | -------------------------------------------------------------------------------- /examples/basic/eosio/stream-transfers-graphql.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, waitFor } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ 6 | apiKey: DFUSE_API_KEY, 7 | network: DFUSE_API_NETWORK, 8 | }) 9 | 10 | const streamTransfer = `subscription($cursor: String!) { 11 | searchTransactionsForward(query: "receiver:eosio.token action:transfer", cursor: $cursor) { 12 | undo cursor 13 | trace { 14 | matchingActions { json } 15 | } 16 | } 17 | }` 18 | 19 | const stream = await client.graphql(streamTransfer, (message) => { 20 | if (message.type === "error") { 21 | console.log("An error occurred", message.errors, message.terminal) 22 | } 23 | 24 | if (message.type === "data") { 25 | const data = message.data.searchTransactionsForward 26 | const actions = data.trace.matchingActions 27 | 28 | actions.forEach(({ json }: any) => { 29 | const { from, to, quantity, memo } = json 30 | console.log(`Transfer [${from} -> ${to}, ${quantity}] (${memo})`) 31 | }) 32 | 33 | stream.mark({ cursor: data.cursor }) 34 | } 35 | 36 | if (message.type === "complete") { 37 | console.log("Stream completed") 38 | } 39 | }) 40 | 41 | await waitFor(5000) 42 | await stream.close() 43 | 44 | client.release() 45 | } 46 | 47 | runMain(main) 48 | -------------------------------------------------------------------------------- /examples/basic/eosio/stream-transfers-ws.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { 3 | createDfuseClient, 4 | InboundMessage, 5 | InboundMessageType, 6 | waitFor, 7 | ActionTraceData, 8 | } from "@dfuse/client" 9 | 10 | async function main(): Promise { 11 | const client = createDfuseClient({ 12 | apiKey: DFUSE_API_KEY, 13 | network: DFUSE_API_NETWORK, 14 | }) 15 | 16 | const stream = await client.streamActionTraces( 17 | { accounts: "eosio.token", action_names: "transfer" }, 18 | (message: InboundMessage) => { 19 | if (message.type === InboundMessageType.ACTION_TRACE) { 20 | const { from, to, quantity, memo } = (message.data as ActionTraceData).trace.act.data 21 | console.log(`Transfer [${from} -> ${to}, ${quantity}] (${memo})`) 22 | } 23 | } 24 | ) 25 | 26 | await waitFor(5000) 27 | await stream.close() 28 | 29 | client.release() 30 | } 31 | 32 | runMain(main) 33 | -------------------------------------------------------------------------------- /examples/basic/ethereum/search-your-latest-transactions.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | const address = "0x09aC08243f91A0dA89995F9B7af96Ef985aA5807" 5 | 6 | async function main(): Promise { 7 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: "mainnet.eth.dfuse.io" }) 8 | 9 | try { 10 | const searchTransactions = `query ($limit: Int64!) { 11 | searchTransactions(indexName: CALLS, query: "signer:${address}", limit: $limit, sort: DESC) { 12 | edges { 13 | node { hash block { number } } 14 | } 15 | } 16 | }` 17 | 18 | const response = await client.graphql(searchTransactions, { 19 | variables: { limit: "10" }, 20 | }) 21 | 22 | if (response.errors) { 23 | throw response.errors 24 | } 25 | 26 | console.log() 27 | console.log(`Your latest 10 transactions`) 28 | 29 | const edges = response.data.searchTransactions.edges || [] 30 | if (edges.length <= 0) { 31 | console.log("Oups nothing found") 32 | return 33 | } 34 | 35 | edges.forEach(({ node }: any) => { 36 | console.log(`- ${buildEthqLink(node.hash)} (Block #${node.block.number})`) 37 | }) 38 | console.log() 39 | } catch (error) { 40 | console.log("An error occurred", error) 41 | } 42 | 43 | client.release() 44 | } 45 | 46 | function buildEthqLink(transactionId: string): string { 47 | return `https://ethq.app/tx/${transactionId}` 48 | } 49 | 50 | runMain(main) 51 | -------------------------------------------------------------------------------- /examples/basic/ethereum/stream-transactions.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY } from "../../config" 2 | import { createDfuseClient, waitFor } from "@dfuse/client" 3 | 4 | // The system can easily handles thousands of addresses, so there is no fear to have a big lists 5 | const addresses = ["0x7a250d5630b4cf539739df2c5dacb4c659f2488d"] 6 | 7 | async function main(): Promise { 8 | const client = createDfuseClient({ 9 | apiKey: DFUSE_API_KEY, 10 | network: "mainnet.eth.dfuse.io", 11 | }) 12 | 13 | // TBC Add showcase of SPECULATIVELY_EXECUTED, and CONFIRMED transition 14 | const streamTransaction = ` 15 | subscription($addresses: [String!]!) { 16 | transactions(addresses: $addresses, matchAnyOf: [TO, FROM, ERC20_TO, ERC20_FROM], maxConfirmations: 20) { 17 | hash currentState transitionName 18 | transition { 19 | ... on TrxTransitionInit { transaction { ...Transaction } trace {...Trace } replacedById confirmations } 20 | ... on TrxTransitionPooled { transaction { ...Transaction } } 21 | ... on TrxTransitionReplaced { replacedById } 22 | ... on TrxTransitionMined { trace {...Trace } } 23 | ... on TrxTransitionForked { transaction { ...Transaction }} 24 | } 25 | } 26 | } 27 | 28 | fragment Transaction on Transaction { 29 | hash from to value(encoding:ETHER) nonce gasLimit gasPrice(encoding:WEI) inputData 30 | } 31 | 32 | fragment Trace on TransactionTrace { 33 | hash status from to value(encoding:ETHER) nonce gasUsed gasLimit gasPrice(encoding:WEI) matchingCalls { from to value(encoding:ETHER) callType } inputData block { hash number header { timestamp parentHash } } 34 | } 35 | ` 36 | 37 | const stream = await client.graphql( 38 | streamTransaction, 39 | (message) => { 40 | if (message.type === "data") { 41 | const { hash, transitionName, transition, currentState } = message.data.transactions 42 | let confirmed = "" 43 | if (transitionName === "INIT") { 44 | confirmed = `, confirmed ${transition.confirmations}` 45 | } 46 | 47 | // The hash value contains the transaction hash in possible transition, a stream cannot be received without having the hash 48 | log( 49 | `Dealing with transaction ${hash} (transition ${transitionName}, to state ${currentState}${confirmed})` 50 | ) 51 | 52 | // There is no cursor on this stream yet, that means greater care must be taken from the consumer 53 | // perspective as upon reconnection, you receive back transactions that you already seen. As such, 54 | // some de-dupe mechanisms must be put in place on the consumer side. 55 | // 56 | // Further, the actual returned a reconnection is a bit different based on wheter it's our backend 57 | // that restarted or if it was only a network problem between you and our backend. 58 | // 59 | // If the disconnection happens and our backend never restarted in-between, you will received every 60 | // pooled and mined transactions the backend knowns about at time of reconnection (of course only 61 | // those matching the filters provided). 62 | // 63 | // However, if our backend restarted, you will receive only every mined transactions (we keep all 64 | // transactions mined in the last 250 blocks). 65 | // 66 | // While the stream is guaranteed to never miss a transaction (if you reconnect within 250 blocks), 67 | // there is a potential of not seeing the transition from "unknown" to "pending" state. 68 | 69 | // Transaction that was already in the backend state when connection for the first or after 70 | // a disconnection. Use the `currentState` variable to determine where it is right now. 71 | // 72 | // @example {"hash": "0x...", "currentState": "PENDING", "transitionName": "INIT", "transition":{"transaction":{"hash": "0x....", ...Transaction's fields },"trace":{"hash": "0x....", ...TransactionTrace's fields }}}} 73 | if (transitionName === "INIT") { 74 | if (currentState === "REPLACED") { 75 | // The transaction was replaced by another one and field `transition.replacedById` tells us which transaction 76 | json(JSON.stringify({ type: "replaced", body: transition.replacedById })) 77 | return 78 | } 79 | 80 | if (currentState === "PENDING") { 81 | // The transaction is in the mempool, this might be a transaction your already received in the past or a new one 82 | json(JSON.stringify({ type: "pooled", body: transition.transaction })) 83 | return 84 | } 85 | 86 | if (currentState === "IN_BLOCK") { 87 | // The transaction is inside a block, this might be a transaction trace your already received in the past or a new one 88 | json(JSON.stringify({ type: "mined", body: transition.trace })) 89 | return 90 | } 91 | 92 | stream.close({ 93 | error: new Error( 94 | `Invalid state ${currentState}, only REPLACED, PENDING and IN_BLOCK should be returned, it's an error if this happens` 95 | ), 96 | }) 97 | return 98 | } 99 | 100 | // Transaction has been added to the mempool transitionning from UNKNOWN to PENDING state. 101 | // 102 | // @example {"hash": "0x...", "currentState": "PENDING", "transitionName": "POOLED", "transition":{"transaction":{"hash": "0x....", ...Transaction's fields }}} 103 | if (transitionName === "POOLED") { 104 | json(JSON.stringify({ type: "pooled", body: transition.transaction })) 105 | return 106 | } 107 | 108 | // Transaction that was in the mempool has been replaced by a new one (gas bump) transitionning from PENDING to REPLACED state. 109 | // 110 | // @example {"hash": "0x...", "currentState": "REPLACED", "transitionName": "REPLACED", "transition":{ "replacedById": "0xabc..."}} 111 | if (transitionName === "REPLACED") { 112 | json(JSON.stringify({ type: "replaced", body: transition.replacedById })) 113 | return 114 | } 115 | 116 | // Transaction has been picked from mempool and is now included in a non-confirmed 117 | // block transitionning from PENDING to IN_BLOCK state. 118 | // 119 | // @example {"hash": "0x...", "currentState": "IN_BLOCK", "transitionName": "MINED", "transition":{"trace":{"hash": "0x....", ...TransactionTrace's fields }}} 120 | if (transitionName === "MINED") { 121 | json(JSON.stringify({ type: "mined", body: transition.trace })) 122 | return 123 | } 124 | 125 | // Transaction was in block that is now forked and not part of the active chain transitionning from IN_BLOCK to FORKED state. 126 | // 127 | // @example {"hash": "0x...", "currentState": "PENDING", "transitionName": "FORKED", "transition":{"trace":{"hash": "0x....", ...TransactionTrace's fields }}} 128 | if (transitionName === "FORKED") { 129 | json(JSON.stringify({ type: "forked", body: transition.trace })) 130 | return 131 | } 132 | 133 | stream.close({ 134 | error: new Error( 135 | `Invalid transition ${transitionName}, we did not request it, it's an error if this happens` 136 | ), 137 | }) 138 | return 139 | } 140 | 141 | if (message.type === "error") { 142 | log("An error occurred", message.errors, message.terminal) 143 | return 144 | } 145 | 146 | if (message.type === "complete") { 147 | stream.close({ 148 | error: new Error( 149 | "Invalid state, stream should never complete, it's an error if this happens" 150 | ), 151 | }) 152 | return 153 | } 154 | }, 155 | { 156 | variables: { addresses }, 157 | } 158 | ) 159 | 160 | await waitFor(30000) 161 | await stream.close() 162 | 163 | log("Completing stream after 30s") 164 | client.release() 165 | } 166 | 167 | // The log is going to emit to Stderr (hence usage of `console.error`) so that output 168 | // JSON line content can be parsed through `jq`. 169 | function log(...args: any[]): void { 170 | console.error(...args) 171 | } 172 | 173 | function json(input: unknown): void { 174 | console.log(JSON.stringify(input)) 175 | } 176 | 177 | main().catch((error) => log("Unexpected error", error)) 178 | -------------------------------------------------------------------------------- /examples/basic/ethereum/stream-transfers.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain } from "../../config" 2 | import { createDfuseClient, waitFor } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ 6 | apiKey: DFUSE_API_KEY, 7 | network: "mainnet.eth.dfuse.io", 8 | }) 9 | 10 | const streamTransfer = `subscription($cursor: String) { 11 | searchTransactions(indexName: CALLS, query: "method:'transfer(address,uint256)'", cursor: $cursor) { 12 | undo cursor 13 | node { hash from to value(encoding: ETHER) } 14 | } 15 | }` 16 | 17 | const stream = await client.graphql(streamTransfer, (message) => { 18 | if (message.type === "error") { 19 | console.log("An error occurred", message.errors, message.terminal) 20 | } 21 | 22 | if (message.type === "data") { 23 | const { cursor, node } = message.data.searchTransactions 24 | console.log(`Transfer [${node.from} -> ${node.to}, ${node.value}]`) 25 | 26 | stream.mark({ cursor }) 27 | } 28 | 29 | if (message.type === "complete") { 30 | console.log("Stream completed") 31 | } 32 | }) 33 | 34 | await waitFor(5000) 35 | await stream.close() 36 | 37 | client.release() 38 | } 39 | 40 | runMain(main) 41 | -------------------------------------------------------------------------------- /examples/config.ts: -------------------------------------------------------------------------------- 1 | // tslint:disable: no-var-requires 2 | import * as path from "path" 3 | import dotenv from "dotenv" 4 | 5 | // Only used to ensure client library and examples free all handles, you 6 | // do not need this in your own project (but don't hesitate to use it if you 7 | // find it useful though). 8 | if (process.env.DEBUG_LEAKED_HANDLES) { 9 | // eslint-disable-next-line @typescript-eslint/no-var-requires 10 | require("leaked-handles").set({ 11 | fullStack: true, 12 | debugSockets: true, 13 | }) 14 | } 15 | 16 | // The two instructions below are there to alter the global scope of Node.js 17 | // adding to it the `fetch` and `WebSocket` variables. The library by default 18 | // will pick these variables when present on the global scope, reducing the 19 | // amount of configuration to perform, specially in Browswer environment where 20 | // those two variables are present out of the box. 21 | // 22 | // If you project is a web application or react native mobile application, 23 | // this import is not required (as `fetch` is already available there in the 24 | // global scope). 25 | // 26 | // If your project targets a Node.js environment (server), you have the options 27 | // to alter the global scope just like below. You can also avoid polluting the 28 | // global scope by providing the variables as configuration options of the 29 | // dfuse Client. 30 | // 31 | // Check [Configure Node.js](https://github.com/dfuse-io/client/blob/master/README.md#node-js) 32 | // section of the read me for further explanation around this topic. 33 | // 34 | ;(global as any).fetch = require("node-fetch") 35 | ;(global as any).WebSocket = require("ws") 36 | 37 | // You don't require to have this configuration and the associated import 38 | // in your own project (of course, feel free to copy it if you think it helps). 39 | // 40 | // Simply provide the various options of the example(s) hard-coded in your 41 | // code and pull from other configuration storage instances. 42 | // 43 | // **Note** Does not work in a Browser environment 44 | dotenv.config({ path: path.join(__dirname, "..", ".env") }) 45 | 46 | export const DFUSE_API_KEY = process.env.DFUSE_API_KEY || "" 47 | export const DFUSE_API_NETWORK = process.env.DFUSE_API_NETWORK || "mainnet.eos.dfuse.io" 48 | 49 | if (!DFUSE_API_KEY) { 50 | const messages = [ 51 | "You must define a DFUSE_API_KEY environment variable containing your dfuse API Key.", 52 | "", 53 | "Visit https://app.dfuse.io to register for a free API Key.", 54 | ] 55 | 56 | throw new Error(messages.join("\n")) 57 | } 58 | 59 | export function runMain(runner: () => Promise): void { 60 | /** 61 | * Helper to display `unhandledRejection` rejection errors. 62 | */ 63 | process.on("unhandledRejection", (error) => { 64 | const messages = [ 65 | "An unhandled rejection error has been catched at the process level. This is", 66 | "completely wrong and should never happen in the examples. If you see this behavior,", 67 | "there is probably something very fishy.", 68 | "", 69 | "You should log a bug report if you see this error, attach the debug output by", 70 | "using `DEBUG='dfuse:*' yarn run:example ...`.", 71 | "", 72 | "Read about unhandle rejection error https://stackoverflow.com/q/40500490/697930", 73 | "", 74 | ] 75 | 76 | console.error(messages.join("\n")) 77 | console.error(error) 78 | 79 | throw error 80 | }) 81 | 82 | runner() 83 | // eslint-disable-next-line promise/always-return 84 | .then(() => { 85 | console.log("Example completed.") 86 | process.exit(0) 87 | }) 88 | .catch((error) => { 89 | console.log("An untrapped error occurred.", error) 90 | process.exit(1) 91 | }) 92 | } 93 | 94 | export function prettifyJson(input: unknown): string { 95 | return JSON.stringify(input, undefined, 2) 96 | } 97 | -------------------------------------------------------------------------------- /examples/reference/common/api-request.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | /** 5 | * The dfuse EOS API proxies most of the standard EOS Chain API RPC calls to 6 | * public nodes. The `dfuseClient.apiRequest` can be used to query 7 | * those endpoints. For example, the `/v1/chain/get_info` call or any other 8 | * EOS Chain RPC calls https://developers.eos.io/eosio-nodeos/reference#chain. 9 | * 10 | * You can provide query params, body and headers to the request. However, they 11 | * are not built-in to the client for us to avoid having to support them directly 12 | * with types and all. 13 | */ 14 | async function main(): Promise { 15 | const client = createDfuseClient({ 16 | apiKey: DFUSE_API_KEY, 17 | network: "mainnet.eos.dfuse.io", 18 | }) 19 | 20 | try { 21 | const response = await client.apiRequest( 22 | "/v1/chain/get_account", 23 | "POST", 24 | {}, 25 | { account_name: "eoscanadacom" } 26 | ) 27 | 28 | console.log("Chain info response", prettifyJson(response)) 29 | } catch (error) { 30 | console.log("An error occurred", error) 31 | } 32 | 33 | client.release() 34 | } 35 | 36 | runMain(main) 37 | -------------------------------------------------------------------------------- /examples/reference/common/auth-issue.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.authIssue() 9 | 10 | console.log("Auth issue response", prettifyJson(response)) 11 | } catch (error) { 12 | console.log("An error occurred", error) 13 | } 14 | 15 | client.release() 16 | } 17 | 18 | runMain(main) 19 | -------------------------------------------------------------------------------- /examples/reference/common/browser.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 8 | 9 | 10 | 11 | 12 | 13 | dfuse Client - Browser Example 14 | 15 | 16 | 17 | 18 | 78 | 79 | 80 | 81 |
82 |
83 |

Stream Transfers

84 |

Streams transfers on EOS Mainnet for 5 seconds than stops

85 |
86 |
87 |
88 | Nothing yet, should start in a few milliseconds... 89 | (If not, check Developer Tools console, something's wrong!) 90 |
91 |
92 |
93 | 94 | 151 | 152 | 153 | -------------------------------------------------------------------------------- /examples/reference/eosio/fetch-block-id-by-time.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | // Equivalent with arguments (new Date("2019-03-04T10:36:14.6Z"), "gte") 9 | const response = await client.fetchBlockIdByTime("2019-03-04T10:36:14.6Z", "gt") 10 | 11 | console.log("Block id by time response", prettifyJson(response)) 12 | } catch (error) { 13 | console.log("An error occurred", error) 14 | } 15 | 16 | client.release() 17 | } 18 | 19 | runMain(main) 20 | -------------------------------------------------------------------------------- /examples/reference/eosio/fetch-transaction.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | // This example will work on EOS Mainnet only, change transaction id accordingly to test it out 9 | const response = await client.fetchTransaction( 10 | "1d5f57e9392d045ef4d1d19e6976803f06741e11089855b94efcdb42a1a41253" 11 | ) 12 | 13 | console.log("Transaction lifecycle response", prettifyJson(response)) 14 | } catch (error) { 15 | console.log("An error occurred", error) 16 | } 17 | 18 | client.release() 19 | } 20 | 21 | runMain(main) 22 | -------------------------------------------------------------------------------- /examples/reference/eosio/search-transactions.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.searchTransactions("auth:eoscanadacom", { 9 | limit: 10, 10 | }) 11 | 12 | console.log(prettifyJson(response)) 13 | } catch (error) { 14 | console.log("An error occurred", error) 15 | } 16 | 17 | client.release() 18 | } 19 | 20 | runMain(main) 21 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-abi-bin-to-json.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const hexRows = [ 9 | "202932c94c833055202932c94c833055701101000000000004454f5300000000109802000000000004454f5300000000", 10 | "202932c94c833055802b35c94c833055102700000000000004454f5300000000102700000000000004454f5300000000", 11 | "202932c94c83305550ab49525fba3055a08601000000000004454f5300000000a08601000000000004454f5300000000", 12 | ] 13 | 14 | const response = await client.stateAbiBinToJson("eosio", "delband", hexRows) 15 | 16 | console.log("State ABI bin -> json response", prettifyJson(response)) 17 | } catch (error) { 18 | console.log("An error occurred", error) 19 | } 20 | 21 | client.release() 22 | } 23 | 24 | runMain(main) 25 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-abi.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateAbi("eosio.forum") 9 | 10 | console.log("State ABI response", prettifyJson(response)) 11 | } catch (error) { 12 | console.log("An error occurred", error) 13 | } 14 | 15 | client.release() 16 | } 17 | 18 | runMain(main) 19 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-key-accounts.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateKeyAccounts( 9 | "EOS7254KPL1TNuM3dPHeZ5Zqsbgff7gk9UhRrqMfzQZRwami4P4yv" 10 | ) 11 | 12 | console.log("State key accounts response", prettifyJson(response)) 13 | } catch (error) { 14 | console.log("An error occurred", error) 15 | } 16 | 17 | client.release() 18 | } 19 | 20 | runMain(main) 21 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-permission-links.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.statePermissionLinks("eoscanadacom") 9 | 10 | console.log("State permission links response", prettifyJson(response)) 11 | } catch (error) { 12 | console.log("An error occurred", error) 13 | } 14 | 15 | client.release() 16 | } 17 | 18 | runMain(main) 19 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-table-row.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateTableRow("eosio.token", "eoscanadacom", "accounts", "EOS", { 9 | keyType: "symbol_code", 10 | }) 11 | 12 | console.log("State table row response", prettifyJson(response)) 13 | } catch (error) { 14 | console.log("An error occurred", prettifyJson(error)) 15 | } 16 | 17 | client.release() 18 | } 19 | 20 | runMain(main) 21 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-table-scopes.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateTableScopes("eosforumdapp", "proposal") 9 | 10 | console.log("State table scopes response", prettifyJson(response)) 11 | } catch (error) { 12 | console.log("An error occurred", error) 13 | } 14 | 15 | client.release() 16 | } 17 | 18 | runMain(main) 19 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-table.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateTable("eosio.token", "EOS", "stat") 9 | 10 | console.log("State table response", prettifyJson(response)) 11 | } catch (error) { 12 | console.log("An error occurred", error) 13 | } 14 | 15 | client.release() 16 | } 17 | 18 | runMain(main) 19 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-tables-for-accounts.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateTablesForAccounts( 9 | ["eosio.token", "trybenetwork", "parslseed123", "zkstokensr4u"], 10 | "eoscanadacom", 11 | "accounts", 12 | { 13 | keyType: "symbol_code", 14 | } 15 | ) 16 | 17 | console.log("State tables for accounts response", prettifyJson(response)) 18 | } catch (error) { 19 | console.log("An error occurred", error) 20 | } 21 | 22 | client.release() 23 | } 24 | 25 | runMain(main) 26 | -------------------------------------------------------------------------------- /examples/reference/eosio/state-tables-for-scopes.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, prettifyJson, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ apiKey: DFUSE_API_KEY, network: DFUSE_API_NETWORK }) 6 | 7 | try { 8 | const response = await client.stateTablesForScopes( 9 | "eosio.token", 10 | ["b1", "eoscanadacom", "eosnewyorkio"], 11 | "accounts" 12 | ) 13 | 14 | console.log("State tables for scopes response", prettifyJson(response)) 15 | } catch (error) { 16 | console.log("An error occurred", error) 17 | } 18 | 19 | client.release() 20 | } 21 | 22 | runMain(main) 23 | -------------------------------------------------------------------------------- /examples/reference/eosio/stream-action-traces.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK, prettifyJson } from "../../config" 2 | import { createDfuseClient, InboundMessage, InboundMessageType, waitFor } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ 6 | apiKey: DFUSE_API_KEY, 7 | network: DFUSE_API_NETWORK, 8 | }) 9 | 10 | const stream = await client.streamActionTraces( 11 | { 12 | accounts: "eosio.token|thekarmadapp|trustdicewin", 13 | with_inline_traces: true, 14 | with_dbops: true, 15 | with_dtrxops: true, 16 | with_ramops: true, 17 | 18 | // Don't confuse with `dbops`, this one streams smart contract's table creation/removal, not changes to rows! 19 | with_tableops: true, 20 | }, 21 | (message: InboundMessage) => { 22 | if (message.type === InboundMessageType.LISTENING) { 23 | console.log(prettifyJson(message.data)) 24 | return 25 | } 26 | 27 | if (message.type === InboundMessageType.ACTION_TRACE) { 28 | /** 29 | * JSON examples of various fields possibilities (since they might 30 | * not always appear in the streaming time frame): 31 | * 32 | * ``` 33 | * { 34 | * dbops: [ 35 | * // An `ActionTraceDbOp` row update operation 36 | * { 37 | * "op": "UPD", 38 | * "action_idx": 8, 39 | * "opayer": "eosbetbank11", 40 | * "npayer": "eosbetbank11", 41 | * "path": "eosio.token/eosbetbank11/accounts/........ehbo5", 42 | * "old": "d11a231c0000000004454f5300000000", 43 | * "new": "cd1a231c0000000004454f5300000000" 44 | * }, 45 | * 46 | * // An `ActionTraceDbOp` row insertion operation 47 | * { 48 | * "op": "INS", 49 | * "action_idx": 0, 50 | * "npayer": "hj1111111534", 51 | * "path": "eosio.token/hj1111111125/accounts/........ehbo5", 52 | * "new": "c02709000000000004454f5300000000" 53 | * } 54 | * 55 | * // An `ActionTraceDbOp` row removal operation 56 | * { 57 | * "op": "REM", 58 | * "action_idx": 2, 59 | * "opayer": "trustdicewin", 60 | * "path": "trustdicewin/trustdicewin/hash/......1iwm13h", 61 | * "old": "90bd994111e45fc947f7f7d4823081cdf13d05c12f31bf2049aec55e170aa0bcbf66c85c00000000" 62 | * }, 63 | * ] 64 | * } 65 | * ``` 66 | */ 67 | console.log(prettifyJson(message.data)) 68 | return 69 | } 70 | 71 | if (message.type === InboundMessageType.ERROR) { 72 | console.log(prettifyJson(message.data)) 73 | return 74 | } 75 | } 76 | ) 77 | 78 | await waitFor(15000) 79 | await stream.close() 80 | 81 | client.release() 82 | } 83 | 84 | runMain(main) 85 | -------------------------------------------------------------------------------- /examples/reference/eosio/stream-head-info.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK, prettifyJson } from "../../config" 2 | import { createDfuseClient, InboundMessage, InboundMessageType, waitFor } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ 6 | apiKey: DFUSE_API_KEY, 7 | network: DFUSE_API_NETWORK, 8 | }) 9 | 10 | const stream = await client.streamHeadInfo((message: InboundMessage) => { 11 | if (message.type === InboundMessageType.LISTENING) { 12 | console.log(prettifyJson(message.data)) 13 | return 14 | } 15 | 16 | if (message.type === InboundMessageType.HEAD_INFO) { 17 | console.log(prettifyJson(message.data)) 18 | return 19 | } 20 | }) 21 | 22 | await waitFor(15000) 23 | await stream.close() 24 | 25 | client.release() 26 | } 27 | 28 | runMain(main) 29 | -------------------------------------------------------------------------------- /examples/reference/eosio/stream-table-rows.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK, prettifyJson } from "../../config" 2 | import { createDfuseClient, InboundMessage, InboundMessageType, waitFor } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ 6 | apiKey: DFUSE_API_KEY, 7 | network: DFUSE_API_NETWORK, 8 | }) 9 | 10 | const stream = await client.streamTableRows( 11 | { code: "eosio", scope: "eosio", table: "global" }, 12 | (message: InboundMessage) => { 13 | if (message.type === InboundMessageType.LISTENING) { 14 | console.log(prettifyJson(message.data)) 15 | return 16 | } 17 | 18 | if (message.type === InboundMessageType.TABLE_SNAPSHOT) { 19 | console.log(prettifyJson(message.data)) 20 | return 21 | } 22 | 23 | if (message.type === InboundMessageType.TABLE_DELTA) { 24 | console.log(prettifyJson(message.data)) 25 | return 26 | } 27 | 28 | if (message.type === InboundMessageType.ERROR) { 29 | console.log(prettifyJson(message.data)) 30 | return 31 | } 32 | }, 33 | { fetch: true } 34 | ) 35 | 36 | await waitFor(15000) 37 | await stream.close() 38 | 39 | client.release() 40 | } 41 | 42 | runMain(main) 43 | -------------------------------------------------------------------------------- /examples/reference/eosio/stream-transaction.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK, prettifyJson } from "../../config" 2 | import { createDfuseClient, InboundMessage, InboundMessageType, waitFor } from "@dfuse/client" 3 | 4 | async function main(): Promise { 5 | const client = createDfuseClient({ 6 | apiKey: DFUSE_API_KEY, 7 | network: DFUSE_API_NETWORK, 8 | }) 9 | 10 | const stream = await client.streamTransaction( 11 | { id: "d2233029848840cc67c32a617b7339664a5866bf229a1833afccc3b4653f594a" }, 12 | (message: InboundMessage) => { 13 | if (message.type === InboundMessageType.LISTENING) { 14 | console.log(prettifyJson(message.data)) 15 | return 16 | } 17 | 18 | if (message.type === InboundMessageType.TRANSACTION_LIFECYCLE) { 19 | console.log(prettifyJson(message.data)) 20 | return 21 | } 22 | 23 | if (message.type === InboundMessageType.ERROR) { 24 | console.log(prettifyJson(message.data)) 25 | return 26 | } 27 | } 28 | ) 29 | 30 | await waitFor(5000) 31 | await stream.close() 32 | 33 | client.release() 34 | } 35 | 36 | runMain(main) 37 | -------------------------------------------------------------------------------- /examples/reference/ethereum/stream-pending-transactions.ts: -------------------------------------------------------------------------------- 1 | import { DFUSE_API_KEY, runMain, DFUSE_API_NETWORK } from "../../config" 2 | import { createDfuseClient, waitFor } from "@dfuse/client" 3 | import WebSocketClient from "ws" 4 | 5 | async function main(): Promise { 6 | const client = createDfuseClient({ 7 | apiKey: DFUSE_API_KEY, 8 | network: DFUSE_API_NETWORK, 9 | graphqlStreamClientOptions: { 10 | socketOptions: { 11 | webSocketFactory: (url) => webSocketFactory(url, ["graphql-ws"]), 12 | }, 13 | }, 14 | streamClientOptions: { 15 | socketOptions: { 16 | webSocketFactory: (url) => webSocketFactory(url), 17 | }, 18 | }, 19 | }) 20 | 21 | // Could be a list coming from a file, we tested successfully with 90K addresses without any problem 22 | const addresses = ["0xA4e5961B58DBE487639929643dCB1Dc3848dAF5E"] 23 | 24 | const streamPendingTrxs = `subscription ($addresses: [String!]!, $fields: FILTER_FIELD!) { 25 | _alphaPendingTransactions(filterAddresses: $addresses, filterField: $fields) { 26 | hash from to gasLimit gasPrice(encoding: ETHER) value(encoding: ETHER) 27 | } 28 | }` 29 | 30 | const stream = await client.graphql( 31 | streamPendingTrxs, 32 | (message) => { 33 | if (message.type === "error") { 34 | console.log("An error occurred", message.errors, message.terminal) 35 | } 36 | 37 | if (message.type === "data") { 38 | const data = message.data._alphaPendingTransactions 39 | const { hash, from, to, value, gasLimit } = data 40 | 41 | console.log(`Pending [${from} -> ${to}, ${value} ETH, Gas ${gasLimit}] (${hash})`) 42 | } 43 | 44 | if (message.type === "complete") { 45 | console.log("Stream completed") 46 | } 47 | }, 48 | { 49 | variables: { 50 | addresses, 51 | fields: "FROM_OR_TO", 52 | }, 53 | } 54 | ) 55 | 56 | await waitFor(10000) 57 | await stream.close() 58 | 59 | client.release() 60 | } 61 | 62 | runMain(main) 63 | 64 | async function webSocketFactory(url: string, protocols: string[] = []): Promise { 65 | console.log("Creating new client with updated max payload") 66 | return new WebSocketClient(url, protocols, { 67 | maxPayload: 45 * 1024 * 1024, // 45 Mib 68 | }) 69 | } 70 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | const { defaults } = require("jest-config") 2 | 3 | module.exports = { 4 | moduleDirectories: ["/node_modules"], 5 | moduleFileExtensions: [...defaults.moduleFileExtensions, "ts", "tsx"], 6 | moduleNameMapper: {}, 7 | transform: { 8 | "^.+\\.tsx?$": "ts-jest" 9 | }, 10 | testEnvironment: "jsdom", 11 | testRegex: "((\\.|/)(test|spec))\\.(jsx?|tsx?)$", 12 | setupFilesAfterEnv: ["jest-expect-message"] 13 | 14 | // coverageThreshold: { 15 | // global: { 16 | // branches: 42.4, 17 | // lines: 62.1, 18 | // functions: 33.8, 19 | // statements: 62.0 20 | // } 21 | // }, 22 | // collectCoverage: true, 23 | // coverageReporters: ["json", "html"] 24 | } 25 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@dfuse/client", 3 | "version": "0.3.21", 4 | "description": "dfuse JavaScript/TypeScript Client Library (for [dfuse API](https://docs.dfuse.io/))", 5 | "sideEffects": false, 6 | "main": "dist/lib/index.js", 7 | "module": "dist/dfuse-client.es5.js", 8 | "browser": "dist/dfuse-client.umd.js", 9 | "typings": "dist/types/index.d.ts", 10 | "files": [ 11 | "dist/*" 12 | ], 13 | "repository": "https://github.com/dfuse-io/client-js", 14 | "author": { 15 | "name": "dfuse Developers", 16 | "email": "dev@dfuse.io", 17 | "url": "https://dfuse.io" 18 | }, 19 | "contributors": [ 20 | { 21 | "name": "Denis Carriere", 22 | "email": "denis@eosnation.io", 23 | "url": "https://eosnation.io" 24 | } 25 | ], 26 | "license": "MIT", 27 | "scripts": { 28 | "check": "yarn lint && yarn test && yarn build", 29 | "prepublishOnly": "yarn build && yarn test", 30 | "prebuild": "rimraf docs dist", 31 | "build": "yarn run build:cjs && yarn run build:es && yarn run build:umd && yarn run build:docs", 32 | "build:cjs": "tsc --module commonjs", 33 | "build:es": "rollup -c rollup.config.es.js", 34 | "build:umd": "rollup -c rollup.config.umd.js && node scripts/compress-umd-build.js", 35 | "build:docs": "typedoc", 36 | "publish:latest": "yarn publish --public && node scripts/gh-publish-docs.js", 37 | "publish:next": "yarn publish --public --tag next", 38 | "postversion": "node scripts/gh-push.js", 39 | "lint": "eslint . --format=pretty --ext .js,.jsx,.ts,.tsx", 40 | "lint:specific": "eslint . --format=pretty", 41 | "start": "tsc --module commonjs --watch", 42 | "run:example": "yarn run ts-node -O '{\"module\":\"commonjs\"}'", 43 | "test": "jest" 44 | }, 45 | "husky": { 46 | "hooks": { 47 | "_comment": "Use --no-verify to bypass", 48 | "pre-commit": "lint-staged && pretty-quick --staged", 49 | "pre-push": "yarn lint" 50 | } 51 | }, 52 | "lint-staged": { 53 | "linters": { 54 | "**/*.ts": [ 55 | "yarn lint:specific" 56 | ] 57 | }, 58 | "ignore": [ 59 | "**/*.d.ts" 60 | ] 61 | }, 62 | "devDependencies": { 63 | "@types/dotenv": "^4.0.3", 64 | "@types/jest": "^26.0.14", 65 | "@types/jest-expect-message": "^1.0.2", 66 | "@types/node-fetch": "^2.1.6", 67 | "@types/ws": "^6.0.1", 68 | "@typescript-eslint/eslint-plugin": "^4.1.1", 69 | "@typescript-eslint/parser": "^4.1.1", 70 | "dotenv": "^6.1.0", 71 | "eslint": "^7.9.0", 72 | "eslint-config-airbnb-base": "^14.2.0", 73 | "eslint-config-prettier": "^6.11.0", 74 | "eslint-formatter-pretty": "^4.0.0", 75 | "eslint-plugin-eslint-comments": "^3.2.0", 76 | "eslint-plugin-import": "^2.22.0", 77 | "eslint-plugin-jest": "^24.0.1", 78 | "eslint-plugin-promise": "^4.2.1", 79 | "graphql": "^14.5.3", 80 | "graphql-tag": "^2.10.1", 81 | "husky": "^1.1.2", 82 | "jest": "^26.4.2", 83 | "jest-expect-message": "^1.0.2", 84 | "leaked-handles": "^5.2.0", 85 | "lint-staged": "^7.3.0", 86 | "node-fetch": "^2.3.0", 87 | "patch-package": "^6.0.7", 88 | "postinstall-postinstall": "^2.0.0", 89 | "prettier": "^2.1.2", 90 | "pretty-quick": "^3.0.2", 91 | "rimraf": "^2.6.3", 92 | "rollup": "^1.22.0", 93 | "rollup-plugin-commonjs": "^10.1.0", 94 | "rollup-plugin-ignore": "^1.0.5", 95 | "rollup-plugin-json": "^4.0.0", 96 | "rollup-plugin-node-resolve": "^5.2.0", 97 | "rollup-plugin-sourcemaps": "^0.4.2", 98 | "rollup-plugin-terser": "^5.1.2", 99 | "rollup-plugin-typescript2": "^0.24.3", 100 | "shelljs": "^0.8.3", 101 | "terser": "^3.17.0", 102 | "ts-jest": "^26.3.0", 103 | "ts-node": "^7.0.1", 104 | "typedoc": "^0.19.2", 105 | "typedoc-default-themes-extension": "^0.0.3", 106 | "typedoc-plugin-toc-group": "^0.0.4", 107 | "typescript": "^4.0.2", 108 | "ws": "^7.0.1" 109 | }, 110 | "dependencies": { 111 | "@types/debug": "^0.0.31", 112 | "debug": "^4.1.0" 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /patches/typedoc-plugin-toc-group+0.0.4.patch: -------------------------------------------------------------------------------- 1 | diff --git a/node_modules/typedoc-plugin-toc-group/plugin.js b/node_modules/typedoc-plugin-toc-group/plugin.js 2 | index 264c301..904e26a 100644 3 | --- a/node_modules/typedoc-plugin-toc-group/plugin.js 4 | +++ b/node_modules/typedoc-plugin-toc-group/plugin.js 5 | @@ -47,7 +47,7 @@ var __decorate = (this && this.__decorate) || function (decorators, target, key, 6 | isHomePage(page) { 7 | if (page && page.url && page.project) { 8 | try { 9 | - if (page.url.indexOf(page.project[exports.PLUGIN_NAME].homePath) > -1) { 10 | + if (page.url.indexOf("index.html") > -1 || page.url.indexOf("globals.html") > -1) { 11 | return true; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /rollup.config.es.js: -------------------------------------------------------------------------------- 1 | const { esBuild } = require("./rollup.config") 2 | 3 | module.exports = esBuild() 4 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | const { builtinModules } = require("module") 2 | const resolve = require("rollup-plugin-node-resolve") 3 | const commonjs = require("rollup-plugin-commonjs") 4 | const sourceMaps = require("rollup-plugin-sourcemaps") 5 | const typescript = require("rollup-plugin-typescript2") 6 | const json = require("rollup-plugin-json") 7 | const { terser } = require("rollup-plugin-terser") 8 | const ignore = require("rollup-plugin-ignore") 9 | 10 | const pkg = require("./package.json") 11 | 12 | module.exports = { 13 | esBuild: () => 14 | build({ 15 | output: { file: pkg.module, format: "es", sourcemap: true }, 16 | external: ["crypto", "debug", "fs", "os", "path"] 17 | }), 18 | 19 | umdBuild: () => 20 | build({ 21 | output: { 22 | file: pkg.browser, 23 | name: "dfuseClient", 24 | format: "umd", 25 | sourcemap: true 26 | }, 27 | resolve: { 28 | browser: true 29 | }, 30 | prePlugins: [ignore(builtinModules)], 31 | postPlugins: [terser()] 32 | }) 33 | } 34 | 35 | function build(options) { 36 | return { 37 | input: `src/index.ts`, 38 | output: options.output, 39 | external: options.external || [], 40 | watch: { 41 | include: "src/**" 42 | }, 43 | plugins: [ 44 | ...(options.prePlugins || []), 45 | json(), 46 | typescript({ useTsconfigDeclarationDir: true }), 47 | commonjs(), 48 | resolve(options.resolve), 49 | sourceMaps(), 50 | ...(options.postPlugins || []) 51 | ] 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /rollup.config.umd.js: -------------------------------------------------------------------------------- 1 | const { umdBuild } = require("./rollup.config") 2 | 3 | module.exports = umdBuild() 4 | -------------------------------------------------------------------------------- /scripts/compress-umd-build.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs") 2 | const zlib = require("zlib") 3 | 4 | const directory = "dist" 5 | const filePattern = `${directory}/dfuse-client.umd` 6 | 7 | async function main() { 8 | await zip(`${filePattern}.js`, `${filePattern}.js.gz`) 9 | await zip(`${filePattern}.js.map`, `${filePattern}.js.map.gz`) 10 | 11 | process.exit(0) 12 | } 13 | 14 | async function zip(source, destination) { 15 | return new Promise((resolve, reject) => { 16 | fs.createReadStream(source) 17 | .pipe(zlib.createGzip()) 18 | .pipe(fs.createWriteStream(destination)) 19 | .on("finish", resolve) 20 | .on("error", reject) 21 | }) 22 | } 23 | 24 | main().catch((error) => { 25 | console.error(error.stack) 26 | process.exit(1) 27 | }) 28 | -------------------------------------------------------------------------------- /scripts/gh-publish-docs.js: -------------------------------------------------------------------------------- 1 | const { cd, exec, echo, touch } = require("shelljs") 2 | const { githubPushUrl, exitOnError, continueOnError } = require("./helpers") 3 | 4 | echo("Pushing docs...") 5 | 6 | exitOnError(() => { 7 | cd("docs") 8 | touch(".nojekyll") 9 | exec("git init") 10 | exec('git config user.name "--username--"') 11 | exec('git config user.email "--usermail--"') 12 | }) 13 | 14 | continueOnError(() => { 15 | exec("git add .") 16 | exec('git commit -m "docs(docs): update gh-pages"') 17 | }) 18 | 19 | exitOnError(() => { 20 | exec(`git push --force --quiet "${githubPushUrl()}" master:gh-pages`) 21 | }) 22 | 23 | echo("Docs deployed") 24 | -------------------------------------------------------------------------------- /scripts/gh-push.js: -------------------------------------------------------------------------------- 1 | const { exec, echo } = require("shelljs") 2 | const { exitOnError } = require("./helpers") 3 | 4 | echo("Pushing code and tags...") 5 | 6 | exitOnError(() => { 7 | exec(`git push origin --follow-tags`) 8 | }) 9 | 10 | echo("Pushed branch & tags") 11 | -------------------------------------------------------------------------------- /scripts/helpers.js: -------------------------------------------------------------------------------- 1 | const { set } = require("shelljs") 2 | const url = require("url") 3 | 4 | const pkg = require("../package.json") 5 | 6 | module.exports = { 7 | githubPushUrl() { 8 | const repoUrl = url.parse(pkg.repository) 9 | 10 | const githubToken = process.env.GITHUB_API_TOKEN 11 | if (githubToken) { 12 | return `https://${githubToken}@${repoUrl.host + repoUrl.path}` 13 | } 14 | 15 | return `git@${repoUrl.host}:${repoUrl.path}.git` 16 | }, 17 | 18 | exitOnError(worker) { 19 | set("-e") 20 | worker() 21 | set("+e") 22 | }, 23 | 24 | continueOnError(worker) { 25 | set("+e") 26 | worker() 27 | set("-e") 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/client/__tests__/api-token-manager.test.ts: -------------------------------------------------------------------------------- 1 | import { 2 | ApiTokenManager, 3 | createApiTokenManager, 4 | createNoopApiTokenManager, 5 | isApiTokenExpired, 6 | } from "../api-token-manager" 7 | import { MockApiTokenStore, MockRefreshScheduler, mock } from "./mocks" 8 | import { ApiTokenInfo } from "../../types/auth-token" 9 | 10 | // In milliseconds 11 | const currentDate = 1000000 12 | 13 | // Expirations is in seconds! 14 | const nonExpiredApiTokenInfo = { token: "non-expired-far", expires_at: 2000 } 15 | const nonExpiredJustBeforeApiTokenInfo = { token: "non-expired-just-before", expires_at: 1001 } 16 | const expiredTokenInfo = { token: "expired-far", expires_at: 100 } 17 | const expiredRightOnApiTokenInfo = { token: "expired-right-on", expires_at: 1000 } 18 | const expiredJustAfterApiTokenInfo = { token: "expired-just-after", expires_at: 999 } 19 | const noopApiTokenInfo = { token: "aa.bb.cc", expires_at: Number.MAX_SAFE_INTEGER } 20 | 21 | const defaultFetchApiTokenInfo = nonExpiredApiTokenInfo 22 | 23 | describe("ApiTokenManager", () => { 24 | let fetchApiToken: jest.Mock 25 | let onTokenRefresh: jest.Mock 26 | let apiTokenStore: MockApiTokenStore 27 | let refreshScheduler: MockRefreshScheduler 28 | let manager: ApiTokenManager 29 | 30 | beforeEach(() => { 31 | jest.spyOn(Date, "now").mockReturnValue(currentDate) 32 | 33 | fetchApiToken = mock>(() => Promise.resolve(defaultFetchApiTokenInfo)) 34 | onTokenRefresh = mock() 35 | apiTokenStore = new MockApiTokenStore() 36 | refreshScheduler = new MockRefreshScheduler() 37 | 38 | manager = createApiTokenManager( 39 | fetchApiToken, 40 | onTokenRefresh, 41 | 0.95, 42 | apiTokenStore, 43 | refreshScheduler 44 | ) 45 | }) 46 | 47 | it("should call fetchApiToken when no token in storage", async () => { 48 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(undefined)) 49 | 50 | const result = await manager.getTokenInfo() 51 | expect(fetchApiToken).toHaveBeenCalledTimes(1) 52 | expect(result).toEqual(defaultFetchApiTokenInfo) 53 | }) 54 | 55 | it("should return stored token when present in storage and non-expired", async () => { 56 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(nonExpiredApiTokenInfo)) 57 | 58 | const result = await manager.getTokenInfo() 59 | expect(fetchApiToken).toHaveBeenCalledTimes(0) 60 | expect(result).toEqual(nonExpiredApiTokenInfo) 61 | }) 62 | 63 | it("should call fetchApiToken when present in storage and expired", async () => { 64 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(expiredTokenInfo)) 65 | 66 | const result = await manager.getTokenInfo() 67 | expect(fetchApiToken).toHaveBeenCalledTimes(1) 68 | expect(result).toEqual(defaultFetchApiTokenInfo) 69 | }) 70 | 71 | it("schedules a refresh when no token in storage", async () => { 72 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(undefined)) 73 | 74 | await manager.getTokenInfo() 75 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(1) 76 | expect(refreshScheduler.scheduleMock.mock.calls[0][0]).toEqual(950) 77 | }) 78 | 79 | it("schedules a refresh when token present in storage but expired", async () => { 80 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(expiredTokenInfo)) 81 | 82 | await manager.getTokenInfo() 83 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(1) 84 | expect(refreshScheduler.scheduleMock.mock.calls[0][0]).toEqual(950) 85 | }) 86 | 87 | it("schedules a refresh when token in storage, not expired, and no previous schedule", async () => { 88 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(nonExpiredApiTokenInfo)) 89 | 90 | await manager.getTokenInfo() 91 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(1) 92 | expect(refreshScheduler.scheduleMock.mock.calls[0][0]).toEqual(950) 93 | }) 94 | 95 | it("does not schedule a refresh when token in storage, not expired and previous schedule exists", async () => { 96 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(nonExpiredApiTokenInfo)) 97 | refreshScheduler.hasScheduledJobMock.mockReturnValue(true) 98 | 99 | await manager.getTokenInfo() 100 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(0) 101 | }) 102 | 103 | // eslint-disable-next-line jest/no-done-callback 104 | it("schedules a refresh when refresh schedule callback is called, even when schedule exists", async (done) => { 105 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(expiredTokenInfo)) 106 | 107 | await manager.getTokenInfo() 108 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(1) 109 | 110 | const refreshCallBack = refreshScheduler.scheduleMock.mock.calls[0][1] 111 | 112 | refreshScheduler.hasScheduledJobMock.mockReturnValue(true) 113 | 114 | onTokenRefresh.mockImplementation(() => { 115 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(2) 116 | done() 117 | }) 118 | 119 | refreshCallBack() 120 | }) 121 | 122 | it("notifies onTokenRefresh when token not present in storage", async () => { 123 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(undefined)) 124 | 125 | await manager.getTokenInfo() 126 | expect(onTokenRefresh).toHaveBeenCalledTimes(1) 127 | expect(onTokenRefresh).toHaveBeenCalledWith(defaultFetchApiTokenInfo.token) 128 | }) 129 | 130 | it("notifies onTokenRefresh when token present in storage but expired", async () => { 131 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(expiredTokenInfo)) 132 | 133 | await manager.getTokenInfo() 134 | expect(onTokenRefresh).toHaveBeenCalledTimes(1) 135 | expect(onTokenRefresh).toHaveBeenCalledWith(defaultFetchApiTokenInfo.token) 136 | }) 137 | 138 | it("does not notify onTokenRefresh when token in storage and not expired", async () => { 139 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(nonExpiredApiTokenInfo)) 140 | 141 | await manager.getTokenInfo() 142 | expect(onTokenRefresh).toHaveBeenCalledTimes(0) 143 | }) 144 | 145 | // eslint-disable-next-line jest/no-done-callback 146 | it("notifies onTokenRefresh when refresh schedule callback is called", async (done) => { 147 | apiTokenStore.getMock.mockReturnValue(Promise.resolve(expiredTokenInfo)) 148 | 149 | await manager.getTokenInfo() 150 | expect(refreshScheduler.scheduleMock).toHaveBeenCalledTimes(1) 151 | 152 | const refreshCallBack = refreshScheduler.scheduleMock.mock.calls[0][1] 153 | 154 | onTokenRefresh.mockImplementation((token: ApiTokenInfo) => { 155 | expect(token).toEqual(defaultFetchApiTokenInfo.token) 156 | done() 157 | }) 158 | 159 | refreshCallBack() 160 | }) 161 | 162 | it("releases scheduler and api token store on release", () => { 163 | manager.release() 164 | expect(apiTokenStore.releaseMock).toHaveBeenCalledTimes(1) 165 | expect(refreshScheduler.releaseMock).toHaveBeenCalledTimes(1) 166 | }) 167 | }) 168 | 169 | describe("NoopApiTokenManager", () => { 170 | let manager: ApiTokenManager 171 | 172 | beforeEach(() => { 173 | jest.spyOn(Date, "now").mockReturnValue(currentDate) 174 | 175 | manager = createNoopApiTokenManager("aa.bb.cc") 176 | }) 177 | 178 | it("should return hardwired token", async () => { 179 | const result = await manager.getTokenInfo() 180 | expect(result).toEqual(noopApiTokenInfo) 181 | }) 182 | }) 183 | 184 | describe("isApiTokenExpired", () => { 185 | const testCases = [ 186 | { token: nonExpiredApiTokenInfo, isExpired: false }, 187 | { token: nonExpiredJustBeforeApiTokenInfo, isExpired: false }, 188 | { token: expiredTokenInfo, isExpired: true }, 189 | { token: expiredRightOnApiTokenInfo, isExpired: true }, 190 | { token: expiredJustAfterApiTokenInfo, isExpired: true }, 191 | ] 192 | 193 | testCases.forEach((testCase) => { 194 | it(`should pass test case ${testCase.token.token}`, () => { 195 | jest.spyOn(Date, "now").mockReturnValue(currentDate) 196 | 197 | expect(isApiTokenExpired(testCase.token)).toEqual(testCase.isExpired) 198 | }) 199 | }) 200 | }) 201 | -------------------------------------------------------------------------------- /src/client/__tests__/mocks.ts: -------------------------------------------------------------------------------- 1 | import { ApiTokenInfo } from "../../types/auth-token" 2 | import { ApiTokenStore } from "../api-token-store" 3 | import { RefreshScheduler, ScheduleJob } from "../refresh-scheduler" 4 | import { Socket, SocketMessageListener, WebSocket, SocketConnectOptions } from "../../types/socket" 5 | import { OutboundMessage } from "../../message/outbound" 6 | import { StreamClient, OnStreamMessage } from "../../types/stream-client" 7 | import { HttpClient, HttpQueryParameters, HttpHeaders } from "../../types/http-client" 8 | import { Stream } from "../../types/stream" 9 | import { GraphqlStreamClient, OnGraphqlStreamMessage } from "../../types/graphql-stream-client" 10 | import { GraphqlDocument, GraphqlVariables } from "../../types/graphql" 11 | 12 | export function mock(implementation?: (...args: any) => T): jest.Mock { 13 | if (implementation === undefined) { 14 | return jest.fn() as jest.Mock 15 | } 16 | 17 | return jest.fn(implementation) 18 | } 19 | 20 | export class MockHttpClient implements HttpClient { 21 | public releaseMock = mock() 22 | public authRequestMock = mock>(() => Promise.resolve()) 23 | public apiRequestMock = mock>(() => Promise.resolve()) 24 | 25 | public release(): void { 26 | this.releaseMock() 27 | } 28 | 29 | public authRequest( 30 | path: string, 31 | method: string, 32 | params?: HttpQueryParameters, 33 | body?: any, 34 | headers?: HttpHeaders 35 | ): Promise { 36 | return this.authRequestMock(path, method, params, body, headers) 37 | } 38 | 39 | public apiRequest( 40 | apiToken: string, 41 | path: string, 42 | method: string, 43 | params?: HttpQueryParameters, 44 | body?: any, 45 | headers?: HttpHeaders 46 | ): Promise { 47 | return this.apiRequestMock(apiToken, path, method, params, body, headers) 48 | } 49 | } 50 | 51 | export class MockGraphqlStreamClient implements GraphqlStreamClient { 52 | public releaseMock = mock() 53 | public setApiTokenMock = jest.fn((/*apiToken: string*/) => { 54 | return 55 | }) 56 | 57 | public registerStreamMock = mock>() 58 | public unregisterStreamMock = mock>(() => Promise.resolve()) 59 | 60 | public release(): void { 61 | this.releaseMock() 62 | } 63 | 64 | public setApiToken(apiToken: string): void { 65 | this.setApiTokenMock(apiToken) 66 | } 67 | 68 | public registerStream( 69 | id: string, 70 | document: GraphqlDocument, 71 | variables: GraphqlVariables, 72 | onMessage: OnGraphqlStreamMessage 73 | ): Promise { 74 | return this.registerStreamMock(id, document, variables, onMessage) 75 | } 76 | 77 | public unregisterStream(id: string): Promise { 78 | return this.unregisterStreamMock(id) 79 | } 80 | } 81 | 82 | export class MockStreamClient implements StreamClient { 83 | public releaseMock = mock() 84 | public setApiTokenMock = jest.fn((/*apiToken: string*/) => { 85 | return 86 | }) 87 | 88 | public registerStreamMock = mock>() 89 | public unregisterStreamMock = mock>(() => Promise.resolve()) 90 | 91 | public release(): void { 92 | this.releaseMock() 93 | } 94 | 95 | public setApiToken(apiToken: string): void { 96 | this.setApiTokenMock(apiToken) 97 | } 98 | 99 | public registerStream(message: OutboundMessage, onMessage: OnStreamMessage): Promise { 100 | return this.registerStreamMock(message, onMessage) 101 | } 102 | 103 | public unregisterStream(id: string): Promise { 104 | return this.unregisterStreamMock(id) 105 | } 106 | } 107 | 108 | export class MockSocket implements Socket { 109 | public isConnectedMock = mock(() => true) 110 | public connectMock = mock>() 111 | public disconnectMock = mock>() 112 | public sendMock = mock>() 113 | public setApiTokenMock = mock() 114 | 115 | public getMock = mock>() 116 | 117 | public get isConnected(): boolean { 118 | return this.isConnectedMock() 119 | } 120 | 121 | public connect( 122 | listener: SocketMessageListener, 123 | options: { onReconnect?: () => void } = {} 124 | ): Promise { 125 | return this.connectMock(listener, options).then(() => { 126 | this.isConnectedMock.mockReturnValue(true) 127 | return 128 | }) 129 | } 130 | 131 | public disconnect(): Promise { 132 | return this.disconnectMock().then(() => { 133 | this.isConnectedMock.mockReturnValue(false) 134 | return 135 | }) 136 | } 137 | 138 | public send(message: T): Promise { 139 | return this.sendMock(message) 140 | } 141 | 142 | public setApiToken(apiToken: string): void { 143 | return this.setApiTokenMock(apiToken) 144 | } 145 | } 146 | 147 | export class MockWebSocket implements WebSocket { 148 | public readonly CLOSED = 0 149 | public readonly CLOSING = 0 150 | public readonly CONNECTING = 0 151 | public readonly OPEN = 0 152 | 153 | public readonly readyState: number 154 | public readonly protocol: string 155 | public readonly url: string 156 | 157 | public onclose?: (event: any) => any 158 | public onerror?: (event: any) => any 159 | public onmessage?: (event: any) => any 160 | public onopen?: (event: any) => any 161 | 162 | public closeMock = mock() 163 | public sendMock = mock() 164 | 165 | constructor(url: string) { 166 | // Our mock does not move around those states, there only to please TypeScript 167 | this.readyState = this.CLOSED 168 | this.protocol = "" 169 | this.url = url 170 | } 171 | 172 | public close(): void { 173 | this.closeMock() 174 | } 175 | 176 | public send(data: string | ArrayBufferLike | Blob | ArrayBufferView): void { 177 | this.sendMock(data) 178 | } 179 | } 180 | 181 | export class MockApiTokenStore implements ApiTokenStore { 182 | public releaseMock = mock() 183 | public setMock = mock>() 184 | public getMock = mock>() 185 | 186 | public release(): void { 187 | this.releaseMock() 188 | } 189 | 190 | public set(apiTokenInfo: ApiTokenInfo): Promise { 191 | return this.setMock(apiTokenInfo) 192 | } 193 | 194 | public get(): Promise { 195 | return this.getMock() 196 | } 197 | } 198 | 199 | export class MockRefreshScheduler implements RefreshScheduler { 200 | public releaseMock = mock() 201 | public hasScheduledJobMock = mock() 202 | public scheduleMock = mock() 203 | 204 | public release(): void { 205 | this.releaseMock() 206 | } 207 | 208 | public hasScheduledJob(): boolean { 209 | return this.hasScheduledJobMock() 210 | } 211 | 212 | public schedule(delayInSeconds: number, job: ScheduleJob): void { 213 | this.scheduleMock(delayInSeconds, job) 214 | } 215 | } 216 | 217 | export type SocketController = { 218 | send(message: any): void 219 | replier(handler: (outboundMessage: any) => unknown | undefined): void 220 | 221 | notifyReconnection(): void 222 | notifyTermination(initiator: "client" | "server", event: any): void 223 | 224 | setConnected(): void 225 | setDisconnected(): void 226 | } 227 | 228 | export const createSocketController = (socket: MockSocket): SocketController => { 229 | let sender: SocketMessageListener | undefined 230 | let replier: ((outboundMessage: any) => unknown | undefined) | undefined 231 | let reconnecNotifier: (() => void) | undefined 232 | let terminationNotifier: ((initiator: "client" | "server", event: any) => void) | undefined 233 | 234 | socket.sendMock.mockImplementation((message: any) => { 235 | if (replier) { 236 | const reply = replier(message) 237 | if (reply) { 238 | // We use a setTimeout so it will be sent right after the promise as resolve in the return below 239 | setTimeout(() => { 240 | if (sender) { 241 | sender(reply) 242 | } 243 | }) 244 | } 245 | } 246 | return Promise.resolve() 247 | }) 248 | 249 | socket.connectMock.mockImplementation( 250 | (listener: SocketMessageListener, options: SocketConnectOptions) => { 251 | sender = listener 252 | reconnecNotifier = options.onReconnect 253 | terminationNotifier = options.onTermination 254 | return Promise.resolve() 255 | } 256 | ) 257 | 258 | socket.disconnectMock.mockImplementation(() => { 259 | sender = undefined 260 | reconnecNotifier = undefined 261 | terminationNotifier = undefined 262 | return Promise.resolve() 263 | }) 264 | 265 | return { 266 | replier(handler: (outboundMessage: any) => unknown | undefined) { 267 | replier = handler 268 | }, 269 | 270 | send(message: any) { 271 | if (sender) { 272 | sender(message) 273 | } 274 | }, 275 | 276 | notifyReconnection() { 277 | if (reconnecNotifier) { 278 | reconnecNotifier() 279 | } 280 | }, 281 | 282 | notifyTermination(initiator: "client" | "server", event: any) { 283 | if (terminationNotifier) { 284 | terminationNotifier(initiator, event) 285 | } 286 | }, 287 | 288 | setConnected() { 289 | socket.isConnectedMock.mockReturnValue(true) 290 | }, 291 | 292 | setDisconnected() { 293 | socket.isConnectedMock.mockReturnValue(false) 294 | }, 295 | } 296 | } 297 | -------------------------------------------------------------------------------- /src/client/api-token-manager.ts: -------------------------------------------------------------------------------- 1 | import { RefreshScheduler } from "./refresh-scheduler" 2 | import { ApiTokenStore } from "./api-token-store" 3 | import debugFactory, { IDebugger } from "debug" 4 | import { ApiTokenInfo } from "../types/auth-token" 5 | 6 | export const DEFAULT_DELAY_BUFFER_PERCENT = 0.95 7 | 8 | export interface ApiTokenManager { 9 | release(): void 10 | getTokenInfo: () => Promise 11 | } 12 | 13 | /** 14 | * Create the standard [[ApiTokenManager]] interface that will manage all the lifecycle 15 | * of a token. 16 | * 17 | * @param fetchTokenInfo The async function that should be used to retrieve a fresh token. 18 | * @param onTokenRefresh The function to call when a token has been refreshed. 19 | * @param delayBuffer The percentage of time to use to schedule the next token refresh 20 | * (i.e. with a value of `0.9` and a token valid for 1000ms from now, 21 | * the next refresh would be scheduled to happen at `now + (0.9 * 1000)`) 22 | * @param apiTokenStore The API token store interface to retrieve token from and to save it back to. 23 | * @param refreshScheduler The scheduler interface that should be used to schedule upcoming refresh token 24 | * and check if a scheduled one already exist. 25 | * 26 | * @kind Factories 27 | */ 28 | export function createApiTokenManager( 29 | fetchTokenInfo: () => Promise, 30 | onTokenRefresh: (apiToken: string) => void, 31 | delayBuffer: number, 32 | apiTokenStore: ApiTokenStore, 33 | refreshScheduler: RefreshScheduler 34 | ): ApiTokenManager { 35 | return new DefaultApiTokenManager( 36 | fetchTokenInfo, 37 | onTokenRefresh, 38 | delayBuffer, 39 | apiTokenStore, 40 | refreshScheduler 41 | ) 42 | } 43 | 44 | /** 45 | * Create the Noop [[ApiTokenManager]] interface that will manage all the lifecycle 46 | * of a token. 47 | * 48 | * @param token The hardwired token value 49 | * 50 | * @kind Factories 51 | */ 52 | export function createNoopApiTokenManager(token: string): ApiTokenManager { 53 | return new NoopApiTokenManager(token) 54 | } 55 | 56 | /** 57 | * Check wheter the received [[ApiTokenInfo]] parameter is expired or near its 58 | * expiration. 59 | */ 60 | export function isApiTokenExpired(tokenInfo: ApiTokenInfo): boolean { 61 | const now = Date.now() / 1000 62 | return tokenInfo.expires_at <= now 63 | } 64 | 65 | class DefaultApiTokenManager implements ApiTokenManager { 66 | private fetchTokenInfo: () => Promise 67 | private onTokenRefresh: (apiToken: string) => void 68 | private delayBuffer: number 69 | private apiTokenStore: ApiTokenStore 70 | private refreshScheduler: RefreshScheduler 71 | 72 | private fetchTokenPromise?: Promise 73 | private debug: IDebugger 74 | 75 | constructor( 76 | fetchTokenInfo: () => Promise, 77 | onTokenRefresh: (apiToken: string) => void, 78 | delayBuffer: number, 79 | apiTokenStore: ApiTokenStore, 80 | refreshScheduler: RefreshScheduler 81 | ) { 82 | this.fetchTokenInfo = fetchTokenInfo 83 | this.onTokenRefresh = onTokenRefresh 84 | this.delayBuffer = delayBuffer 85 | this.apiTokenStore = apiTokenStore 86 | this.refreshScheduler = refreshScheduler 87 | 88 | this.debug = debugFactory("dfuse:token-manager") 89 | } 90 | 91 | public release(): void { 92 | this.debug("Releasing default API token manager") 93 | this.refreshScheduler.release() 94 | this.apiTokenStore.release() 95 | } 96 | 97 | public async getTokenInfo(): Promise { 98 | const tokenInfo = await this.apiTokenStore.get() 99 | if (tokenInfo && !isApiTokenExpired(tokenInfo)) { 100 | this.maybeScheduleNextRefresh(tokenInfo, { forceRefresh: false }) 101 | 102 | this.debug("Returning token found in API token store since it was still valid") 103 | return Promise.resolve(tokenInfo) 104 | } 105 | 106 | this.debug("Returning a fresh API token as it was not present in store or expired") 107 | return await this.refreshToken() 108 | } 109 | 110 | private async refreshToken(): Promise { 111 | const tokenInfo = await this.internalFetchToken() 112 | this.debug( 113 | "Retrieved an API token that is going to expires at %s (%s)", 114 | new Date(tokenInfo.expires_at * 1000), 115 | tokenInfo.token 116 | ) 117 | 118 | this.maybeScheduleNextRefresh(tokenInfo, { forceRefresh: true }) 119 | 120 | try { 121 | this.debug("Storing API token into token storage") 122 | await this.apiTokenStore.set(tokenInfo) 123 | } catch (error) { 124 | this.debug("Storing token into storage failed %s (%o)", error, error) 125 | } 126 | 127 | this.debug("Notifying upstream listener that API token has been refreshed.") 128 | this.onTokenRefresh(tokenInfo.token) 129 | 130 | return tokenInfo 131 | } 132 | 133 | private maybeScheduleNextRefresh( 134 | tokenInfo: ApiTokenInfo, 135 | options: { forceRefresh: boolean } 136 | ): void { 137 | if (!options.forceRefresh && this.refreshScheduler.hasScheduledJob()) { 138 | return 139 | } 140 | 141 | const delayInSeconds = this.getRefreshDelayInSeconds(tokenInfo) 142 | const refreshAt = new Date(Date.now() + delayInSeconds * 1000) 143 | 144 | this.debug( 145 | "Scheduling next token refresh to occur at %s (in %s seconds)", 146 | refreshAt, 147 | delayInSeconds 148 | ) 149 | 150 | this.refreshScheduler.schedule(delayInSeconds, () => this.refreshToken()) 151 | } 152 | 153 | private getRefreshDelayInSeconds(tokenInfo: ApiTokenInfo): number { 154 | const nowInSeconds = Math.floor(Date.now() / 1000) 155 | 156 | return (tokenInfo.expires_at - nowInSeconds) * this.delayBuffer 157 | } 158 | 159 | private async internalFetchToken(): Promise { 160 | if (this.fetchTokenPromise !== undefined) { 161 | return this.fetchTokenPromise 162 | } 163 | 164 | this.fetchTokenPromise = new Promise((resolve, reject) => { 165 | this.fetchTokenInfo() 166 | .then((apiTokenInfo: ApiTokenInfo) => { 167 | this.fetchTokenPromise = undefined 168 | resolve(apiTokenInfo) 169 | return 170 | }) 171 | .catch((error: any) => { 172 | this.fetchTokenPromise = undefined 173 | reject(error) 174 | }) 175 | }) 176 | 177 | return this.fetchTokenPromise 178 | } 179 | } 180 | 181 | class NoopApiTokenManager implements ApiTokenManager { 182 | private token: string 183 | private expiresAt: number 184 | private debug: IDebugger 185 | 186 | constructor(token: string) { 187 | this.token = token 188 | this.expiresAt = Number.MAX_SAFE_INTEGER 189 | this.debug = debugFactory("dfuse:token-manager-noop") 190 | } 191 | 192 | public release(): void { 193 | this.debug("Releasing default API token manager") 194 | } 195 | 196 | public async getTokenInfo(): Promise { 197 | return { token: this.token, expires_at: this.expiresAt } 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /src/client/api-token-store.ts: -------------------------------------------------------------------------------- 1 | import { ApiTokenInfo } from "../types/auth-token" 2 | import { DfuseClientError } from "../types/error" 3 | 4 | // These modules will be replaced by empty modules for an UMD build (Browser), check rollup.config.js `umdBuild` 5 | import crypto from "crypto" 6 | import fs from "fs" 7 | import os from "os" 8 | import path from "path" 9 | 10 | /** 11 | * A simple API token store interface supporting async operations. This 12 | * interface is used to store the API token when it has been refreshed 13 | * as well as retrieving a token from the store. 14 | * 15 | * By providing your own [[ApiTokenStore]] implementation, you can for 16 | * example easily store the token in the `localStorage` ([[LocalStorageApiTokenStore]]), 17 | * under `~/.dfuse//token.json` file ([[OnDiskApiTokenStore]]) or 18 | * more generically at any path ([[FileApiTokenStore]]). 19 | * 20 | * **Note** The [[OnDiskApiTokenStore]] and [[FileApiTokenStore]] are available 21 | * only on a Node.js environment. 22 | * 23 | * @kind Interfaces 24 | */ 25 | export interface ApiTokenStore { 26 | /** 27 | * Release any resources hold by this [[ApiTokenStore]] instance. Must 28 | * be tolerant to being called multiple times. 29 | * 30 | * Once called, the instance is assumed unsuable and should never 31 | * be invoked anymore. 32 | */ 33 | release(): void 34 | 35 | set: (apiTokenInfo: ApiTokenInfo) => Promise 36 | get: () => Promise 37 | } 38 | 39 | /** 40 | * Represents an in-memory token storage concrete implementation of 41 | * a . This simply keep the token in variable and serves 42 | * it from there. 43 | * 44 | * It is **never** persisted and will be reset upon restart of the Browser tab 45 | * or process, leading to a new token being issued. 46 | * 47 | * You should try hard to use a persistent solution so that you re-use the 48 | * same token as long as it's valid. 49 | */ 50 | export class InMemoryApiTokenStore { 51 | private apiTokenInfo?: ApiTokenInfo 52 | 53 | public release(): void { 54 | return 55 | } 56 | 57 | public async get(): Promise { 58 | return this.apiTokenInfo 59 | } 60 | 61 | public async set(apiTokenInfo: ApiTokenInfo): Promise { 62 | this.apiTokenInfo = apiTokenInfo 63 | } 64 | } 65 | 66 | /** 67 | * Represents an [[ApiTokenStore]] that saves the token as a JSON string 68 | * in the `localStorage` of the Browser. 69 | * 70 | * Trying to use this class when `window.localStorage` is not a function 71 | * (like in a Node.js environment) will throw an error at construction 72 | * time. Use another implementation. If this error is thrown nonetheless 73 | * in your Browser, local storage is probably not supported there. 74 | * 75 | * It is persisted in the local storage of the Browser it will be picked up 76 | * upon restart of the Browser tab. 77 | */ 78 | export class LocalStorageApiTokenStore implements ApiTokenStore { 79 | private key: string 80 | private apiTokenInfo?: ApiTokenInfo 81 | 82 | constructor(key: string) { 83 | this.key = key 84 | 85 | if (typeof localStorage !== "object") { 86 | const messages = [ 87 | "This environment does not contain a valid `localStorage` object in the global scope to use.", 88 | "", 89 | "You are most likely in a Node.js environment where a global `localStorage` is not available by default.", 90 | "This API token store concrete impelementation is not usable in your environment. You should be", 91 | "providing a different implementation of ApiTokenInfo.", 92 | "", 93 | "If this error occurred when you did not provide yourself the instance, it means our auto-detection", 94 | "mechanism incorrectly thought it could use `LocalStorageApiTokenStore` instance while it should", 95 | "have not. Please report a bug about this issue so we can fix it.", 96 | "", 97 | "If you provided the instance yourself, you should read our documentation to better", 98 | "understand what you should provide here.", 99 | "", 100 | "See https://github.com/dfuse-io/client-js", 101 | ] 102 | 103 | throw new DfuseClientError(messages.join("\n")) 104 | } 105 | } 106 | 107 | public release(): void { 108 | return 109 | } 110 | 111 | public async get(): Promise { 112 | if (this.apiTokenInfo !== undefined) { 113 | return this.apiTokenInfo 114 | } 115 | 116 | const raw = localStorage.getItem(this.key) 117 | if (raw == null) { 118 | return undefined 119 | } 120 | 121 | this.apiTokenInfo = JSON.parse(raw) 122 | 123 | return this.apiTokenInfo 124 | } 125 | 126 | public async set(apiTokenInfo: ApiTokenInfo): Promise { 127 | this.apiTokenInfo = apiTokenInfo 128 | localStorage.setItem(this.key, JSON.stringify(apiTokenInfo)) 129 | } 130 | } 131 | 132 | /** 133 | * Represents an [[ApiTokenStore]] implementation that will save 134 | * as a JSON string in plain text in the given file. 135 | * 136 | * The directory structure is created when it does not exists. 137 | * 138 | * **Note** This cannot be used in a browser environment 139 | */ 140 | export class FileApiTokenStore implements ApiTokenStore { 141 | private filePath: string 142 | private apiTokenInfo?: ApiTokenInfo 143 | 144 | constructor(filePath: string) { 145 | this.filePath = filePath 146 | } 147 | 148 | public release(): void { 149 | return 150 | } 151 | 152 | public async get(): Promise { 153 | if (this.apiTokenInfo !== undefined) { 154 | return this.apiTokenInfo 155 | } 156 | 157 | const data = await readData(this.filePath) 158 | if (data === undefined) { 159 | return undefined 160 | } 161 | 162 | this.apiTokenInfo = JSON.parse(data) 163 | 164 | return this.apiTokenInfo 165 | } 166 | 167 | public async set(apiTokenInfo: ApiTokenInfo): Promise { 168 | this.apiTokenInfo = apiTokenInfo 169 | 170 | await writeData(this.filePath, JSON.stringify(apiTokenInfo)) 171 | } 172 | } 173 | 174 | /** 175 | * Represents an [[ApiTokenStore]] implementation that will save 176 | * as a JSON string in a file located at 177 | * `~/.dfuse//token.json`. 178 | * 179 | * The directory structure is created when it does not exists. 180 | * 181 | * **Note** This cannot be used in a browser environment. 182 | */ 183 | export class OnDiskApiTokenStore extends FileApiTokenStore { 184 | constructor(apiKey: string) { 185 | const homeDirectory = os.homedir() 186 | const sha256sum = crypto.createHash("sha256") 187 | 188 | super(`${homeDirectory}/.dfuse/${sha256sum.update(apiKey).digest("hex")}/token.json`) 189 | } 190 | } 191 | 192 | async function readData(filePath: string): Promise { 193 | return new Promise((resolve, reject) => { 194 | if (!fs.existsSync(filePath)) { 195 | resolve(undefined) 196 | return 197 | } 198 | 199 | fs.readFile(filePath, (error: any, data: any) => { 200 | error ? reject(error) : resolve(data) 201 | }) 202 | }) 203 | } 204 | 205 | async function writeData(filePath: string, data: string): Promise { 206 | return new Promise((resolve, reject) => { 207 | try { 208 | mkdirpSync(path.dirname(filePath)) 209 | } catch (error) { 210 | reject(error) 211 | return 212 | } 213 | 214 | fs.writeFile(filePath, data, (error: any) => { 215 | error ? reject(error) : resolve() 216 | }) 217 | }) 218 | } 219 | 220 | async function mkdirpSync(directory: string): Promise { 221 | if (!path.isAbsolute(directory)) { 222 | return 223 | } 224 | 225 | const parent = path.join(directory, "..") 226 | if (parent !== path.join("/") && !fs.existsSync(parent)) { 227 | mkdirpSync(parent) 228 | } 229 | 230 | if (!fs.existsSync(directory)) { 231 | fs.mkdirSync(directory) 232 | } 233 | } 234 | 235 | /** 236 | * Represents an no-op token storage concrete implementation. All operations 237 | * are no-op and this should be used when no authentication is required for a given 238 | * instance. 239 | */ 240 | export class NoOpApiTokenStore { 241 | public release(): void { 242 | return 243 | } 244 | 245 | public async get(): Promise { 246 | return undefined 247 | } 248 | 249 | public async set(): Promise { 250 | return 251 | } 252 | } 253 | -------------------------------------------------------------------------------- /src/client/refresh-scheduler.ts: -------------------------------------------------------------------------------- 1 | import { IDebugger } from "debug" 2 | import debugFactory from "debug" 3 | 4 | export type ScheduleJob = () => void 5 | 6 | export interface RefreshScheduler { 7 | release(): void 8 | hasScheduledJob(): boolean 9 | schedule(delayInSeconds: number, job: ScheduleJob): void 10 | } 11 | 12 | export function createRefreshScheduler(): RefreshScheduler { 13 | return new DefaultRefreshScheduler() 14 | } 15 | 16 | class DefaultRefreshScheduler { 17 | public renewalTimeout?: any 18 | private debug: IDebugger 19 | 20 | constructor() { 21 | this.debug = debugFactory("dfuse:refresh-scheduler") 22 | } 23 | 24 | public release(): void { 25 | this.debug("Releasing default refresh scheduler") 26 | if (this.renewalTimeout !== undefined) { 27 | this.debug("Clearing refresh timeout interval") 28 | this.clearRefreshTimeout() 29 | } 30 | } 31 | 32 | public hasScheduledJob(): boolean { 33 | return this.renewalTimeout !== undefined 34 | } 35 | 36 | public schedule( 37 | delayInSeconds: number, 38 | job: ScheduleJob, 39 | onJobFailed?: (error: any) => void 40 | ): void { 41 | if (delayInSeconds <= 0) { 42 | this.debug("Delay in seconds should be greater than 0") 43 | return 44 | } 45 | 46 | if (this.renewalTimeout) { 47 | this.debug("Clearing previous sheduled timer") 48 | this.clearRefreshTimeout() 49 | } 50 | 51 | this.renewalTimeout = setTimeout(() => { 52 | try { 53 | this.debug("Executing scheduled job at %s%O", new Date(), job) 54 | job() 55 | } catch (error) { 56 | this.debug("Scheduled job failed (%o)", error) 57 | if (onJobFailed) { 58 | onJobFailed(error) 59 | } 60 | } 61 | 62 | this.clearRefreshTimeout() 63 | }, delayInSeconds * 1000) 64 | } 65 | 66 | private clearRefreshTimeout(): void { 67 | clearTimeout(this.renewalTimeout) 68 | this.renewalTimeout = undefined 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/helpers/__tests__/transaction.test.ts: -------------------------------------------------------------------------------- 1 | import { TransactionLifecycle } from "../../types/transaction" 2 | import { ActionTrace } from "../../types/action-trace" 3 | import { flattenActionTraces, matchingActionTraces } from "../transaction" 4 | 5 | describe("flattenActions", () => { 6 | it("flattens action traces when no execution traces correctly", () => { 7 | const transaction = createTransaction([]) 8 | transaction.execution_trace = undefined 9 | 10 | expect(flattenActionTraces(transaction)).toEqual([]) 11 | }) 12 | 13 | it("flattens action traces correctly", () => { 14 | const transaction = createTransaction([ 15 | createActionTrace("eosio.token/eosio.token:transfer", [ 16 | createActionTrace("from/eosio.token:transfer", [ 17 | createActionTrace("contractX/contractX:log") 18 | ]), 19 | createActionTrace("to/eosio.token:transfer", [ 20 | createActionTrace("contractY/contractY:update") 21 | ]) 22 | ]) 23 | ]) 24 | 25 | const toTriplet = (actionTrace: ActionTrace): string => 26 | `${actionTrace.receipt.receiver}/${actionTrace.act.account}:${actionTrace.act.name}` 27 | 28 | expect(flattenActionTraces(transaction).map(toTriplet)).toEqual([ 29 | "eosio.token/eosio.token:transfer", 30 | "from/eosio.token:transfer", 31 | "contractX/contractX:log", 32 | "to/eosio.token:transfer", 33 | "contractY/contractY:update" 34 | ]) 35 | }) 36 | }) 37 | 38 | describe("matchingActions", () => { 39 | it("matching action traces when no execution traces correctly", () => { 40 | const transaction = createTransaction([]) 41 | transaction.execution_trace = undefined 42 | 43 | expect(matchingActionTraces({ lifecycle: transaction, action_idx: [] })).toEqual([]) 44 | }) 45 | 46 | it("extracts matching action traces correctly", () => { 47 | const transaction = createTransaction([ 48 | createActionTrace("eosio.token/eosio.token:transfer", [ 49 | createActionTrace("from/eosio.token:transfer", [ 50 | createActionTrace("contractX/contractX:log") 51 | ]), 52 | createActionTrace("to/eosio.token:transfer", [ 53 | createActionTrace("contractY/contractY:update") 54 | ]) 55 | ]) 56 | ]) 57 | 58 | const toTriplet = (actionTrace: ActionTrace): string => 59 | `${actionTrace.receipt.receiver}/${actionTrace.act.account}:${actionTrace.act.name}` 60 | 61 | expect(matchingActionTraces({ lifecycle: transaction, action_idx: [] }).map(toTriplet)).toEqual( 62 | [] 63 | ) 64 | 65 | expect( 66 | matchingActionTraces({ lifecycle: transaction, action_idx: [0, 2, 4] }).map(toTriplet) 67 | ).toEqual([ 68 | "eosio.token/eosio.token:transfer", 69 | "contractX/contractX:log", 70 | "contractY/contractY:update" 71 | ]) 72 | 73 | expect( 74 | matchingActionTraces({ lifecycle: transaction, action_idx: [0, 1, 2, 3, 4] }).map(toTriplet) 75 | ).toEqual([ 76 | "eosio.token/eosio.token:transfer", 77 | "from/eosio.token:transfer", 78 | "contractX/contractX:log", 79 | "to/eosio.token:transfer", 80 | "contractY/contractY:update" 81 | ]) 82 | }) 83 | }) 84 | 85 | function createTransaction(actionTraces: ActionTrace[]): TransactionLifecycle { 86 | return { 87 | id: "123", 88 | transaction_status: "executed", 89 | execution_block_header: { 90 | confirmed: 1, 91 | producer: "eosio", 92 | action_mroot: "", 93 | transaction_mroot: "", 94 | previous: "", 95 | timestamp: "", 96 | header_extensions: [], 97 | new_producers: null, 98 | schedule_version: 1 99 | }, 100 | cancelation_irreversible: false, 101 | creation_irreversible: false, 102 | execution_irreversible: true, 103 | pub_keys: [], 104 | transaction: {} as any, 105 | execution_trace: { 106 | id: "123", 107 | scheduled: false, 108 | block_num: 1, 109 | block_time: "", 110 | elapsed: 10, 111 | net_usage: 10, 112 | action_traces: actionTraces 113 | } 114 | } 115 | } 116 | 117 | function createActionTrace( 118 | triplet: string, 119 | childActionTraces?: ActionTrace[] 120 | ): ActionTrace { 121 | const receiverActionParts = triplet.split("/", 2) 122 | const actionParts = receiverActionParts[1].split(":", 2) 123 | 124 | return { 125 | act: { 126 | account: actionParts[0], 127 | name: actionParts[1], 128 | data: {} 129 | }, 130 | block_num: 1, 131 | block_time: "", 132 | console: "", 133 | elapsed: 0, 134 | context_free: false, 135 | receipt: { 136 | abi_sequence: 1, 137 | act_digest: "", 138 | auth_sequence: [], 139 | code_sequence: 1, 140 | recv_sequence: 1, 141 | global_sequence: 2, 142 | receiver: receiverActionParts[0] 143 | }, 144 | trx_id: "123", 145 | inline_traces: childActionTraces 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /src/helpers/message.ts: -------------------------------------------------------------------------------- 1 | import { OnStreamMessage } from "../types/stream-client" 2 | import { InboundMessage } from "../message/inbound" 3 | import { Stream } from "../types/stream" 4 | 5 | /** 6 | * This small utility is useful to implement a dynamic dispatcher 7 | * based on the type of message. That is usefull to avoid having to 8 | * code yourself a `switch (message.type) { ... }` switch case. 9 | * 10 | * Instead, define a series of specific of handlers on a class or 11 | * an object, then when calling the stream method of your choices, 12 | * pass the dynamic dispatcher created by calling this method as 13 | * the message handler. 14 | * 15 | * The created dispatcher upon each message, will check the received 16 | * type to handler map and will dispatch it there. 17 | * 18 | * ``` 19 | * const dispatch = dynamicMessageDispatcher({ 20 | * listening: this.onListening, 21 | * progress: this.onProgress, 22 | * action_trace: this.onAction, 23 | * }) 24 | * 25 | * client.streamActionTraces({ ... }, dispatcher) 26 | * ``` 27 | */ 28 | export function dynamicMessageDispatcher(typeToDispatcher: { 29 | [messageType: string]: OnStreamMessage 30 | }): OnStreamMessage { 31 | return (message: InboundMessage, stream: Stream) => { 32 | const dispatcher = typeToDispatcher[message.type] 33 | if (dispatcher) { 34 | dispatcher(message, stream) 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/helpers/promises.ts: -------------------------------------------------------------------------------- 1 | export class Deferred { 2 | private activePromise: Promise 3 | private resolver!: (value?: T | PromiseLike) => void 4 | private rejecter!: (reason?: any) => void 5 | 6 | constructor() { 7 | this.activePromise = new Promise((resolve, reject) => { 8 | this.resolver = resolve 9 | this.rejecter = reject 10 | }) 11 | } 12 | 13 | public promise(): Promise { 14 | return this.activePromise 15 | } 16 | 17 | public resolve(value?: T | PromiseLike): void { 18 | this.resolver(value) 19 | } 20 | 21 | public reject(reason?: any): void { 22 | this.rejecter(reason) 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/helpers/time.ts: -------------------------------------------------------------------------------- 1 | export function waitFor(ms: number): Promise { 2 | return new Promise((resolve) => setTimeout(resolve, ms)) 3 | } 4 | -------------------------------------------------------------------------------- /src/helpers/transaction.ts: -------------------------------------------------------------------------------- 1 | import { TransactionLifecycle } from "../types/transaction" 2 | import { ActionTrace } from "../types/action-trace" 3 | import { SearchTransactionRow } from "../types/search" 4 | 5 | type WalkActionsState = { 6 | index: number 7 | actions: ActionTrace[] 8 | } 9 | 10 | /** 11 | * This method flattens the nested action traces of a [[TransactionLifecycle]] into 12 | * a single flat list. The flat list indexes matches dfuse API action index 13 | * which is used within dfuse to identify particular action trace in 14 | * various situation (db ops, RAM ops, etc). 15 | * 16 | * The action index of a given action is obtained simply by doing a 17 | * deep-first traversal of the action traces structure incrementing 18 | * a counter at each step and binding the counter to the current traversed 19 | * action becoming the action's index. 20 | * 21 | * As an example of this method, assume the following transaction: 22 | * 23 | * ``` 24 | * Transaction 123 { 25 | * ExecutionTrace { 26 | * ActionTraces: [ 27 | * ActionTrace(eosio.token/eosio.token:transfer) { 28 | * InlineTraces: [ 29 | * ActionTrace(from/eosio.token:transfer) { 30 | * InlineTraces: [ 31 | * ActionTrace(contractX/contractX:log) 32 | * ] 33 | * } 34 | * ActionTrace(to/eosio.token:transfer) { 35 | * InlineTraces: [ 36 | * ActionTrace(contractY/contractY:update) 37 | * ] 38 | * } 39 | * ] 40 | * } 41 | * ] 42 | * } 43 | * } 44 | * ``` 45 | * 46 | * This will results in the following flattened actions list being returned: 47 | * 48 | * ``` 49 | * [ 50 | * ActionTrace(eosio.token/eosio.token:transfer), 51 | * ActionTrace(from/eosio.token:transfer), 52 | * ActionTrace(contractX/contractX:log), 53 | * ActionTrace(to/eosio.token:transfer), 54 | * ActionTrace(contractY/contractY:update), 55 | * ] 56 | * ``` 57 | * 58 | * @param transaction The transaction lifecycle object to flatten action traces from. 59 | * @returns A flat list of action traces extracted from the transaction lifecycle for which each 60 | * index of the list represents the action's index concept of dfuse API. 61 | */ 62 | export function flattenActionTraces(transaction: TransactionLifecycle): ActionTrace[] { 63 | if (!transaction.execution_trace) { 64 | return [] 65 | } 66 | 67 | const state = { 68 | index: -1, 69 | actions: [], 70 | } 71 | 72 | transaction.execution_trace.action_traces.forEach((actionTrace) => { 73 | walkFlattenedActionTraces(actionTrace, state) 74 | }) 75 | 76 | return state.actions 77 | } 78 | 79 | function walkFlattenedActionTraces( 80 | rootActionTrace: ActionTrace, 81 | state: WalkActionsState 82 | ): void { 83 | state.index++ 84 | state.actions.push(rootActionTrace) 85 | 86 | const childActionTraces = rootActionTrace.inline_traces || [] 87 | 88 | childActionTraces.forEach((childActionTrace) => { 89 | walkFlattenedActionTraces(childActionTrace, state) 90 | }) 91 | } 92 | 93 | /** 94 | * This method extracts the matchinf actions out of [[SearchTransactionRow]] 95 | * object. 96 | * 97 | * Using the search endpoint, you receives a [[SearchTransactionRow]] that 98 | * is simply a composite object containing an actual [[TransactionLifecycle]] 99 | * element as well as a list of action indexes which are the actual 100 | * ones that matched your query. 101 | * 102 | * A single transaction can contains a big amount of actions but usually, 103 | * only a subset of the actions in a transaction matches your search query. 104 | * 105 | * By using this method, you can easily extracts the matching actions 106 | * out of the [[SearchTransactionRow]] object. 107 | * 108 | * @param searchRow The search result row to extract matching action traces from. 109 | * @returns A flat list of action traces extracted from the search result row that matched 110 | * the query term(s). 111 | */ 112 | export function matchingActionTraces(searchRow: SearchTransactionRow): ActionTrace[] { 113 | if (!searchRow.lifecycle.execution_trace) { 114 | return [] 115 | } 116 | 117 | const matchingActionIndexes = searchRow.action_idx 118 | const state = { 119 | index: -1, 120 | actions: [], 121 | } 122 | 123 | searchRow.lifecycle.execution_trace.action_traces.forEach((actionTrace) => { 124 | walkMatchingActionTraces(actionTrace, matchingActionIndexes, state) 125 | }) 126 | 127 | return state.actions 128 | } 129 | 130 | function walkMatchingActionTraces( 131 | rootActionTrace: ActionTrace, 132 | matchingActionIndexes: number[], 133 | state: WalkActionsState 134 | ): void { 135 | state.index++ 136 | if (matchingActionIndexes.includes(state.index)) { 137 | state.actions.push(rootActionTrace) 138 | } 139 | 140 | const childActionTraces = rootActionTrace.inline_traces || [] 141 | 142 | childActionTraces.forEach((childActionTrace) => { 143 | walkMatchingActionTraces(childActionTrace, matchingActionIndexes, state) 144 | }) 145 | } 146 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./client/api-token-manager" 2 | export * from "./client/api-token-store" 3 | export * from "./client/client" 4 | export * from "./client/http-client" 5 | export * from "./client/refresh-scheduler" 6 | export * from "./client/socket" 7 | export * from "./client/stream-client" 8 | export * from "./client/graphql-stream-client" 9 | 10 | export * from "./helpers/message" 11 | export * from "./helpers/time" 12 | export * from "./helpers/transaction" 13 | 14 | export * from "./message/inbound" 15 | export * from "./message/outbound" 16 | 17 | export * from "./types/action-trace" 18 | export * from "./types/auth-token" 19 | export * from "./types/block-id" 20 | export * from "./types/client" 21 | export * from "./types/common" 22 | export * from "./types/error" 23 | export * from "./types/head-info" 24 | export * from "./types/http-client" 25 | export * from "./types/graphql" 26 | export * from "./types/graphql-stream-client" 27 | export * from "./types/listen" 28 | export * from "./types/progress" 29 | export * from "./types/search" 30 | export * from "./types/socket" 31 | export * from "./types/state" 32 | export * from "./types/stream" 33 | export * from "./types/stream-client" 34 | export * from "./types/table-delta" 35 | export * from "./types/table-snapshot" 36 | export * from "./types/transaction" 37 | -------------------------------------------------------------------------------- /src/message/inbound.ts: -------------------------------------------------------------------------------- 1 | import { ProgressData } from "../types/progress" 2 | import { ActionTraceData } from "../types/action-trace" 3 | import { ErrorData } from "../types/error" 4 | import { ListeningData } from "../types/listen" 5 | import { HeadInfoData } from "../types/head-info" 6 | import { TableDeltaData } from "../types/table-delta" 7 | import { TableSnapshotData } from "../types/table-snapshot" 8 | import { TransactionLifecycleData } from "../types/transaction" 9 | 10 | export type InboundMessage = { 11 | type: InboundMessageType 12 | req_id?: string 13 | data: T 14 | } 15 | 16 | // **Important** The key must be the same as the API type but in upper snake case for "in" operation to work 17 | export enum InboundMessageType { 18 | ACTION_TRACE = "action_trace", 19 | ERROR = "error", 20 | LISTENING = "listening", 21 | HEAD_INFO = "head_info", 22 | PING = "ping", 23 | PROGRESS = "progress", 24 | UNLISTENED = "unlistened", 25 | TABLE_DELTA = "table_delta", 26 | TABLE_SNAPSHOT = "table_snapshot", 27 | TRANSACTION_LIFECYCLE = "transaction_lifecycle", 28 | } 29 | 30 | export type ActionTraceInboundMessage> = InboundMessage> 31 | export type ErrorInboundMessage = InboundMessage 32 | export type ListeningInboundMessage = InboundMessage 33 | export type HeadInfoInboundMessage = InboundMessage 34 | export type PingInboundMessage = InboundMessage 35 | export type ProgressInboundMessage = InboundMessage 36 | export type UnlistenedInboundMessage = InboundMessage 37 | export type TableDeltaInboundMessage> = InboundMessage> 38 | export type TableSnapshotInboundMessage> = InboundMessage< 39 | TableSnapshotData 40 | > 41 | export type TransactionLifecycleInboundMessage = InboundMessage 42 | -------------------------------------------------------------------------------- /src/message/outbound.ts: -------------------------------------------------------------------------------- 1 | import { DfuseClientError } from "../types/error" 2 | 3 | export type OutboundMessage = { 4 | type: OutboundMessageType 5 | req_id: string 6 | listen?: boolean 7 | fetch?: boolean 8 | start_block?: number 9 | with_progress?: number 10 | data: T 11 | } 12 | 13 | // **Important** The key must be the same as the API type but in upper snake case for "in" operation to work 14 | export enum OutboundMessageType { 15 | GET_ACTION_TRACES = "get_action_traces", 16 | GET_TABLE_ROWS = "get_table_rows", 17 | GET_TRANSACTION_LIFECYCLE = "get_transaction_lifecycle", 18 | GET_HEAD_INFO = "get_head_info", 19 | UNLISTEN = "unlisten", 20 | } 21 | 22 | export type StreamOptions = { 23 | req_id?: string 24 | fetch?: boolean 25 | listen?: boolean 26 | start_block?: number 27 | with_progress?: number 28 | irreversible_only?: boolean 29 | } 30 | 31 | export type GetActionTracesMessageData = { 32 | accounts: string 33 | receivers?: string 34 | action_names?: string 35 | with_dbops?: boolean 36 | with_dtrxops?: boolean 37 | with_ramops?: boolean 38 | with_tableops?: boolean 39 | with_inline_traces?: boolean 40 | } 41 | 42 | export function getActionTracesMessage( 43 | data: GetActionTracesMessageData, 44 | streamOptions: StreamOptions = {} 45 | ): OutboundMessage { 46 | return createOutboundMessage(OutboundMessageType.GET_ACTION_TRACES, data, { 47 | listen: true, 48 | ...streamOptions, 49 | }) 50 | } 51 | 52 | export type GetTableRowsMessageData = { 53 | code: string 54 | scope: string 55 | table: string 56 | json?: boolean 57 | lower_bound?: string 58 | upper_bound?: string 59 | } 60 | 61 | /** 62 | * @deprecated The message factories are deprecated, there is no need to create your 63 | * own message anynore. This will be removed in a future release. The standard 64 | * client does not use this anymore. 65 | */ 66 | export function getTableRowsMessage( 67 | data: GetTableRowsMessageData, 68 | streamOptions: StreamOptions = {} 69 | ): OutboundMessage { 70 | return createOutboundMessage(OutboundMessageType.GET_TABLE_ROWS, data, { 71 | listen: true, 72 | ...streamOptions, 73 | }) 74 | } 75 | 76 | export type GetTransactionLifecycleMessageData = { 77 | id: string 78 | } 79 | 80 | /** 81 | * @deprecated The message factories are deprecated, there is no need to create your 82 | * own message anynore. This will be removed in a future release. The standard 83 | * client does not use this anymore. 84 | */ 85 | export function getTransactionLifecycleMessage( 86 | data: GetTransactionLifecycleMessageData, 87 | streamOptions: StreamOptions = {} 88 | ): OutboundMessage { 89 | return createOutboundMessage(OutboundMessageType.GET_TRANSACTION_LIFECYCLE, data, { 90 | listen: true, 91 | fetch: true, 92 | ...streamOptions, 93 | }) 94 | } 95 | 96 | /** 97 | * @deprecated The message factories are deprecated, there is no need to create your 98 | * own message anynore. This will be removed in a future release. The standard 99 | * client does not use this anymore. 100 | */ 101 | export function getHeadInfoMessage(streamOptions: StreamOptions = {}): OutboundMessage { 102 | return createOutboundMessage( 103 | OutboundMessageType.GET_HEAD_INFO, 104 | {}, 105 | { listen: true, ...streamOptions } 106 | ) 107 | } 108 | 109 | export type UnlistenMessageData = { 110 | req_id: string 111 | } 112 | 113 | /** 114 | * @deprecated The message factories are deprecated, there is no need to create your 115 | * own message anymore. This will be removed in a future release. The standard 116 | * client does not use this anymore. 117 | */ 118 | export function unlistenMessage(data: UnlistenMessageData): OutboundMessage { 119 | return { 120 | req_id: data.req_id, 121 | type: OutboundMessageType.UNLISTEN, 122 | data, 123 | } 124 | } 125 | 126 | export type OutboundMessageFactory = ( 127 | createOutboundMessage: ( 128 | type: OutboundMessageType, 129 | data: T, 130 | userOptions: StreamOptions 131 | ) => OutboundMessage, 132 | withDefaultOptions: (userOptions: StreamOptions) => StreamOptions 133 | ) => OutboundMessage 134 | 135 | /** 136 | * Exported for consumption from internal packages. This does **not** 137 | * have any **Backward compatibility** policy nor documentation attached 138 | * to it. 139 | * 140 | * It will be moved and made private again when message factories 141 | * above have been removed. 142 | */ 143 | export function createOutboundMessage( 144 | type: OutboundMessageType, 145 | data: T, 146 | options: StreamOptions 147 | ): OutboundMessage { 148 | const req_id = options.req_id 149 | if (req_id === undefined) { 150 | throw new DfuseClientError("All outbound message should have a 'req_id' value") 151 | } 152 | 153 | return { 154 | type, 155 | req_id, 156 | data, 157 | ...options, 158 | } 159 | } 160 | -------------------------------------------------------------------------------- /src/types/action-trace.ts: -------------------------------------------------------------------------------- 1 | import { RamOp, DTrxOp, TableOp, Int64, Uint64 } from "./common" 2 | 3 | export type ActionTraceData> = { 4 | block_num: number 5 | block_id: string 6 | block_time: string 7 | trx_id: string 8 | idx: number 9 | depth: number 10 | trace: ActionTrace 11 | dbops?: ActionTraceDbOp[] 12 | ramops?: RamOp[] 13 | dtrxops?: DTrxOp[] 14 | tableops?: TableOp[] 15 | } 16 | 17 | export type ActionTrace = { 18 | receipt: ActionReceipt 19 | act: Action 20 | context_free: boolean 21 | elapsed: number 22 | console: string 23 | trx_id: string 24 | block_num: number 25 | block_time: string 26 | producer_block_id?: string 27 | account_ram_deltas?: AccountRamDelta[] 28 | except?: any 29 | inline_traces?: ActionTrace[] 30 | } 31 | 32 | export type Action = { 33 | account: string 34 | name: string 35 | authorization?: Authorization[] 36 | data: T 37 | hex_data?: string 38 | } 39 | 40 | export type Authorization = { 41 | actor: string 42 | permission: string 43 | } 44 | 45 | export type ActionReceipt = { 46 | receiver: string 47 | act_digest: string 48 | global_sequence: Uint64 49 | recv_sequence: Uint64 50 | auth_sequence: [string, number][] 51 | code_sequence: number 52 | abi_sequence: number 53 | } 54 | 55 | export type AccountRamDelta = { 56 | account: string 57 | delta: Int64 58 | } 59 | 60 | /** 61 | * The `dbops` array out of an [[ActionTraceData]] message is completely 62 | * different than other [[DbOp]] found in dfuse API (like on [[TableDeltaData]] 63 | * or [[TransactionLifecycle]]). 64 | * 65 | * One for `opayer` or `npayer` will always be present depending on the 66 | * operation, same thing for the `old` and `new` fields: 67 | * 68 | * - When `op == "INS"`, `npayer` and `new` are present 69 | * - When `op == "UPD"`, `opayer`, `old`, `npayer` and `new` are present 70 | * - When `op == "REM"`, `opayer` and `old` are present 71 | * 72 | * The `old` and `new` fields are the hexadecimal string encoded 73 | * representing the row in binary format. 74 | * 75 | * Check the [Decode Hex Data using eosjs example](https://github.com/dfuse-io/example-eosjs-decode-hex) 76 | * for a way to transform the hexadecimal string into a JSON 77 | * structure representing the row. 78 | * 79 | * @see https://github.com/dfuse-io/example-eosjs-decode-hex 80 | */ 81 | export type ActionTraceDbOp = { 82 | op: "INS" | "UPD" | "REM" 83 | action_idx: number 84 | 85 | /** 86 | * The account which was the old payer of the row. Present when `op == "UPD" | "REM"`. 87 | */ 88 | opayer?: string 89 | 90 | /** 91 | * The account which is the new payer of the row. Present when `op == "UPD" | "REM"`. 92 | */ 93 | npayer?: string 94 | 95 | /** 96 | * The full path of the database row, it's a string with four elements 97 | * of the row path separated with the `/` character being respectively 98 | * from left to right: account, scope, table, row primary key (name encoded). 99 | * 100 | * ``` 101 | * "eosio.token/trustdicewin/accounts/........ehbo5" 102 | * 103 | * { 104 | * account: "eosio.token", 105 | * scope: "trustdicewin", 106 | * table: "accounts", 107 | * key: "........ehbo5", // Name encoded value representing `EOS` SymbolCode 108 | * } 109 | * ``` 110 | */ 111 | path: string 112 | 113 | /** 114 | * The hexadecimal string encoded representing the old version of the 115 | * row in binary format. Present when `op == "UPD" | "REM"`. 116 | */ 117 | old?: string 118 | 119 | /** 120 | * The hexadecimal string encoded representing the new version of the 121 | * row in binary format. Present when `op == "INS" | "UPD"`. 122 | */ 123 | new?: string 124 | } 125 | -------------------------------------------------------------------------------- /src/types/auth-token.ts: -------------------------------------------------------------------------------- 1 | export type AuthTokenResponse = ApiTokenInfo 2 | 3 | export type ApiTokenInfo = { 4 | token: string 5 | expires_at: number 6 | } 7 | -------------------------------------------------------------------------------- /src/types/block-id.ts: -------------------------------------------------------------------------------- 1 | export type ComparisonOperator = "gt" | "gte" | "lt" | "lte" | "eq" 2 | 3 | export type BlockIdByTimeResponse = { 4 | block: { 5 | id: string 6 | num: number 7 | time: string 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /src/types/common.ts: -------------------------------------------------------------------------------- 1 | import { ErrorData } from "./error" 2 | import { Transaction } from "./transaction" 3 | 4 | /** 5 | * A `int64_t` natively in `nodeos` but can become a string when > 32 bits number 6 | * due to how `nodeos` serialize number to JSON. 7 | * 8 | * This is like because JavaScript largest number possible is 53 bits large which 9 | * make it impossible to hold a full `int64_t` type. To overcome that, `nodeos` 10 | * will output a string when number is too large to preserve precision. 11 | */ 12 | export type Int64 = number | string 13 | 14 | /** 15 | * A `uint64_t` natively in `nodeos` but can become a string when > 32 bits number 16 | * due to how `nodeos` serialize number to JSON. 17 | * 18 | * This is like because JavaScript largest number possible is 53 bits large which 19 | * make it impossible to hold a full `uint64_t` type. To overcome that, `nodeos` 20 | * will output a string when number is too large to preserve precision. 21 | */ 22 | export type Uint64 = number | string 23 | 24 | /** 25 | * Represents a node in the creation tree. 26 | * first number represents the creation node index 27 | * second number represents the parent node index (-1 for root) 28 | * third number represents the action index 29 | */ 30 | export type CreationNode = [number, number, number] 31 | 32 | export type DTrxOp = { 33 | op: "CREATE" | "PUSH_CREATE" | "MODIFY_CREATE" | "MODIFY_CANCEL" | "CANCEL" 34 | action_idx: number 35 | sender: string 36 | sender_id: string 37 | payer: string 38 | published_at: string 39 | delay_until: string 40 | expiration_at: string 41 | trx_id: string 42 | trx?: Transaction 43 | } 44 | 45 | export type ExtDTrxOp = { 46 | src_trx_id: string 47 | block_num: number 48 | block_id: string 49 | block_time: string 50 | } & DTrxOp 51 | 52 | /** 53 | * @deprecated Renamed to `DbOp` 54 | */ 55 | export type DBOp = DbOp 56 | 57 | export type DbOp = { 58 | /** 59 | * This is a quirk of dfuse API, it's recommended to do a lower case comparison on the `op` field. 60 | */ 61 | op: "ins" | "INS" | "upd" | "UPD" | "rem" | "REM" 62 | action_idx: number 63 | account: string 64 | table: string 65 | scope: string 66 | key: string 67 | old?: DbRow 68 | new?: DbRow 69 | } 70 | 71 | /** 72 | * One of `error`, `hex` or `json` field will be present depending 73 | * on the actual request made. 74 | */ 75 | export type DbRow = { 76 | payer: string 77 | blockNum?: string 78 | error?: ErrorData 79 | hex?: string 80 | json?: T 81 | } 82 | 83 | export type RamOp = { 84 | /** 85 | * @deprecated This field is scheduled for removal, newer RamOp will have this field set to 86 | * "deprecated". 87 | * 88 | * Use the `family` and `action` fields instead 89 | */ 90 | op: string 91 | family: string 92 | action: string 93 | action_idx: number 94 | payer: string 95 | delta: number 96 | usage: number 97 | } 98 | 99 | /** 100 | * @deprecated Renamed to `RamOp` 101 | */ 102 | export type RAMOp = RamOp 103 | 104 | export type TableOp = { 105 | op: "INS" | "REM" 106 | action_idx: number 107 | payer: string 108 | path: string 109 | } 110 | -------------------------------------------------------------------------------- /src/types/error.ts: -------------------------------------------------------------------------------- 1 | export type ErrorData = { 2 | code: string 3 | trace_id?: string 4 | message: string 5 | details?: { [key: string]: any } 6 | } 7 | 8 | export class DfuseError extends Error { 9 | public description: string 10 | public cause?: Error 11 | 12 | constructor(message: string, cause?: Error) { 13 | super(message) 14 | 15 | this.description = message 16 | this.cause = cause 17 | } 18 | } 19 | 20 | export class DfuseApiError extends DfuseError implements ErrorData { 21 | public code: string 22 | public trace_id?: string 23 | public message: string 24 | public details?: { [key: string]: any } 25 | 26 | constructor(data: ErrorData, cause?: Error) { 27 | super(data.message, cause) 28 | 29 | this.code = data.code 30 | this.trace_id = data.trace_id 31 | this.message = data.message 32 | this.details = data.details 33 | } 34 | } 35 | 36 | // DfuseGenericApiError represents API requests error for calls that are proxyed to external entities 37 | // like `nodeos` or does not respect standard dfuse API error format. 38 | export class DfuseGenericApiError extends DfuseError { 39 | public code: number 40 | public data: unknown 41 | public body: string 42 | public headers: any 43 | 44 | // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types 45 | constructor(code: number, body: string, data: unknown, headers?: any, cause?: Error) { 46 | let message = "Failed to execute API call" 47 | if (data && typeof data === "object" && (data as any).message) { 48 | message = (data as any).message 49 | } 50 | 51 | super(message, cause) 52 | 53 | this.code = code 54 | this.data = data 55 | this.body = body 56 | this.headers = headers 57 | } 58 | } 59 | 60 | export class DfuseClientError extends DfuseError { 61 | constructor(message: string, cause?: Error) { 62 | super(message, cause) 63 | } 64 | } 65 | 66 | export class DfuseSocketError extends DfuseError { 67 | constructor(message: string, cause?: Error) { 68 | super(message, cause) 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/types/graphql-stream-client.ts: -------------------------------------------------------------------------------- 1 | import { Stream } from "./stream" 2 | import { GraphqlDocument, GraphqlVariables } from "./graphql" 3 | 4 | /** 5 | * The stream client is an interface used to interact with dfuse Stream API. 6 | * 7 | * The stream client interface shall be responsible of managing the registration 8 | * unregistration of the dfuse Stream as well as managing the full lifecycle of 9 | * a dfuse Stream currently active. 10 | * 11 | * @group Interfaces 12 | */ 13 | export interface GraphqlStreamClient { 14 | /** 15 | * Release any resources hold by this [[GraphqlStreamClient]] instance. Must 16 | * be tolerant to being called multiple times. 17 | * 18 | * Once called, the instance is assumed unsuable and should never 19 | * be invoked anymore. 20 | */ 21 | release(): void 22 | 23 | /** 24 | * Update the API token that should be used to communicate with the dfuse Stream 25 | * API. This token is assumed to be fresh and valid. 26 | * 27 | * @param apiToken The new API token to use from now on. 28 | */ 29 | setApiToken(apiToken: string): void 30 | 31 | /** 32 | * Register a dfuse Stream with the remote endpoint and receives message back from 33 | * the stream via the `onMessage` parameter. 34 | * 35 | * By calling this method, the socket will connect to remote endpoint if it's not 36 | * already the case. As soon as the method is called, the specific dfuse Stream 37 | * listening message is send to remote endpoint. 38 | * 39 | * On success, you will receive a [[Stream]] interface that you can use to 40 | * interact with the stream (mark progeess, restart, close). 41 | * 42 | * On error, the promise will reject with the actual error thrown. 43 | * 44 | * @param message The specific [[OutboundMessage]] used to register the stream with the dfuse remote endpoint. 45 | * @param onMessage The callback that is invoked for each [[InboundMessage]] received bound to this stream. 46 | */ 47 | registerStream( 48 | id: string, 49 | document: GraphqlDocument, 50 | variables: GraphqlVariables, 51 | onMessage: OnGraphqlStreamMessage 52 | ): Promise 53 | 54 | /** 55 | * Unregister the stream represented by this stream's id. 56 | * 57 | * This will send the `stop` message to the remote endpoint effectively 58 | * stopping the dfuse GraphQL Subscription as well as the flow of message. 59 | * 60 | * All stream should be unregistered when not required anymore to clean up 61 | * resources and ensure no more extra bandwidth are required. 62 | * 63 | * @param id The stream's id that should be unregister from the stream client. 64 | */ 65 | unregisterStream(id: string): Promise 66 | } 67 | 68 | export type GraphqlStreamMessage = 69 | | DataGraphqlStreamMessage 70 | | ErrorGraphqlStreamMessage 71 | | CompleteGraphqlStreamMessage 72 | 73 | /** 74 | * Represents a valid data result for which the payload of type `T` will 75 | * be available for consumption in the `data` field. 76 | */ 77 | export type DataGraphqlStreamMessage = { type: "data"; data: T } 78 | 79 | /** 80 | * Represents an error message received from the stream. Both resolvers 81 | * error as well as stream error will fall into this type. When `terminal` 82 | * is sets to `true`, this message is a stream error meaning the stream 83 | * should terminate and cannot continue. 84 | * 85 | * **Note** Only when it's a terminal error and auto restart on error is sets to 86 | * true on the GraphQL stream client that the stream will auto-restart. 87 | */ 88 | export type ErrorGraphqlStreamMessage = { 89 | type: "error" 90 | errors: Error[] 91 | terminal: boolean 92 | } 93 | 94 | /** 95 | * Represents the completion of the streaming in a correct maner. This message 96 | * means that messages will never be received anymore for this stream, even if 97 | * it's restarted. 98 | */ 99 | export type CompleteGraphqlStreamMessage = { type: "complete" } 100 | 101 | /** 102 | * Handler invoked when a message is routed to this exact stream via the matching 103 | * of the message id and the stream id. If this is invoked, you are guaranteed to 104 | * received a message for your stream. 105 | * 106 | * @param message The actual inbound GraphQL message received destinated to you. 107 | * @param stream The actual stream object on which the handler is defined, can be used to 108 | * mark the stream at right location or close it eagerly. 109 | */ 110 | export type OnGraphqlStreamMessage = ( 111 | message: GraphqlStreamMessage, 112 | stream: Stream 113 | ) => void 114 | export type OnGraphqlStreamRestart = () => void 115 | -------------------------------------------------------------------------------- /src/types/graphql.ts: -------------------------------------------------------------------------------- 1 | export type GraphqlOperationType = "query" | "mutation" | "subscription" 2 | 3 | // See https://github.com/graphql/graphql-js/blob/master/src/language/ast.js#L1 for actual object like Document 4 | // 5 | // **Note** We are a bit more broad to avoid important all types yet, since it's not really required 6 | export type GraphqlDocument = 7 | | string 8 | | { 9 | kind: "Document" 10 | loc?: Record 11 | definitions: ReadonlyArray 12 | } 13 | 14 | export type GraphqlInboundMessageType = 15 | | "connection_ack" 16 | | "connection_error" 17 | | "ka" 18 | | "data" 19 | | "error" 20 | | "complete" 21 | 22 | export type GraphqlInboundMessage = 23 | | { type: "connection_ack" } 24 | | { type: "connection_error"; payload: Error } 25 | // The 'ka' means 'connection_keep_alive', it's set as `ka` to save bandwidth 26 | | { type: "ka" } 27 | | { 28 | id: string 29 | type: "data" 30 | payload: { data: any; errors?: Error[] } 31 | } 32 | | { 33 | id: string 34 | type: "error" 35 | payload: Error 36 | } 37 | | { 38 | id: string 39 | type: "complete" 40 | } 41 | 42 | export type GraphqlOutboundMessage = 43 | | { type: "connection_init"; payload?: Record } 44 | | { type: "stop"; id: string } 45 | | GraphqlStartOutboundMessage 46 | | { type: "connection_terminate" } 47 | 48 | export type GraphqlStartOutboundMessage = { 49 | id: string 50 | type: "start" 51 | payload: { 52 | query: GraphqlDocument 53 | variables?: GraphqlVariables 54 | operationName?: string 55 | } 56 | } 57 | 58 | export type GraphqlVariables = 59 | | Record 60 | | (() => Promise>) 61 | | (() => Record) 62 | | undefined 63 | 64 | export type GraphqlResponse = { 65 | data: T 66 | errors?: GraphqlResponseError[] 67 | } 68 | 69 | export type GraphqlResponseError = { 70 | message: string 71 | locations?: { line: number; column: number }[] 72 | path?: (string | number)[] 73 | extensions?: Record 74 | } 75 | -------------------------------------------------------------------------------- /src/types/head-info.ts: -------------------------------------------------------------------------------- 1 | export type HeadInfoData = { 2 | last_irreversible_block_num: number 3 | last_irreversible_block_id: string 4 | head_block_num: number 5 | head_block_id: string 6 | head_block_time: string 7 | head_block_producer: string 8 | } 9 | -------------------------------------------------------------------------------- /src/types/http-client.ts: -------------------------------------------------------------------------------- 1 | export const V1_AUTH_ISSUE = "/v1/auth/issue" 2 | 3 | export const V0_FETCH_BLOCK_ID_BY_TIME = "/v0/block_id/by_time" 4 | export const V0_FETCH_TRANSACTION = "/v0/transactions/:id" 5 | 6 | export const V0_STATE_ABI = "/v0/state/abi" 7 | export const V0_STATE_ABI_BIN_TO_JSON = "/v0/state/abi/bin_to_json" 8 | 9 | export const V0_STATE_KEY_ACCOUNTS = "/v0/state/key_accounts" 10 | export const V0_STATE_PERMISSION_LINKS = "/v0/state/permission_links" 11 | 12 | export const V0_STATE_TABLE = "/v0/state/table" 13 | export const V0_STATE_TABLE_ROW = "/v0/state/table/row" 14 | export const V0_STATE_TABLES_SCOPES = "/v0/state/tables/scopes" 15 | export const V0_STATE_TABLES_ACCOUNTS = "/v0/state/tables/accounts" 16 | export const V0_STATE_TABLE_SCOPES = "/v0/state/table_scopes" 17 | 18 | export const V0_SEARCH_TRANSACTIONS = "/v0/search/transactions" 19 | 20 | /** 21 | * An interface used to interact with dfuse REST API. 22 | * 23 | * Created mainly to abstract implementation details of dealing 24 | * with HTTP request/response, enable consumer of the library to 25 | * provide their own implementation of an HTTP client (think about 26 | * providing `Fetch` interface instead). 27 | * 28 | * @group Interfaces 29 | */ 30 | export interface HttpClient { 31 | /** 32 | * Release any resources hold by this [[HttpClient]] instance. Must 33 | * be tolerant to being called multiple times. 34 | * 35 | * Once called, the instance is assumed unsuable and should never 36 | * be invoked anymore. 37 | */ 38 | release(): void 39 | 40 | /** 41 | * Make an anonymous request (unauthenticated) to the dfuse Authentication endpoint. 42 | * Usually used only for issuing an API token from an API key. 43 | * 44 | * @param path (required) The HTTP path on the endpoint 45 | * @param method (required) The HTTP method to perform the request agaisnt 46 | * @param params (defaults `{}`) The HTTP query parameters to append to the url, they will 47 | * be url-encoded and included in the final remote url. Has no effect when empty or undefined. 48 | * @param body (defaults `undefined`) The HTTP body to include in the request, assumed to be a 49 | * JSON object that will be serialized to a string. Not included in the HTTP request when `undefined`. 50 | * @param headers (defaults `{}`) The extra HTTP headers to include in the request. Those will be merged 51 | * with default ones (`{ Authorization: ... }`) and they override them if same key are specified. 52 | * 53 | * @returns A `Promise` that will resolve to the response body if it passes. Will reject with a 54 | * [[DfuseApiError]] if it fits the dfuse Error Format or a generic `DfuseError` is it's something 55 | * not fitting our expected format. 56 | */ 57 | authRequest( 58 | path: string, 59 | method: string, 60 | params?: HttpQueryParameters, 61 | body?: any, 62 | headers?: HttpHeaders 63 | ): Promise 64 | 65 | /** 66 | * Make an authenticated request (unauthenticated) to the dfuse REST endpoint. 67 | * 68 | * Upon a succesfull call, the actual response body (expected to be a valid JSON) will 69 | * be returned to the caller. 70 | * 71 | * Upon an error, a [[DfuseError]] is returned, will be a [[DfuseApiError]] if the response's 72 | * body exist, it's a valid JSON string and it fits the dfuse error format. 73 | * 74 | * @param apiToken The API token used to to interact with the API endpoint. The token will be turned 75 | * into a proper HTTP header `Authorization: Bearer ...`. 76 | * @param path (required) The HTTP path on the endpoint 77 | * @param method (required) The HTTP method to perform the request agaisnt 78 | * @param params (defaults `{}`) The HTTP query parameters to append to the url, they will 79 | * be url-encoded and included in the final remote url. Has no effect when empty or undefined. 80 | * @param body (defaults `undefined`) The HTTP body to include in the request, assumed to be a 81 | * JSON object that will be serialized to a string. Not included in the HTTP request when `undefined`. 82 | * @param headers (defaults `{}`) The extra HTTP headers to include in the request. Those will be merged 83 | * with default ones (`{ Authorization: ... }`) and they override them if same key are specified. 84 | * 85 | * @returns A `Promise` that will resolve to the response body if it passes. Will reject with a 86 | * [[DfuseApiError]] if it fits the dfuse Error Format or a generic `DfuseError` is it's something 87 | * not fitting our expected format. 88 | */ 89 | apiRequest( 90 | apiToken: string | undefined, 91 | path: string, 92 | method: string, 93 | params?: HttpQueryParameters, 94 | body?: any, 95 | headers?: HttpHeaders 96 | ): Promise 97 | } 98 | 99 | export type HttpHeaders = Record 100 | export type HttpQueryParameters = Record 101 | 102 | /** 103 | * This interface is the bare minimum as required by our internal usage. 104 | * 105 | * This is copied to ensure minimal compatiblity with `fetch` is required 106 | * and thus, it's required to provide a full clone of `fetch` handling. 107 | * To avoid that problem of over-complexifying , we define a small interface of what we really use 108 | * inside the library. It's the only part's that are needed. 109 | * 110 | * Passing the `window.fetch` (in the Browser) or `global.fetch` (polyfilled in Node.js) 111 | * should always be accepted as a valid usage. 112 | * 113 | * @ignore 114 | */ 115 | export type Fetch = (url: string, options?: RequestInit) => Promise 116 | 117 | export type RequestInit = { 118 | body?: any 119 | headers?: any 120 | method?: string 121 | } 122 | 123 | /** 124 | * @ignore 125 | */ 126 | export interface HttpBody { 127 | json(): Promise 128 | text(): Promise 129 | } 130 | 131 | /** 132 | * @ignore 133 | */ 134 | export type HttpResponse = { 135 | readonly headers: any 136 | readonly ok: boolean 137 | readonly status: number 138 | readonly statusText: string 139 | readonly url: string 140 | } & HttpBody 141 | -------------------------------------------------------------------------------- /src/types/listen.ts: -------------------------------------------------------------------------------- 1 | export type ListeningData = { 2 | next_block: number 3 | } 4 | -------------------------------------------------------------------------------- /src/types/progress.ts: -------------------------------------------------------------------------------- 1 | export type ProgressData = { 2 | block_num: number 3 | block_id: string 4 | } 5 | -------------------------------------------------------------------------------- /src/types/search.ts: -------------------------------------------------------------------------------- 1 | import { TransactionLifecycle } from "./transaction" 2 | 3 | export type SearchSortType = "asc" | "desc" 4 | 5 | export type SearchTransactionsResponse = { 6 | cursor: string 7 | transactions?: SearchTransactionRow[] 8 | forked_head_warning: boolean 9 | } 10 | 11 | export type SearchTransactionRow = { 12 | lifecycle: TransactionLifecycle 13 | action_idx: number[] 14 | } 15 | -------------------------------------------------------------------------------- /src/types/socket.ts: -------------------------------------------------------------------------------- 1 | export type SocketConnectOptions = { 2 | onReconnect?: () => void 3 | onTermination?: (initiator: "client" | "server", event: CloseEvent) => void 4 | } 5 | 6 | /** 7 | * An abstraction over a WebSocket object to deal more easily with the 8 | * WebSocket protocol. 9 | * 10 | * This interface will usually abstract connection/disconnection as well as 11 | * dealing with re-connection and disconnection error and handling all events 12 | * of the WebSocket API (baseline is the W3C WebSocket API). 13 | * 14 | * @group Interfaces 15 | */ 16 | export interface Socket { 17 | /** 18 | * A property to determine if the current socket implementation is actually 19 | * connected with the remote endpoint or not. If the [[Socket]] is actually 20 | * connected, consumer can assume messages can be sent through the WebSocket. 21 | */ 22 | isConnected: boolean 23 | 24 | /** 25 | * Perform an actual connection with the remote WebSocket endoint. This 26 | * will usually construct a `WebSocket` instance and initiate the 27 | * connection with the remote endpoint. 28 | * 29 | * The method receives a listener which will receive all messages 30 | * sent through the WebSocket by the remote endpoint. 31 | * 32 | * @param listener The actual callback that will receive all the messages sent by the 33 | * remote endpoint through the WebSocket. 34 | * @param options The options that can be passed to the connect method for certain functionalities. 35 | * @param options.onReconnect An optional callback than can be passed to be notified **after** the 36 | * socket has successfully re-connected with the remote endpoint. 37 | * @param options.onTermination An optional callback than can be passed to be notified when the socket 38 | * has now terminated, i.e. that it is now disconnected (wheter via a client or server termination) 39 | * and that it will no try to auto-reconnect anymore. The callback will the initiator of the disconnection, 40 | * either `'client'` or `'server'` and the `CloseEvent` received by the WebSocket as-is. 41 | */ 42 | connect(listener: SocketMessageListener, options?: SocketConnectOptions): Promise 43 | 44 | /** 45 | * Disconnects the actual socket. This closes the underlying socket 46 | * by closing it and clean up all resources. 47 | */ 48 | disconnect(): Promise 49 | 50 | /** 51 | * Send a message through the WebSocket. The message is stringified 52 | * to JSON before being sent to the remote endpoint 53 | * 54 | * @param message The actual message to send to the remote endpoint. 55 | */ 56 | send(message: T): Promise 57 | 58 | /** 59 | * Assigns a new API token to the stream client instance meaning the 60 | * previous one is not good anymore. 61 | * 62 | * This usualy indicates the previous token is not valid anymore. This 63 | * does not re-trigger a re-connection automatically. 64 | * 65 | * Instead, it should change the internal state of the [[Socket]] instance, 66 | * and once a re-connection is requested (by the client or by the server), 67 | * the new API token should be used to re-construct the WebSocket url to 68 | * contact. 69 | * 70 | * @param apiToken The new API token that should be now set as the active token 71 | */ 72 | setApiToken(apiToken: string): void 73 | } 74 | 75 | export type SocketMessageListener = (message: unknown) => void 76 | 77 | /** 78 | * This is copied here because the actual WebSocket is defined differently 79 | * under a Node.js environment then in a Browser environment. As such, 80 | * importing one or the other causes problem and used in the wrong 81 | * environment. 82 | * 83 | * To avoid problem, we define a small interface of what we really use 84 | * inside the library. It's the only part's that are needed. 85 | * 86 | * @ignore 87 | */ 88 | export type WebSocket = { 89 | onclose?: ((this: WebSocket, ev: any) => any) | null 90 | onerror?: ((this: WebSocket, ev: any) => any) | null 91 | onmessage?: ((this: WebSocket, ev: any) => any) | null 92 | onopen?: ((this: WebSocket, ev: any) => any) | null 93 | 94 | readonly readyState: number 95 | readonly protocol: string 96 | readonly url: string 97 | 98 | close(code?: number, reason?: string): void 99 | send(data: string | ArrayBufferLike | Blob | ArrayBufferView): void 100 | 101 | readonly CLOSED: number 102 | readonly CLOSING: number 103 | readonly CONNECTING: number 104 | readonly OPEN: number 105 | } 106 | 107 | export type WebSocketFactory = (url: string) => Promise 108 | -------------------------------------------------------------------------------- /src/types/state.ts: -------------------------------------------------------------------------------- 1 | import { DbRow } from "./common" 2 | 3 | export type StateKeyType = "name" | "hex" | "hex_be" | "uint64" | "symbol" | "symbol_code" 4 | 5 | export type StateAbiResponse = { 6 | block_num: number 7 | account: string 8 | abi: Abi 9 | } 10 | 11 | /** 12 | * The actual ABI JSON representation as returned by EOSIO platform. Extracted 13 | * from [EOSIO/eosjs](https://github.com/EOSIO/eosjs) library. 14 | * 15 | * @see https://github.com/EOSIO/eosjs/blob/develop/src/eosjs-rpc-interfaces.ts#L4 16 | */ 17 | export type Abi = { 18 | version: string 19 | types: AbiType[] 20 | structs: AbiStruct[] 21 | actions: AbiAction[] 22 | tables: AbiTable[] 23 | ricardian_clauses: AbiRicardianClause[] 24 | error_messages: AbiErrorMessage[] 25 | abi_extensions: AbiExtension[] 26 | variants?: AbiVariant[] 27 | } 28 | 29 | export type AbiType = { 30 | new_type_name: string 31 | type: string 32 | } 33 | 34 | export type AbiStruct = { 35 | name: string 36 | base: string 37 | fields: AbiStructField[] 38 | } 39 | 40 | export type AbiStructField = { 41 | name: string 42 | type: string 43 | } 44 | 45 | export type AbiAction = { 46 | name: string 47 | type: string 48 | ricardian_contract: string 49 | } 50 | 51 | export type AbiTable = { 52 | name: string 53 | type: string 54 | index_type: string 55 | key_names?: string[] 56 | key_types?: string[] 57 | } 58 | 59 | export type AbiRicardianClause = { 60 | id: string 61 | body: string 62 | } 63 | 64 | export type AbiErrorMessage = { 65 | error_code: string 66 | error_msg: string 67 | } 68 | 69 | export type AbiExtension = { 70 | tag: number 71 | value: string 72 | } 73 | 74 | export type AbiVariant = { 75 | name: string 76 | types: string[] 77 | } 78 | 79 | export type StateAbiToJsonResponse = { 80 | block_num: number 81 | account: string 82 | table: string 83 | rows: T[] 84 | } 85 | 86 | export type StateKeyAccountsResponse = { 87 | block_num: number 88 | account_names: string[] 89 | } 90 | 91 | export type StatePermissionLinksResponse = { 92 | up_to_block_id?: string 93 | up_to_block_num?: number 94 | last_irreversible_block_id: string 95 | last_irreversible_block_num: number 96 | linked_permissions: LinkedPermission[] 97 | } 98 | 99 | export type LinkedPermission = { 100 | contract: string 101 | action: string 102 | permission_name: string 103 | } 104 | 105 | export type StateTableScopesResponse = { 106 | block_num: number 107 | scopes: string[] 108 | } 109 | 110 | export type StateResponse = { 111 | up_to_block_id?: string 112 | up_to_block_num?: number 113 | last_irreversible_block_id: string 114 | last_irreversible_block_num: number 115 | abi?: Abi 116 | rows: DbRow[] 117 | } 118 | 119 | export type MultiStateResponse = { 120 | up_to_block_id?: string 121 | up_to_block_num?: number 122 | last_irreversible_block_id: string 123 | last_irreversible_block_num: number 124 | abi?: Abi 125 | tables: TableRows[] 126 | } 127 | 128 | export type TableRows = { 129 | account: string 130 | scope: string 131 | rows: DbRow[] 132 | } 133 | 134 | export type StateTableRowResponse = { 135 | up_to_block_id?: string 136 | up_to_block_num?: number 137 | last_irreversible_block_id: string 138 | last_irreversible_block_num: number 139 | abi?: Abi 140 | row: DbRow 141 | } 142 | -------------------------------------------------------------------------------- /src/types/stream-client.ts: -------------------------------------------------------------------------------- 1 | import { InboundMessage } from "../message/inbound" 2 | import { OutboundMessage } from "../message/outbound" 3 | import { Stream } from "./stream" 4 | 5 | /** 6 | * The stream client is an interface used to interact with dfuse Stream API. 7 | * 8 | * The stream client interface shall be responsible of managing the registration 9 | * unregistration of the dfuse Stream as well as managing the full lifecycle of 10 | * a dfuse Stream currently active. 11 | * 12 | * @group Interfaces 13 | */ 14 | export interface StreamClient { 15 | /** 16 | * Release any resources hold by this [[StreamClient]] instance. Must 17 | * be tolerant to being called multiple times. 18 | * 19 | * Once called, the instance is assumed unsuable and should never 20 | * be invoked anymore. 21 | */ 22 | release(): void 23 | 24 | /** 25 | * Update the API token that should be used to communicate with the dfuse Stream 26 | * API. This token is assumed to be fresh and valid. 27 | * 28 | * @param apiToken The new API token to use from now on. 29 | */ 30 | setApiToken(apiToken: string): void 31 | 32 | /** 33 | * Register a dfuse Stream with the remote endpoint and receives message back from 34 | * the stream via the `onMessage` parameter. 35 | * 36 | * By calling this method, the socket will connect to remote endpoint if it's not 37 | * already the case. As soon as the method is called, the specific dfuse Stream 38 | * listening message is send to remote endpoint. 39 | * 40 | * On success, you will receive a [[Stream]] interface that you can use to 41 | * interact with the stream (mark progeess, restart, close). 42 | * 43 | * On error, the promise will reject with the actual error thrown. 44 | * 45 | * @param message The specific [[OutboundMessage]] used to register the stream with the dfuse remote endpoint. 46 | * @param onMessage The callback that is invoked for each [[InboundMessage]] received bound to this stream. 47 | */ 48 | registerStream(message: OutboundMessage, onMessage: OnStreamMessage): Promise 49 | 50 | /** 51 | * Unregister the stream represented by this stream's id. 52 | * 53 | * This will send the `unlisten` message to the remote endpoint effectively 54 | * stopping the dfuse Stream as well as the flow of message. 55 | * 56 | * All stream should be unregistered when not required anymore to clean up 57 | * resources and ensure no more extra bandwidth are required. 58 | * 59 | * @param id The stream's id that should be unregister from the stream client. 60 | */ 61 | unregisterStream(id: string): Promise 62 | } 63 | 64 | /** 65 | * Handler invoked when a message is routed to this exact stream via the matching 66 | * of the message id and the stream id. If this is invoked, you are guaranteed to 67 | * received a message for your stream. 68 | * 69 | * @param message The actual inbound WebSocket message received destinated to you 70 | * @param stream The actual stream object on which the handler is defined, can be used to 71 | * mark the stream at right location or close it eagerly. 72 | */ 73 | export type OnStreamMessage = (message: InboundMessage, stream: Stream) => void 74 | export type OnStreamRestart = () => void 75 | -------------------------------------------------------------------------------- /src/types/stream.ts: -------------------------------------------------------------------------------- 1 | import { OnStreamRestart } from "./stream-client" 2 | 3 | /** 4 | * A [[Stream]] represents a single open streaming pipeline with dfuse API. 5 | * On a single WebSocket connection, there is multiple streams active, 6 | * each it his own request ID. Routing of messages from the WebSocket to 7 | * the right stream instance is done this way. 8 | * 9 | * With this interface, you can control some aspects of the lifecycle of 10 | * a dfuse Stream. You can `restart` it at a given location when the socket 11 | * reconnects. You can also `close` the stream once it's not needed anymore. 12 | * 13 | * @group Interfaces 14 | */ 15 | export interface Stream { 16 | /** 17 | * Represents the request id used to identify this stream across all 18 | * viable streams. 19 | * 20 | * Should be unique among a common pool of Stream. 21 | */ 22 | readonly id: string 23 | 24 | /** 25 | * The current [[OnStreamRestart]] callback currently registered 26 | * by this [[Stream]]. When set to something, it will be invoked after a successful 27 | * restart of a stream (to be precise, after succesfully sent the `listen` dfuse Stream 28 | * message on the WebSocket without known if the remote end has actually received 29 | * yet). 30 | * 31 | * There can be only one active `onPostRestart` handler for a given [[Stream]]. 32 | * When set on the [[Stream]] instance. 33 | */ 34 | onPostRestart?: OnStreamRestart 35 | 36 | /** 37 | * The current active marker as last marked on this stream. If undefined, 38 | * it means the marker was never set. 39 | */ 40 | currentActiveMarker(): undefined | StreamMarker 41 | 42 | /** 43 | * Restart a stream after it has been disconnect. This re-sends the original 44 | * registration message along any start marker information (`start_block` 45 | * argument dfuse Stream API or `cursor` variable for dfuse GraphQL API). 46 | * 47 | * If you pass a [[StreamMarker]], the marker is used to determine the 48 | * the right value to pick. 49 | * 50 | * If you do not pass any argument, the last marker set on this stream 51 | * instance (by calling [[mark]]) will be used if defined. 52 | * 53 | * If there is no argument and the stream was never marked for progress, 54 | * then it sends the original message as-is, and blocks not seen 55 | * while disconnected will not be re-processed. If it's not valid 56 | * for your use case, ensure to either `mark` the stream to start 57 | * back at that point, or use a `marker` when re-connecting. 58 | * 59 | * If the stream marker is invalid for the stream client, this will 60 | * reject the promise with an appropriate error message. 61 | * 62 | * @param marker The marker used to decide where to `restart` the 63 | * stream, see [[StreamMarker]]. 64 | */ 65 | restart(marker?: StreamMarker): Promise 66 | 67 | /** 68 | * Close the stream. Once closed, the stream is not usable as an object 69 | * and should be discarded. 70 | * 71 | * This closes the socket connection at the same time in the event there 72 | * is no more stream connected. 73 | * 74 | * One can pass the optional `options.error` value to let the stream client 75 | * managing the stream if this was caused by an error or not. When the `options.error` 76 | * is set, it will usually be passed to the `join` promise which will be 77 | * rejected. 78 | * 79 | * @param options (optional) Optional parameters 80 | * @param options.error (defaults `undefined`) The error that caused this stream to be closed, if any. 81 | */ 82 | close(options?: { error?: Error }): Promise 83 | 84 | /** 85 | * Join the corresponding stream, waiting for it's completion or for an 86 | * error to occur. This promise will resolve only when the stream terminates, 87 | * via any code path. 88 | * 89 | * The code path that can terminate a stream: 90 | * - Someone called the `close` method on the stream. 91 | * - The stream received a `complete` message indicating the end of the stream. 92 | * - The stream received a terminating `error` message that forces the stream to stop. 93 | * - The socket disconnects (whatever the cause, client or server side) and automatic re-connection is not enabled. 94 | * 95 | * In the even that disconnection was not abnormal or that the stream was close 96 | * with the `options.error` being set (i.e. `stream.close({ error: new Error(...) }))`) 97 | * the the promise will reject with the error being set. 98 | */ 99 | join(): Promise 100 | 101 | /** 102 | * Mark the stream at this giving block num. If you mark to tell the 103 | * library that every block before this one (`atBlockNum` exclusive) were 104 | * seen and should not be processed anymore. 105 | * 106 | * When restarting, if you provide no argument, the stream will restart 107 | * at this exact marker, giving you blocks that were missed while 108 | * disconnected. 109 | * 110 | * @param marker The marker object to use to mark the stream. Can be either 111 | * the object `{ cursor: string }` or the object `{ atBlockNum: number }`. 112 | */ 113 | mark(marker: StreamMarker): void 114 | } 115 | 116 | /** 117 | * Represents a marker of stream which indicates where the stream 118 | * is currently at in its processing of messages. 119 | * 120 | * The marker can be later re-used to restart a [[Stream]] at the right 121 | * location. 122 | */ 123 | export type StreamMarker = { cursor: string } | { atBlockNum: number } 124 | -------------------------------------------------------------------------------- /src/types/table-delta.ts: -------------------------------------------------------------------------------- 1 | import { DbOp } from "./common" 2 | 3 | export type TableDeltaData> = { 4 | block_num: number 5 | block_id: string 6 | dbop: DbOp 7 | step: "new" | "undo" | "redo" 8 | } 9 | -------------------------------------------------------------------------------- /src/types/table-snapshot.ts: -------------------------------------------------------------------------------- 1 | import { DbRow } from "./common" 2 | 3 | export type TableSnapshotData> = { 4 | rows: DbRow[] 5 | } 6 | -------------------------------------------------------------------------------- /src/types/transaction.ts: -------------------------------------------------------------------------------- 1 | import { ActionTrace, Action } from "./action-trace" 2 | import { ExtDTrxOp, DTrxOp, DbOp, RamOp, TableOp, CreationNode } from "./common" 3 | 4 | export type TransactionLifecycleData = { 5 | lifecycle: TransactionLifecycle 6 | } 7 | 8 | export type TransactionStatus = 9 | | "pending" 10 | | "delayed" 11 | | "canceled" 12 | | "expired" 13 | | "executed" 14 | | "soft_fail" 15 | | "hard_fail" 16 | 17 | export type TransactionLifecycle = { 18 | id: string 19 | transaction: Transaction 20 | transaction_status: TransactionStatus 21 | execution_trace?: TransactionTrace 22 | execution_block_header?: BlockHeader 23 | created_by?: ExtDTrxOp 24 | canceled_by?: ExtDTrxOp 25 | execution_irreversible: boolean 26 | creation_irreversible: boolean 27 | cancelation_irreversible: boolean 28 | dtrxops?: DTrxOp[] 29 | dbops?: DbOp[] 30 | ramops?: RamOp[] 31 | tableops?: TableOp[] 32 | pub_keys?: string[] 33 | creation_tree?: CreationNode[] 34 | } 35 | 36 | export type Transaction = { 37 | expiration: string 38 | ref_block_num: number 39 | ref_block_prefix: number 40 | max_net_usage_words: number 41 | max_cpu_usage_ms: number 42 | delay_sec: number 43 | context_free_actions: Action[] 44 | actions: Action[] 45 | transaction_extensions: any[] 46 | signatures?: string[] 47 | context_free_data?: Action[] 48 | } 49 | 50 | export type TransactionTrace = { 51 | id: string 52 | block_num: number 53 | block_time: string 54 | producer_block_id?: string 55 | receipt?: TransactionReceipt 56 | elapsed: number 57 | net_usage: number 58 | scheduled: boolean 59 | action_traces: ActionTrace[] 60 | failed_dtrx_trace?: TransactionTrace 61 | except?: any 62 | } 63 | 64 | export type TransactionReceipt = { 65 | status: TransactionStatus 66 | cpu_usage_us: number 67 | net_usage_words: number 68 | } 69 | 70 | export type BlockHeader = { 71 | timestamp: string 72 | producer: string 73 | confirmed: number 74 | previous: string 75 | transaction_mroot: string 76 | action_mroot: string 77 | schedule_version: number 78 | new_producers: null 79 | header_extensions: any[] 80 | } 81 | -------------------------------------------------------------------------------- /tools/docs/theme/partials/toc.root.hbs: -------------------------------------------------------------------------------- 1 | {{#if isInPath}} 2 | 3 |
    4 | {{/if}} 5 |
  • 6 | {{#if groupTitle}} 7 |

    {{{wbr groupTitle}}}

    8 | {{else}} 9 | {{{wbr title}}} 10 | {{/if}} 11 | {{#if children}} 12 |
      13 | {{#each children}} 14 | {{> toc}} 15 | {{/each}} 16 |
    17 | {{/if}} 18 |
  • 19 | {{#if isInPath}} 20 |
21 |
    22 | {{/if}} -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Basic Options */ 4 | "target": "es6", 5 | "module": "es2015", 6 | "lib": ["es2015", "es2016", "es2017", "dom"], 7 | "declaration": true, 8 | "declarationDir": "dist/types", 9 | "sourceMap": true, 10 | "outDir": "dist/lib", 11 | /* Strict Type-Checking Options */ 12 | "strict": true, 13 | 14 | /* Module Resolution Options */ 15 | "allowSyntheticDefaultImports": true, 16 | "moduleResolution": "node", 17 | "esModuleInterop": true, 18 | "resolveJsonModule": true 19 | }, 20 | "files": ["src/index.ts"], 21 | 22 | "includes": ["./src/**/*.ts"] 23 | } 24 | -------------------------------------------------------------------------------- /typedoc.json: -------------------------------------------------------------------------------- 1 | { 2 | "mode": "file", 3 | "out": "docs", 4 | "exclude": "**/*+.test.ts", 5 | "excludeNotExported": true, 6 | "excludePrivate": true, 7 | "listInvalidSymbolLinks": true, 8 | "theme": "./tools/docs/theme" 9 | } 10 | --------------------------------------------------------------------------------