├── .nvmrc ├── test └── Gateway.test.ts ├── .eslintignore ├── src ├── express.d.ts ├── utility │ ├── sleep.utility.ts │ ├── log.utility.ts │ ├── ans.utility.ts │ ├── file.utility.ts │ └── encoding.utility.ts ├── middleware │ ├── json.middleware.ts │ ├── cors.middleware.ts │ └── log.middleware.ts ├── route │ ├── proxy.route.ts │ └── data.route.ts ├── database │ ├── connection.database.ts │ ├── block.database.ts │ ├── knex.batch.database.ts │ ├── import.database.ts │ ├── transaction.database.ts │ ├── batch.database.ts │ └── sync.database.ts ├── graphql │ ├── server.graphql.ts │ ├── query.graphql.ts │ ├── resolver.graphql.ts │ └── types.ts ├── Import.ts ├── types │ └── arweave.types.ts ├── Gateway.ts ├── query │ ├── node.query.ts │ ├── transaction.query.ts │ └── block.query.ts └── Snapshot.ts ├── codegen.yml ├── .gitignore ├── .travis.yml ├── .dockerignore ├── .env.dev ├── docker └── gateway.dockerfile ├── .env.docker ├── .eslintrc.json ├── knexfile.ts ├── docker-compose.yml ├── tsconfig.json ├── LICENSE ├── SNAPSHOT.md ├── migrations └── 20200404025828_initialize.ts ├── package.json ├── README.md ├── DEV.md ├── bin └── wait.sh └── types.graphql /.nvmrc: -------------------------------------------------------------------------------- 1 | 12.20.1 -------------------------------------------------------------------------------- /test/Gateway.test.ts: -------------------------------------------------------------------------------- 1 | export { 2 | 3 | } -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | src/express.d.ts 2 | src/graphql/types.ts 3 | -------------------------------------------------------------------------------- /src/express.d.ts: -------------------------------------------------------------------------------- 1 | declare namespace Express { 2 | export interface Request { 3 | id?: string; 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/utility/sleep.utility.ts: -------------------------------------------------------------------------------- 1 | export function sleep(ms: number) { 2 | return new Promise(resolve => setTimeout(resolve, ms)); 3 | } -------------------------------------------------------------------------------- /codegen.yml: -------------------------------------------------------------------------------- 1 | schema: ./types.graphql 2 | generates: 3 | ./src/graphql/types.ts: 4 | plugins: 5 | - typescript 6 | - typescript-resolvers -------------------------------------------------------------------------------- /src/middleware/json.middleware.ts: -------------------------------------------------------------------------------- 1 | import {json} from 'body-parser'; 2 | 3 | export const jsonMiddleware = json({ 4 | limit: '15mb', 5 | type: () => true, 6 | }); 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | dist 2 | node_modules 3 | .env 4 | .cache 5 | cache 6 | 7 | snapshot 8 | .snapshot 9 | .import 10 | 11 | **/*.log 12 | **/.DS_Store 13 | **/Thumbs.db 14 | -------------------------------------------------------------------------------- /src/utility/log.utility.ts: -------------------------------------------------------------------------------- 1 | import {createLogger, transports, format} from 'winston'; 2 | 3 | export const log = createLogger({ 4 | level: 'info', 5 | transports: new transports.Console({ 6 | format: format.simple(), 7 | }), 8 | }); 9 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | 3 | node_js: 4 | - "12" 5 | 6 | before_script: 7 | - npm install yarn --global 8 | 9 | services: 10 | - docker 11 | 12 | script: 13 | - yarn dev:lint 14 | - yarn docker:start 15 | - yarn docker:stop -------------------------------------------------------------------------------- /src/route/proxy.route.ts: -------------------------------------------------------------------------------- 1 | import {Request, Response} from 'express'; 2 | import {grabNode} from '../query/node.query'; 3 | 4 | export async function proxyRoute(req: Request, res: Response) { 5 | return res.redirect(308, `${grabNode()}/${req.path}`); 6 | } 7 | -------------------------------------------------------------------------------- /src/utility/ans.utility.ts: -------------------------------------------------------------------------------- 1 | import Arweave from 'arweave'; 2 | import deepHash from 'arweave/node/lib/deepHash'; 3 | import arweaveBundles from 'arweave-bundles'; 4 | 5 | export const ansDeps = { 6 | utils: Arweave.utils, 7 | crypto: Arweave.crypto, 8 | deepHash: deepHash, 9 | }; 10 | 11 | export const ansBundles = arweaveBundles(ansDeps); 12 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Ignore Everything 2 | ** 3 | 4 | # Include Essential Gateway Files 5 | !/bin 6 | !/.env 7 | !/types.graphql 8 | !/codegen.yml 9 | !/knexfile.ts 10 | !/yarn.lock 11 | !/package.json 12 | !/package-lock.json 13 | !/tsconfig.json 14 | !/src/** 15 | !/migrations/** 16 | 17 | # Ignore Unnecessary Files 18 | **/*.log 19 | **/.DS_Store 20 | **/Thumbs.db -------------------------------------------------------------------------------- /src/middleware/cors.middleware.ts: -------------------------------------------------------------------------------- 1 | import {Request, Response, NextFunction} from 'express'; 2 | 3 | export function corsMiddleware(req: Request, res: Response, next: NextFunction) { 4 | res.header('Access-Control-Allow-Origin', '*'); 5 | res.header('Access-Control-Allow-Methods', req.method); 6 | res.header('Access-Control-Allow-Headers', 'Content-Type'); 7 | 8 | return next(); 9 | } 10 | -------------------------------------------------------------------------------- /src/utility/file.utility.ts: -------------------------------------------------------------------------------- 1 | import {existsSync, mkdirSync, readdirSync, unlinkSync} from 'fs'; 2 | import { join } from 'path'; 3 | 4 | export function mkdir(path: string) { 5 | if (!existsSync(path)) { 6 | mkdirSync(path); 7 | } 8 | } 9 | 10 | export function clean(path: string) { 11 | const files = readdirSync(path); 12 | for (const file in files) { 13 | unlinkSync(join(path, file)); 14 | } 15 | } -------------------------------------------------------------------------------- /src/database/connection.database.ts: -------------------------------------------------------------------------------- 1 | import knex from 'knex'; 2 | import {config} from 'dotenv'; 3 | 4 | config(); 5 | 6 | export const connection: knex = knex({ 7 | client: 'pg', 8 | pool: {min: 10, max: 10000}, 9 | connection: { 10 | host: process.env.DATABASE_HOST, 11 | port: parseInt(process.env.DATABASE_PORT || '5432'), 12 | database: process.env.DATABASE_NAME, 13 | user: process.env.DATABASE_USER, 14 | password: process.env.DATABASE_PASSWORD, 15 | }, 16 | }); 17 | -------------------------------------------------------------------------------- /.env.dev: -------------------------------------------------------------------------------- 1 | ARWEAVE_NODES=["http://lon-1.eu-west-1.arweave.net:1984","http://lon-2.eu-west-1.arweave.net:1984","http://lon-3.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-5.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] 2 | 3 | DATABASE_HOST=0.0.0.0 4 | DATABASE_PORT=5432 5 | DATABASE_USER=arweave 6 | DATABASE_PASSWORD=arweave 7 | DATABASE_NAME=arweave 8 | 9 | ENVIRONMENT=public 10 | PORT=3000 11 | 12 | PARALLEL=4 13 | SNAPSHOT=0 14 | 15 | INDICES=["App-Name", "app", "domain", "namespace"] -------------------------------------------------------------------------------- /docker/gateway.dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:12 2 | LABEL Arweave Team 3 | 4 | WORKDIR /app 5 | 6 | COPY bin/wait.sh bin/wait.sh 7 | COPY .env .env 8 | COPY types.graphql types.graphql 9 | COPY codegen.yml codegen.yml 10 | COPY knexfile.ts knexfile.ts 11 | COPY package.json package.json 12 | COPY tsconfig.json tsconfig.json 13 | COPY src src 14 | COPY migrations migrations 15 | 16 | RUN chmod +x bin/wait.sh 17 | RUN yarn 18 | RUN yarn dev:build 19 | 20 | CMD ["./bin/wait.sh", "$DATABASE_HOST:$DATABASE_PORT", "--", "yarn", "start"] -------------------------------------------------------------------------------- /.env.docker: -------------------------------------------------------------------------------- 1 | ARWEAVE_NODES=["http://lon-1.eu-west-1.arweave.net:1984","http://lon-2.eu-west-1.arweave.net:1984","http://lon-3.eu-west-1.arweave.net:1984","http://lon-4.eu-west-1.arweave.net:1984","http://lon-5.eu-west-1.arweave.net:1984","http://lon-6.eu-west-1.arweave.net:1984"] 2 | 3 | DATABASE_HOST=postgres 4 | DATABASE_PORT=5432 5 | DATABASE_USER=arweave 6 | DATABASE_PASSWORD=arweave 7 | DATABASE_NAME=arweave 8 | 9 | ENVIRONMENT=public 10 | PORT=3000 11 | 12 | PARALLEL=4 13 | SNAPSHOT=0 14 | 15 | INDICES=["App-Name", "app", "domain", "namespace"] -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es2021": true, 4 | "node": true 5 | }, 6 | "extends": [ 7 | "google" 8 | ], 9 | "parser": "@typescript-eslint/parser", 10 | "parserOptions": { 11 | "ecmaVersion": 12, 12 | "sourceType": "module" 13 | }, 14 | "plugins": [ 15 | "@typescript-eslint" 16 | ], 17 | "rules": { 18 | "quotes": [2, "single", { "avoidEscape": true }], 19 | "require-jsdoc": 0, 20 | "max-len": 0, 21 | "camelcase": 0 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /src/route/data.route.ts: -------------------------------------------------------------------------------- 1 | import {Request, Response} from 'express'; 2 | import {getData} from '../query/node.query'; 3 | 4 | export const dataRouteRegex = /^\/?([a-zA-Z0-9-_]{43})\/?$|^\/?([a-zA-Z0-9-_]{43})\/(.*)$/i; 5 | export const pathRegex = /^\/?([a-z0-9-_]{43})/i; 6 | 7 | export async function dataRoute(req: Request, res: Response) { 8 | const path = req.path.match(pathRegex) || []; 9 | const transaction = path.length > 1 ? path[1] : ''; 10 | const payload = await getData(transaction); 11 | 12 | return res.status(200).send(payload); 13 | } 14 | -------------------------------------------------------------------------------- /src/graphql/server.graphql.ts: -------------------------------------------------------------------------------- 1 | import {readFileSync} from 'fs'; 2 | import {ApolloServer, ApolloServerExpressConfig, gql} from 'apollo-server-express'; 3 | import {connection} from '../database/connection.database'; 4 | import {resolvers} from './resolver.graphql'; 5 | 6 | const typeDefs = gql(readFileSync(`${process.cwd()}/types.graphql`, 'utf8')); 7 | 8 | export function graphServer(opts: ApolloServerExpressConfig = {}) { 9 | return new ApolloServer({ 10 | typeDefs, 11 | resolvers, 12 | debug: false, 13 | context: ({req}) => { 14 | return { 15 | req, 16 | connection, 17 | }; 18 | }, 19 | ...opts, 20 | }); 21 | } 22 | -------------------------------------------------------------------------------- /knexfile.ts: -------------------------------------------------------------------------------- 1 | import {config} from 'dotenv'; 2 | import {Config} from 'knex'; 3 | 4 | config(); 5 | 6 | export default { 7 | client: 'pg', 8 | connection: { 9 | host: process.env.DATABASE_HOST, 10 | port: parseInt(process.env.DATABASE_PORT || '5432'), 11 | database: process.env.DATABASE_NAME, 12 | user: process.env.DATABASE_USER, 13 | password: process.env.DATABASE_PASSWORD, 14 | }, 15 | pool: { 16 | min: 1, 17 | max: 10, 18 | }, 19 | migrations: { 20 | tableName: 'migrations', 21 | loadExtensions: ['.ts'], 22 | extension: 'ts', 23 | directory: './migrations', 24 | schemaName: 'public', 25 | }, 26 | } as Config; 27 | -------------------------------------------------------------------------------- /src/Import.ts: -------------------------------------------------------------------------------- 1 | import {log} from './utility/log.utility'; 2 | import {importBlocks, importTransactions, importTags} from './database/import.database'; 3 | 4 | export async function importSnapshot() { 5 | await importBlocks(`${process.cwd()}/snapshot/block.csv`); 6 | log.info('[snapshot] successfully imported block.csv'); 7 | 8 | await importTransactions(`${process.cwd()}/snapshot/transaction.csv`); 9 | log.info('[snapshot] successfully imported transaction.csv'); 10 | 11 | await importTags(`${process.cwd()}/snapshot/tags.csv`); 12 | log.info('[snapshot] successfully imported tags.csv'); 13 | 14 | process.exit(); 15 | } 16 | 17 | (async () => await importSnapshot())(); 18 | -------------------------------------------------------------------------------- /src/middleware/log.middleware.ts: -------------------------------------------------------------------------------- 1 | import morgan from 'morgan'; 2 | import id from 'shortid'; 3 | import {Request, Response, NextFunction} from 'express'; 4 | 5 | export function logConfigurationMiddleware(req: Request, res: Response, next: NextFunction) { 6 | const trace = id.generate(); 7 | 8 | req.id = trace; 9 | res.header('X-Trace', trace); 10 | 11 | return next(); 12 | } 13 | 14 | morgan.token('trace', (req: Request) => { 15 | return req.id || 'UNKNOWN'; 16 | }); 17 | 18 | export const logMiddleware = morgan('[http] :remote-addr - :remote-user [:date] ":method :url HTTP/:http-version" :status :res[content-length] :response-time ms ":referrer" ":user-agent" [trace=:trace]'); 19 | -------------------------------------------------------------------------------- /src/types/arweave.types.ts: -------------------------------------------------------------------------------- 1 | import {Base64UrlEncodedString, WinstonString} from '../utility/encoding.utility'; 2 | 3 | export interface Tag { 4 | name: Base64UrlEncodedString; 5 | value: Base64UrlEncodedString; 6 | } 7 | 8 | export interface Transaction { 9 | format: number; 10 | id: string; 11 | signature: string; 12 | owner: string; 13 | target: string; 14 | data: Base64UrlEncodedString; 15 | reward: WinstonString; 16 | last_tx: string; 17 | tags: Tag[]; 18 | quantity: WinstonString; 19 | data_size: number; 20 | data_root: string; 21 | data_tree: string[]; 22 | } 23 | 24 | export type TransactionHeader = Omit; 25 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | # Postgres Configuration 3 | postgres: 4 | image: postgres:12 5 | volumes: 6 | - database:/var/lib/postgresql/data 7 | expose: 8 | - ${DATABASE_PORT} 9 | ports: 10 | - ${DATABASE_PORT}:${DATABASE_PORT} 11 | environment: 12 | POSTGRES_USER: ${DATABASE_USER} 13 | POSTGRES_PASSWORD: ${DATABASE_PASSWORD} 14 | POSTGRES_DB: ${DATABASE_NAME} 15 | command: -p ${DATABASE_PORT} 16 | # Gateway Configuration 17 | server: 18 | build: 19 | context: . 20 | dockerfile: ./docker/gateway.dockerfile 21 | links: 22 | - postgres 23 | ports: 24 | - ${PORT}:${PORT} 25 | environment: 26 | PORT: ${PORT} 27 | 28 | volumes: 29 | database: -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "module": "CommonJS", 5 | "moduleResolution": "node", 6 | "lib": ["ESNext"], 7 | "declaration": true, 8 | "declarationMap": true, 9 | "sourceMap": true, 10 | "outDir": "./dist", 11 | "composite": true, 12 | "removeComments": true, 13 | "importHelpers": true, 14 | "downlevelIteration": true, 15 | "isolatedModules": true, 16 | "strict": true, 17 | "allowSyntheticDefaultImports": true, 18 | "esModuleInterop": true, 19 | "preserveSymlinks": true, 20 | "allowUmdGlobalAccess": true, 21 | "forceConsistentCasingInFileNames": true 22 | }, 23 | "include": [ 24 | "src/**/*.ts", 25 | "test/**/*.ts" 26 | ], 27 | "exclude": [ 28 | "node_modules" 29 | ] 30 | } 31 | -------------------------------------------------------------------------------- /src/database/block.database.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import {pick} from 'lodash'; 3 | import {BlockType} from '../query/block.query'; 4 | 5 | export interface BlockDatabaseType { 6 | id: string; 7 | previous_block: string; 8 | mined_at: string; 9 | height: number; 10 | txs: string; 11 | extended: string; 12 | } 13 | 14 | export const blockExtendedFields = [ 15 | 'diff', 16 | 'hash', 17 | 'reward_addr', 18 | 'last_retarget', 19 | 'tx_root', 20 | 'tx_tree', 21 | 'reward_pool', 22 | 'weave_size', 23 | 'block_size', 24 | 'cumulative_diff', 25 | 'hash_list_merkle', 26 | 'tags', 27 | ]; 28 | 29 | export function formatBlock(block: BlockType): BlockDatabaseType { 30 | return { 31 | id: block.indep_hash, 32 | height: block.height, 33 | previous_block: block.previous_block, 34 | txs: JSON.stringify(block.txs), 35 | mined_at: moment(block.timestamp * 1000).format(), 36 | extended: JSON.stringify(pick(block, blockExtendedFields)), 37 | }; 38 | } 39 | -------------------------------------------------------------------------------- /src/Gateway.ts: -------------------------------------------------------------------------------- 1 | import 'colors'; 2 | import express, {Express} from 'express'; 3 | import {config} from 'dotenv'; 4 | import {corsMiddleware} from './middleware/cors.middleware'; 5 | import {jsonMiddleware} from './middleware/json.middleware'; 6 | import {log} from './utility/log.utility'; 7 | import {graphServer} from './graphql/server.graphql'; 8 | import {proxyRoute} from './route/proxy.route'; 9 | import {dataRouteRegex, dataRoute} from './route/data.route'; 10 | import {startSync} from './database/sync.database'; 11 | 12 | config(); 13 | 14 | export const app: Express = express(); 15 | 16 | export function start() { 17 | app.set('trust proxy', 1); 18 | app.use(corsMiddleware); 19 | app.use(jsonMiddleware); 20 | 21 | graphServer({introspection: true, playground: true}).applyMiddleware({app, path: '/graphql'}); 22 | 23 | app.get(dataRouteRegex, dataRoute); 24 | app.all('*', proxyRoute); 25 | 26 | app.listen(process.env.PORT || 3000, () => { 27 | log.info(`[app] started on http://localhost:${process.env.PORT || 3000}`); 28 | startSync(); 29 | }); 30 | } 31 | 32 | 33 | (async () => await start())(); 34 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Arweave Team 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /src/query/node.query.ts: -------------------------------------------------------------------------------- 1 | import {get} from 'superagent'; 2 | 3 | export const NODES = process.env.ARWEAVE_NODES ? JSON.parse(process.env.ARWEAVE_NODES) : ['http://lon-1.eu-west-1.arweave.net:1984']; 4 | 5 | export function grabNode() { 6 | return NODES[Math.floor(Math.random() * NODES.length)]; 7 | } 8 | 9 | export interface InfoType { 10 | network: string; 11 | version: number; 12 | release: number; 13 | height: number; 14 | current: string; 15 | blocks: number; 16 | peers: number; 17 | queue_length: number; 18 | node_state_latency: number; 19 | } 20 | 21 | export async function getNodeInfo(): Promise { 22 | const payload = await get(`${grabNode()}/info`); 23 | const body = JSON.parse(payload.text); 24 | 25 | return { 26 | network: body.network, 27 | version: body.version, 28 | release: body.release, 29 | height: body.height, 30 | current: body.current, 31 | blocks: body.blocks, 32 | peers: body.peers, 33 | queue_length: body.queue_length, 34 | node_state_latency: body.node_state_latency, 35 | }; 36 | } 37 | 38 | export async function getData(id: string): Promise { 39 | const payload = await get(`${grabNode()}/${id}`); 40 | return payload.body; 41 | } 42 | -------------------------------------------------------------------------------- /src/database/knex.batch.database.ts: -------------------------------------------------------------------------------- 1 | import {Transaction, QueryBuilder} from 'knex'; 2 | import {BlockType} from '../query/block.query'; 3 | import {TransactionType} from '../query/transaction.query'; 4 | import {formatBlock} from './block.database'; 5 | import {formatTransaction, DatabaseTag} from './transaction.database'; 6 | 7 | export function createBatchItem(batchScope: Transaction, table: string, data: object, conflictKey: string = 'id'): QueryBuilder { 8 | return batchScope 9 | .insert(data) 10 | .into(table) 11 | .onConflict(conflictKey as any) 12 | .ignore(); 13 | } 14 | 15 | export function createBatchItemForTag(batchScope: Transaction, table: string, data: object): QueryBuilder { 16 | return batchScope 17 | .insert(data) 18 | .into(table); 19 | } 20 | 21 | export function createBlockBatchItem(batchScope: Transaction, block: BlockType): QueryBuilder { 22 | const formattedBlock = formatBlock(block); 23 | return createBatchItem(batchScope, 'blocks', formattedBlock); 24 | } 25 | 26 | export function createTransactionBatchItem(batchScope: Transaction, transaction: TransactionType): QueryBuilder { 27 | const formattedTransaction = formatTransaction(transaction); 28 | return createBatchItem(batchScope, 'transactions', formattedTransaction); 29 | } 30 | 31 | export function createTagBatchItem(batchScope: Transaction, tag: DatabaseTag): QueryBuilder { 32 | return createBatchItemForTag(batchScope, 'tags', tag); 33 | } 34 | -------------------------------------------------------------------------------- /src/query/transaction.query.ts: -------------------------------------------------------------------------------- 1 | import {get} from 'superagent'; 2 | import {Base64UrlEncodedString, WinstonString, fromB64Url} from '../utility/encoding.utility'; 3 | import {grabNode} from './node.query'; 4 | 5 | export interface Tag { 6 | name: Base64UrlEncodedString; 7 | value: Base64UrlEncodedString; 8 | } 9 | 10 | export interface TransactionType { 11 | format: number; 12 | id: string; 13 | height?: number; 14 | last_tx: string; 15 | owner: string; 16 | tags: Array; 17 | target: string; 18 | quantity: WinstonString; 19 | data: Base64UrlEncodedString; 20 | data_size: string; 21 | data_tree: Array; 22 | data_root: string; 23 | reward: string; 24 | signature: string; 25 | } 26 | 27 | export async function transaction(id: string): Promise { 28 | const payload = await get(`${grabNode()}/tx/${id}`); 29 | const body = JSON.parse(payload.text); 30 | 31 | return { 32 | format: body.format, 33 | id: body.id, 34 | last_tx: body.last_tx, 35 | owner: body.owner, 36 | tags: body.tags, 37 | target: body.target, 38 | quantity: body.quantity, 39 | data: body.data, 40 | data_size: body.data_size, 41 | data_tree: body.data_tree, 42 | data_root: body.data_root, 43 | reward: body.reward, 44 | signature: body.signature, 45 | }; 46 | } 47 | 48 | export function tagValue(tags: Array, name: string): string { 49 | for (let i = 0; i < tags.length; i++) { 50 | const tag = tags[i]; 51 | if (fromB64Url(tag.name).toString().toLowerCase() === name.toLowerCase()) { 52 | return fromB64Url(tag.value).toString(); 53 | } 54 | } 55 | 56 | return ''; 57 | } 58 | -------------------------------------------------------------------------------- /SNAPSHOT.md: -------------------------------------------------------------------------------- 1 | # Snapshot Guide 2 | 3 | Use this guide to generate your own snapshots or import a snapshot. 4 | 5 | ## Generate a Snapshot 6 | 7 | You can generate a snapshot while syncing your node by enabling snapshots with the `SNAPSHOT=1` variable in your environment file. 8 | 9 | However, you can have an instance solely dedicated to creating a snapshot file by running `yarn dev:snapshot`. 10 | 11 | You can configure the level of block synchronization by modifying the `PARALLEL` variable. 12 | 13 | **Examples** 14 | 15 | ```bash 16 | # Sync 4 blocks at a time when running yarn dev:snapshot 17 | PARALLEL=4 18 | SNAPSHOT=1 19 | ``` 20 | 21 | ```bash 22 | # Sync 8 blocks at a time 23 | PARALLEL=8 24 | SNAPSHOT=1 25 | ``` 26 | 27 | When generating a snapshot. Output will appear in the `snapshot` folder. You can tar.gz the archive by running. 28 | 29 | ```bash 30 | tar -zcvf snapshot.tar.gz snapshot 31 | ``` 32 | 33 | You can then upload the snapshot to Arweave by running. 34 | 35 | ```bash 36 | arweave deploy snapshot.tar.gz 37 | ``` 38 | 39 | ## Importing a Snapshot 40 | 41 | If you want to import a snapshot. You need to make sure import the `.csv` files into the `snapshot` folder. It should look something like. 42 | 43 | ```bash 44 | snapshot/block.csv 45 | snapshot/transaction.csv 46 | snapshot/tags.csv 47 | ``` 48 | 49 | If you're downloading a `.tar.gz` file. You can decompress it by running. 50 | 51 | ```bash 52 | tar -zxf snapshot.tar.gz -C snapshot 53 | ``` 54 | 55 | You can then run the import command. 56 | 57 | ```bash 58 | yarn dev:import 59 | ``` 60 | 61 | If successful, it should output. 62 | 63 | ```bash 64 | info: [snapshot] successfully imported block.csv 65 | info: [snapshot] successfully imported transaction.csv 66 | info: [snapshot] successfully imported tags.csv 67 | ``` 68 | 69 | Make sure when running an actual Gateway you copy the `.snapshot` file from the `snapshot` folder into the root directory. 70 | 71 | ```bash 72 | cp snapshot/.snapshot .snapshot 73 | ``` -------------------------------------------------------------------------------- /src/database/import.database.ts: -------------------------------------------------------------------------------- 1 | import {config} from 'dotenv'; 2 | import {connection} from '../database/connection.database'; 3 | import {transactionFields} from '../database/transaction.database'; 4 | 5 | config(); 6 | 7 | export const indices = JSON.parse(process.env.INDICES || '[]') as Array; 8 | 9 | export async function importBlocks(path: string) { 10 | return new Promise(async (resolve) => { 11 | await connection.raw(` 12 | COPY 13 | blocks 14 | (id, previous_block, mined_at, height, txs, extended) 15 | FROM 16 | '${path}' 17 | WITH 18 | ( 19 | FORMAT CSV, 20 | ESCAPE '\\', 21 | DELIMITER ',', 22 | FORCE_NULL(height) 23 | ) 24 | `); 25 | 26 | return resolve(true); 27 | }); 28 | } 29 | 30 | export async function importTransactions(path: string) { 31 | return new Promise(async (resolve) => { 32 | const fields = transactionFields 33 | .concat(indices) 34 | .map(field => `"${field}"`); 35 | 36 | await connection.raw(` 37 | COPY 38 | transactions 39 | (${fields.join(',')}) 40 | FROM 41 | '${path}' 42 | WITH 43 | ( 44 | FORMAT CSV, 45 | ESCAPE '\\', 46 | DELIMITER ',', 47 | FORCE_NULL("format", "height", "data_size") 48 | )`); 49 | 50 | return resolve(true); 51 | }); 52 | } 53 | 54 | export async function importTags(path: string) { 55 | return new Promise(async (resolve) => { 56 | await connection.raw(` 57 | COPY 58 | tags 59 | (tx_id, index, name, value) 60 | FROM 61 | '${path}' 62 | WITH 63 | ( 64 | FORMAT CSV, 65 | ESCAPE '\\', 66 | DELIMITER ',', 67 | FORCE_NULL(index) 68 | ) 69 | `); 70 | 71 | return resolve(true); 72 | }); 73 | } 74 | -------------------------------------------------------------------------------- /migrations/20200404025828_initialize.ts: -------------------------------------------------------------------------------- 1 | import * as Knex from 'knex'; 2 | import {config} from 'dotenv'; 3 | 4 | config(); 5 | 6 | export async function up(knex: Knex) { 7 | const indices = JSON.parse(process.env.INDICES || '[]'); 8 | 9 | return knex.schema 10 | .withSchema(process.env.ENVIRONMENT || 'public') 11 | .createTable('transactions', (table) => { 12 | table.string('id', 64).notNullable(); 13 | table.text('owner'); 14 | table.jsonb('tags'); 15 | table.string('target', 64); 16 | table.string('quantity'); 17 | table.string('reward'); 18 | table.text('signature'); 19 | table.string('last_tx', 64); 20 | table.integer('data_size', 8); 21 | table.string('content_type'); 22 | table.integer('format', 2); 23 | table.integer('height', 4); 24 | table.string('owner_address'); 25 | table.string('data_root', 64); 26 | table.string('parent', 64); 27 | table.timestamp('created_at').defaultTo(knex.fn.now()); 28 | 29 | for (let i = 0; i < indices.length; i++) { 30 | const index = indices[i]; 31 | table.string(index, 64); 32 | table.index(index, `index_${index}_transactions`, 'BTREE'); 33 | } 34 | 35 | table.primary(['id'], 'pkey_transactions'); 36 | }) 37 | .createTable('blocks', (table) => { 38 | table.string('id', 64).notNullable(); 39 | table.integer('height', 4).notNullable(); 40 | table.timestamp('mined_at').notNullable(); 41 | table.string('previous_block').notNullable(); 42 | table.jsonb('txs').notNullable(); 43 | table.jsonb('extended'); 44 | table.timestamp('created_at').defaultTo(knex.fn.now()); 45 | 46 | table.primary(['id'], 'pkey_blocks'); 47 | }) 48 | .createTable('tags', (table) => { 49 | table.string('tx_id', 64).notNullable(); 50 | table.integer('index').notNullable(); 51 | table.string('name'); 52 | table.text('value'); 53 | table.timestamp('created_at').defaultTo(knex.fn.now()); 54 | 55 | table.primary(['tx_id', 'index'], 'pkey_tags'); 56 | table.index(['name', 'value'], 'index_name_value', 'BTREE'); 57 | }); 58 | } 59 | 60 | export async function down(knex: Knex) { 61 | return knex.schema 62 | .withSchema(process.env.ENVIRONMENT || 'public') 63 | .dropTableIfExists('transactions') 64 | .dropTableIfExists('blocks') 65 | .dropTableIfExists('tags'); 66 | } 67 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@arweave/gateway", 3 | "version": "0.2.0", 4 | "main": "dist/src/Gateway.js", 5 | "repository": "git@github.com:ArweaveTeam/gateway.git", 6 | "author": "Arweave ", 7 | "license": "MIT", 8 | "scripts": { 9 | "dev:lint": "eslint src/**.ts migrations/*.ts knexfile.ts", 10 | "dev:gql": "npx graphql-codegen --config codegen.yml", 11 | "dev:build": "npm run dev:gql && tsc", 12 | "dev:start": "npm run dev:build && node dist/src/Gateway.js", 13 | "dev:restart": "npm run migrate:down && npm run migrate:latest && npm run dev:start", 14 | "dev:snapshot": "npm run dev:build && node dist/src/Snapshot.js", 15 | "dev:import": "npm run dev:build && node dist/src/Import.js", 16 | "migrate:down": "knex migrate:down", 17 | "migrate:up": "knex migrate:up", 18 | "migrate:latest": "knex migrate:latest", 19 | "migrate:reset": "knex migrate:down && knex migrate:latest", 20 | "docker:start": "docker-compose up --build -d", 21 | "docker:stop": "docker-compose down -v", 22 | "start": "npm run migrate:latest && node dist/src/Gateway.js" 23 | }, 24 | "dependencies": { 25 | "apollo-server-express": "^2.19.2", 26 | "arweave": "^1.10.6", 27 | "arweave-bundles": "^1.0.3", 28 | "body-parser": "^1.19.0", 29 | "colors": "^1.4.0", 30 | "dotenv": "^8.2.0", 31 | "express": "^4.17.1", 32 | "graphql": "^15.5.0", 33 | "graphql-fields": "^2.0.3", 34 | "knex": "^0.21.17", 35 | "lodash": "^4.17.20", 36 | "moment": "^2.29.1", 37 | "morgan": "^1.10.0", 38 | "pg": "^8.5.1", 39 | "progress": "^2.0.3", 40 | "rfc4648": "^1.4.0", 41 | "shortid": "^2.2.16", 42 | "superagent": "^6.1.0", 43 | "winston": "^3.3.3" 44 | }, 45 | "devDependencies": { 46 | "@graphql-codegen/cli": "1.17.7", 47 | "@graphql-codegen/typescript": "1.17.7", 48 | "@graphql-codegen/typescript-resolvers": "1.17.7", 49 | "@types/event-stream": "^3.3.34", 50 | "@types/express": "^4.17.11", 51 | "@types/graphql-fields": "^1.3.3", 52 | "@types/lodash": "^4.14.168", 53 | "@types/morgan": "^1.9.2", 54 | "@types/node": "^14.14.22", 55 | "@types/progress": "^2.0.3", 56 | "@types/shortid": "^0.0.29", 57 | "@types/superagent": "^4.1.10", 58 | "@typescript-eslint/eslint-plugin": "^4.14.1", 59 | "@typescript-eslint/parser": "^4.14.1", 60 | "eslint": "^7.18.0", 61 | "eslint-config-google": "^0.14.0", 62 | "ts-node": "^9.1.1", 63 | "typescript": "^4.1.3" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Arweave Gateway 2 | 3 | ![License](https://img.shields.io/badge/license-MIT-blue.svg) 4 | [![Build Status](https://travis-ci.org/ArweaveTeam/gateway.svg?branch=master)](https://travis-ci.org/ArweaveTeam/gateway) 5 | [![codecov](https://codecov.io/gh/ArweaveTeam/gateway/branch/master/graph/badge.svg)](https://codecov.io/gh/ArweaveTeam/gateway) 6 | 7 | ## Requirements 8 | 9 | 1. A Unix OS 10 | 11 | 2. Docker and Docker Compose LTS 12 | 13 | ### Suggested Hardware 14 | 15 | There are several million transactions on the Arweave chain. In order to effectively serve content on the gateway you'll need a decent sized computer. The ideal specs for a Gateway should have the following: 16 | 17 | 1. 16GB RAM (ideally 32GB RAM) 18 | 19 | 2. ~100GB of SSD storage available 20 | 21 | 3. Intel i5 / AMD FX or greater, +4 vCPUs should be more than enough, these are typically Intel Xeon CPUs. 22 | 23 | # Deploying a Gateway 24 | 25 | This guide is designed to use Docker Compose. There is also the development version of the guide, that you can review [here.](./DEV.md) 26 | 27 | Also make sure to read the [Snapshot Guide](./SNAPSHOT.md) to expedite the synchronization process for your Gateway. 28 | 29 | ## Environment 30 | 31 | By default, there is a default environment you can use located at `.env.docker` in the repository. 32 | 33 | ```env 34 | ARWEAVE_NODES=["..."] 35 | 36 | DATABASE_HOST=postgres 37 | DATABASE_PORT=5432 38 | DATABASE_USER=arweave 39 | DATABASE_PASSWORD=arweave 40 | DATABASE_NAME=arweave 41 | 42 | ENVIRONMENT=public 43 | PORT=3000 44 | 45 | PARALLEL=4 46 | SNAPSHOT=0 47 | 48 | INDICES=["App-Name", "app", "domain", "namespace"] 49 | ``` 50 | 51 | Make sure you copy this configuration to `.env`. 52 | 53 | ```bash 54 | cp .env.docker .env 55 | ``` 56 | 57 | ## Compilation 58 | 59 | You can start the server with `docker-compose`. 60 | 61 | ```bash 62 | # with npm 63 | npm run docker:start 64 | 65 | # with yarn 66 | yarn docker:start 67 | 68 | # with pure docker-compose 69 | docker-compose up --build -d 70 | ``` 71 | 72 | You can spin down the `docker-compose` cluster with. 73 | 74 | ```bash 75 | # with npm 76 | npm run docker:stop 77 | 78 | # with yarn 79 | yarn docker:stop 80 | 81 | # with pure docker-compose 82 | docker-compose down -v 83 | ``` 84 | 85 | ## Testing 86 | 87 | You can test if the server and the GraphQL queries are working properly by navigating to. 88 | 89 | ```bash 90 | http://localhost:3000/graphql 91 | ``` 92 | 93 | This webpage should look similar to. 94 | 95 | ```bash 96 | https://arweave.dev/graphql 97 | ``` 98 | -------------------------------------------------------------------------------- /src/database/transaction.database.ts: -------------------------------------------------------------------------------- 1 | import {config} from 'dotenv'; 2 | import {DataItemJson} from 'arweave-bundles'; 3 | import {pick} from 'lodash'; 4 | import {TransactionType, tagValue} from '../query/transaction.query'; 5 | import {fromB64Url, sha256B64Url} from '../utility/encoding.utility'; 6 | 7 | config(); 8 | 9 | export const indices = JSON.parse(process.env.INDICES || '[]'); 10 | 11 | export interface ANSTransaction { 12 | id: string; 13 | owner: string; 14 | content_type: string; 15 | target: string; 16 | tags: string; 17 | } 18 | 19 | export interface DatabaseTag { 20 | tx_id: string; 21 | index: number; 22 | name: string | undefined; 23 | value: string | undefined; 24 | } 25 | 26 | export const transactionFields = [ 27 | 'format', 28 | 'id', 29 | 'signature', 30 | 'owner', 31 | 'owner_address', 32 | 'target', 33 | 'reward', 34 | 'last_tx', 35 | 'height', 36 | 'tags', 37 | 'quantity', 38 | 'content_type', 39 | 'data_size', 40 | 'data_root', 41 | ]; 42 | 43 | export function formatTransaction(transaction: TransactionType) { 44 | const indexFields: any = {}; 45 | 46 | for (let i = 0; i < indices.length; i++) { 47 | const index = indices[i]; 48 | const value = tagValue(transaction.tags, index); 49 | 50 | if (value) { 51 | indexFields[index] = value; 52 | } 53 | } 54 | 55 | return pick( 56 | { 57 | ...transaction, 58 | ...indexFields, 59 | content_type: tagValue(transaction.tags, 'content-type'), 60 | format: transaction.format || 0, 61 | data_size: transaction.data_size || transaction.data ? fromB64Url(transaction.data).byteLength : undefined, 62 | tags: JSON.stringify(transaction.tags), 63 | owner_address: sha256B64Url(fromB64Url(transaction.owner)), 64 | }, 65 | transactionFields.concat(indices), 66 | ); 67 | } 68 | 69 | export function formatAnsTransaction(ansTransaction: DataItemJson) { 70 | const indexFields: any = {}; 71 | 72 | for (let i = 0; i < indices.length; i++) { 73 | const index = indices[i]; 74 | const value = tagValue(ansTransaction.tags, index); 75 | 76 | if (value) { 77 | indexFields[index] = value; 78 | } 79 | } 80 | 81 | return pick( 82 | { 83 | ...indexFields, 84 | id: ansTransaction.id, 85 | owner: ansTransaction.owner, 86 | content_type: 'ANS-102', 87 | target: ansTransaction.target, 88 | tags: JSON.stringify(ansTransaction.tags), 89 | }, 90 | transactionFields.concat(indices), 91 | ); 92 | } 93 | -------------------------------------------------------------------------------- /src/query/block.query.ts: -------------------------------------------------------------------------------- 1 | import {get} from 'superagent'; 2 | import {grabNode} from './node.query'; 3 | 4 | export interface BlockType { 5 | nonce: string; 6 | previous_block: string; 7 | timestamp: number; 8 | last_retarget: number; 9 | diff: string; 10 | height: number; 11 | hash: string; 12 | indep_hash: string; 13 | txs: Array; 14 | tx_root: string; 15 | tx_tree: Array; 16 | wallet_list: string; 17 | reward_addr: string; 18 | tags: Array; 19 | reward_pool: number; 20 | weave_size: number; 21 | block_size: number; 22 | cumulative_diff: string; 23 | hash_list_merkle: string; 24 | poa: { 25 | option: string; 26 | tx_path: string; 27 | chunk: string; 28 | }; 29 | } 30 | 31 | export async function block(height: number): Promise { 32 | const payload = await get(`${grabNode()}/block/height/${height}`); 33 | const body = JSON.parse(payload.text); 34 | 35 | return { 36 | nonce: body.nonce, 37 | previous_block: body.previous_block, 38 | timestamp: body.timestamp, 39 | last_retarget: body.last_retarget, 40 | diff: body.diff, 41 | height: body.height, 42 | hash: body.hash, 43 | indep_hash: body.indep_hash, 44 | txs: body.txs, 45 | tx_root: body.tx_root, 46 | tx_tree: body.tx_tree, 47 | wallet_list: body.wallet_list, 48 | reward_addr: body.reward_addr, 49 | tags: body.tags, 50 | reward_pool: body.reward_pool, 51 | weave_size: body.weave_size, 52 | block_size: body.block_size, 53 | cumulative_diff: body.cumulative_diff, 54 | hash_list_merkle: body.hash_list_merkle, 55 | poa: { 56 | option: body.poa?.option, 57 | tx_path: body.poa?.tx_path, 58 | chunk: body.poa?.chunk, 59 | }, 60 | }; 61 | } 62 | 63 | export async function currentBlock(): Promise { 64 | const payload = await get(`${grabNode()}/block/current`); 65 | const body = JSON.parse(payload.text); 66 | 67 | return { 68 | nonce: body.nonce, 69 | previous_block: body.previous_block, 70 | timestamp: body.timestamp, 71 | last_retarget: body.last_retarget, 72 | diff: body.diff, 73 | height: body.height, 74 | hash: body.hash, 75 | indep_hash: body.indep_hash, 76 | txs: body.txs, 77 | tx_root: body.tx_root, 78 | tx_tree: body.tx_tree, 79 | wallet_list: body.wallet_list, 80 | reward_addr: body.reward_addr, 81 | tags: body.tags, 82 | reward_pool: body.reward_pool, 83 | weave_size: body.weave_size, 84 | block_size: body.block_size, 85 | cumulative_diff: body.cumulative_diff, 86 | hash_list_merkle: body.hash_list_merkle, 87 | poa: { 88 | option: body.poa.option, 89 | tx_path: body.poa.tx_path, 90 | chunk: body.poa.chunk, 91 | }, 92 | }; 93 | } 94 | -------------------------------------------------------------------------------- /src/database/batch.database.ts: -------------------------------------------------------------------------------- 1 | import {DataItemJson} from 'arweave-bundles'; 2 | import {Transaction, QueryBuilder} from 'knex'; 3 | import {connection} from './connection.database'; 4 | import {createBatchItem, createBlockBatchItem, createTransactionBatchItem, createTagBatchItem} from './knex.batch.database'; 5 | import {utf8DecodeTag} from '../utility/encoding.utility'; 6 | import {ansBundles} from '../utility/ans.utility'; 7 | import {BlockType} from '../query/block.query'; 8 | import {getData} from '../query/node.query'; 9 | import {transaction, TransactionType, tagValue} from '../query/transaction.query'; 10 | import {DatabaseTag, ANSTransaction, formatAnsTransaction} from './transaction.database'; 11 | 12 | export function processTransaction(batchScope: Transaction, payload: TransactionType): QueryBuilder[] { 13 | const batch: QueryBuilder[] = []; 14 | 15 | batch.push(createTransactionBatchItem(batchScope, payload)); 16 | 17 | const id = payload.id; 18 | const tags = payload.tags; 19 | 20 | for (let i = 0; i < tags.length; i++) { 21 | const tag = tags[i]; 22 | const {name, value} = utf8DecodeTag(tag); 23 | 24 | const formattedTag: DatabaseTag = { 25 | tx_id: id, 26 | index: i, 27 | name: name || '', 28 | value: value || '', 29 | }; 30 | 31 | batch.push(createTagBatchItem(batchScope, formattedTag)); 32 | } 33 | 34 | return batch; 35 | } 36 | 37 | export async function processANSTransaction(batchScope: Transaction, ansTxs: DataItemJson[]): Promise { 38 | const batch: QueryBuilder[] = []; 39 | 40 | for (let i = 0; i < ansTxs.length; i++) { 41 | const ansTx = ansTxs[i]; 42 | const formattedAnsTx = formatAnsTransaction(ansTx); 43 | 44 | batch.push(createBatchItem(batchScope, 'transactions', formattedAnsTx)); 45 | 46 | const ansTags = ansTx.tags; 47 | 48 | for (let ii = 0; ii < ansTags.length; ii++) { 49 | const ansTag = ansTags[ii]; 50 | const {name, value} = await ansBundles.decodeTag(ansTag); 51 | 52 | const formattedTag: DatabaseTag = { 53 | tx_id: ansTx.id, 54 | index: ii, 55 | name: name || '', 56 | value: value || '', 57 | }; 58 | 59 | batch.push(createTagBatchItem(batchScope, formattedTag)); 60 | } 61 | } 62 | 63 | return batch; 64 | } 65 | 66 | export async function storeBlock(block: BlockType) { 67 | return await connection.transaction(async (batchScope) => { 68 | let batch = []; 69 | 70 | batch.push(createBlockBatchItem(batchScope, block)); 71 | 72 | for (let i = 0; i < block.txs.length; i++) { 73 | const tx = block.txs[i]; 74 | const payload = await transaction(tx); 75 | 76 | batch = batch.concat(processTransaction(batchScope, payload)); 77 | 78 | const ans102 = tagValue(payload.tags, 'Bundle-Type') === 'ANS-102'; 79 | 80 | if (ans102) { 81 | try { 82 | const ansPayload = await getData(payload.id); 83 | const ansTxs = await ansBundles.unbundleData(ansPayload); 84 | 85 | batch = batch.concat(await processANSTransaction(batchScope, ansTxs)); 86 | } catch (error) { 87 | console.error(error); 88 | } 89 | } 90 | } 91 | 92 | try { 93 | await Promise.all(batch); 94 | } catch (error) { 95 | console.error(error); 96 | } 97 | 98 | return true; 99 | }); 100 | } 101 | -------------------------------------------------------------------------------- /DEV.md: -------------------------------------------------------------------------------- 1 | # Gateway Development Guide 2 | 3 | If you want to develop and contribute to the Gateway source code, use this guide as a reference for development and starting a server. If you're looking to deploy a Gateway. We suggest using the normal guide found [here.](./README.md) 4 | 5 | ## Requirements 6 | 7 | 1. A Unix OS 8 | 9 | 2. Postgres v10+ 10 | 11 | 3. Node.js v12.20.1 12 | 13 | ## Node Version 14 | 15 | *Please note there may be some problems with Node v14 LTS or later. If necessary run.* 16 | 17 | ```bash 18 | # Install Node.js v12 LTS 19 | nvm install 12 20 | # Or just use v12 LTS if already installed 21 | nvm use 12 22 | ``` 23 | 24 | ## Configuring Postgres 25 | 26 | Before you begin, you'll need to create and configure the Database and User. 27 | 28 | ```bash 29 | # Access PSQL Terminal 30 | sudo -u postgres psql 31 | 32 | # Create the arweave database and user 33 | CREATE DATABASE arweave; 34 | CREATE USER arweave WITH ENCRYPTED PASSWORD 'arweave'; 35 | GRANT ALL PRIVILEGES ON DATABASE arweave TO arweave; 36 | 37 | # Required in order to import blocks from a snapshot 38 | ALTER ROLE arweave WITH SUPERUSER; 39 | 40 | # exit PSQL Terminal 41 | exit 42 | ``` 43 | 44 | ## Environment 45 | 46 | By default, there is a development environment you can use located at `.env.dev` in the repository. This `.dev` environment is different to the `.env.docker` environment which is designed for `docker` usage. 47 | 48 | ```env 49 | ARWEAVE_NODES=["..."] 50 | 51 | DATABASE_HOST=0.0.0.0 52 | DATABASE_PORT=5432 53 | DATABASE_USER=arweave 54 | DATABASE_PASSWORD=arweave 55 | DATABASE_NAME=arweave 56 | 57 | ENVIRONMENT=public 58 | PORT=3000 59 | 60 | PARALLEL=4 61 | SNAPSHOT=0 62 | 63 | INDICES=["App-Name", "app", "domain", "namespace"] 64 | ``` 65 | 66 | Make sure you copy this configuration to `.env`. 67 | 68 | ```bash 69 | cp .env.dev .env 70 | ``` 71 | 72 | ## Block Synchronization 73 | 74 | If at any point or time you want to increase the parallelization level of block synchronization. You should change the `PARALLEL` variable. 75 | 76 | This variable indicates how many blocks to query concurrently. You can change it to any amount of blocks you please. 77 | 78 | ```env 79 | PARALLEL=16 80 | ``` 81 | 82 | If you want to disable block synchronization. Simply set `PARALLEL` to `0`. 83 | 84 | ```env 85 | PARALLEL=0 86 | ``` 87 | 88 | ## Deploying Migrations with Knex 89 | 90 | For development purposes, you will want to debug Knex migrations. 91 | 92 | To spin up the tables for Postgres run: 93 | 94 | ```bash 95 | yarn migrate:latest 96 | ``` 97 | 98 | To drop the tables run: 99 | 100 | ```bash 101 | yarn migrate:down 102 | ``` 103 | 104 | ## Developing 105 | 106 | Assuming everything was smooth with the above. You can now run. 107 | 108 | ```bash 109 | yarn dev:start 110 | ``` 111 | 112 | You can now test queries on. 113 | 114 | ```bash 115 | http://localhost:3000/graphql 116 | ``` 117 | 118 | This webpage should look similar to. 119 | 120 | ```bash 121 | https://arweave.dev/graphql 122 | ``` 123 | 124 | ### Additional Commands 125 | 126 | If you're doing a lot of work related to do databases. You might want to use `yarn dev:restart` as it resets the database. 127 | 128 | ```bash 129 | yarn dev:restart 130 | ``` 131 | 132 | If you're updating the graphql library as seen in `types.graphql`. You should run `yarn dev:gql` to update the GraphQL types. 133 | 134 | ```bash 135 | yarn dev:gql 136 | ``` 137 | 138 | Also make sure before pushing a commit. The project passes the lint tests. 139 | 140 | ```bash 141 | yarn dev:lint 142 | ``` 143 | 144 | You can also automatically fix and format files using. 145 | 146 | ```bash 147 | yarn dev:lint --fix 148 | ``` 149 | -------------------------------------------------------------------------------- /src/graphql/query.graphql.ts: -------------------------------------------------------------------------------- 1 | import {config} from 'dotenv'; 2 | import {QueryBuilder} from 'knex'; 3 | import {connection} from '../database/connection.database'; 4 | import {ISO8601DateTimeString} from '../utility/encoding.utility'; 5 | import {TagFilter} from './types'; 6 | 7 | config(); 8 | 9 | export type TxSortOrder = 'HEIGHT_ASC' | 'HEIGHT_DESC'; 10 | 11 | export const orderByClauses = { 12 | HEIGHT_ASC: 'transactions.height ASC NULLS LAST, id ASC', 13 | HEIGHT_DESC: 'transactions.height DESC NULLS FIRST, id ASC', 14 | }; 15 | 16 | export const indices = JSON.parse(process.env.INDICES || '[]'); 17 | 18 | export interface QueryParams { 19 | to?: string[]; 20 | from?: string[]; 21 | id?: string; 22 | ids?: string[]; 23 | tags?: TagFilter[]; 24 | limit?: number; 25 | offset?: number; 26 | select?: any; 27 | blocks?: boolean; 28 | since?: ISO8601DateTimeString; 29 | sortOrder?: TxSortOrder; 30 | status?: 'any' | 'confirmed' | 'pending'; 31 | pendingMinutes?: number; 32 | minHeight?: number; 33 | maxHeight?: number; 34 | } 35 | 36 | export async function generateQuery(params: QueryParams): Promise { 37 | const {to, from, tags, id, ids, status, select} = params; 38 | const {limit = 10, blocks = false, sortOrder = 'HEIGHT_DESC'} = params; 39 | const {offset = 0, minHeight = -1, maxHeight = -1} = params; 40 | 41 | const query = connection 42 | .queryBuilder() 43 | .select(select || {id: 'transactions.id', height: 'transactions.height', tags: 'transactions.tags'}) 44 | .from('transactions'); 45 | 46 | if (id) { 47 | query.where('transactions.id', id); 48 | } 49 | 50 | if (ids) { 51 | query.whereIn('transactions.id', ids); 52 | } 53 | 54 | if (blocks) { 55 | query.leftJoin('blocks', 'transactions.height', 'blocks.height'); 56 | } 57 | 58 | if (status === 'confirmed') { 59 | query.whereNotNull('transactions.height'); 60 | } 61 | 62 | if (to) { 63 | query.whereIn('transactions.target', to); 64 | } 65 | 66 | if (from) { 67 | query.whereIn('transactions.owner_address', from); 68 | } 69 | 70 | if (tags) { 71 | for (let i = 0; i < tags.length; i++) { 72 | const tag = tags[i]; 73 | const tagAlias = `${i}_${i}`; 74 | let indexed = false; 75 | 76 | for (let ii = 0; ii < indices.length; ii++) { 77 | const index = indices[ii]; 78 | 79 | if (tag.name === index) { 80 | indexed = true; 81 | 82 | if (tag.op === 'EQ') { 83 | query.whereIn(`transactions.${index}`, tag.values); 84 | } 85 | 86 | if (tag.op === 'NEQ') { 87 | query.whereNotIn(`transactions.${index}`, tag.values); 88 | } 89 | } 90 | } 91 | 92 | if (indexed === false) { 93 | query.join(`tags as ${tagAlias}`, (join) => { 94 | join.on('transactions.id', `${tagAlias}.tx_id`); 95 | 96 | join.andOnIn(`${tagAlias}.name`, [tag.name]); 97 | 98 | if (tag.op === 'EQ') { 99 | join.andOnIn(`${tagAlias}.value`, tag.values); 100 | } 101 | 102 | if (tag.op === 'NEQ') { 103 | join.andOnNotIn(`${tagAlias}.value`, tag.values); 104 | } 105 | }); 106 | } 107 | } 108 | } 109 | 110 | if (minHeight >= 0) { 111 | query.where('transactions.height', '>=', minHeight); 112 | } 113 | 114 | if (maxHeight >= 0) { 115 | query.where('transactions.height', '<=', maxHeight); 116 | } 117 | 118 | query.limit(limit).offset(offset); 119 | 120 | if (Object.keys(orderByClauses).includes(sortOrder)) { 121 | query.orderByRaw(orderByClauses[sortOrder]); 122 | } 123 | 124 | return query; 125 | } 126 | -------------------------------------------------------------------------------- /src/graphql/resolver.graphql.ts: -------------------------------------------------------------------------------- 1 | import moment from 'moment'; 2 | import {IResolvers} from 'apollo-server-express'; 3 | import {QueryTransactionsArgs} from './types'; 4 | import {ISO8601DateTimeString, winstonToAr, utf8DecodeTag} from '../utility/encoding.utility'; 5 | import {TransactionHeader} from '../types/arweave.types'; 6 | import {QueryParams, generateQuery} from './query.graphql'; 7 | 8 | type Resolvers = IResolvers; 9 | 10 | const DEFAULT_PAGE_SIZE = 10; 11 | const MAX_PAGE_SIZE = 100; 12 | 13 | const fieldMap = { 14 | id: 'transactions.id', 15 | anchor: 'transactions.last_tx', 16 | recipient: 'transactions.target', 17 | tags: 'transactions.tags', 18 | fee: 'transactions.reward', 19 | quantity: 'transactions.quantity', 20 | data_size: 'transactions.data_size', 21 | data_type: 'transactions.content_type', 22 | parent: 'transactions.parent', 23 | owner: 'transactions.owner', 24 | owner_address: 'transactions.owner_address', 25 | signature: 'transactions.signature', 26 | block_id: 'blocks.id', 27 | block_timestamp: 'blocks.mined_at', 28 | block_height: 'blocks.height', 29 | block_previous: 'blocks.previous_block', 30 | }; 31 | 32 | export const resolvers: Resolvers = { 33 | Query: { 34 | transaction: async (parent, queryParams, {req, connection}) => { 35 | req.log.info('[graphql/v2] transaction/request', queryParams); 36 | 37 | const params: QueryParams = { 38 | id: queryParams.id, 39 | blocks: true, 40 | select: fieldMap, 41 | }; 42 | 43 | const result = (await generateQuery(params)).first(); 44 | 45 | return await result as TransactionHeader; 46 | }, 47 | transactions: async (parent, queryParams: QueryTransactionsArgs, {req, connection}, info) => { 48 | const {timestamp, offset} = parseCursor(queryParams.after || newCursor()); 49 | const pageSize = Math.min(queryParams.first || DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE); 50 | 51 | const params: QueryParams = { 52 | limit: pageSize + 1, 53 | offset: offset, 54 | ids: queryParams.ids || undefined, 55 | to: queryParams.recipients || undefined, 56 | from: queryParams.owners || undefined, 57 | tags: queryParams.tags || undefined, 58 | blocks: true, 59 | since: timestamp, 60 | select: fieldMap, 61 | minHeight: queryParams.block?.min || undefined, 62 | maxHeight: queryParams.block?.max || undefined, 63 | sortOrder: queryParams.sort || undefined, 64 | }; 65 | 66 | const results = (await generateQuery(params)) as TransactionHeader[]; 67 | const hasNextPage = results.length > pageSize; 68 | 69 | return { 70 | pageInfo: { 71 | hasNextPage, 72 | }, 73 | edges: async () => { 74 | return results.slice(0, pageSize).map((result: any, index) => { 75 | return { 76 | cursor: encodeCursor({timestamp, offset: offset + index + 1}), 77 | node: result, 78 | }; 79 | }); 80 | }, 81 | }; 82 | }, 83 | }, 84 | Transaction: { 85 | tags: (parent) => { 86 | return parent.tags.map(utf8DecodeTag); 87 | }, 88 | recipient: (parent) => { 89 | return parent.recipient.trim(); 90 | }, 91 | data: (parent) => { 92 | return { 93 | size: parent.data_size || 0, 94 | type: parent.data_type, 95 | }; 96 | }, 97 | quantity: (parent) => { 98 | return { 99 | ar: winstonToAr(parent.quantity || 0), 100 | winston: parent.quantity || 0, 101 | }; 102 | }, 103 | fee: (parent) => { 104 | return { 105 | ar: winstonToAr(parent.fee || 0), 106 | winston: parent.fee || 0, 107 | }; 108 | }, 109 | block: (parent) => { 110 | if (parent.block_id) { 111 | return { 112 | id: parent.block_id, 113 | previous: parent.block_previous, 114 | timestamp: moment(parent.block_timestamp).unix(), 115 | height: parent.block_height, 116 | }; 117 | } 118 | }, 119 | owner: (parent) => { 120 | return { 121 | address: parent.owner_address, 122 | key: parent.owner, 123 | }; 124 | }, 125 | parent: (parent) => { 126 | if (parent.parent) { 127 | return { 128 | id: parent.parent, 129 | }; 130 | } 131 | }, 132 | }, 133 | }; 134 | 135 | export interface Cursor { 136 | timestamp: ISO8601DateTimeString; 137 | offset: number; 138 | } 139 | 140 | export const newCursor = (): string => encodeCursor({timestamp: moment().toISOString(), offset: 0}); 141 | 142 | export const encodeCursor = ({timestamp, offset}: Cursor): string => { 143 | const string = JSON.stringify([timestamp, offset]); 144 | return Buffer.from(string).toString('base64'); 145 | }; 146 | 147 | export const parseCursor = (cursor: string): Cursor => { 148 | try { 149 | const [timestamp, offset] = JSON.parse(Buffer.from(cursor, 'base64').toString()) as [ISO8601DateTimeString, number]; 150 | return {timestamp, offset}; 151 | } catch (error) { 152 | throw new Error('invalid cursor'); 153 | } 154 | }; 155 | -------------------------------------------------------------------------------- /src/utility/encoding.utility.ts: -------------------------------------------------------------------------------- 1 | import Ar from 'arweave/node/ar'; 2 | import {base32} from 'rfc4648'; 3 | import {createHash} from 'crypto'; 4 | import {Readable, PassThrough, Transform} from 'stream'; 5 | import {Tag} from '../types/arweave.types'; 6 | 7 | const ar = new Ar(); 8 | 9 | export type Base64EncodedString = string; 10 | export type Base64UrlEncodedString = string; 11 | export type WinstonString = string; 12 | export type ArString = string; 13 | export type ISO8601DateTimeString = string; 14 | 15 | export class Base64DUrlecode extends Transform { 16 | protected extra: string; 17 | protected bytesProcessed: number; 18 | 19 | constructor() { 20 | super({decodeStrings: false, objectMode: false}); 21 | this.extra = ''; 22 | this.bytesProcessed = 0; 23 | } 24 | 25 | _transform(chunk: Buffer, encoding: any, cb: Function) { 26 | const conbinedChunk = 27 | this.extra + 28 | chunk 29 | .toString('base64') 30 | .replace(/-/g, '+') 31 | .replace(/_/g, '/') 32 | .replace(/(\r\n|\n|\r)/gm, ''); 33 | 34 | this.bytesProcessed += chunk.byteLength; 35 | 36 | const remaining = chunk.length % 4; 37 | 38 | this.extra = conbinedChunk.slice(chunk.length - remaining); 39 | 40 | const buf = Buffer.from( 41 | conbinedChunk.slice(0, chunk.length - remaining), 42 | 'base64', 43 | ); 44 | this.push(buf); 45 | cb(); 46 | } 47 | 48 | _flush(cb: Function) { 49 | if (this.extra.length) { 50 | this.push(Buffer.from(this.extra, 'base64')); 51 | } 52 | 53 | cb(); 54 | } 55 | } 56 | 57 | export const sha256 = (buffer: Buffer): Buffer => { 58 | return createHash('sha256').update(buffer).digest(); 59 | }; 60 | 61 | export function toB64url(buffer: Buffer): Base64UrlEncodedString { 62 | return buffer 63 | .toString('base64') 64 | .replace(/\+/g, '-') 65 | .replace(/\//g, '_') 66 | .replace(/=/g, ''); 67 | } 68 | 69 | export function fromB64Url(input: Base64UrlEncodedString): Buffer { 70 | const paddingLength = input.length % 4 === 0 ? 0 : 4 - (input.length % 4); 71 | 72 | const base64 = input 73 | .replace(/-/g, '+') 74 | .replace(/_/g, '/') 75 | .concat('='.repeat(paddingLength)); 76 | 77 | return Buffer.from(base64, 'base64'); 78 | } 79 | 80 | export function fromB32(input: string): Buffer { 81 | return Buffer.from( 82 | base32.parse(input, { 83 | loose: true, 84 | }), 85 | ); 86 | } 87 | 88 | export function toB32(input: Buffer): string { 89 | return base32.stringify(input, {pad: false}).toLowerCase(); 90 | } 91 | 92 | export function sha256B64Url(input: Buffer): string { 93 | return toB64url(createHash('sha256').update(input).digest()); 94 | } 95 | 96 | export const streamToBuffer = async (stream: Readable): Promise => { 97 | let buffer = Buffer.alloc(0); 98 | return new Promise((resolve, reject) => { 99 | stream.on('data', (chunk: Buffer) => { 100 | buffer = Buffer.concat([buffer, chunk]); 101 | }); 102 | 103 | stream.on('end', () => { 104 | resolve(buffer); 105 | }); 106 | }); 107 | }; 108 | 109 | export const streamToString = async (stream: Readable): Promise => { 110 | return (await streamToBuffer(stream)).toString('utf-8'); 111 | }; 112 | 113 | export const bufferToJson = (input: Buffer): T => { 114 | return JSON.parse(input.toString('utf8')); 115 | }; 116 | 117 | export const jsonToBuffer = (input: object): Buffer => { 118 | return Buffer.from(JSON.stringify(input)); 119 | }; 120 | 121 | export const streamToJson = async (input: Readable): Promise => { 122 | return bufferToJson(await streamToBuffer(input)); 123 | }; 124 | 125 | export const isValidUTF8 = function(buffer: Buffer) { 126 | return Buffer.compare(Buffer.from(buffer.toString(), 'utf8'), buffer) === 0; 127 | }; 128 | 129 | export const streamDecoderb64url = (readable: Readable): Readable => { 130 | const outputStream = new PassThrough({objectMode: false}); 131 | 132 | const decoder = new Base64DUrlecode(); 133 | 134 | readable.pipe(decoder).pipe(outputStream); 135 | 136 | return outputStream; 137 | }; 138 | export const bufferToStream = (buffer: Buffer) => { 139 | return new Readable({ 140 | objectMode: false, 141 | read() { 142 | this.push(buffer); 143 | this.push(null); 144 | }, 145 | }); 146 | }; 147 | 148 | export const winstonToAr = (amount: string) => { 149 | return ar.winstonToAr(amount); 150 | }; 151 | 152 | export const arToWinston = (amount: string) => { 153 | return ar.arToWinston(amount); 154 | }; 155 | 156 | export const utf8DecodeTag = ( 157 | tag: Tag, 158 | ): { name: string | undefined; value: string | undefined } => { 159 | let name; 160 | let value; 161 | try { 162 | const nameBuffer = fromB64Url(tag.name); 163 | if (isValidUTF8(nameBuffer)) { 164 | name = nameBuffer.toString('utf8'); 165 | } 166 | const valueBuffer = fromB64Url(tag.value); 167 | if (isValidUTF8(valueBuffer)) { 168 | value = valueBuffer.toString('utf8'); 169 | } 170 | } catch (error) {} 171 | return { 172 | name, 173 | value, 174 | }; 175 | }; 176 | -------------------------------------------------------------------------------- /bin/wait.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Use this script to test if a given TCP host/port are available 3 | 4 | WAITFORIT_cmdname=${0##*/} 5 | 6 | echoerr() { if [[ $WAITFORIT_QUIET -ne 1 ]]; then echo "$@" 1>&2; fi } 7 | 8 | usage() 9 | { 10 | cat << USAGE >&2 11 | Usage: 12 | $WAITFORIT_cmdname host:port [-s] [-t timeout] [-- command args] 13 | -h HOST | --host=HOST Host or IP under test 14 | -p PORT | --port=PORT TCP port under test 15 | Alternatively, you specify the host and port as host:port 16 | -s | --strict Only execute subcommand if the test succeeds 17 | -q | --quiet Don't output any status messages 18 | -t TIMEOUT | --timeout=TIMEOUT 19 | Timeout in seconds, zero for no timeout 20 | -- COMMAND ARGS Execute command with args after the test finishes 21 | USAGE 22 | exit 1 23 | } 24 | 25 | wait_for() 26 | { 27 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then 28 | echoerr "$WAITFORIT_cmdname: waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" 29 | else 30 | echoerr "$WAITFORIT_cmdname: waiting for $WAITFORIT_HOST:$WAITFORIT_PORT without a timeout" 31 | fi 32 | WAITFORIT_start_ts=$(date +%s) 33 | while : 34 | do 35 | if [[ $WAITFORIT_ISBUSY -eq 1 ]]; then 36 | nc -z $WAITFORIT_HOST $WAITFORIT_PORT 37 | WAITFORIT_result=$? 38 | else 39 | (echo -n > /dev/tcp/$WAITFORIT_HOST/$WAITFORIT_PORT) >/dev/null 2>&1 40 | WAITFORIT_result=$? 41 | fi 42 | if [[ $WAITFORIT_result -eq 0 ]]; then 43 | WAITFORIT_end_ts=$(date +%s) 44 | echoerr "$WAITFORIT_cmdname: $WAITFORIT_HOST:$WAITFORIT_PORT is available after $((WAITFORIT_end_ts - WAITFORIT_start_ts)) seconds" 45 | break 46 | fi 47 | sleep 1 48 | done 49 | return $WAITFORIT_result 50 | } 51 | 52 | wait_for_wrapper() 53 | { 54 | # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692 55 | if [[ $WAITFORIT_QUIET -eq 1 ]]; then 56 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --quiet --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & 57 | else 58 | timeout $WAITFORIT_BUSYTIMEFLAG $WAITFORIT_TIMEOUT $0 --child --host=$WAITFORIT_HOST --port=$WAITFORIT_PORT --timeout=$WAITFORIT_TIMEOUT & 59 | fi 60 | WAITFORIT_PID=$! 61 | trap "kill -INT -$WAITFORIT_PID" INT 62 | wait $WAITFORIT_PID 63 | WAITFORIT_RESULT=$? 64 | if [[ $WAITFORIT_RESULT -ne 0 ]]; then 65 | echoerr "$WAITFORIT_cmdname: timeout occurred after waiting $WAITFORIT_TIMEOUT seconds for $WAITFORIT_HOST:$WAITFORIT_PORT" 66 | fi 67 | return $WAITFORIT_RESULT 68 | } 69 | 70 | # process arguments 71 | while [[ $# -gt 0 ]] 72 | do 73 | case "$1" in 74 | *:* ) 75 | WAITFORIT_hostport=(${1//:/ }) 76 | WAITFORIT_HOST=${WAITFORIT_hostport[0]} 77 | WAITFORIT_PORT=${WAITFORIT_hostport[1]} 78 | shift 1 79 | ;; 80 | --child) 81 | WAITFORIT_CHILD=1 82 | shift 1 83 | ;; 84 | -q | --quiet) 85 | WAITFORIT_QUIET=1 86 | shift 1 87 | ;; 88 | -s | --strict) 89 | WAITFORIT_STRICT=1 90 | shift 1 91 | ;; 92 | -h) 93 | WAITFORIT_HOST="$2" 94 | if [[ $WAITFORIT_HOST == "" ]]; then break; fi 95 | shift 2 96 | ;; 97 | --host=*) 98 | WAITFORIT_HOST="${1#*=}" 99 | shift 1 100 | ;; 101 | -p) 102 | WAITFORIT_PORT="$2" 103 | if [[ $WAITFORIT_PORT == "" ]]; then break; fi 104 | shift 2 105 | ;; 106 | --port=*) 107 | WAITFORIT_PORT="${1#*=}" 108 | shift 1 109 | ;; 110 | -t) 111 | WAITFORIT_TIMEOUT="$2" 112 | if [[ $WAITFORIT_TIMEOUT == "" ]]; then break; fi 113 | shift 2 114 | ;; 115 | --timeout=*) 116 | WAITFORIT_TIMEOUT="${1#*=}" 117 | shift 1 118 | ;; 119 | --) 120 | shift 121 | WAITFORIT_CLI=("$@") 122 | break 123 | ;; 124 | --help) 125 | usage 126 | ;; 127 | *) 128 | echoerr "Unknown argument: $1" 129 | usage 130 | ;; 131 | esac 132 | done 133 | 134 | if [[ "$WAITFORIT_HOST" == "" || "$WAITFORIT_PORT" == "" ]]; then 135 | echoerr "Error: you need to provide a host and port to test." 136 | usage 137 | fi 138 | 139 | WAITFORIT_TIMEOUT=${WAITFORIT_TIMEOUT:-15} 140 | WAITFORIT_STRICT=${WAITFORIT_STRICT:-0} 141 | WAITFORIT_CHILD=${WAITFORIT_CHILD:-0} 142 | WAITFORIT_QUIET=${WAITFORIT_QUIET:-0} 143 | 144 | # Check to see if timeout is from busybox? 145 | WAITFORIT_TIMEOUT_PATH=$(type -p timeout) 146 | WAITFORIT_TIMEOUT_PATH=$(realpath $WAITFORIT_TIMEOUT_PATH 2>/dev/null || readlink -f $WAITFORIT_TIMEOUT_PATH) 147 | 148 | WAITFORIT_BUSYTIMEFLAG="" 149 | if [[ $WAITFORIT_TIMEOUT_PATH =~ "busybox" ]]; then 150 | WAITFORIT_ISBUSY=1 151 | # Check if busybox timeout uses -t flag 152 | # (recent Alpine versions don't support -t anymore) 153 | if timeout &>/dev/stdout | grep -q -e '-t '; then 154 | WAITFORIT_BUSYTIMEFLAG="-t" 155 | fi 156 | else 157 | WAITFORIT_ISBUSY=0 158 | fi 159 | 160 | if [[ $WAITFORIT_CHILD -gt 0 ]]; then 161 | wait_for 162 | WAITFORIT_RESULT=$? 163 | exit $WAITFORIT_RESULT 164 | else 165 | if [[ $WAITFORIT_TIMEOUT -gt 0 ]]; then 166 | wait_for_wrapper 167 | WAITFORIT_RESULT=$? 168 | else 169 | wait_for 170 | WAITFORIT_RESULT=$? 171 | fi 172 | fi 173 | 174 | if [[ $WAITFORIT_CLI != "" ]]; then 175 | if [[ $WAITFORIT_RESULT -ne 0 && $WAITFORIT_STRICT -eq 1 ]]; then 176 | echoerr "$WAITFORIT_cmdname: strict mode, refusing to execute subprocess" 177 | exit $WAITFORIT_RESULT 178 | fi 179 | exec "${WAITFORIT_CLI[@]}" 180 | else 181 | exit $WAITFORIT_RESULT 182 | fi -------------------------------------------------------------------------------- /types.graphql: -------------------------------------------------------------------------------- 1 | type Query { 2 | """ 3 | Get a transaction by its id 4 | """ 5 | transaction(id: ID!): Transaction 6 | """ 7 | Get a paginated set of matching transactions using filters. 8 | """ 9 | transactions( 10 | """ 11 | Find transactions from a list of ids. 12 | """ 13 | ids: [ID!] 14 | """ 15 | Find transactions from a list of owner wallet addresses, or wallet owner public keys. 16 | """ 17 | owners: [String!] 18 | """ 19 | Find transactions from a list of recipient wallet addresses. 20 | """ 21 | recipients: [String!] 22 | """ 23 | Find transactions using tags. 24 | """ 25 | tags: [TagFilter!] 26 | """ 27 | Only match the first n transactions. 28 | """ 29 | first: Int = 10 30 | """ 31 | A pagination cursor value, for fetching subsequent pages from a result set. 32 | """ 33 | after: String 34 | """ 35 | Find transactions within the given block range 36 | """ 37 | block: BlockFilter 38 | """ 39 | Optionally specify the result sort order. 40 | """ 41 | sort: SortOrder = HEIGHT_DESC 42 | ): TransactionConnection! 43 | } 44 | """ 45 | Optionally reverse the result sort order from `HEIGHT_DESC` (default) to `HEIGHT_ASC`. 46 | """ 47 | enum SortOrder { 48 | """ 49 | Results are sorted by the transaction block height in ascending order, with the oldest transactions appearing first, and the most recent and pending/unconfirmed appearing last. 50 | """ 51 | HEIGHT_ASC 52 | """ 53 | Results are sorted by the transaction block height in descending order, with the most recent and unconfirmed/pending transactions appearing first. 54 | """ 55 | HEIGHT_DESC 56 | } 57 | 58 | """ 59 | Find transactions with the folowing tag name and value 60 | """ 61 | input TagFilter { 62 | """ 63 | The tag name 64 | """ 65 | name: String! 66 | """ 67 | An array of values to match against. If multiple values are passed then transactions with _any_ matching tag value from the set will be returned. 68 | 69 | e.g. 70 | 71 | \`{name: "app-name", values: ["app-1"]}\` 72 | 73 | Returns all transactions where the \`app-name\` tag has a value of \`app-1\`. 74 | 75 | \`{name: "app-name", values: ["app-1", "app-2", "app-3"]}\` 76 | 77 | Returns all transactions where the \`app-name\` tag has a value of either \`app-1\` _or_ \`app-2\` _or_ \`app-3\`. 78 | """ 79 | values: [String!]! 80 | 81 | """ 82 | The operator to apply to to the tag filter. Defaults to EQ (equal). 83 | """ 84 | op: TagOperator = EQ 85 | } 86 | 87 | """ 88 | Find transactions within the given block range 89 | """ 90 | input BlockFilter { 91 | """ 92 | Minimum block height to filter from 93 | """ 94 | min: Int 95 | """ 96 | Maximum block height to filter to 97 | """ 98 | max: Int 99 | } 100 | 101 | """ 102 | Paginated result set using the GraphQL cursor spec, 103 | see: https://relay.dev/graphql/connections.htm. 104 | """ 105 | type TransactionConnection { 106 | pageInfo: PageInfo! 107 | edges: [TransactionEdge!]! 108 | } 109 | 110 | """ 111 | Paginated result set using the GraphQL cursor spec. 112 | """ 113 | type TransactionEdge { 114 | """ 115 | The cursor value for fetching the next page. 116 | 117 | Pass this to the \`after\` parameter in \`transactions(after: $cursor)\`, the next page will start from the next item after this. 118 | """ 119 | cursor: String! 120 | """ 121 | A transaction object. 122 | """ 123 | node: Transaction! 124 | } 125 | 126 | """ 127 | Paginated page info using the GraphQL cursor spec. 128 | """ 129 | type PageInfo { 130 | hasNextPage: Boolean! 131 | } 132 | 133 | type Transaction { 134 | id: ID! 135 | 136 | anchor: String! 137 | signature: String! 138 | recipient: String! 139 | 140 | owner: Owner! 141 | fee: Amount! 142 | quantity: Amount! 143 | data: MetaData! 144 | tags: [Tag!]! 145 | """ 146 | Transactions with a null block are recent and unconfirmed, if they aren't mined into a block within 60 minutes they will be removed from results. 147 | """ 148 | block: Block 149 | """ 150 | Transactions with parent are Bundled Data Items as defined in the ANS-102 data spec. https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md 151 | """ 152 | parent: Parent 153 | } 154 | 155 | """ 156 | The parent transaction for bundled transactions, 157 | see: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md. 158 | """ 159 | type Parent { 160 | id: ID! 161 | } 162 | 163 | """ 164 | The block in which the transaction was included. 165 | """ 166 | type Block { 167 | id: ID! 168 | timestamp: Int! 169 | height: Int! 170 | previous: ID! 171 | } 172 | 173 | """ 174 | Basic metadata about the transaction data payload. 175 | """ 176 | type MetaData { 177 | """ 178 | Size of the associated data in bytes. 179 | """ 180 | size: Int! 181 | """ 182 | Type is derrived from the \`content-type\` tag on a transaction. 183 | """ 184 | type: String 185 | } 186 | """ 187 | Representation of a value transfer between wallets, in both winson and ar. 188 | """ 189 | type Amount { 190 | """ 191 | Amount as a winston string e.g. \`"1000000000000"\`. 192 | """ 193 | winston: String! 194 | """ 195 | Amount as an AR string e.g. \`"0.000000000001"\`. 196 | """ 197 | ar: String! 198 | } 199 | 200 | """ 201 | Representation of a transaction owner. 202 | """ 203 | type Owner { 204 | """ 205 | The owner's wallet address. 206 | """ 207 | address: String! 208 | """ 209 | The owner's public key as a base64url encoded string. 210 | """ 211 | key: String! 212 | } 213 | 214 | type Tag { 215 | """ 216 | UTF-8 tag name 217 | """ 218 | name: String! 219 | """ 220 | UTF-8 tag value 221 | """ 222 | value: String! 223 | } 224 | 225 | """ 226 | The operator to apply to a tag value. 227 | """ 228 | enum TagOperator { 229 | """ 230 | Equal 231 | """ 232 | EQ 233 | """ 234 | Not equal 235 | """ 236 | NEQ 237 | } 238 | 239 | # """ 240 | # Transaction statuses 241 | # """ 242 | # enum Status { 243 | # """ 244 | # Transaction is included in a block 245 | # """ 246 | # CONFIRMED 247 | # """ 248 | # Transaction is not yet included in a block 249 | # """ 250 | # PENDING 251 | # } -------------------------------------------------------------------------------- /src/Snapshot.ts: -------------------------------------------------------------------------------- 1 | import ProgressBar from 'progress'; 2 | import {DataItemJson} from 'arweave-bundles'; 3 | import {config} from 'dotenv'; 4 | import {existsSync, readFileSync, writeFileSync, createWriteStream} from 'fs'; 5 | import {ansBundles} from './utility/ans.utility'; 6 | import {mkdir} from './utility/file.utility'; 7 | import {log} from './utility/log.utility'; 8 | import {sleep} from './utility/sleep.utility'; 9 | import {getNodeInfo, getData} from './query/node.query'; 10 | import {block} from './query/block.query'; 11 | import {transaction, tagValue, Tag} from './query/transaction.query'; 12 | import {formatBlock} from './database/block.database'; 13 | import {transactionFields, DatabaseTag, formatTransaction, formatAnsTransaction} from './database/transaction.database'; 14 | 15 | config(); 16 | mkdir('snapshot'); 17 | 18 | export const indices = JSON.parse(process.env.INDICES || '[]') as Array; 19 | export const parallelization = parseInt(process.env.PARALLEL || '8'); 20 | 21 | export let SIGINT: boolean = false; 22 | export let SIGKILL: boolean = false; 23 | 24 | export let bar: ProgressBar; 25 | export let topHeight = 0; 26 | 27 | export const streams = { 28 | block: createWriteStream('snapshot/block.csv', {flags: 'a'}), 29 | transaction: createWriteStream('snapshot/transaction.csv', {flags: 'a'}), 30 | tags: createWriteStream('snapshot/tags.csv', {flags: 'a'}), 31 | }; 32 | 33 | export function configureSnapshotBar(start: number, end: number) { 34 | bar = new ProgressBar( 35 | ':current/:total blocks synced [:bar] :percent :etas', 36 | { 37 | complete: '|', 38 | incomplete: ' ', 39 | total: end - start, 40 | }, 41 | ); 42 | } 43 | 44 | export async function snapshot() { 45 | if (existsSync('.snapshot')) { 46 | log.info('[snapshot] existing snapshot state found'); 47 | const snapshotState = parseInt(readFileSync('.snapshot').toString()); 48 | 49 | if (!isNaN(snapshotState)) { 50 | const nodeInfo = await getNodeInfo(); 51 | configureSnapshotBar(snapshotState, nodeInfo.height); 52 | topHeight = nodeInfo.height; 53 | log.info(`[snapshot] snapshot is currently at height ${snapshotState}, resuming sync to ${topHeight}`); 54 | bar.tick(); 55 | await parallelize(snapshotState + 1); 56 | } else { 57 | log.info('[snapshot] snapshot state is malformed. Please make sure it is a number'); 58 | process.exit(); 59 | } 60 | } else { 61 | const nodeInfo = await getNodeInfo(); 62 | configureSnapshotBar(0, nodeInfo.height); 63 | topHeight = nodeInfo.height; 64 | log.info(`[snapshot] new snapshot is being generated, syncing from block 0 to ${topHeight}`); 65 | bar.tick(); 66 | await parallelize(0); 67 | } 68 | } 69 | 70 | export async function parallelize(height: number) { 71 | if (height >= topHeight) { 72 | log.info('[snapshot] fully synced, monitoring for new blocks'); 73 | await sleep(30000); 74 | const nodeInfo = await getNodeInfo(); 75 | if (nodeInfo.height > topHeight) { 76 | log.info(`[snapshot] updated height from ${topHeight} to ${nodeInfo.height} syncing new blocks`); 77 | } 78 | topHeight = nodeInfo.height; 79 | await parallelize(height); 80 | } else { 81 | const batch = []; 82 | 83 | for (let i = height; i < height + parallelization && i < topHeight; i++) { 84 | batch.push(storeBlock(i)); 85 | } 86 | 87 | SIGINT = true; 88 | 89 | await Promise.all(batch); 90 | 91 | if (!bar.complete) { 92 | bar.tick(batch.length); 93 | } 94 | 95 | writeFileSync('.snapshot', (height + batch.length).toString()); 96 | writeFileSync('snapshot/.snapshot', (height + batch.length).toString()); 97 | 98 | SIGINT = false; 99 | 100 | if (SIGKILL === false) { 101 | await parallelize(height + batch.length); 102 | } 103 | } 104 | } 105 | 106 | export async function storeBlock(height: number) { 107 | try { 108 | const currentBlock = await block(height); 109 | const fb = formatBlock(currentBlock); 110 | const input = `"${fb.id}","${fb.previous_block}","${fb.mined_at}","${fb.height}","${fb.txs.replace(/"/g, '\\"')}","${fb.extended.replace(/"/g, '\\"')}"\n`; 111 | 112 | streams.block.write(input); 113 | 114 | if (height > 0) { 115 | await storeTransactions(JSON.parse(fb.txs) as Array, height); 116 | } 117 | } catch (error) { 118 | log.info(`[snapshot] could not retrieve block at height ${height}, retrying`); 119 | if (SIGKILL === false) { 120 | await storeBlock(height); 121 | } 122 | } 123 | } 124 | 125 | export async function storeTransactions(txs: Array, height: number) { 126 | const batch = []; 127 | 128 | for (let i = 0; i < txs.length; i++) { 129 | const tx = txs[i]; 130 | batch.push(storeTransaction(tx, height)); 131 | } 132 | 133 | await Promise.all(batch); 134 | } 135 | 136 | export async function storeTransaction(tx: string, height: number) { 137 | try { 138 | const currentTransaction = await transaction(tx); 139 | const ft = formatTransaction(currentTransaction); 140 | const preservedTags = JSON.parse(ft.tags) as Array; 141 | ft.tags = `${ft.tags.replace(/"/g, '\\"')}`; 142 | 143 | const fields = transactionFields 144 | .map((field) => `"${ft[field] ? ft[field] : ''}"`) 145 | .concat(indices.map((ifield) => `"${ft[ifield] ? ft[ifield] : ''}"`)); 146 | 147 | const input = `${fields.join(',')}\n`; 148 | 149 | streams.transaction.write(input); 150 | 151 | storeTags(ft.id, preservedTags); 152 | 153 | const ans102 = tagValue(preservedTags, 'Bundle-Type') === 'ANS-102'; 154 | 155 | if (ans102) { 156 | try { 157 | const ansPayload = await getData(ft.id); 158 | const ansTxs = await ansBundles.unbundleData(ansPayload); 159 | 160 | await processANSTransaction(ansTxs); 161 | } catch (error) { 162 | console.log(''); 163 | log.info(`[snapshot] malformed ANS payload at height ${height} for tx ${ft.id}`); 164 | } 165 | } 166 | } catch (error) { 167 | console.log(''); 168 | log.info(`[snapshot] could not retrieve tx ${tx} at height ${height}`); 169 | } 170 | } 171 | 172 | export async function processANSTransaction(ansTxs: Array) { 173 | for (let i = 0; i < ansTxs.length; i++) { 174 | const ansTx = ansTxs[i]; 175 | const ft = formatAnsTransaction(ansTx); 176 | ft.tags = `${ft.tags.replace(/"/g, '\\"')}`; 177 | 178 | const ansTags = ansTx.tags; 179 | 180 | const fields = transactionFields 181 | .map((field) => `"${ft[field] ? ft[field] : ''}"`) 182 | .concat(indices.map((ifield) => `"${ft[ifield] ? ft[ifield] : ''}"`)); 183 | 184 | const input = `${fields.join(',')}\n`; 185 | 186 | streams.transaction.write(input); 187 | 188 | for (let ii = 0; ii < ansTags.length; ii++) { 189 | const ansTag = ansTags[ii]; 190 | const {name, value} = ansTag; 191 | 192 | const tag: DatabaseTag = { 193 | tx_id: ansTx.id, 194 | index: ii, 195 | name: name || '', 196 | value: value || '', 197 | }; 198 | 199 | const input = `"${tag.tx_id}","${tag.index}","${tag.name}","${tag.value}"\n`; 200 | 201 | streams.tags.write(input); 202 | } 203 | } 204 | } 205 | 206 | export function storeTags(tx_id: string, tags: Array) { 207 | for (let i = 0; i < tags.length; i++) { 208 | const tag = tags[i]; 209 | 210 | const input = `"${tx_id}","${i}","${tag.name}","${tag.value}"\n`; 211 | 212 | streams.tags.write(input); 213 | } 214 | } 215 | (async () => await snapshot())(); 216 | 217 | process.on('SIGINT', () => { 218 | SIGKILL = true; 219 | setInterval(() => { 220 | if (SIGINT === false) { 221 | streams.block.end(); 222 | streams.transaction.end(); 223 | streams.tags.end(); 224 | process.exit(); 225 | } 226 | }, 100); 227 | }); 228 | -------------------------------------------------------------------------------- /src/database/sync.database.ts: -------------------------------------------------------------------------------- 1 | import ProgressBar from 'progress'; 2 | import {DataItemJson} from 'arweave-bundles'; 3 | import {existsSync, readFileSync, writeFileSync, createWriteStream} from 'fs'; 4 | import {config} from 'dotenv'; 5 | import {log} from '../utility/log.utility'; 6 | import {ansBundles} from '../utility/ans.utility'; 7 | import {mkdir} from '../utility/file.utility'; 8 | import {sleep} from '../utility/sleep.utility'; 9 | import {getNodeInfo} from '../query/node.query'; 10 | import {block} from '../query/block.query'; 11 | import {transaction, tagValue, Tag} from '../query/transaction.query'; 12 | import {getData} from '../query/node.query'; 13 | import {importBlocks, importTransactions, importTags} from './import.database'; 14 | import {formatBlock} from '../database/block.database'; 15 | import {transactionFields, DatabaseTag, formatTransaction, formatAnsTransaction} from '../database/transaction.database'; 16 | 17 | config(); 18 | mkdir('snapshot'); 19 | mkdir('cache'); 20 | 21 | export const indices = JSON.parse(process.env.INDICES || '[]') as Array; 22 | export const storeSnapshot = process.env.SNAPSHOT === '1' ? true : false; 23 | export const parallelization = parseInt(process.env.PARALLEL || '8'); 24 | 25 | export let SIGINT: boolean = false; 26 | export let SIGKILL: boolean = false; 27 | export let bar: ProgressBar; 28 | export let topHeight = 0; 29 | 30 | export const streams = { 31 | block: { 32 | snapshot: createWriteStream('snapshot/block.csv', {flags: 'a'}), 33 | cache: createWriteStream('cache/block.csv'), 34 | }, 35 | transaction: { 36 | snapshot: createWriteStream('snapshot/transaction.csv', {flags: 'a'}), 37 | cache: createWriteStream('cache/transaction.csv'), 38 | }, 39 | tags: { 40 | snapshot: createWriteStream('snapshot/tags.csv', {flags: 'a'}), 41 | cache: createWriteStream('cache/tags.csv'), 42 | }, 43 | }; 44 | 45 | export function configureSyncBar(start: number, end: number) { 46 | bar = new ProgressBar( 47 | ':current/:total blocks synced [:bar] :percent :etas', 48 | { 49 | complete: '|', 50 | incomplete: ' ', 51 | total: end - start, 52 | }, 53 | ); 54 | } 55 | 56 | export async function startSync() { 57 | log.info(`[database] starting sync, parallelization is set to ${parallelization}`); 58 | if (storeSnapshot) { 59 | log.info(`[snapshot] also writing new blocks to the snapshot folder`); 60 | } 61 | 62 | if (existsSync('.snapshot')) { 63 | log.info('[database] existing sync state found'); 64 | const state = parseInt(readFileSync('.snapshot').toString()); 65 | 66 | if (!isNaN(state)) { 67 | const nodeInfo = await getNodeInfo(); 68 | configureSyncBar(state, nodeInfo.height); 69 | topHeight = nodeInfo.height; 70 | log.info(`[database] database is currently at height ${state}, resuming sync to ${topHeight}`); 71 | bar.tick(); 72 | await parallelize(state + 1); 73 | } else { 74 | log.info('[database] sync state is malformed. Please make sure it is a number'); 75 | process.exit(); 76 | } 77 | } else { 78 | const nodeInfo = await getNodeInfo(); 79 | configureSyncBar(0, nodeInfo.height); 80 | topHeight = nodeInfo.height; 81 | log.info(`[database] syncing from block 0 to ${topHeight}`); 82 | bar.tick(); 83 | await parallelize(0); 84 | } 85 | } 86 | 87 | export async function parallelize(height: number) { 88 | if (height >= topHeight) { 89 | log.info(`[database] fully synced, monitoring for new blocks`); 90 | await sleep(30000); 91 | const nodeInfo = await getNodeInfo(); 92 | if (nodeInfo.height > topHeight) { 93 | log.info(`[database] updated height from ${topHeight} to ${nodeInfo.height} syncing new blocks`) 94 | } 95 | topHeight = nodeInfo.height; 96 | await parallelize(height); 97 | } else { 98 | const batch = []; 99 | 100 | for (let i = height; i < height + parallelization && i < topHeight; i++) { 101 | batch.push(storeBlock(i)); 102 | } 103 | 104 | SIGINT = true; 105 | 106 | await Promise.all(batch); 107 | 108 | await importBlocks(`${process.cwd()}/cache/block.csv`); 109 | await importTransactions(`${process.cwd()}/cache/transaction.csv`); 110 | await importTags(`${process.cwd()}/cache/tags.csv`); 111 | 112 | streams.block.cache = createWriteStream('cache/block.csv'); 113 | streams.transaction.cache = createWriteStream('cache/transaction.csv'); 114 | streams.tags.cache = createWriteStream('cache/tags.csv'); 115 | 116 | if (!bar.complete) { 117 | bar.tick(batch.length); 118 | } 119 | 120 | writeFileSync('.snapshot', (height + batch.length).toString()); 121 | 122 | SIGINT = false; 123 | 124 | if (SIGKILL === false) { 125 | await parallelize(height + batch.length); 126 | } 127 | } 128 | } 129 | 130 | export async function storeBlock(height: number) { 131 | try { 132 | const currentBlock = await block(height); 133 | const fb = formatBlock(currentBlock); 134 | const input = `"${fb.id}","${fb.previous_block}","${fb.mined_at}","${fb.height}","${fb.txs.replace(/"/g, '\\"')}","${fb.extended.replace(/"/g, '\\"')}"\n`; 135 | 136 | streams.block.cache.write(input); 137 | 138 | if (storeSnapshot) { 139 | streams.block.snapshot.write(input); 140 | } 141 | 142 | if (height > 0) { 143 | await storeTransactions(JSON.parse(fb.txs) as Array, height); 144 | } 145 | } catch (error) { 146 | log.info(`[snapshot] could not retrieve block at height ${height}, retrying`); 147 | if (SIGKILL === false) { 148 | await storeBlock(height); 149 | } 150 | } 151 | } 152 | 153 | export async function storeTransactions(txs: Array, height: number) { 154 | const batch = []; 155 | 156 | for (let i = 0; i < txs.length; i++) { 157 | const tx = txs[i]; 158 | batch.push(storeTransaction(tx, height)); 159 | } 160 | 161 | await Promise.all(batch); 162 | } 163 | 164 | export async function storeTransaction(tx: string, height: number) { 165 | try { 166 | const currentTransaction = await transaction(tx); 167 | const ft = formatTransaction(currentTransaction); 168 | const preservedTags = JSON.parse(ft.tags) as Array; 169 | ft.tags = `${ft.tags.replace(/"/g, '\\"')}`; 170 | 171 | const fields = transactionFields 172 | .map((field) => `"${ft[field] ? ft[field] : ''}"`) 173 | .concat(indices.map((ifield) => `"${ft[ifield] ? ft[ifield] : ''}"`)); 174 | 175 | const input = `${fields.join(',')}\n`; 176 | 177 | streams.transaction.cache.write(input); 178 | 179 | if (storeSnapshot) { 180 | streams.transaction.snapshot.write(input); 181 | } 182 | 183 | storeTags(ft.id, preservedTags); 184 | 185 | const ans102 = tagValue(preservedTags, 'Bundle-Type') === 'ANS-102'; 186 | 187 | if (ans102) { 188 | try { 189 | const ansPayload = await getData(ft.id); 190 | const ansTxs = await ansBundles.unbundleData(ansPayload); 191 | 192 | await processANSTransaction(ansTxs); 193 | } catch (error) { 194 | console.log(''); 195 | log.info(`[database] malformed ANS payload at height ${height} for tx ${ft.id}`); 196 | } 197 | } 198 | } catch (error) { 199 | console.log(''); 200 | log.info(`[database] could not retrieve tx ${tx} at height ${height}`); 201 | } 202 | } 203 | 204 | export async function processANSTransaction(ansTxs: Array) { 205 | for (let i = 0; i < ansTxs.length; i++) { 206 | const ansTx = ansTxs[i]; 207 | const ft = formatAnsTransaction(ansTx); 208 | ft.tags = `${ft.tags.replace(/"/g, '\\"')}`; 209 | 210 | const ansTags = ansTx.tags; 211 | 212 | const fields = transactionFields 213 | .map((field) => `"${ft[field] ? ft[field] : ''}"`) 214 | .concat(indices.map((ifield) => `"${ft[ifield] ? ft[ifield] : ''}"`)); 215 | 216 | const input = `${fields.join(',')}\n`; 217 | 218 | streams.transaction.cache.write(input); 219 | 220 | if (storeSnapshot) { 221 | streams.transaction.snapshot.write(input); 222 | } 223 | 224 | for (let ii = 0; ii < ansTags.length; ii++) { 225 | const ansTag = ansTags[ii]; 226 | const {name, value} = ansTag; 227 | 228 | const tag: DatabaseTag = { 229 | tx_id: ansTx.id, 230 | index: ii, 231 | name: name || '', 232 | value: value || '', 233 | }; 234 | 235 | const input = `"${tag.tx_id}","${tag.index}","${tag.name}","${tag.value}"\n`; 236 | 237 | streams.tags.cache.write(input); 238 | 239 | if (storeSnapshot) { 240 | streams.tags.snapshot.write(input); 241 | } 242 | } 243 | } 244 | } 245 | 246 | export function storeTags(tx_id: string, tags: Array) { 247 | for (let i = 0; i < tags.length; i++) { 248 | const tag = tags[i]; 249 | 250 | const input = `"${tx_id}","${i}","${tag.name}","${tag.value}"\n`; 251 | 252 | streams.tags.cache.write(input); 253 | 254 | if (storeSnapshot) { 255 | streams.tags.snapshot.write(input); 256 | } 257 | } 258 | } 259 | 260 | process.on('SIGINT', () => { 261 | log.info('[database] ensuring all blocks are stored before exit, you may see some extra output in console'); 262 | SIGKILL = true; 263 | setInterval(() => { 264 | if (SIGINT === false) { 265 | log.info('[database] block sync state preserved, now exiting'); 266 | process.exit(); 267 | } 268 | }, 100); 269 | }); -------------------------------------------------------------------------------- /src/graphql/types.ts: -------------------------------------------------------------------------------- 1 | import { GraphQLResolveInfo } from 'graphql'; 2 | export type Maybe = T | null; 3 | export type Exact = { [K in keyof T]: T[K] }; 4 | export type RequireFields = { [X in Exclude]?: T[X] } & { [P in K]-?: NonNullable }; 5 | /** All built-in and custom scalars, mapped to their actual values */ 6 | export type Scalars = { 7 | ID: string; 8 | String: string; 9 | Boolean: boolean; 10 | Int: number; 11 | Float: number; 12 | }; 13 | 14 | export type Query = { 15 | __typename?: 'Query'; 16 | /** Get a transaction by its id */ 17 | transaction?: Maybe; 18 | /** Get a paginated set of matching transactions using filters. */ 19 | transactions: TransactionConnection; 20 | }; 21 | 22 | 23 | export type QueryTransactionArgs = { 24 | id: Scalars['ID']; 25 | }; 26 | 27 | 28 | export type QueryTransactionsArgs = { 29 | ids?: Maybe>; 30 | owners?: Maybe>; 31 | recipients?: Maybe>; 32 | tags?: Maybe>; 33 | first?: Maybe; 34 | after?: Maybe; 35 | block?: Maybe; 36 | sort?: Maybe; 37 | }; 38 | 39 | /** Optionally reverse the result sort order from `HEIGHT_DESC` (default) to `HEIGHT_ASC`. */ 40 | export enum SortOrder { 41 | /** Results are sorted by the transaction block height in ascending order, with the oldest transactions appearing first, and the most recent and pending/unconfirmed appearing last. */ 42 | HeightAsc = 'HEIGHT_ASC', 43 | /** Results are sorted by the transaction block height in descending order, with the most recent and unconfirmed/pending transactions appearing first. */ 44 | HeightDesc = 'HEIGHT_DESC' 45 | } 46 | 47 | /** Find transactions with the folowing tag name and value */ 48 | export type TagFilter = { 49 | /** The tag name */ 50 | name: Scalars['String']; 51 | /** 52 | * An array of values to match against. If multiple values are passed then transactions with _any_ matching tag value from the set will be returned. 53 | * 54 | * e.g. 55 | * 56 | * \`{name: "app-name", values: ["app-1"]}\` 57 | * 58 | * Returns all transactions where the \`app-name\` tag has a value of \`app-1\`. 59 | * 60 | * \`{name: "app-name", values: ["app-1", "app-2", "app-3"]}\` 61 | * 62 | * Returns all transactions where the \`app-name\` tag has a value of either \`app-1\` _or_ \`app-2\` _or_ \`app-3\`. 63 | */ 64 | values: Array; 65 | /** The operator to apply to to the tag filter. Defaults to EQ (equal). */ 66 | op?: Maybe; 67 | }; 68 | 69 | /** Find transactions within the given block range */ 70 | export type BlockFilter = { 71 | /** Minimum block height to filter from */ 72 | min?: Maybe; 73 | /** Maximum block height to filter to */ 74 | max?: Maybe; 75 | }; 76 | 77 | /** 78 | * Paginated result set using the GraphQL cursor spec, 79 | * see: https://relay.dev/graphql/connections.htm. 80 | */ 81 | export type TransactionConnection = { 82 | __typename?: 'TransactionConnection'; 83 | pageInfo: PageInfo; 84 | edges: Array; 85 | }; 86 | 87 | /** Paginated result set using the GraphQL cursor spec. */ 88 | export type TransactionEdge = { 89 | __typename?: 'TransactionEdge'; 90 | /** 91 | * The cursor value for fetching the next page. 92 | * 93 | * Pass this to the \`after\` parameter in \`transactions(after: $cursor)\`, the next page will start from the next item after this. 94 | */ 95 | cursor: Scalars['String']; 96 | /** A transaction object. */ 97 | node: Transaction; 98 | }; 99 | 100 | /** Paginated page info using the GraphQL cursor spec. */ 101 | export type PageInfo = { 102 | __typename?: 'PageInfo'; 103 | hasNextPage: Scalars['Boolean']; 104 | }; 105 | 106 | export type Transaction = { 107 | __typename?: 'Transaction'; 108 | id: Scalars['ID']; 109 | anchor: Scalars['String']; 110 | signature: Scalars['String']; 111 | recipient: Scalars['String']; 112 | owner: Owner; 113 | fee: Amount; 114 | quantity: Amount; 115 | data: MetaData; 116 | tags: Array; 117 | /** Transactions with a null block are recent and unconfirmed, if they aren't mined into a block within 60 minutes they will be removed from results. */ 118 | block?: Maybe; 119 | /** Transactions with parent are Bundled Data Items as defined in the ANS-102 data spec. https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md */ 120 | parent?: Maybe; 121 | }; 122 | 123 | /** 124 | * The parent transaction for bundled transactions, 125 | * see: https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-102.md. 126 | */ 127 | export type Parent = { 128 | __typename?: 'Parent'; 129 | id: Scalars['ID']; 130 | }; 131 | 132 | /** The block in which the transaction was included. */ 133 | export type Block = { 134 | __typename?: 'Block'; 135 | id: Scalars['ID']; 136 | timestamp: Scalars['Int']; 137 | height: Scalars['Int']; 138 | previous: Scalars['ID']; 139 | }; 140 | 141 | /** Basic metadata about the transaction data payload. */ 142 | export type MetaData = { 143 | __typename?: 'MetaData'; 144 | /** Size of the associated data in bytes. */ 145 | size: Scalars['Int']; 146 | /** Type is derrived from the \`content-type\` tag on a transaction. */ 147 | type?: Maybe; 148 | }; 149 | 150 | /** Representation of a value transfer between wallets, in both winson and ar. */ 151 | export type Amount = { 152 | __typename?: 'Amount'; 153 | /** Amount as a winston string e.g. \`"1000000000000"\`. */ 154 | winston: Scalars['String']; 155 | /** Amount as an AR string e.g. \`"0.000000000001"\`. */ 156 | ar: Scalars['String']; 157 | }; 158 | 159 | /** Representation of a transaction owner. */ 160 | export type Owner = { 161 | __typename?: 'Owner'; 162 | /** The owner's wallet address. */ 163 | address: Scalars['String']; 164 | /** The owner's public key as a base64url encoded string. */ 165 | key: Scalars['String']; 166 | }; 167 | 168 | export type Tag = { 169 | __typename?: 'Tag'; 170 | /** UTF-8 tag name */ 171 | name: Scalars['String']; 172 | /** UTF-8 tag value */ 173 | value: Scalars['String']; 174 | }; 175 | 176 | /** The operator to apply to a tag value. */ 177 | export enum TagOperator { 178 | /** Equal */ 179 | Eq = 'EQ', 180 | /** Not equal */ 181 | Neq = 'NEQ' 182 | } 183 | 184 | 185 | 186 | export type ResolverTypeWrapper = Promise | T; 187 | 188 | 189 | export type LegacyStitchingResolver = { 190 | fragment: string; 191 | resolve: ResolverFn; 192 | }; 193 | 194 | export type NewStitchingResolver = { 195 | selectionSet: string; 196 | resolve: ResolverFn; 197 | }; 198 | export type StitchingResolver = LegacyStitchingResolver | NewStitchingResolver; 199 | export type Resolver = 200 | | ResolverFn 201 | | StitchingResolver; 202 | 203 | export type ResolverFn = ( 204 | parent: TParent, 205 | args: TArgs, 206 | context: TContext, 207 | info: GraphQLResolveInfo 208 | ) => Promise | TResult; 209 | 210 | export type SubscriptionSubscribeFn = ( 211 | parent: TParent, 212 | args: TArgs, 213 | context: TContext, 214 | info: GraphQLResolveInfo 215 | ) => AsyncIterator | Promise>; 216 | 217 | export type SubscriptionResolveFn = ( 218 | parent: TParent, 219 | args: TArgs, 220 | context: TContext, 221 | info: GraphQLResolveInfo 222 | ) => TResult | Promise; 223 | 224 | export interface SubscriptionSubscriberObject { 225 | subscribe: SubscriptionSubscribeFn<{ [key in TKey]: TResult }, TParent, TContext, TArgs>; 226 | resolve?: SubscriptionResolveFn; 227 | } 228 | 229 | export interface SubscriptionResolverObject { 230 | subscribe: SubscriptionSubscribeFn; 231 | resolve: SubscriptionResolveFn; 232 | } 233 | 234 | export type SubscriptionObject = 235 | | SubscriptionSubscriberObject 236 | | SubscriptionResolverObject; 237 | 238 | export type SubscriptionResolver = 239 | | ((...args: any[]) => SubscriptionObject) 240 | | SubscriptionObject; 241 | 242 | export type TypeResolveFn = ( 243 | parent: TParent, 244 | context: TContext, 245 | info: GraphQLResolveInfo 246 | ) => Maybe | Promise>; 247 | 248 | export type IsTypeOfResolverFn = (obj: T, info: GraphQLResolveInfo) => boolean | Promise; 249 | 250 | export type NextResolverFn = () => Promise; 251 | 252 | export type DirectiveResolverFn = ( 253 | next: NextResolverFn, 254 | parent: TParent, 255 | args: TArgs, 256 | context: TContext, 257 | info: GraphQLResolveInfo 258 | ) => TResult | Promise; 259 | 260 | /** Mapping between all available schema types and the resolvers types */ 261 | export type ResolversTypes = { 262 | Query: ResolverTypeWrapper<{}>; 263 | ID: ResolverTypeWrapper; 264 | String: ResolverTypeWrapper; 265 | Int: ResolverTypeWrapper; 266 | SortOrder: SortOrder; 267 | TagFilter: TagFilter; 268 | BlockFilter: BlockFilter; 269 | TransactionConnection: ResolverTypeWrapper; 270 | TransactionEdge: ResolverTypeWrapper; 271 | PageInfo: ResolverTypeWrapper; 272 | Boolean: ResolverTypeWrapper; 273 | Transaction: ResolverTypeWrapper; 274 | Parent: ResolverTypeWrapper; 275 | Block: ResolverTypeWrapper; 276 | MetaData: ResolverTypeWrapper; 277 | Amount: ResolverTypeWrapper; 278 | Owner: ResolverTypeWrapper; 279 | Tag: ResolverTypeWrapper; 280 | TagOperator: TagOperator; 281 | }; 282 | 283 | /** Mapping between all available schema types and the resolvers parents */ 284 | export type ResolversParentTypes = { 285 | Query: {}; 286 | ID: Scalars['ID']; 287 | String: Scalars['String']; 288 | Int: Scalars['Int']; 289 | TagFilter: TagFilter; 290 | BlockFilter: BlockFilter; 291 | TransactionConnection: TransactionConnection; 292 | TransactionEdge: TransactionEdge; 293 | PageInfo: PageInfo; 294 | Boolean: Scalars['Boolean']; 295 | Transaction: Transaction; 296 | Parent: Parent; 297 | Block: Block; 298 | MetaData: MetaData; 299 | Amount: Amount; 300 | Owner: Owner; 301 | Tag: Tag; 302 | }; 303 | 304 | export type QueryResolvers = { 305 | transaction?: Resolver, ParentType, ContextType, RequireFields>; 306 | transactions?: Resolver>; 307 | }; 308 | 309 | export type TransactionConnectionResolvers = { 310 | pageInfo?: Resolver; 311 | edges?: Resolver, ParentType, ContextType>; 312 | __isTypeOf?: IsTypeOfResolverFn; 313 | }; 314 | 315 | export type TransactionEdgeResolvers = { 316 | cursor?: Resolver; 317 | node?: Resolver; 318 | __isTypeOf?: IsTypeOfResolverFn; 319 | }; 320 | 321 | export type PageInfoResolvers = { 322 | hasNextPage?: Resolver; 323 | __isTypeOf?: IsTypeOfResolverFn; 324 | }; 325 | 326 | export type TransactionResolvers = { 327 | id?: Resolver; 328 | anchor?: Resolver; 329 | signature?: Resolver; 330 | recipient?: Resolver; 331 | owner?: Resolver; 332 | fee?: Resolver; 333 | quantity?: Resolver; 334 | data?: Resolver; 335 | tags?: Resolver, ParentType, ContextType>; 336 | block?: Resolver, ParentType, ContextType>; 337 | parent?: Resolver, ParentType, ContextType>; 338 | __isTypeOf?: IsTypeOfResolverFn; 339 | }; 340 | 341 | export type ParentResolvers = { 342 | id?: Resolver; 343 | __isTypeOf?: IsTypeOfResolverFn; 344 | }; 345 | 346 | export type BlockResolvers = { 347 | id?: Resolver; 348 | timestamp?: Resolver; 349 | height?: Resolver; 350 | previous?: Resolver; 351 | __isTypeOf?: IsTypeOfResolverFn; 352 | }; 353 | 354 | export type MetaDataResolvers = { 355 | size?: Resolver; 356 | type?: Resolver, ParentType, ContextType>; 357 | __isTypeOf?: IsTypeOfResolverFn; 358 | }; 359 | 360 | export type AmountResolvers = { 361 | winston?: Resolver; 362 | ar?: Resolver; 363 | __isTypeOf?: IsTypeOfResolverFn; 364 | }; 365 | 366 | export type OwnerResolvers = { 367 | address?: Resolver; 368 | key?: Resolver; 369 | __isTypeOf?: IsTypeOfResolverFn; 370 | }; 371 | 372 | export type TagResolvers = { 373 | name?: Resolver; 374 | value?: Resolver; 375 | __isTypeOf?: IsTypeOfResolverFn; 376 | }; 377 | 378 | export type Resolvers = { 379 | Query?: QueryResolvers; 380 | TransactionConnection?: TransactionConnectionResolvers; 381 | TransactionEdge?: TransactionEdgeResolvers; 382 | PageInfo?: PageInfoResolvers; 383 | Transaction?: TransactionResolvers; 384 | Parent?: ParentResolvers; 385 | Block?: BlockResolvers; 386 | MetaData?: MetaDataResolvers; 387 | Amount?: AmountResolvers; 388 | Owner?: OwnerResolvers; 389 | Tag?: TagResolvers; 390 | }; 391 | 392 | 393 | /** 394 | * @deprecated 395 | * Use "Resolvers" root object instead. If you wish to get "IResolvers", add "typesPrefix: I" to your config. 396 | */ 397 | export type IResolvers = Resolvers; 398 | --------------------------------------------------------------------------------