├── .editorconfig ├── .github └── workflows │ ├── build-test.yml │ └── release.yml ├── .gitignore ├── LICENSE ├── README.md ├── jest.config.ts ├── package.json ├── src ├── bin │ └── index.ts ├── checkpoint.ts ├── codegen.ts ├── container.ts ├── graphql │ ├── controller.ts │ ├── index.ts │ └── resolvers.ts ├── index.ts ├── knex.ts ├── orm │ ├── index.ts │ └── model.ts ├── pg.ts ├── providers │ ├── base.ts │ ├── evm │ │ ├── index.ts │ │ ├── indexer.ts │ │ ├── provider.ts │ │ └── types.ts │ ├── index.ts │ └── starknet │ │ ├── index.ts │ │ ├── indexer.ts │ │ ├── provider.ts │ │ ├── types.ts │ │ └── utils.ts ├── register.ts ├── schemas.ts ├── stores │ └── checkpoints.ts ├── types.ts └── utils │ ├── checkpoint.ts │ ├── database.ts │ ├── graphql.ts │ ├── helpers.ts │ └── logger.ts ├── test ├── fixtures │ ├── .keep │ └── checkpointConfig.fixture.ts └── unit │ ├── __snapshots__ │ └── codegen.test.ts.snap │ ├── codegen.test.ts │ ├── graphql │ ├── __snapshots__ │ │ └── controller.test.ts.snap │ └── controller.test.ts │ ├── knex.test.ts │ ├── providers │ └── starknet │ │ ├── __snapshots__ │ │ └── utils.test.ts.snap │ │ ├── fixtures.ts │ │ └── utils.test.ts │ ├── stores │ ├── __snapshots__ │ │ └── checkpoints.test.ts.snap │ └── checkpoints.test.ts │ └── utils │ ├── __snapshots__ │ └── checkpoint.test.ts.snap │ ├── checkpoint.test.ts │ ├── database.test.ts │ └── helpers.test.ts ├── tsconfig.json └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | indent_style = space 5 | indent_size = 2 6 | end_of_line = LF 7 | charset = utf-8 8 | trim_trailing_whitespace = true 9 | insert_final_newline = true 10 | 11 | [*.md] 12 | trim_trailing_whitespace = false 13 | -------------------------------------------------------------------------------- /.github/workflows/build-test.yml: -------------------------------------------------------------------------------- 1 | name: Run tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | build-test: 13 | runs-on: ubuntu-24.04 14 | steps: 15 | - uses: actions/checkout@v3 16 | - uses: actions/setup-node@v3 17 | with: 18 | node-version: '18' 19 | cache: 'yarn' 20 | - run: yarn install 21 | - run: yarn lint 22 | - run: yarn build 23 | - run: yarn test 24 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_run: 5 | workflows: [Run tests] 6 | types: [completed] 7 | branches: [master] 8 | 9 | jobs: 10 | release: 11 | runs-on: ubuntu-24.04 12 | if: ${{ github.event.workflow_run.conclusion == 'success' }} 13 | steps: 14 | - uses: actions/checkout@v3 15 | - uses: actions/setup-node@v3 16 | with: 17 | node-version: '18' 18 | cache: 'yarn' 19 | - run: yarn install 20 | - run: yarn build 21 | - id: publish 22 | name: Release to NPM 23 | if: github.ref == 'refs/heads/master' 24 | uses: JS-DevTools/npm-publish@v1 25 | with: 26 | token: ${{ secrets.NPM_TOKEN }} 27 | access: public 28 | tag: beta 29 | - name: Tag successful release 30 | if: steps.publish.outputs.version != steps.publish.outputs.old-version 31 | uses: rickstaa/action-create-tag@v1 32 | with: 33 | tag: v${{ steps.publish.outputs.version }} 34 | message: 'Latest v${{ steps.publish.outputs.version }}' 35 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | node_modules 3 | dist 4 | build 5 | .env 6 | coverage 7 | 8 | # Remove some common IDE working directories 9 | .idea 10 | .vscode 11 | *.log 12 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Snapshot Labs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Checkpoint 2 | 3 | Checkpoint is a library for indexing data of StarkNet contracts and making it accessible through GraphQL. Checkpoint is inspired by The Graph and focused on providing similar functionality for StarkNet. 4 | 5 | ## Installation 6 | 7 | Checkpoint is an NPM package that can be installed through the following command: 8 | 9 | ```tsx 10 | npm install @snapshot-labs/checkpoint@beta 11 | ``` 12 | 13 | ## Documentation 14 | 15 | - [How it works](https://docs.checkpoint.fyi/#how-it-works) 16 | - [Quickstart guide](https://docs.checkpoint.fyi/guides/quickstart) 17 | - Core concepts 18 | - [Configuration](https://docs.checkpoint.fyi/core-concepts/checkpoint-configuration) 19 | - [Entity schema](https://docs.checkpoint.fyi/core-concepts/entity-schema) 20 | - [Data writers](https://docs.checkpoint.fyi/core-concepts/checkpoint-writers) 21 | 22 | ## Contribution guideline 23 | 24 | Join Snapshot's [Discord](https://discord.gg/snapshot) to further discuss and engage with the team. 25 | 26 | ## LICENSE 27 | 28 | MIT 29 | -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | /* 2 | * For a detailed explanation regarding each configuration property and type check, visit: 3 | * https://jestjs.io/docs/configuration 4 | */ 5 | 6 | export default { 7 | clearMocks: true, 8 | collectCoverage: true, 9 | coverageDirectory: 'coverage', 10 | coverageProvider: 'v8', 11 | coveragePathIgnorePatterns: ['/node_modules/', '/dist/', '/test/fixtures/'], 12 | 13 | preset: 'ts-jest', 14 | testEnvironment: 'node', 15 | setupFiles: ['dotenv/config'], 16 | testPathIgnorePatterns: ['/node_modules/', '/dist/', '/test/fixtures/'], 17 | moduleFileExtensions: ['js', 'ts'] 18 | }; 19 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@snapshot-labs/checkpoint", 3 | "version": "0.1.0-beta.59", 4 | "license": "MIT", 5 | "bin": { 6 | "checkpoint": "dist/src/bin/index.js" 7 | }, 8 | "main": "dist/src/index.js", 9 | "types": "dist/src/index.d.ts", 10 | "scripts": { 11 | "lint": "eslint src/ test/ --ext .ts --fix", 12 | "build": "tsc", 13 | "prepare": "yarn build", 14 | "prepublishOnly": "yarn run lint", 15 | "test": "jest" 16 | }, 17 | "eslintConfig": { 18 | "extends": "@snapshot-labs" 19 | }, 20 | "prettier": "@snapshot-labs/prettier-config", 21 | "dependencies": { 22 | "@ethersproject/abi": "^5.7.0", 23 | "@ethersproject/address": "^5.7.0", 24 | "@ethersproject/keccak256": "^5.7.0", 25 | "@ethersproject/providers": "^5.7.2", 26 | "@ethersproject/strings": "^5.7.0", 27 | "@graphql-tools/schema": "^8.5.1", 28 | "connection-string": "^4.3.5", 29 | "dataloader": "^2.1.0", 30 | "express-graphql": "^0.12.0", 31 | "graphql": "^16.5.0", 32 | "graphql-fields": "^2.0.3", 33 | "graphql-parse-resolve-info": "^4.12.3", 34 | "json-to-graphql-query": "^2.2.4", 35 | "knex": "^3.1.0", 36 | "object-hash": "^3.0.0", 37 | "pg": "^8.10.0", 38 | "pino": "^8.3.1", 39 | "pino-pretty": "^8.1.0", 40 | "pluralize": "^8.0.0", 41 | "starknet": "~5.19.3", 42 | "yargs": "^17.7.2", 43 | "zod": "^3.21.4" 44 | }, 45 | "devDependencies": { 46 | "@snapshot-labs/eslint-config": "^0.1.0-beta.7", 47 | "@snapshot-labs/prettier-config": "^0.1.0-beta.7", 48 | "@types/bn.js": "^5.1.0", 49 | "@types/jest": "^29.5.0", 50 | "@types/node": "^18.11.6", 51 | "@types/pg": "^8.6.6", 52 | "dotenv": "^16.0.1", 53 | "eslint": "^8.28.0", 54 | "jest": "^29.5.0", 55 | "jest-mock-extended": "^3.0.4", 56 | "prettier": "^2.7.1", 57 | "sqlite3": "^5.1.6", 58 | "ts-jest": "^29.1.0", 59 | "ts-node": "^10.9.1", 60 | "typescript": "^4.9.3" 61 | }, 62 | "files": [ 63 | "dist/**/*", 64 | "src/**/*" 65 | ] 66 | } 67 | -------------------------------------------------------------------------------- /src/bin/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import path from 'path'; 4 | import fs from 'fs/promises'; 5 | import process from 'process'; 6 | import yargs from 'yargs/yargs'; 7 | import { hideBin } from 'yargs/helpers'; 8 | import { codegen } from '../codegen'; 9 | import { OverridesConfig } from '../types'; 10 | import { extendSchema } from '../utils/graphql'; 11 | import { GqlEntityController } from '../graphql/controller'; 12 | import { printSchema } from 'graphql'; 13 | 14 | const DEFAULT_CONFIG_PATH = 'src/overrides.json'; 15 | const DEFAULT_SCHEMA_PATH = 'src/schema.gql'; 16 | const OUTPUT_DIRECTORY = '.checkpoint'; 17 | 18 | async function generate(schemaFile: string, overridesConfigFile: string, format: string) { 19 | if (format !== 'typescript' && format !== 'javascript') { 20 | throw new Error('Invalid output format'); 21 | } 22 | 23 | const cwd = process.cwd(); 24 | const schemaFilePath = path.join(cwd, schemaFile); 25 | const overridesConfigFilePath = path.join(cwd, overridesConfigFile); 26 | 27 | console.log('Generating models from schema:', schemaFile); 28 | 29 | let config: OverridesConfig = {}; 30 | try { 31 | config = await import(overridesConfigFilePath); 32 | } catch (err) {} 33 | 34 | let schema = await fs.readFile(schemaFilePath, 'utf8'); 35 | schema = extendSchema(schema); 36 | 37 | const controller = new GqlEntityController(schema, config); 38 | 39 | const generatedModels = codegen(controller, config, format); 40 | 41 | const outputFile = format === 'typescript' ? 'models.ts' : 'models.js'; 42 | const modelsOutputPath = path.join(OUTPUT_DIRECTORY, outputFile); 43 | 44 | await fs.mkdir(path.join(cwd, OUTPUT_DIRECTORY), { recursive: true }); 45 | await fs.writeFile(path.join(cwd, modelsOutputPath), generatedModels); 46 | 47 | console.log('Models generated to', modelsOutputPath); 48 | 49 | console.log('Generating query schema'); 50 | const querySchema = controller.generateSchema(); 51 | const schemaOutputPath = path.join(OUTPUT_DIRECTORY, 'schema.gql'); 52 | await fs.writeFile(path.join(cwd, schemaOutputPath), printSchema(querySchema)); 53 | 54 | console.log('Schema generated to', schemaOutputPath); 55 | } 56 | 57 | yargs(hideBin(process.argv)) 58 | .command( 59 | 'generate', 60 | 'generate models from schema', 61 | yargs => { 62 | return yargs 63 | .option('schema-file', { 64 | alias: 's', 65 | type: 'string', 66 | default: DEFAULT_SCHEMA_PATH, 67 | description: 'Schema file path' 68 | }) 69 | .option('overrides-config-file', { 70 | alias: 'o', 71 | type: 'string', 72 | default: DEFAULT_CONFIG_PATH, 73 | description: 'Overrides config file path' 74 | }) 75 | .option('output-format', { 76 | alias: 'f', 77 | type: 'string', 78 | default: 'typescript', 79 | description: 'Output format (typescript or javascript)' 80 | }); 81 | }, 82 | async argv => { 83 | try { 84 | await generate(argv['schema-file'], argv['overrides-config-file'], argv['output-format']); 85 | } catch (err) { 86 | console.error('Error generating models:', err); 87 | process.exit(1); 88 | } 89 | } 90 | ) 91 | .demandCommand(1, 'You need to specify a command') 92 | .parse(); 93 | -------------------------------------------------------------------------------- /src/checkpoint.ts: -------------------------------------------------------------------------------- 1 | import { Knex } from 'knex'; 2 | import { Pool as PgPool } from 'pg'; 3 | import getGraphQL from './graphql'; 4 | import { GqlEntityController } from './graphql/controller'; 5 | import { CheckpointsStore } from './stores/checkpoints'; 6 | import { BaseIndexer } from './providers'; 7 | import { createLogger, Logger, LogLevel } from './utils/logger'; 8 | import { extendSchema } from './utils/graphql'; 9 | import { createKnex } from './knex'; 10 | import { createPgPool } from './pg'; 11 | import { checkpointConfigSchema } from './schemas'; 12 | import { register } from './register'; 13 | import { CheckpointConfig, CheckpointOptions } from './types'; 14 | import { Container } from './container'; 15 | 16 | export default class Checkpoint { 17 | private readonly entityController: GqlEntityController; 18 | private readonly log: Logger; 19 | 20 | private containers: Map = new Map(); 21 | 22 | private schema: string; 23 | private dbConnection: string; 24 | private knex: Knex; 25 | private pgPool?: PgPool; 26 | private checkpointsStore?: CheckpointsStore; 27 | private opts?: CheckpointOptions; 28 | 29 | constructor(schema: string, opts?: CheckpointOptions) { 30 | this.schema = extendSchema(schema); 31 | this.entityController = new GqlEntityController(this.schema, opts?.overridesConfig); 32 | 33 | this.log = createLogger({ 34 | base: { component: 'checkpoint' }, 35 | level: opts?.logLevel || LogLevel.Error, 36 | ...(opts?.prettifyLogs 37 | ? { 38 | transport: { 39 | target: 'pino-pretty' 40 | } 41 | } 42 | : {}) 43 | }); 44 | 45 | const dbConnection = opts?.dbConnection || process.env.DATABASE_URL; 46 | if (!dbConnection) { 47 | throw new Error( 48 | 'a valid connection string or DATABASE_URL environment variable is required to connect to the database' 49 | ); 50 | } 51 | 52 | this.knex = createKnex(dbConnection); 53 | this.dbConnection = dbConnection; 54 | 55 | register.setKnex(this.knex); 56 | } 57 | 58 | public addIndexer(name: string, config: CheckpointConfig, indexer: BaseIndexer) { 59 | const validationResult = checkpointConfigSchema.safeParse(config); 60 | if (validationResult.success === false) { 61 | throw new Error(`Checkpoint config is invalid: ${validationResult.error.message}`); 62 | } 63 | 64 | const container = new Container( 65 | name, 66 | this.log, 67 | this.knex, 68 | this.store, 69 | this.entityController, 70 | config, 71 | indexer, 72 | this.schema, 73 | this.opts 74 | ); 75 | 76 | container.validateConfig(); 77 | 78 | this.containers.set(name, container); 79 | } 80 | 81 | public getBaseContext() { 82 | return { 83 | log: this.log.child({ component: 'resolver' }), 84 | knex: this.knex, 85 | pg: this.pg 86 | }; 87 | } 88 | 89 | public getSchema() { 90 | return this.entityController.generateSchema({ addResolvers: true }); 91 | } 92 | 93 | /** 94 | * Returns an express handler that exposes a GraphQL API to query entities defined 95 | * in the schema. 96 | * 97 | */ 98 | public get graphql() { 99 | const schema = this.getSchema(); 100 | 101 | return getGraphQL(schema, this.getBaseContext(), this.entityController.generateSampleQuery()); 102 | } 103 | 104 | /** 105 | * Starts the indexer. 106 | * 107 | * The indexer will invoker the respective writer functions when a contract 108 | * event is found. 109 | * 110 | */ 111 | public async start() { 112 | this.log.debug('starting'); 113 | 114 | await Promise.all([...this.containers.values()].map(container => container.start())); 115 | } 116 | 117 | /** 118 | * Reset will clear the last synced block informations 119 | * and force Checkpoint to start indexing from the start 120 | * block. 121 | * 122 | * This will also clear all indexed GraphQL entity records. 123 | * 124 | * This should be called when there has been a change to the GraphQL schema 125 | * or a change to the writer functions logic, so indexing will re-run from 126 | * the starting block. Also, it should be called the first time Checkpoint 127 | * is being initialized. 128 | * 129 | */ 130 | public async reset() { 131 | this.log.debug('reset'); 132 | await this.store.createStore(); 133 | 134 | for (const container of this.containers.values()) { 135 | await container.reset(); 136 | } 137 | 138 | await this.entityController.createEntityStores(this.knex); 139 | } 140 | 141 | /** 142 | * Resets Checkpoint's internal tables (including checkpoints). 143 | * 144 | * Calling this function will cause next run of checkpoint to start syncing 145 | * from the start, block-by-block, until new checkpoints are found. 146 | * 147 | */ 148 | public async resetMetadata() { 149 | this.log.debug('reset metadata'); 150 | 151 | await this.store.resetStore(); 152 | 153 | for (const container of this.containers.values()) { 154 | await container.resetMetadata(); 155 | } 156 | } 157 | 158 | /** 159 | * Registers the blocks where a contracts event can be found. 160 | * This will be used as a skip list for checkpoints while 161 | * indexing relevant blocks. Using this seed function can significantly 162 | * reduce the time for Checkpoint to re-index blocks. 163 | * 164 | * This should be called before the start() method is called. 165 | * 166 | */ 167 | public async seedCheckpoints( 168 | indexerName: string, 169 | checkpointBlocks: { contract: string; blocks: number[] }[] 170 | ): Promise { 171 | await this.store.createStore(); 172 | 173 | const container = this.containers.get(indexerName); 174 | 175 | if (!container) { 176 | throw new Error(`Container ${indexerName} not found`); 177 | } 178 | 179 | container.seedCheckpoints(checkpointBlocks); 180 | } 181 | 182 | private get store(): CheckpointsStore { 183 | if (this.checkpointsStore) { 184 | return this.checkpointsStore; 185 | } 186 | 187 | return (this.checkpointsStore = new CheckpointsStore(this.knex, this.log)); 188 | } 189 | 190 | private get pg(): PgPool { 191 | if (this.pgPool) { 192 | return this.pgPool; 193 | } 194 | 195 | this.pgPool = createPgPool(this.dbConnection); 196 | return this.pgPool; 197 | } 198 | } 199 | -------------------------------------------------------------------------------- /src/codegen.ts: -------------------------------------------------------------------------------- 1 | import { 2 | GraphQLField, 3 | GraphQLFloat, 4 | GraphQLID, 5 | GraphQLInt, 6 | GraphQLList, 7 | GraphQLNonNull, 8 | GraphQLObjectType, 9 | GraphQLScalarType, 10 | GraphQLString, 11 | GraphQLType, 12 | isListType 13 | } from 'graphql'; 14 | import pluralize from 'pluralize'; 15 | import { GqlEntityController } from './graphql/controller'; 16 | import { getDerivedFromDirective } from './utils/graphql'; 17 | import { OverridesConfig } from './types'; 18 | 19 | type TypeInfo = { 20 | type: string; 21 | initialValue: any; 22 | }; 23 | 24 | type DecimalTypes = NonNullable; 25 | 26 | const DEFAULT_DECIMAL_TYPES = { 27 | Decimal: { 28 | p: 10, 29 | d: 2 30 | }, 31 | BigDecimal: { 32 | p: 20, 33 | d: 8 34 | } 35 | }; 36 | 37 | export const getTypeInfo = ( 38 | type: GraphQLType, 39 | decimalTypes: DecimalTypes = DEFAULT_DECIMAL_TYPES 40 | ): TypeInfo => { 41 | if (type instanceof GraphQLNonNull) { 42 | throw new Error('Type must raw type'); 43 | } 44 | 45 | switch (type) { 46 | case GraphQLInt: 47 | case GraphQLFloat: 48 | return { type: 'number', initialValue: 0 }; 49 | case GraphQLString: 50 | case GraphQLID: 51 | return { type: 'string', initialValue: '' }; 52 | } 53 | 54 | if (type instanceof GraphQLScalarType) { 55 | switch (type.name) { 56 | case 'BigInt': 57 | return { type: 'bigint', initialValue: 0 }; 58 | case 'Boolean': 59 | return { type: 'boolean', initialValue: false }; 60 | case 'Text': 61 | return { type: 'string', initialValue: '' }; 62 | } 63 | 64 | if (type.name in decimalTypes) { 65 | return { type: 'string', initialValue: '0' }; 66 | } 67 | } 68 | 69 | if (type instanceof GraphQLObjectType) { 70 | return { type: 'string', initialValue: '' }; 71 | } 72 | 73 | if (type instanceof GraphQLList) { 74 | const nonNullNestedType = 75 | type.ofType instanceof GraphQLNonNull ? type.ofType.ofType : type.ofType; 76 | 77 | return { type: `${getTypeInfo(nonNullNestedType, decimalTypes).type}[]`, initialValue: '[]' }; 78 | } 79 | 80 | throw new Error('Unknown type'); 81 | }; 82 | 83 | export const getInitialValue = ( 84 | type: GraphQLType, 85 | decimalTypes: DecimalTypes = DEFAULT_DECIMAL_TYPES 86 | ) => { 87 | if (!(type instanceof GraphQLNonNull)) { 88 | return null; 89 | } 90 | 91 | return getTypeInfo(type.ofType, decimalTypes).initialValue; 92 | }; 93 | 94 | export const getBaseType = ( 95 | type: GraphQLType, 96 | decimalTypes: DecimalTypes = DEFAULT_DECIMAL_TYPES 97 | ) => { 98 | const nonNullType = type instanceof GraphQLNonNull ? type.ofType : type; 99 | 100 | return getTypeInfo(nonNullType, decimalTypes).type; 101 | }; 102 | 103 | export const getJSType = ( 104 | field: GraphQLField, 105 | decimalTypes: DecimalTypes = DEFAULT_DECIMAL_TYPES 106 | ) => { 107 | const nonNullType = field.type instanceof GraphQLNonNull ? field.type.ofType : field.type; 108 | const isNullable = !(field.type instanceof GraphQLNonNull); 109 | const isList = nonNullType instanceof GraphQLList; 110 | const baseType = getBaseType(nonNullType, decimalTypes); 111 | 112 | return { isNullable, isList, baseType }; 113 | }; 114 | 115 | export const codegen = ( 116 | controller: GqlEntityController, 117 | config: OverridesConfig, 118 | format: 'typescript' | 'javascript' 119 | ) => { 120 | const decimalTypes = config.decimal_types || DEFAULT_DECIMAL_TYPES; 121 | 122 | const preamble = `import { Model } from '@snapshot-labs/checkpoint';\n\n`; 123 | 124 | let contents = `${preamble}`; 125 | 126 | controller.schemaObjects.forEach((type, i, arr) => { 127 | const modelName = type.name; 128 | 129 | contents += `export class ${modelName} extends Model {\n`; 130 | contents += ` static tableName = '${pluralize(modelName.toLowerCase())}';\n\n`; 131 | 132 | const typeFields = controller.getTypeFields(type); 133 | const idField = typeFields.find(field => field.name === 'id'); 134 | const idType = idField ? getJSType(idField, decimalTypes) : null; 135 | 136 | if ( 137 | !idType || 138 | !['string', 'number'].includes(idType.baseType) || 139 | idType.isNullable || 140 | idType.isList 141 | ) { 142 | throw new Error(`Model ${modelName} must have an id field of type string or number`); 143 | } 144 | 145 | contents += 146 | format === 'javascript' 147 | ? ` constructor(id, indexerName) {\n` 148 | : ` constructor(id: ${idType.baseType}, indexerName: string) {\n`; 149 | contents += ` super(${modelName}.tableName, indexerName);\n\n`; 150 | typeFields.forEach(field => { 151 | const fieldType = field.type instanceof GraphQLNonNull ? field.type.ofType : field.type; 152 | if ( 153 | isListType(fieldType) && 154 | fieldType.ofType instanceof GraphQLObjectType && 155 | getDerivedFromDirective(field) 156 | ) { 157 | return; 158 | } 159 | 160 | const rawInitialValue = getInitialValue(field.type, decimalTypes); 161 | const initialValue = field.name === 'id' ? 'id' : JSON.stringify(rawInitialValue); 162 | contents += ` this.initialSet('${field.name}', ${initialValue});\n`; 163 | }); 164 | contents += ` }\n\n`; 165 | 166 | contents += 167 | format === 'javascript' 168 | ? ` static async loadEntity(id, indexerName) {\n` 169 | : ` static async loadEntity(id: ${idType.baseType}, indexerName: string): Promise<${modelName} | null> {\n`; 170 | contents += ` const entity = await super._loadEntity(${modelName}.tableName, id, indexerName);\n`; 171 | contents += ` if (!entity) return null;\n\n`; 172 | contents += ` const model = new ${modelName}(id, indexerName);\n`; 173 | contents += ` model.setExists();\n\n`; 174 | contents += ` for (const key in entity) {\n`; 175 | contents += ` const value = entity[key] !== null && typeof entity[key] === 'object'\n`; 176 | contents += ` ? JSON.stringify(entity[key])\n`; 177 | contents += ` : entity[key];\n`; 178 | contents += ` model.set(key, value);\n`; 179 | contents += ` }\n\n`; 180 | contents += ` return model;\n`; 181 | contents += ` }\n\n`; 182 | 183 | typeFields.forEach(field => { 184 | const fieldType = field.type instanceof GraphQLNonNull ? field.type.ofType : field.type; 185 | if ( 186 | isListType(fieldType) && 187 | fieldType.ofType instanceof GraphQLObjectType && 188 | getDerivedFromDirective(field) 189 | ) { 190 | return; 191 | } 192 | 193 | const { isNullable, isList, baseType } = getJSType(field, decimalTypes); 194 | const typeAnnotation = isNullable ? `${baseType} | null` : baseType; 195 | 196 | contents += 197 | format === 'javascript' 198 | ? ` get ${field.name}() {\n` 199 | : ` get ${field.name}(): ${typeAnnotation} {\n`; 200 | contents += ` return ${ 201 | isList ? `JSON.parse(this.get('${field.name}'))` : `this.get('${field.name}')` 202 | };\n`; 203 | contents += ` }\n\n`; 204 | 205 | contents += 206 | format === 'javascript' 207 | ? ` set ${field.name}(value) {\n` 208 | : ` set ${field.name}(value: ${typeAnnotation}) {\n`; 209 | contents += ` this.set('${field.name}', ${isList ? `JSON.stringify(value)` : 'value'});\n`; 210 | contents += ` }\n\n`; 211 | }); 212 | 213 | contents = contents.slice(0, -1); 214 | contents += i === arr.length - 1 ? '}\n' : '}\n\n'; 215 | }); 216 | 217 | return contents; 218 | }; 219 | -------------------------------------------------------------------------------- /src/container.ts: -------------------------------------------------------------------------------- 1 | import { BaseIndexer, BlockNotFoundError, Instance, ReorgDetectedError } from './providers'; 2 | import { CheckpointConfig, CheckpointOptions, ContractSourceConfig, TemplateSource } from './types'; 3 | import { CheckpointRecord, CheckpointsStore, MetadataId } from './stores/checkpoints'; 4 | import { Logger } from './utils/logger'; 5 | import { getConfigChecksum, getContractsFromConfig } from './utils/checkpoint'; 6 | import { GqlEntityController } from './graphql/controller'; 7 | import { Knex } from 'knex'; 8 | import { sleep } from './utils/helpers'; 9 | import { register } from './register'; 10 | import { getTableName } from './utils/database'; 11 | 12 | const SCHEMA_VERSION = 1; 13 | 14 | const BLOCK_PRELOAD_START_RANGE = 1000; 15 | const BLOCK_RELOAD_MIN_RANGE = 10; 16 | const BLOCK_PRELOAD_STEP = 100; 17 | const BLOCK_PRELOAD_TARGET = 10; 18 | const BLOCK_PRELOAD_OFFSET = 50; 19 | 20 | const CHECK_LATEST_BLOCK_INTERVAL = 50; 21 | 22 | const DEFAULT_FETCH_INTERVAL = 2000; 23 | 24 | export class Container implements Instance { 25 | private indexerName: string; 26 | 27 | public config: CheckpointConfig; 28 | public opts?: CheckpointOptions; 29 | public schema: string; 30 | 31 | private store: CheckpointsStore; 32 | private readonly log: Logger; 33 | private readonly indexer: BaseIndexer; 34 | private readonly entityController: GqlEntityController; 35 | private knex: Knex; 36 | 37 | private activeTemplates: TemplateSource[] = []; 38 | private cpBlocksCache: number[] | null = []; 39 | private blockHashCache: { blockNumber: number; hash: string } | null = null; 40 | 41 | private preloadStep: number = BLOCK_PRELOAD_START_RANGE; 42 | private preloadedBlocks: number[] = []; 43 | private preloadEndBlock = 0; 44 | 45 | constructor( 46 | indexerName: string, 47 | log: Logger, 48 | knex: Knex, 49 | store: CheckpointsStore, 50 | entityController: GqlEntityController, 51 | config: CheckpointConfig, 52 | indexer: BaseIndexer, 53 | schema: string, 54 | opts?: CheckpointOptions 55 | ) { 56 | this.indexerName = indexerName; 57 | this.log = log.child({ component: 'container', indexer: indexerName }); 58 | this.knex = knex; 59 | this.store = store; 60 | this.entityController = entityController; 61 | this.config = config; 62 | this.indexer = indexer; 63 | this.schema = schema; 64 | this.opts = opts; 65 | 66 | this.indexer.init({ 67 | instance: this, 68 | log: this.log, 69 | abis: config.abis 70 | }); 71 | } 72 | 73 | public get sourceContracts() { 74 | return this.indexer.getProvider().formatAddresses(getContractsFromConfig(this.config)); 75 | } 76 | 77 | public getCurrentSources(blockNumber: number) { 78 | if (!this.config.sources) return []; 79 | 80 | return this.config.sources.filter(source => source.start <= blockNumber); 81 | } 82 | 83 | private async getNextCheckpointBlock(blockNum: number): Promise { 84 | if (this.cpBlocksCache && this.cpBlocksCache.length !== 0) { 85 | return this.cpBlocksCache.shift() || null; 86 | } 87 | 88 | const checkpointBlocks = await this.store.getNextCheckpointBlocks( 89 | this.indexerName, 90 | blockNum, 91 | this.sourceContracts, 92 | 15 93 | ); 94 | 95 | if (checkpointBlocks.length === 0) return null; 96 | 97 | this.cpBlocksCache = checkpointBlocks; 98 | return this.cpBlocksCache.shift() || null; 99 | } 100 | 101 | private async getBlockHash(blockNumber: number): Promise { 102 | if (this.blockHashCache && this.blockHashCache.blockNumber === blockNumber) { 103 | return this.blockHashCache.hash; 104 | } 105 | 106 | return this.store.getBlockHash(this.indexerName, blockNumber); 107 | } 108 | 109 | private addSource(source: ContractSourceConfig) { 110 | if (!this.config.sources) this.config.sources = []; 111 | 112 | this.config.sources.push(source); 113 | this.cpBlocksCache = []; 114 | } 115 | 116 | public async executeTemplate( 117 | name: string, 118 | { contract, start }: { contract: string; start: number }, 119 | persist = true 120 | ) { 121 | const template = this.config.templates?.[name]; 122 | 123 | if (!template) { 124 | this.log.warn({ name }, 'template not found'); 125 | return; 126 | } 127 | 128 | const existingTemplate = this.activeTemplates.find( 129 | template => 130 | template.template === name && 131 | template.contractAddress === contract && 132 | template.startBlock === start 133 | ); 134 | 135 | if (existingTemplate) return; 136 | this.activeTemplates.push({ template: name, contractAddress: contract, startBlock: start }); 137 | 138 | if (persist) { 139 | await this.store.insertTemplateSource(this.indexerName, contract, start, name); 140 | } 141 | 142 | this.addSource({ 143 | contract, 144 | start, 145 | abi: template.abi, 146 | events: template.events 147 | }); 148 | } 149 | 150 | getWriterHelpers() { 151 | return { 152 | executeTemplate: this.executeTemplate.bind(this) 153 | }; 154 | } 155 | 156 | public async setBlockHash(blockNum: number, hash: string) { 157 | this.blockHashCache = { blockNumber: blockNum, hash }; 158 | 159 | return this.store.setBlockHash(this.indexerName, blockNum, hash); 160 | } 161 | 162 | public async setLastIndexedBlock(block: number) { 163 | await this.store.setMetadata(this.indexerName, MetadataId.LastIndexedBlock, block); 164 | } 165 | 166 | public async insertCheckpoints(checkpoints: CheckpointRecord[]) { 167 | await this.store.insertCheckpoints(this.indexerName, checkpoints); 168 | } 169 | 170 | /** 171 | * Starts the indexer. 172 | * 173 | * The indexer will invoker the respective writer functions when a contract 174 | * event is found. 175 | * 176 | */ 177 | public async start() { 178 | await this.validateStore(); 179 | await this.indexer.getProvider().init(); 180 | 181 | const templateSources = await this.store.getTemplateSources(this.indexerName); 182 | await Promise.all( 183 | templateSources.map(source => 184 | this.executeTemplate( 185 | source.template, 186 | { 187 | contract: source.contractAddress, 188 | start: source.startBlock 189 | }, 190 | false 191 | ) 192 | ) 193 | ); 194 | 195 | const blockNum = await this.getStartBlockNum(); 196 | this.preloadEndBlock = 197 | (await this.indexer.getProvider().getLatestBlockNumber()) - BLOCK_PRELOAD_OFFSET; 198 | 199 | return await this.next(blockNum); 200 | } 201 | 202 | private async preload(blockNum: number) { 203 | if (this.preloadedBlocks.length > 0) return this.preloadedBlocks.shift() as number; 204 | 205 | let currentBlock = blockNum; 206 | 207 | while (currentBlock <= this.preloadEndBlock) { 208 | const endBlock = Math.min(currentBlock + this.preloadStep, this.preloadEndBlock); 209 | let checkpoints: CheckpointRecord[]; 210 | try { 211 | this.log.info({ start: currentBlock, end: endBlock }, 'preloading blocks'); 212 | checkpoints = await this.indexer.getProvider().getCheckpointsRange(currentBlock, endBlock); 213 | } catch (e) { 214 | this.log.error( 215 | { blockNumber: currentBlock, err: e }, 216 | 'error occurred during checkpoint fetching' 217 | ); 218 | await sleep(this.config.fetch_interval || DEFAULT_FETCH_INTERVAL); 219 | continue; 220 | } 221 | 222 | const increase = 223 | checkpoints.length > BLOCK_PRELOAD_TARGET ? -BLOCK_PRELOAD_STEP : +BLOCK_PRELOAD_STEP; 224 | this.preloadStep = Math.max(BLOCK_RELOAD_MIN_RANGE, this.preloadStep + increase); 225 | 226 | if (checkpoints.length > 0) { 227 | this.preloadedBlocks = [ 228 | ...new Set(checkpoints.map(cp => cp.blockNumber).sort((a, b) => a - b)) 229 | ]; 230 | return this.preloadedBlocks.shift() as number; 231 | } 232 | 233 | currentBlock = endBlock + 1; 234 | } 235 | 236 | return null; 237 | } 238 | 239 | private async next(blockNum: number) { 240 | let checkpointBlock, preloadedBlock; 241 | if (!this.config.tx_fn && !this.config.global_events) { 242 | checkpointBlock = await this.getNextCheckpointBlock(blockNum); 243 | 244 | if (checkpointBlock) { 245 | blockNum = checkpointBlock; 246 | } else if (blockNum <= this.preloadEndBlock) { 247 | preloadedBlock = await this.preload(blockNum); 248 | blockNum = preloadedBlock || this.preloadEndBlock + 1; 249 | } 250 | } 251 | 252 | if (!checkpointBlock && !preloadedBlock) { 253 | if (blockNum % CHECK_LATEST_BLOCK_INTERVAL === 0) { 254 | try { 255 | const latestBlock = await this.indexer.getProvider().getLatestBlockNumber(); 256 | 257 | this.log.info({ latestBlock, behind: latestBlock - blockNum }, 'checking latest block'); 258 | 259 | if (latestBlock > blockNum + BLOCK_PRELOAD_OFFSET * 2) { 260 | this.log.info( 261 | { latestBlock, blockNum }, 262 | `fell more than ${BLOCK_PRELOAD_OFFSET * 2} blocks behind, reverting to preload` 263 | ); 264 | 265 | this.preloadEndBlock = latestBlock - BLOCK_PRELOAD_OFFSET; 266 | } 267 | } catch (e) { 268 | this.log.error( 269 | { blockNumber: blockNum, err: e }, 270 | 'error occurred during latest block check, ignoring for now' 271 | ); 272 | } 273 | } 274 | } 275 | 276 | this.log.debug({ blockNumber: blockNum }, 'next block'); 277 | 278 | try { 279 | register.setCurrentBlock(this.indexerName, BigInt(blockNum)); 280 | 281 | const initialSources = this.getCurrentSources(blockNum); 282 | const parentHash = await this.getBlockHash(blockNum - 1); 283 | const nextBlockNumber = await this.indexer.getProvider().processBlock(blockNum, parentHash); 284 | const sources = this.getCurrentSources(nextBlockNumber); 285 | 286 | if (initialSources.length !== sources.length) { 287 | this.preloadedBlocks = []; 288 | } 289 | 290 | return this.next(nextBlockNumber); 291 | } catch (err) { 292 | if (err instanceof BlockNotFoundError) { 293 | if (this.config.optimistic_indexing) { 294 | try { 295 | await this.indexer.getProvider().processPool(blockNum); 296 | } catch (err) { 297 | this.log.error({ blockNumber: blockNum, err }, 'error occurred during pool processing'); 298 | } 299 | } 300 | } else if (err instanceof ReorgDetectedError) { 301 | const nextBlockNumber = await this.handleReorg(blockNum); 302 | return this.next(nextBlockNumber); 303 | } else { 304 | this.log.error({ blockNumber: blockNum, err }, 'error occurred during block processing'); 305 | } 306 | 307 | if (checkpointBlock && this.cpBlocksCache) { 308 | this.cpBlocksCache.unshift(checkpointBlock); 309 | } 310 | 311 | if (preloadedBlock && this.preloadedBlocks) { 312 | this.preloadedBlocks.unshift(preloadedBlock); 313 | } 314 | 315 | await sleep(this.config.fetch_interval || DEFAULT_FETCH_INTERVAL); 316 | return this.next(blockNum); 317 | } 318 | } 319 | 320 | private async handleReorg(blockNumber: number) { 321 | this.log.info({ blockNumber }, 'handling reorg'); 322 | 323 | let current = blockNumber - 1; 324 | let lastGoodBlock: null | number = null; 325 | while (lastGoodBlock === null) { 326 | try { 327 | const storedBlockHash = await this.store.getBlockHash(this.indexerName, current); 328 | const currentBlockHash = await this.indexer.getProvider().getBlockHash(current); 329 | 330 | if (storedBlockHash === null || storedBlockHash === currentBlockHash) { 331 | lastGoodBlock = current; 332 | } else { 333 | current -= 1; 334 | } 335 | } catch (e) { 336 | this.log.error({ blockNumber: current, err: e }, 'error occurred during block hash check'); 337 | await sleep(this.config.fetch_interval || DEFAULT_FETCH_INTERVAL); 338 | } 339 | } 340 | 341 | const entities = await this.entityController.schemaObjects; 342 | const tables = entities.map(entity => getTableName(entity.name.toLowerCase())); 343 | 344 | await this.knex.transaction(async trx => { 345 | for (const tableName of tables) { 346 | await trx 347 | .table(tableName) 348 | .where('_indexer', this.indexerName) 349 | .andWhereRaw('lower(block_range) > ?', [lastGoodBlock]) 350 | .delete(); 351 | 352 | await trx 353 | .table(tableName) 354 | .where('_indexer', this.indexerName) 355 | .andWhereRaw('block_range @> int8(??)', [lastGoodBlock]) 356 | .update({ 357 | block_range: this.knex.raw('int8range(lower(block_range), NULL)') 358 | }); 359 | } 360 | }); 361 | 362 | // TODO: when we have full transaction support, we should include this in the transaction 363 | await this.store.removeFutureData(this.indexerName, lastGoodBlock); 364 | 365 | this.cpBlocksCache = null; 366 | this.blockHashCache = null; 367 | 368 | this.log.info({ blockNumber: lastGoodBlock }, 'reorg resolved'); 369 | 370 | return lastGoodBlock + 1; 371 | } 372 | 373 | public async reset() { 374 | await this.store.setMetadata(this.indexerName, MetadataId.LastIndexedBlock, 0); 375 | await this.store.setMetadata(this.indexerName, MetadataId.SchemaVersion, SCHEMA_VERSION); 376 | await this.store.removeBlocks(this.indexerName); 377 | } 378 | 379 | public async resetMetadata() { 380 | await this.store.setMetadata(this.indexerName, MetadataId.SchemaVersion, SCHEMA_VERSION); 381 | } 382 | 383 | /** 384 | * Registers the blocks where a contracts event can be found. 385 | * This will be used as a skip list for checkpoints while 386 | * indexing relevant blocks. Using this seed function can significantly 387 | * reduce the time for Checkpoint to re-index blocks. 388 | * 389 | * This should be called before the start() method is called. 390 | * 391 | */ 392 | public async seedCheckpoints( 393 | checkpointBlocks: { contract: string; blocks: number[] }[] 394 | ): Promise { 395 | const checkpoints: CheckpointRecord[] = []; 396 | 397 | let finalBlock = 0; 398 | checkpointBlocks.forEach(cp => { 399 | cp.blocks.forEach(blockNumber => { 400 | finalBlock = Math.max(finalBlock, blockNumber); 401 | checkpoints.push({ 402 | blockNumber, 403 | contractAddress: cp.contract 404 | }); 405 | }); 406 | }); 407 | 408 | await this.store.insertCheckpoints(this.indexerName, checkpoints); 409 | } 410 | 411 | public getConfigStartBlock() { 412 | if (this.config.start && (this.config.tx_fn || this.config.global_events)) { 413 | return this.config.start; 414 | } 415 | 416 | return Math.min(...(this.config.sources?.map(source => source.start) || [])); 417 | } 418 | 419 | public async getStartBlockNum() { 420 | const start = this.getConfigStartBlock(); 421 | const lastBlock = 422 | (await this.store.getMetadataNumber(this.indexerName, MetadataId.LastIndexedBlock)) ?? 0; 423 | 424 | const nextBlock = lastBlock + 1; 425 | 426 | return nextBlock > start ? nextBlock : start; 427 | } 428 | 429 | public validateConfig() { 430 | const sources = this.config.sources ?? []; 431 | const templates = Object.values(this.config.templates ?? {}); 432 | 433 | const usedAbis = [ 434 | ...sources.map(source => source.abi), 435 | ...templates.map(template => template.abi) 436 | ].filter(abi => abi) as string[]; 437 | const usedWriters = [ 438 | ...sources.flatMap(source => source.events), 439 | ...templates.flatMap(template => template.events) 440 | ]; 441 | 442 | const missingAbis = usedAbis.filter(abi => !this.config.abis?.[abi]); 443 | const missingWriters = usedWriters.filter( 444 | writer => !this.indexer.getHandlers().includes(writer.fn) 445 | ); 446 | 447 | if (missingAbis.length > 0) { 448 | throw new Error( 449 | `Following ABIs are used (${missingAbis.join(', ')}), but they are missing in opts.abis` 450 | ); 451 | } 452 | 453 | if (missingWriters.length > 0) { 454 | throw new Error( 455 | `Following writers are used (${missingWriters 456 | .map(writer => writer.fn) 457 | .join(', ')}), but they are not defined` 458 | ); 459 | } 460 | } 461 | 462 | public async validateStore() { 463 | const networkIdentifier = await this.indexer.getProvider().getNetworkIdentifier(); 464 | const configChecksum = getConfigChecksum(this.config); 465 | 466 | const storedNetworkIdentifier = await this.store.getMetadata( 467 | this.indexerName, 468 | MetadataId.NetworkIdentifier 469 | ); 470 | const storedStartBlock = await this.store.getMetadataNumber( 471 | this.indexerName, 472 | MetadataId.StartBlock 473 | ); 474 | const storedConfigChecksum = await this.store.getMetadata( 475 | this.indexerName, 476 | MetadataId.ConfigChecksum 477 | ); 478 | const storedSchemaVersion = await this.store.getMetadataNumber( 479 | this.indexerName, 480 | MetadataId.SchemaVersion 481 | ); 482 | 483 | const hasNetworkChanged = 484 | storedNetworkIdentifier && storedNetworkIdentifier !== networkIdentifier; 485 | const hasStartBlockChanged = 486 | storedStartBlock && storedStartBlock !== this.getConfigStartBlock(); 487 | const hasConfigChanged = storedConfigChecksum && storedConfigChecksum !== configChecksum; 488 | const hasSchemaChanged = storedSchemaVersion !== SCHEMA_VERSION; 489 | 490 | if ( 491 | (hasNetworkChanged || hasStartBlockChanged || hasConfigChanged || hasSchemaChanged) && 492 | this.opts?.resetOnConfigChange 493 | ) { 494 | await this.resetMetadata(); 495 | await this.reset(); 496 | 497 | await this.store.setMetadata( 498 | this.indexerName, 499 | MetadataId.NetworkIdentifier, 500 | networkIdentifier 501 | ); 502 | await this.store.setMetadata( 503 | this.indexerName, 504 | MetadataId.StartBlock, 505 | this.getConfigStartBlock() 506 | ); 507 | await this.store.setMetadata(this.indexerName, MetadataId.ConfigChecksum, configChecksum); 508 | } else if (hasNetworkChanged) { 509 | this.log.error( 510 | `network identifier changed from ${storedNetworkIdentifier} to ${networkIdentifier}. 511 | You probably should reset the database by calling .reset() and resetMetadata(). 512 | You can also set resetOnConfigChange to true in Checkpoint options to do this automatically.` 513 | ); 514 | 515 | throw new Error('network identifier changed'); 516 | } else if (hasStartBlockChanged) { 517 | this.log.error( 518 | `start block changed from ${storedStartBlock} to ${this.getConfigStartBlock()}. 519 | You probably should reset the database by calling .reset() and resetMetadata(). 520 | You can also set resetOnConfigChange to true in Checkpoint options to do this automatically.` 521 | ); 522 | 523 | throw new Error('start block changed'); 524 | } else if (hasConfigChanged) { 525 | this.log.error( 526 | `config checksum changed from ${storedConfigChecksum} to ${configChecksum} to due to a change in the config. 527 | You probably should reset the database by calling .reset() and resetMetadata(). 528 | You can also set resetOnConfigChange to true in Checkpoint options to do this automatically.` 529 | ); 530 | 531 | throw new Error('config changed'); 532 | } else if (hasSchemaChanged) { 533 | this.log.error( 534 | `schema version changed from ${storedSchemaVersion} to ${SCHEMA_VERSION}. 535 | You probably should reset the database by calling .reset() and resetMetadata(). 536 | You can also set resetOnConfigChange to true in Checkpoint options to do this automatically.` 537 | ); 538 | 539 | throw new Error('schema changed'); 540 | } else { 541 | if (!storedNetworkIdentifier) { 542 | await this.store.setMetadata( 543 | this.indexerName, 544 | MetadataId.NetworkIdentifier, 545 | networkIdentifier 546 | ); 547 | } 548 | 549 | if (!storedStartBlock) { 550 | await this.store.setMetadata( 551 | this.indexerName, 552 | MetadataId.StartBlock, 553 | this.getConfigStartBlock() 554 | ); 555 | } 556 | 557 | if (!storedConfigChecksum) { 558 | await this.store.setMetadata(this.indexerName, MetadataId.ConfigChecksum, configChecksum); 559 | } 560 | } 561 | } 562 | } 563 | -------------------------------------------------------------------------------- /src/graphql/controller.ts: -------------------------------------------------------------------------------- 1 | import { 2 | buildSchema, 3 | GraphQLEnumType, 4 | GraphQLField, 5 | GraphQLFieldConfig, 6 | GraphQLFieldConfigMap, 7 | GraphQLFieldResolver, 8 | GraphQLFloat, 9 | GraphQLID, 10 | GraphQLInputObjectType, 11 | GraphQLInputObjectTypeConfig, 12 | GraphQLInt, 13 | GraphQLList, 14 | GraphQLNonNull, 15 | GraphQLObjectType, 16 | GraphQLOutputType, 17 | GraphQLScalarType, 18 | GraphQLSchema, 19 | GraphQLString, 20 | isLeafType, 21 | isListType, 22 | Source 23 | } from 'graphql'; 24 | import { Knex } from 'knex'; 25 | import pluralize from 'pluralize'; 26 | import { KnexType } from '../knex'; 27 | import { 28 | generateQueryForEntity, 29 | multiEntityQueryName, 30 | singleEntityQueryName, 31 | getNonNullType, 32 | getDerivedFromDirective 33 | } from '../utils/graphql'; 34 | import { OverridesConfig } from '../types'; 35 | import { querySingle, queryMulti, getNestedResolver } from './resolvers'; 36 | import { CheckpointsGraphQLObject, MetadataGraphQLObject } from '.'; 37 | import { addResolversToSchema } from '@graphql-tools/schema'; 38 | 39 | /** 40 | * Type for single and multiple query resolvers 41 | */ 42 | interface EntityQueryResolvers { 43 | singleEntityResolver: typeof querySingle; 44 | multipleEntityResolver: typeof queryMulti; 45 | } 46 | 47 | const GraphQLOrderDirection = new GraphQLEnumType({ 48 | name: 'OrderDirection', 49 | values: { 50 | asc: { value: 'ASC' }, 51 | desc: { value: 'DESC' } 52 | } 53 | }); 54 | 55 | type WhereResult = { 56 | where: { 57 | type: GraphQLInputObjectType; 58 | }; 59 | orderByValues: Record; 60 | }; 61 | 62 | /** 63 | * Controller for performing actions based on the graphql schema provided to its 64 | * constructor. It exposes public functions to generate graphql or database 65 | * items based on the entities identified in the schema. 66 | * 67 | * Note: Entities refer to Object types with an `id` field defined within the 68 | * graphql schema. 69 | */ 70 | export class GqlEntityController { 71 | private readonly schema: GraphQLSchema; 72 | private readonly decimalTypes: NonNullable; 73 | private _schemaObjects?: GraphQLObjectType[]; 74 | 75 | constructor(typeDefs: string | Source, config?: OverridesConfig) { 76 | this.schema = buildSchema(typeDefs); 77 | this.decimalTypes = config?.decimal_types || { 78 | Decimal: { 79 | p: 10, 80 | d: 2 81 | }, 82 | BigDecimal: { 83 | p: 20, 84 | d: 8 85 | } 86 | }; 87 | } 88 | 89 | /** 90 | * Creates a grqphql Query object generated from the objects defined within 91 | * the schema. 92 | * For each of the objects, two queries are created, one for querying the object 93 | * by it's id and the second for querying multiple objects based on a couple 94 | * of parameters. 95 | * 96 | * For example, given the input schema: 97 | * ``` 98 | * type Vote { 99 | * id: Int! 100 | * name: String 101 | * } 102 | * ``` 103 | * 104 | * The generated queries will be like: 105 | * ``` 106 | * type Query { 107 | * votes( 108 | * first: Int 109 | * skip: Int 110 | * orderBy: String 111 | * orderDirection: String 112 | * where: WhereVote 113 | * ): [Vote] 114 | * vote(id: Int!): Vote 115 | * } 116 | * 117 | * input WhereVote { 118 | * id: Int 119 | * id_in: [Int] 120 | * name: String 121 | * name_in: [String] 122 | * } 123 | * 124 | * ``` 125 | * 126 | */ 127 | public generateQueryFields( 128 | schemaObjects?: GraphQLObjectType[], 129 | resolvers: EntityQueryResolvers = { 130 | singleEntityResolver: querySingle, 131 | multipleEntityResolver: queryMulti 132 | } 133 | ): GraphQLFieldConfigMap { 134 | schemaObjects = schemaObjects || this.schemaObjects; 135 | 136 | const queryFields: GraphQLFieldConfigMap = {}; 137 | 138 | schemaObjects.forEach(type => { 139 | queryFields[singleEntityQueryName(type)] = this.getSingleEntityQueryConfig( 140 | type, 141 | resolvers.singleEntityResolver 142 | ); 143 | queryFields[multiEntityQueryName(type)] = this.getMultipleEntityQueryConfig( 144 | type, 145 | resolvers.multipleEntityResolver 146 | ); 147 | }); 148 | 149 | return queryFields; 150 | } 151 | 152 | /** 153 | * Generates entity resolvers for subqueries. 154 | * Returned resolvers use format compatible with addResolversToSchema. 155 | * { 156 | * Proposal: { 157 | * space: () => {} 158 | * } 159 | * } 160 | */ 161 | public generateEntityResolvers(fields: GraphQLFieldConfigMap) { 162 | return this.schemaObjects.reduce((entities, obj) => { 163 | entities[obj.name] = this.getTypeFields(obj).reduce((resolvers, field) => { 164 | const nonNullType = getNonNullType(field.type); 165 | 166 | if (isListType(nonNullType)) { 167 | const itemType = getNonNullType(nonNullType.ofType); 168 | 169 | if (itemType instanceof GraphQLObjectType) { 170 | resolvers[field.name] = getNestedResolver(multiEntityQueryName(itemType)); 171 | } 172 | } 173 | 174 | if (nonNullType instanceof GraphQLObjectType) { 175 | resolvers[field.name] = fields[singleEntityQueryName(nonNullType)].resolve; 176 | } 177 | 178 | return resolvers; 179 | }, {}); 180 | 181 | return entities; 182 | }, {}); 183 | } 184 | 185 | /** 186 | * Creates store for each of the objects in the schema. 187 | * For now, it only creates database tables for each of the objects. 188 | * It also creates a checkpoint table to track checkpoints visited. 189 | * 190 | * For example, given an schema like: 191 | * ```graphql 192 | * type Vote { 193 | * id: Int! 194 | * name: String 195 | * } 196 | * ``` 197 | * 198 | * will execute the following SQL: 199 | * ```sql 200 | * DROP TABLE IF EXISTS votes; 201 | * CREATE TABLE votes ( 202 | * id VARCHAR(128) NOT NULL, 203 | * name VARCHAR(128), 204 | * PRIMARY KEY (id) , 205 | * INDEX id (id), 206 | * INDEX name (name) 207 | * ); 208 | * ``` 209 | * 210 | */ 211 | public async createEntityStores(knex: Knex): Promise<{ builder: Knex.SchemaBuilder }> { 212 | let builder = knex.schema; 213 | 214 | if (knex.client.config.client === 'pg') { 215 | builder = builder.raw('CREATE EXTENSION IF NOT EXISTS btree_gist'); 216 | } 217 | 218 | if (this.schemaObjects.length === 0) { 219 | return { builder }; 220 | } 221 | 222 | this.schemaObjects.map(type => { 223 | const tableName = pluralize(type.name.toLowerCase()); 224 | 225 | builder = builder.dropTableIfExists(tableName).createTable(tableName, t => { 226 | t.uuid('uid').primary().defaultTo(knex.fn.uuid()); 227 | t.specificType('block_range', 'int8range').notNullable(); 228 | 229 | this.getTypeFields(type).forEach(field => { 230 | const fieldType = field.type instanceof GraphQLNonNull ? field.type.ofType : field.type; 231 | if ( 232 | isListType(fieldType) && 233 | fieldType.ofType instanceof GraphQLObjectType && 234 | getDerivedFromDirective(field) 235 | ) { 236 | return; 237 | } 238 | const sqlType = this.getSqlType(field.type); 239 | 240 | let column = 241 | 'options' in sqlType 242 | ? t[sqlType.name](field.name, ...sqlType.options) 243 | : t[sqlType.name](field.name); 244 | 245 | if (field.type instanceof GraphQLNonNull) { 246 | column = column.notNullable(); 247 | } 248 | 249 | if (!['text', 'json'].includes(sqlType.name)) { 250 | column.index(); 251 | } 252 | }); 253 | }); 254 | 255 | if (knex.client.config.client === 'pg') { 256 | builder = builder.raw( 257 | knex 258 | .raw( 259 | 'ALTER TABLE ?? ADD EXCLUDE USING GIST (id WITH =, _indexer WITH =, block_range WITH &&)', 260 | [tableName] 261 | ) 262 | .toQuery() 263 | ); 264 | } 265 | }); 266 | 267 | await builder; 268 | 269 | return { builder }; 270 | } 271 | 272 | /** 273 | * Generates a query based on the first entity discovered 274 | * in a schema. If no entities are found in the schema 275 | * it returns undefined. 276 | * 277 | */ 278 | public generateSampleQuery(): string | undefined { 279 | if (this.schemaObjects.length === 0) { 280 | return undefined; 281 | } 282 | 283 | const firstEntityQuery = generateQueryForEntity(this.schemaObjects[0]); 284 | const queryComment = ` 285 | # Welcome to Checkpoint. Try running the below example query from 286 | # your defined entity. 287 | `; 288 | return `${queryComment}\n${firstEntityQuery}`; 289 | } 290 | 291 | /** 292 | * Returns a list of objects defined within the graphql typedefs. 293 | * The types returns are introspection objects, that can be used 294 | * for inspecting the fields and types. 295 | * 296 | * Note: that the returned objects does not include the Query object type if defined. 297 | * 298 | */ 299 | public get schemaObjects(): GraphQLObjectType[] { 300 | if (this._schemaObjects) { 301 | return this._schemaObjects; 302 | } 303 | 304 | this._schemaObjects = Object.values(this.schema.getTypeMap()).filter(type => { 305 | return ( 306 | type instanceof GraphQLObjectType && type.name != 'Query' && !type.name.startsWith('__') 307 | ); 308 | }) as GraphQLObjectType[]; 309 | 310 | return this._schemaObjects; 311 | } 312 | 313 | public getTypeFields( 314 | type: GraphQLObjectType 315 | ): GraphQLField[] { 316 | return Object.values(type.getFields()); 317 | } 318 | 319 | private getSingleEntityQueryConfig( 320 | type: GraphQLObjectType, 321 | resolver: GraphQLFieldResolver 322 | ): GraphQLFieldConfig { 323 | return { 324 | type, 325 | args: { 326 | id: { 327 | type: new GraphQLNonNull(this.getEntityIdType(type)) 328 | }, 329 | indexer: { 330 | type: GraphQLString 331 | }, 332 | block: { 333 | type: GraphQLInt 334 | } 335 | }, 336 | resolve: resolver 337 | }; 338 | } 339 | 340 | private getMultipleEntityQueryConfig( 341 | type: GraphQLObjectType, 342 | resolver: GraphQLFieldResolver 343 | ): GraphQLFieldConfig { 344 | const getWhereType = ( 345 | nestedType: GraphQLObjectType, 346 | prefix?: string 347 | ): WhereResult => { 348 | const name = prefix ? `${prefix}_${nestedType.name}_filter` : `${nestedType.name}_filter`; 349 | 350 | const orderByValues = {}; 351 | const whereInputConfig: GraphQLInputObjectTypeConfig = { 352 | name, 353 | fields: {} 354 | }; 355 | 356 | this.getTypeFields(nestedType).forEach(field => { 357 | // all field types in a where input variable must be optional 358 | // so we try to extract the non null type here. 359 | let nonNullFieldType = getNonNullType(field.type); 360 | 361 | if (nonNullFieldType instanceof GraphQLObjectType) { 362 | const fields = nestedType.getFields(); 363 | const idField = fields['id']; 364 | 365 | if ( 366 | idField && 367 | idField.type instanceof GraphQLNonNull && 368 | idField.type.ofType instanceof GraphQLScalarType && 369 | ['String', 'Int', 'ID'].includes(idField.type.ofType.name) 370 | ) { 371 | if (!prefix) { 372 | whereInputConfig.fields[`${field.name}_`] = getWhereType( 373 | nonNullFieldType, 374 | nestedType.name 375 | ).where; 376 | } 377 | 378 | nonNullFieldType = getNonNullType(idField.type); 379 | } 380 | } 381 | 382 | if (isListType(nonNullFieldType)) { 383 | const itemType = nonNullFieldType.ofType; 384 | 385 | if (!isLeafType(itemType)) { 386 | return; 387 | } 388 | 389 | whereInputConfig.fields[`${field.name}`] = { type: nonNullFieldType }; 390 | whereInputConfig.fields[`${field.name}_not`] = { type: nonNullFieldType }; 391 | 392 | if (itemType === GraphQLString) { 393 | // those are the only supported operators for string arrays because PostgreSQL intersection 394 | // for jsonb is only supported for string arrays 395 | whereInputConfig.fields[`${field.name}_contains`] = { type: nonNullFieldType }; 396 | whereInputConfig.fields[`${field.name}_not_contains`] = { type: nonNullFieldType }; 397 | } 398 | } 399 | 400 | // avoid setting up where filters for non scalar types 401 | if (!isLeafType(nonNullFieldType)) { 402 | return; 403 | } 404 | 405 | if (nonNullFieldType === GraphQLInt) { 406 | whereInputConfig.fields[`${field.name}_gt`] = { type: GraphQLInt }; 407 | whereInputConfig.fields[`${field.name}_gte`] = { type: GraphQLInt }; 408 | whereInputConfig.fields[`${field.name}_lt`] = { type: GraphQLInt }; 409 | whereInputConfig.fields[`${field.name}_lte`] = { type: GraphQLInt }; 410 | } 411 | 412 | if ( 413 | (nonNullFieldType instanceof GraphQLScalarType && nonNullFieldType.name === 'BigInt') || 414 | this.decimalTypes[nonNullFieldType.name] 415 | ) { 416 | whereInputConfig.fields[`${field.name}_gt`] = { type: nonNullFieldType }; 417 | whereInputConfig.fields[`${field.name}_gte`] = { type: nonNullFieldType }; 418 | whereInputConfig.fields[`${field.name}_lt`] = { type: nonNullFieldType }; 419 | whereInputConfig.fields[`${field.name}_lte`] = { type: nonNullFieldType }; 420 | } 421 | 422 | if ( 423 | nonNullFieldType === GraphQLString || 424 | (nonNullFieldType as GraphQLScalarType).name === 'Text' 425 | ) { 426 | whereInputConfig.fields[`${field.name}_contains`] = { type: GraphQLString }; 427 | whereInputConfig.fields[`${field.name}_not_contains`] = { type: GraphQLString }; 428 | whereInputConfig.fields[`${field.name}_contains_nocase`] = { type: GraphQLString }; 429 | whereInputConfig.fields[`${field.name}_not_contains_nocase`] = { type: GraphQLString }; 430 | } 431 | 432 | if ((nonNullFieldType as GraphQLScalarType).name !== 'Text') { 433 | whereInputConfig.fields[`${field.name}`] = { type: nonNullFieldType }; 434 | whereInputConfig.fields[`${field.name}_not`] = { type: nonNullFieldType }; 435 | whereInputConfig.fields[`${field.name}_in`] = { 436 | type: new GraphQLList(nonNullFieldType) 437 | }; 438 | whereInputConfig.fields[`${field.name}_not_in`] = { 439 | type: new GraphQLList(nonNullFieldType) 440 | }; 441 | } 442 | 443 | orderByValues[field.name] = { value: field.name }; 444 | }); 445 | 446 | const result = { 447 | where: { type: new GraphQLInputObjectType(whereInputConfig) }, 448 | orderByValues 449 | }; 450 | 451 | return result; 452 | }; 453 | 454 | const { where, orderByValues } = getWhereType(type); 455 | 456 | const OrderByEnum = new GraphQLEnumType({ 457 | name: `${type.name}_orderBy`, 458 | values: orderByValues 459 | }); 460 | 461 | return { 462 | type: new GraphQLNonNull(new GraphQLList(new GraphQLNonNull(type))), 463 | args: { 464 | first: { 465 | type: GraphQLInt 466 | }, 467 | skip: { 468 | type: GraphQLInt 469 | }, 470 | orderBy: { 471 | type: OrderByEnum 472 | }, 473 | orderDirection: { 474 | type: GraphQLOrderDirection 475 | }, 476 | indexer: { 477 | type: GraphQLString 478 | }, 479 | block: { 480 | type: GraphQLInt 481 | }, 482 | where 483 | }, 484 | resolve: resolver 485 | }; 486 | } 487 | 488 | private getEntityIdType(type: GraphQLObjectType): GraphQLScalarType { 489 | const idField = type.getFields().id; 490 | if (!idField) { 491 | throw new Error( 492 | `'id' field is missing in type '${type.name}'. All types are required to have an id field.` 493 | ); 494 | } 495 | 496 | if (!(idField.type instanceof GraphQLNonNull)) { 497 | throw new Error(`'id' field for type ${type.name} must be non nullable.`); 498 | } 499 | 500 | const nonNullType = idField.type.ofType; 501 | 502 | // verify only scalar types are used 503 | if (!(nonNullType instanceof GraphQLScalarType)) { 504 | throw new Error(`'id' field for type ${type.name} is not a scalar type.`); 505 | } 506 | 507 | return nonNullType; 508 | } 509 | 510 | /** 511 | * Return a knex column type and options for the graphql type. 512 | * 513 | * It throws if the type is not a recognized type. 514 | */ 515 | private getSqlType(type: GraphQLOutputType): KnexType { 516 | if (type instanceof GraphQLNonNull) { 517 | type = type.ofType; 518 | } 519 | 520 | switch (type) { 521 | case GraphQLInt: 522 | return { name: 'integer' }; 523 | case GraphQLFloat: 524 | return { name: 'float', options: [23] }; 525 | case GraphQLString: 526 | case GraphQLID: 527 | return { name: 'string', options: [256] }; 528 | } 529 | 530 | if (type instanceof GraphQLObjectType) { 531 | const fields = type.getFields(); 532 | const idField = fields['id']; 533 | 534 | if ( 535 | idField && 536 | idField.type instanceof GraphQLNonNull && 537 | idField.type.ofType instanceof GraphQLScalarType 538 | ) { 539 | if (['String', 'ID'].includes(idField.type.ofType.name)) { 540 | return { name: 'string', options: [256] }; 541 | } else if (idField.type.ofType.name === 'Int') { 542 | return { name: 'integer' }; 543 | } 544 | } 545 | } 546 | 547 | // check for TEXT scalar type 548 | if (type instanceof GraphQLScalarType && type.name === 'Text') { 549 | return { name: 'text' }; 550 | } 551 | 552 | if (type instanceof GraphQLScalarType && type.name === 'BigInt') { 553 | return { name: 'bigint' }; 554 | } 555 | 556 | if (type instanceof GraphQLScalarType && type.name === 'Boolean') { 557 | return { name: 'boolean' }; 558 | } 559 | 560 | if (type instanceof GraphQLScalarType && this.decimalTypes[type.name]) { 561 | const decimalType = this.decimalTypes[type.name]; 562 | return { name: 'decimal', options: [decimalType.p, decimalType.d] }; 563 | } 564 | 565 | if (type instanceof GraphQLList) { 566 | return { name: 'jsonb' }; 567 | } 568 | 569 | throw new Error(`sql type for ${type} is not supported`); 570 | } 571 | 572 | generateSchema(opts?: { addResolvers?: boolean }) { 573 | const entityQueryFields = this.generateQueryFields(); 574 | const coreQueryFields = this.generateQueryFields([ 575 | MetadataGraphQLObject, 576 | CheckpointsGraphQLObject 577 | ]); 578 | 579 | const query = new GraphQLObjectType({ 580 | name: 'Query', 581 | fields: { 582 | ...entityQueryFields, 583 | ...coreQueryFields 584 | } 585 | }); 586 | 587 | const schema = new GraphQLSchema({ query }); 588 | 589 | if (opts?.addResolvers) { 590 | return addResolversToSchema({ 591 | schema, 592 | resolvers: this.generateEntityResolvers(entityQueryFields) 593 | }); 594 | } 595 | 596 | return schema; 597 | } 598 | } 599 | -------------------------------------------------------------------------------- /src/graphql/index.ts: -------------------------------------------------------------------------------- 1 | import { graphqlHTTP } from 'express-graphql'; 2 | import { 3 | GraphQLID, 4 | GraphQLInt, 5 | GraphQLNonNull, 6 | GraphQLObjectType, 7 | GraphQLSchema, 8 | GraphQLString 9 | } from 'graphql'; 10 | import DataLoader from 'dataloader'; 11 | import { ResolverContextInput } from './resolvers'; 12 | import { getTableName, applyQueryFilter, QueryFilter, applyDefaultOrder } from '../utils/database'; 13 | 14 | /** 15 | * Creates getLoader function that will return existing, or create a new dataloader 16 | * for specific entity. 17 | * createGetLoader should be called per-request so each request has its own caching 18 | * and batching. 19 | */ 20 | export const createGetLoader = (context: ResolverContextInput) => { 21 | const loaders = {}; 22 | 23 | return (name: string, field = 'id', filter: QueryFilter) => { 24 | const key = `${name}-${field}`; 25 | 26 | if (!loaders[key]) { 27 | loaders[key] = new DataLoader(async ids => { 28 | const tableName = getTableName(name); 29 | 30 | let query = context.knex 31 | .select('*') 32 | .from(tableName) 33 | .whereIn(field, ids as string[]); 34 | 35 | query = applyQueryFilter(query, tableName, filter); 36 | query = applyDefaultOrder(query, tableName); 37 | 38 | context.log.debug({ sql: query.toQuery(), ids }, 'executing batched query'); 39 | 40 | const results = await query; 41 | 42 | const resultsMap = results.reduce((acc, result) => { 43 | if (!acc[result[field]]) acc[result[field]] = []; 44 | 45 | acc[result[field]].push(result); 46 | 47 | return acc; 48 | }, {}); 49 | 50 | return ids.map((id: any) => resultsMap[id] || []); 51 | }); 52 | } 53 | 54 | return loaders[key]; 55 | }; 56 | }; 57 | 58 | /** 59 | * Creates an graphql http handler for the query passed a parameters. 60 | * Returned middleware can be used with express. 61 | */ 62 | export default function get( 63 | schema: GraphQLSchema, 64 | context: ResolverContextInput, 65 | sampleQuery?: string 66 | ) { 67 | return graphqlHTTP(() => ({ 68 | schema, 69 | context: { 70 | ...context, 71 | getLoader: createGetLoader(context) 72 | }, 73 | graphiql: { 74 | defaultQuery: sampleQuery 75 | } 76 | })); 77 | } 78 | 79 | /** 80 | * This objects name and field maps to the values of the _metadata 81 | * database store 82 | * 83 | */ 84 | export const MetadataGraphQLObject = new GraphQLObjectType({ 85 | name: '_Metadata', 86 | description: 'Core metadata values used internally by Checkpoint', 87 | fields: { 88 | id: { type: new GraphQLNonNull(GraphQLID), description: 'example: last_indexed_block' }, 89 | indexer: { type: new GraphQLNonNull(GraphQLString) }, 90 | value: { type: GraphQLString } 91 | } 92 | }); 93 | 94 | /** 95 | * This objects name and field maps to the values of the _checkpoints 96 | * database store. And is used to generate entity queries for graphql 97 | * 98 | */ 99 | export const CheckpointsGraphQLObject = new GraphQLObjectType({ 100 | name: '_Checkpoint', 101 | description: 'Contract and Block where its event is found.', 102 | fields: { 103 | id: { 104 | type: new GraphQLNonNull(GraphQLID), 105 | description: 'id computed as last 5 bytes of sha256(contract+block)' 106 | }, 107 | indexer: { type: new GraphQLNonNull(GraphQLString) }, 108 | block_number: { 109 | type: new GraphQLNonNull(GraphQLInt) 110 | }, 111 | contract_address: { 112 | type: new GraphQLNonNull(GraphQLString) 113 | } 114 | } 115 | }); 116 | -------------------------------------------------------------------------------- /src/graphql/resolvers.ts: -------------------------------------------------------------------------------- 1 | import { 2 | GraphQLField, 3 | GraphQLList, 4 | GraphQLNonNull, 5 | GraphQLObjectType, 6 | GraphQLResolveInfo, 7 | GraphQLScalarType, 8 | isListType, 9 | isObjectType, 10 | isScalarType 11 | } from 'graphql'; 12 | import { 13 | parseResolveInfo, 14 | simplifyParsedResolveInfoFragmentWithType 15 | } from 'graphql-parse-resolve-info'; 16 | import { Knex } from 'knex'; 17 | import { Pool as PgPool } from 'pg'; 18 | import { getNonNullType, getDerivedFromDirective } from '../utils/graphql'; 19 | import { getTableName, applyQueryFilter, QueryFilter, applyDefaultOrder } from '../utils/database'; 20 | import { Logger } from '../utils/logger'; 21 | import type DataLoader from 'dataloader'; 22 | 23 | type BaseArgs = { 24 | block?: number; 25 | indexer?: string; 26 | }; 27 | 28 | type SingleEntitySource = Record & { 29 | _args: BaseArgs; 30 | }; 31 | 32 | type Result = Record & { 33 | _args?: BaseArgs; 34 | }; 35 | 36 | type SingleEntityResolverArgs = BaseArgs & { 37 | id: string; 38 | }; 39 | 40 | type MultiEntityResolverArgs = BaseArgs & { 41 | first?: number; 42 | skip?: number; 43 | orderBy?: string; 44 | orderDirection?: string; 45 | where?: Record; 46 | }; 47 | 48 | export type ResolverContextInput = { 49 | log: Logger; 50 | knex: Knex; 51 | pg: PgPool; 52 | }; 53 | 54 | export type ResolverContext = ResolverContextInput & { 55 | getLoader: ( 56 | name: string, 57 | field: string, 58 | queryFilter: QueryFilter 59 | ) => DataLoader; 60 | }; 61 | 62 | export async function queryMulti( 63 | parent: undefined, 64 | args: MultiEntityResolverArgs, 65 | context: ResolverContext, 66 | info: GraphQLResolveInfo 67 | ): Promise { 68 | const { log, knex } = context; 69 | 70 | const nonNullType = getNonNullType(info.returnType); 71 | if (!isListType(nonNullType)) throw new Error('unexpected return type'); 72 | const returnType = getNonNullType(nonNullType.ofType) as GraphQLObjectType; 73 | const jsonFields = getJsonFields(returnType); 74 | 75 | const tableName = getTableName(returnType.name.toLowerCase()); 76 | 77 | const nestedEntitiesMappings = {} as Record>; 78 | 79 | let query = knex.select(`${tableName}.*`).from(tableName); 80 | query = applyQueryFilter(query, tableName, { 81 | block: args.block, 82 | indexer: args.indexer 83 | }); 84 | 85 | const handleWhere = (query: Knex.QueryBuilder, prefix: string, where: Record) => { 86 | const isFieldList = (fieldName: string) => { 87 | const fieldType = getNonNullType(returnType.getFields()[fieldName].type); 88 | return isListType(fieldType); 89 | }; 90 | 91 | Object.entries(where).map((w: [string, any]) => { 92 | // TODO: we could generate where as objects { name, column, operator, value } 93 | // so we don't have to cut it there 94 | 95 | if (w[0].endsWith('_not')) { 96 | const fieldName = w[0].slice(0, -4); 97 | const isList = isFieldList(fieldName); 98 | 99 | if (isList) { 100 | query = query.whereRaw(`NOT :field: @> :value::jsonb OR NOT :field: <@ :value::jsonb`, { 101 | field: `${prefix}.${fieldName}`, 102 | value: JSON.stringify(w[1]) 103 | }); 104 | } else { 105 | query = query.where(`${prefix}.${fieldName}`, '!=', w[1]); 106 | } 107 | } else if (w[0].endsWith('_gt')) { 108 | query = query.where(`${prefix}.${w[0].slice(0, -3)}`, '>', w[1]); 109 | } else if (w[0].endsWith('_gte')) { 110 | query = query.where(`${prefix}.${w[0].slice(0, -4)}`, '>=', w[1]); 111 | } else if (w[0].endsWith('_lt')) { 112 | query = query.where(`${prefix}.${w[0].slice(0, -3)}`, '<', w[1]); 113 | } else if (w[0].endsWith('_lte')) { 114 | query = query.where(`${prefix}.${w[0].slice(0, -4)}`, '<=', w[1]); 115 | } else if (w[0].endsWith('_not_contains')) { 116 | const fieldName = w[0].slice(0, -13); 117 | const isList = isFieldList(fieldName); 118 | 119 | if (isList) { 120 | const arrayBindings = w[1].map(() => '?').join(', '); 121 | query = query.whereRaw(`NOT ?? \\?| array[${arrayBindings}]`, [ 122 | `${prefix}.${fieldName}`, 123 | ...w[1] 124 | ]); 125 | } else { 126 | query = query.not.whereLike(`${prefix}.${fieldName}`, `%${w[1]}%`); 127 | } 128 | } else if (w[0].endsWith('_not_contains_nocase')) { 129 | query = query.not.whereILike(`${prefix}.${w[0].slice(0, -20)}`, `%${w[1]}%`); 130 | } else if (w[0].endsWith('_contains')) { 131 | const fieldName = w[0].slice(0, -9); 132 | const isList = isFieldList(fieldName); 133 | 134 | if (isList) { 135 | const arrayBindings = w[1].map(() => '?').join(', '); 136 | query = query.whereRaw(`?? \\?& array[${arrayBindings}]`, [ 137 | `${prefix}.${fieldName}`, 138 | ...w[1] 139 | ]); 140 | } else { 141 | query = query.whereLike(`${prefix}.${fieldName}`, `%${w[1]}%`); 142 | } 143 | } else if (w[0].endsWith('_contains_nocase')) { 144 | query = query.whereILike(`${prefix}.${w[0].slice(0, -16)}`, `%${w[1]}%`); 145 | } else if (w[0].endsWith('_not_in')) { 146 | query = query.not.whereIn(`${prefix}.${w[0].slice(0, -7)}`, w[1]); 147 | } else if (w[0].endsWith('_in')) { 148 | query = query.whereIn(`${prefix}.${w[0].slice(0, -3)}`, w[1]); 149 | } else if (typeof w[1] === 'object' && w[0].endsWith('_')) { 150 | const fieldName = w[0].slice(0, -1); 151 | const nestedReturnType = getNonNullType( 152 | returnType.getFields()[fieldName].type as GraphQLObjectType 153 | ); 154 | const nestedTableName = getTableName(nestedReturnType.name.toLowerCase()); 155 | 156 | const fields = Object.values(nestedReturnType.getFields()) 157 | .filter(field => { 158 | const baseType = getNonNullType(field.type); 159 | 160 | return ( 161 | isScalarType(baseType) || 162 | isObjectType(baseType) || 163 | (isListType(baseType) && !getDerivedFromDirective(field)) 164 | ); 165 | }) 166 | .map(field => field.name); 167 | 168 | nestedEntitiesMappings[fieldName] = { 169 | [`${fieldName}.id`]: `${nestedTableName}.id`, 170 | ...Object.fromEntries( 171 | fields.map(field => [`${fieldName}.${field}`, `${nestedTableName}.${field}`]) 172 | ) 173 | }; 174 | 175 | query = query 176 | .columns(nestedEntitiesMappings[fieldName]) 177 | .innerJoin(nestedTableName, `${tableName}.${fieldName}`, '=', `${nestedTableName}.id`) 178 | .whereRaw('?? = ??', [`${tableName}._indexer`, `${nestedTableName}._indexer`]); 179 | 180 | query = applyQueryFilter(query, nestedTableName, { 181 | block: args.block, 182 | indexer: args.indexer 183 | }); 184 | 185 | handleWhere(query, nestedTableName, w[1]); 186 | } else { 187 | const fieldName = w[0]; 188 | const isList = isFieldList(fieldName); 189 | 190 | if (isList) { 191 | query = query.whereRaw(`:field: @> :value::jsonb AND :field: <@ :value::jsonb`, { 192 | field: `${prefix}.${fieldName}`, 193 | value: JSON.stringify(w[1]) 194 | }); 195 | } else { 196 | query = query.where(`${prefix}.${fieldName}`, w[1]); 197 | } 198 | } 199 | }); 200 | }; 201 | 202 | if (args.where) { 203 | handleWhere(query, tableName, args.where); 204 | } 205 | 206 | if (args.orderBy) { 207 | query = query.orderBy( 208 | `${tableName}.${args.orderBy}`, 209 | args.orderDirection?.toLowerCase() || 'desc' 210 | ); 211 | } 212 | 213 | query = applyDefaultOrder(query, tableName); 214 | 215 | query = query.limit(args?.first || 1000).offset(args?.skip || 0); 216 | log.debug({ sql: query.toQuery(), args }, 'executing multi query'); 217 | 218 | const result = await query; 219 | return result.map(item => { 220 | const nested = Object.fromEntries( 221 | Object.entries(nestedEntitiesMappings).map(([fieldName, mapping]) => { 222 | return [ 223 | fieldName, 224 | Object.fromEntries( 225 | Object.entries(mapping).map(([to, from]) => { 226 | const exploded = from.split('.'); 227 | const key = exploded[exploded.length - 1]; 228 | 229 | return [key, item[to]]; 230 | }) 231 | ) 232 | ]; 233 | }) 234 | ); 235 | 236 | return { 237 | ...formatItem(item, jsonFields), 238 | ...nested, 239 | _args: { 240 | block: args.block, 241 | indexer: args.indexer 242 | } 243 | }; 244 | }); 245 | } 246 | 247 | export async function querySingle( 248 | parent: SingleEntitySource | undefined, 249 | args: SingleEntityResolverArgs, 250 | context: ResolverContext, 251 | info: GraphQLResolveInfo 252 | ): Promise { 253 | const queryFilter = { 254 | block: parent?._args.block ?? args.block, 255 | indexer: parent?._args.indexer ?? args.indexer 256 | }; 257 | 258 | const returnType = getNonNullType(info.returnType) as GraphQLObjectType; 259 | const jsonFields = getJsonFields(returnType); 260 | 261 | const parentResolvedValue = parent?.[info.fieldName]; 262 | 263 | if (parentResolvedValue === null) return null; 264 | const alreadyResolvedInParent = typeof parentResolvedValue === 'object'; 265 | if (alreadyResolvedInParent) { 266 | return { 267 | ...formatItem(parentResolvedValue, jsonFields), 268 | _args: queryFilter 269 | }; 270 | } 271 | 272 | const parsed = parseResolveInfo(info); 273 | if (parsed && parentResolvedValue) { 274 | // @ts-ignore 275 | const simplified = simplifyParsedResolveInfoFragmentWithType(parsed, returnType); 276 | 277 | if (Object.keys(simplified.fields).length === 1 && simplified.fields['id']) { 278 | return { id: parentResolvedValue, _args: queryFilter }; 279 | } 280 | } 281 | 282 | const id = parentResolvedValue || args.id; 283 | const items = await context.getLoader(returnType.name.toLowerCase(), 'id', queryFilter).load(id); 284 | if (items.length === 0) { 285 | throw new Error(`Row not found: ${id}`); 286 | } 287 | 288 | return { 289 | ...formatItem(items[0], jsonFields), 290 | _args: queryFilter 291 | }; 292 | } 293 | 294 | export const getNestedResolver = 295 | (columnName: string) => 296 | async ( 297 | parent: Result, 298 | args: unknown, 299 | context: ResolverContext, 300 | info: GraphQLResolveInfo 301 | ): Promise => { 302 | const { getLoader } = context; 303 | 304 | const queryFilter = { 305 | block: parent._args?.block, 306 | indexer: parent._args?.indexer 307 | }; 308 | 309 | const returnType = getNonNullType(info.returnType) as 310 | | GraphQLList 311 | | GraphQLList>; 312 | const jsonFields = getJsonFields(getNonNullType(returnType.ofType) as GraphQLObjectType); 313 | 314 | const parentType = getNonNullType(info.parentType) as GraphQLObjectType; 315 | const field = parentType.getFields()[info.fieldName]; 316 | 317 | const fieldType = 318 | info.returnType instanceof GraphQLNonNull ? info.returnType.ofType : info.returnType; 319 | if (!isListType(fieldType)) return []; 320 | 321 | const derivedFromDirective = getDerivedFromDirective(field); 322 | 323 | let result: Record[] = []; 324 | if (!derivedFromDirective) { 325 | const loaderResult = await getLoader(columnName, 'id', queryFilter).loadMany( 326 | parent[info.fieldName] 327 | ); 328 | 329 | // NOTE: loader returns array of arrays when used with loadMany, because in some cases, 330 | // for example when fetching derived entities we expect multiple results for a single id 331 | // this is why we need to flatten it. In the future it would be nice to have clearer API 332 | result = loaderResult.flat(); 333 | } else { 334 | const fieldArgument = derivedFromDirective.arguments?.find(arg => arg.name.value === 'field'); 335 | if (!fieldArgument || fieldArgument.value.kind !== 'StringValue') { 336 | throw new Error(`field ${field.name} is missing field in derivedFrom directive`); 337 | } 338 | 339 | result = await getLoader(columnName, fieldArgument.value.value, queryFilter).load(parent.id); 340 | } 341 | 342 | return result.map(item => ({ 343 | ...formatItem(item, jsonFields), 344 | _args: queryFilter 345 | })); 346 | }; 347 | 348 | function getJsonFields(type: GraphQLObjectType) { 349 | return Object.values(type.getFields()).filter(field => { 350 | const baseType = getNonNullType(field.type); 351 | 352 | return isListType(baseType) && baseType.ofType instanceof GraphQLScalarType; 353 | }); 354 | } 355 | 356 | function formatItem(item: Record, jsonFields: GraphQLField[]) { 357 | const formatted = { ...item }; 358 | 359 | jsonFields.forEach(field => { 360 | if (typeof formatted[field.name] === 'string') { 361 | formatted[field.name] = JSON.parse(formatted[field.name]); 362 | } 363 | }); 364 | 365 | return formatted; 366 | } 367 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import Checkpoint from './checkpoint'; 2 | export { LogLevel } from './utils/logger'; 3 | export { createGetLoader } from './graphql'; 4 | export { Model } from './orm'; 5 | export * from './providers'; 6 | export * from './types'; 7 | 8 | export default Checkpoint; 9 | -------------------------------------------------------------------------------- /src/knex.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import knex, { Knex } from 'knex'; 3 | import { ConnectionString } from 'connection-string'; 4 | 5 | export type KnexType = 6 | | { 7 | name: 'integer' | 'bigint' | 'boolean' | 'text' | 'json' | 'jsonb'; 8 | } 9 | | { 10 | name: 'decimal'; 11 | options: [number, number]; 12 | } 13 | | { 14 | name: 'float'; 15 | options: [number]; 16 | } 17 | | { 18 | name: 'string'; 19 | options: [number]; 20 | }; 21 | 22 | const PROTOCOLS = { 23 | postgres: 'pg', 24 | postgresql: 'pg' 25 | }; 26 | 27 | export function getConnectionData(connectionString: string) { 28 | const connectionConfig = new ConnectionString(connectionString); 29 | if (!connectionConfig.protocol || !connectionConfig.hosts || !connectionConfig.path) { 30 | throw new Error('invalid connection string provided'); 31 | } 32 | 33 | const client = PROTOCOLS[connectionConfig.protocol]; 34 | if (!client) { 35 | throw new Error(`Supplied protocol ${connectionConfig.protocol} is not supported`); 36 | } 37 | 38 | const sslConfig: { rejectUnauthorized?: boolean; sslmode?: string; ca?: string } = {}; 39 | if ( 40 | connectionConfig.params?.sslaccept === 'strict' || 41 | connectionConfig.params?.ssl === 'rejectUnauthorized' 42 | ) { 43 | sslConfig.rejectUnauthorized = true; 44 | } 45 | if (connectionConfig.params?.sslmode) { 46 | sslConfig.sslmode = connectionConfig.params.sslmode; 47 | } 48 | 49 | if (process.env.CA_CERT) { 50 | sslConfig.ca = process.env.CA_CERT; 51 | } else if (process.env.CA_CERT_FILE) { 52 | sslConfig.ca = fs.readFileSync(process.env.CA_CERT_FILE).toString(); 53 | } 54 | 55 | return { 56 | client, 57 | connection: { 58 | database: connectionConfig.path[0], 59 | user: connectionConfig.user, 60 | password: connectionConfig.password, 61 | host: connectionConfig.hosts[0].name, 62 | port: connectionConfig.hosts[0].port, 63 | ssl: Object.keys(sslConfig).length > 0 ? sslConfig : undefined 64 | } 65 | }; 66 | } 67 | 68 | export function createKnexConfig(connectionString: string): Knex.Config { 69 | return getConnectionData(connectionString); 70 | } 71 | 72 | export function createKnex(config: string | Knex.Config) { 73 | const parsedConfig = typeof config === 'string' ? createKnexConfig(config) : config; 74 | 75 | return knex(parsedConfig); 76 | } 77 | -------------------------------------------------------------------------------- /src/orm/index.ts: -------------------------------------------------------------------------------- 1 | export { default as Model } from './model'; 2 | -------------------------------------------------------------------------------- /src/orm/model.ts: -------------------------------------------------------------------------------- 1 | import { register } from '../register'; 2 | 3 | export default class Model { 4 | private tableName: string; 5 | private indexerName: string; 6 | private values = new Map(); 7 | private valuesImplicitlySet = new Set(); 8 | private exists = false; 9 | 10 | constructor(tableName: string, indexerName: string) { 11 | this.tableName = tableName; 12 | this.indexerName = indexerName; 13 | } 14 | 15 | private async _update() { 16 | const knex = register.getKnex(); 17 | const currentBlock = register.getCurrentBlock(this.indexerName); 18 | 19 | const diff = Object.fromEntries( 20 | [...this.values.entries()].filter(([key]) => this.valuesImplicitlySet.has(key)) 21 | ); 22 | 23 | return knex.transaction(async trx => { 24 | await trx 25 | .table(this.tableName) 26 | .where('id', this.get('id')) 27 | .andWhere('_indexer', this.indexerName) 28 | .andWhereRaw('upper_inf(block_range)') 29 | .update({ 30 | block_range: knex.raw('int8range(lower(block_range), ?)', [currentBlock]) 31 | }); 32 | 33 | const newEntity = { 34 | ...Object.fromEntries(this.values.entries()), 35 | ...diff 36 | }; 37 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 38 | const { uid, ...currentValues } = newEntity; 39 | 40 | await trx.table(this.tableName).insert({ 41 | ...currentValues, 42 | block_range: knex.raw('int8range(?, NULL)', [currentBlock]) 43 | }); 44 | }); 45 | } 46 | 47 | private async _insert() { 48 | const currentBlock = register.getCurrentBlock(this.indexerName); 49 | 50 | const entity = Object.fromEntries(this.values.entries()); 51 | 52 | return register 53 | .getKnex() 54 | .table(this.tableName) 55 | .insert({ 56 | ...entity, 57 | _indexer: this.indexerName, 58 | block_range: register.getKnex().raw('int8range(?, NULL)', [currentBlock]) 59 | }); 60 | } 61 | 62 | private async _delete() { 63 | const currentBlock = register.getCurrentBlock(this.indexerName); 64 | 65 | return register 66 | .getKnex() 67 | .table(this.tableName) 68 | .where('id', this.get('id')) 69 | .andWhere('_indexer', this.indexerName) 70 | .andWhereRaw('upper_inf(block_range)') 71 | .update({ 72 | block_range: register.getKnex().raw('int8range(lower(block_range), ?)', [currentBlock]) 73 | }); 74 | } 75 | 76 | setExists() { 77 | this.exists = true; 78 | } 79 | 80 | initialSet(key: string, value: any) { 81 | this.values.set(key, value); 82 | } 83 | 84 | get(key: string): any { 85 | return this.values.get(key) ?? null; 86 | } 87 | 88 | set(key: string, value: any) { 89 | this.values.set(key, value); 90 | this.valuesImplicitlySet.add(key); 91 | } 92 | 93 | static async _loadEntity( 94 | tableName: string, 95 | id: string | number, 96 | indexerName: string 97 | ): Promise | null> { 98 | const knex = register.getKnex(); 99 | 100 | const entity = await knex 101 | .table(tableName) 102 | .select('*') 103 | .where('id', id) 104 | .andWhere('_indexer', indexerName) 105 | .andWhereRaw('upper_inf(block_range)') 106 | .first(); 107 | if (!entity) return null; 108 | 109 | return entity; 110 | } 111 | 112 | async save() { 113 | if (this.exists) return this._update(); 114 | return this._insert(); 115 | } 116 | 117 | async delete() { 118 | if (this.exists) this._delete(); 119 | } 120 | } 121 | -------------------------------------------------------------------------------- /src/pg.ts: -------------------------------------------------------------------------------- 1 | import { Pool } from 'pg'; 2 | import { getConnectionData } from './knex'; 3 | 4 | /** 5 | * Attempts to connect to the database by the connection string. 6 | * 7 | * This returns a pg pool connection object. 8 | */ 9 | export const createPgPool = (connectionString: string): Pool => { 10 | const { connection } = getConnectionData(connectionString); 11 | 12 | const config = { 13 | ...connection, 14 | connectionTimeoutMillis: 30000 // 30 seconds 15 | }; 16 | 17 | return new Pool(config); 18 | }; 19 | -------------------------------------------------------------------------------- /src/providers/base.ts: -------------------------------------------------------------------------------- 1 | import { CheckpointRecord } from '../stores/checkpoints'; 2 | import { Logger } from '../utils/logger'; 3 | import { CheckpointConfig, ContractSourceConfig } from '../types'; 4 | 5 | export type Instance = { 6 | config: CheckpointConfig; 7 | getCurrentSources(blockNumber: number): ContractSourceConfig[]; 8 | setBlockHash(blockNum: number, hash: string); 9 | setLastIndexedBlock(blockNum: number); 10 | insertCheckpoints(checkpoints: CheckpointRecord[]); 11 | getWriterHelpers(): { 12 | executeTemplate( 13 | template: string, 14 | config: { contract: string; start: number }, 15 | persist?: boolean 16 | ); 17 | }; 18 | }; 19 | 20 | export class BlockNotFoundError extends Error { 21 | constructor() { 22 | super('Block not found'); 23 | this.name = 'BlockNotFoundError'; 24 | } 25 | } 26 | 27 | export class ReorgDetectedError extends Error { 28 | constructor() { 29 | super('Reorg detected'); 30 | this.name = 'ReorgDetectedError'; 31 | } 32 | } 33 | 34 | export class BaseProvider { 35 | protected readonly instance: Instance; 36 | protected readonly log: Logger; 37 | protected readonly abis: Record = {}; 38 | 39 | constructor({ 40 | instance, 41 | log, 42 | abis 43 | }: { 44 | instance: Instance; 45 | log: Logger; 46 | abis?: Record; 47 | }) { 48 | this.instance = instance; 49 | this.log = log; 50 | if (abis) { 51 | this.abis = abis; 52 | } 53 | } 54 | 55 | init(): Promise { 56 | throw new Error('init method was not defined'); 57 | } 58 | 59 | formatAddresses(addresses: string[]): string[] { 60 | throw new Error( 61 | `formatAddresses method was not defined when formatting ${addresses.length} addresses` 62 | ); 63 | } 64 | 65 | getNetworkIdentifier(): Promise { 66 | throw new Error('getNetworkIdentifier method was not defined'); 67 | } 68 | 69 | getLatestBlockNumber(): Promise { 70 | throw new Error('getLatestBlockNumber method was not defined'); 71 | } 72 | 73 | getBlockHash(blockNumber: number): Promise { 74 | throw new Error( 75 | `getBlockHash method was not defined when getting block hash for block ${blockNumber}` 76 | ); 77 | } 78 | 79 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 80 | processBlock(blockNum: number, parentHash: string | null): Promise { 81 | throw new Error(`processBlock method was not defined when fetching block ${blockNum}`); 82 | } 83 | 84 | processPool(blockNumber: number) { 85 | throw new Error( 86 | `processPool method was not defined when fetching pool for block ${blockNumber}` 87 | ); 88 | } 89 | 90 | async getCheckpointsRange(fromBlock: number, toBlock: number): Promise { 91 | throw new Error( 92 | `getCheckpointsRange method was not defined when fetching events from ${fromBlock} to ${toBlock}` 93 | ); 94 | } 95 | } 96 | 97 | export class BaseIndexer { 98 | protected provider?: BaseProvider; 99 | 100 | // eslint-disable-next-line @typescript-eslint/no-unused-vars 101 | init({ instance, log, abis }: { instance: Instance; log: Logger; abis?: Record }) { 102 | throw new Error('init method was not defined'); 103 | } 104 | 105 | public getProvider() { 106 | if (!this.provider) { 107 | throw new Error('Provider not initialized'); 108 | } 109 | 110 | return this.provider; 111 | } 112 | 113 | public getHandlers(): string[] { 114 | throw new Error('getHandlers method was not defined'); 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/providers/evm/index.ts: -------------------------------------------------------------------------------- 1 | export { EvmProvider } from './provider'; 2 | export { EvmIndexer } from './indexer'; 3 | export * from './types'; 4 | -------------------------------------------------------------------------------- /src/providers/evm/indexer.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from '../../utils/logger'; 2 | import { Instance, BaseIndexer } from '../base'; 3 | import { EvmProvider } from './provider'; 4 | import { Writer } from './types'; 5 | 6 | export class EvmIndexer extends BaseIndexer { 7 | private writers: Record; 8 | 9 | constructor(writers: Record) { 10 | super(); 11 | this.writers = writers; 12 | } 13 | 14 | init({ instance, log, abis }: { instance: Instance; log: Logger; abis?: Record }) { 15 | this.provider = new EvmProvider({ instance, log, abis, writers: this.writers }); 16 | } 17 | 18 | public getHandlers(): string[] { 19 | return Object.keys(this.writers); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/providers/evm/provider.ts: -------------------------------------------------------------------------------- 1 | import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; 2 | import { getAddress } from '@ethersproject/address'; 3 | import { Formatter, Log, Provider, StaticJsonRpcProvider } from '@ethersproject/providers'; 4 | import { Interface, LogDescription } from '@ethersproject/abi'; 5 | import { keccak256 } from '@ethersproject/keccak256'; 6 | import { toUtf8Bytes } from '@ethersproject/strings'; 7 | import { CheckpointRecord } from '../../stores/checkpoints'; 8 | import { Writer } from './types'; 9 | import { ContractSourceConfig } from '../../types'; 10 | import { sleep } from '../../utils/helpers'; 11 | 12 | type BlockWithTransactions = Awaited>; 13 | type Transaction = BlockWithTransactions['transactions'][number]; 14 | type EventsMap = Record; 15 | 16 | type GetLogsBlockHashFilter = { 17 | blockHash: string; 18 | }; 19 | 20 | type GetLogsBlockRangeFilter = { 21 | fromBlock: number; 22 | toBlock: number; 23 | }; 24 | 25 | const MAX_BLOCKS_PER_REQUEST = 10000; 26 | 27 | class CustomJsonRpcError extends Error { 28 | constructor(message: string, public code: number, public data: any) { 29 | super(message); 30 | } 31 | } 32 | 33 | export class EvmProvider extends BaseProvider { 34 | private readonly provider: Provider; 35 | /** 36 | * Formatter instance from ethers.js used to format raw responses. 37 | */ 38 | private readonly formatter = new Formatter(); 39 | private readonly writers: Record; 40 | private processedPoolTransactions = new Set(); 41 | private startupLatestBlockNumber: number | undefined; 42 | private sourceHashes = new Map(); 43 | 44 | constructor({ 45 | instance, 46 | log, 47 | abis, 48 | writers 49 | }: ConstructorParameters[0] & { writers: Record }) { 50 | super({ instance, log, abis }); 51 | 52 | this.provider = new StaticJsonRpcProvider(this.instance.config.network_node_url); 53 | this.writers = writers; 54 | } 55 | 56 | formatAddresses(addresses: string[]): string[] { 57 | return addresses.map(address => getAddress(address)); 58 | } 59 | 60 | public async init() { 61 | this.startupLatestBlockNumber = await this.getLatestBlockNumber(); 62 | } 63 | 64 | async getNetworkIdentifier(): Promise { 65 | const result = await this.provider.getNetwork(); 66 | return `evm_${result.chainId}`; 67 | } 68 | 69 | async getLatestBlockNumber(): Promise { 70 | return this.provider.getBlockNumber(); 71 | } 72 | 73 | async getBlockHash(blockNumber: number) { 74 | const block = await this.provider.getBlock(blockNumber); 75 | return block.hash; 76 | } 77 | 78 | async processBlock(blockNum: number, parentHash: string | null) { 79 | let block: BlockWithTransactions | null; 80 | let eventsMap: EventsMap; 81 | try { 82 | block = await this.provider.getBlockWithTransactions(blockNum); 83 | } catch (e) { 84 | this.log.error({ blockNumber: blockNum, err: e }, 'getting block failed... retrying'); 85 | throw e; 86 | } 87 | 88 | if (block === null) { 89 | this.log.info({ blockNumber: blockNum }, 'block not found'); 90 | throw new BlockNotFoundError(); 91 | } 92 | 93 | try { 94 | eventsMap = await this.getEvents(block.hash); 95 | } catch (e: unknown) { 96 | if (e instanceof CustomJsonRpcError && e.code === -32000) { 97 | this.log.info({ blockNumber: blockNum }, 'block events not found'); 98 | throw new BlockNotFoundError(); 99 | } 100 | 101 | this.log.error({ blockNumber: blockNum, err: e }, 'getting events failed... retrying'); 102 | throw e; 103 | } 104 | 105 | if (parentHash && block.parentHash !== parentHash) { 106 | this.log.error({ blockNumber: blockNum }, 'reorg detected'); 107 | throw new ReorgDetectedError(); 108 | } 109 | 110 | await this.handleBlock(block, eventsMap); 111 | 112 | await this.instance.setBlockHash(blockNum, block.hash); 113 | 114 | await this.instance.setLastIndexedBlock(block.number); 115 | 116 | return blockNum + 1; 117 | } 118 | 119 | private async handleBlock(block: BlockWithTransactions, eventsMap: EventsMap) { 120 | this.log.info({ blockNumber: block.number }, 'handling block'); 121 | 122 | const txsToCheck = block.transactions.filter( 123 | tx => !this.processedPoolTransactions.has(tx.hash) 124 | ); 125 | 126 | for (const [i, tx] of txsToCheck.entries()) { 127 | await this.handleTx(block, block.number, i, tx, tx.hash ? eventsMap[tx.hash] || [] : []); 128 | } 129 | 130 | this.processedPoolTransactions.clear(); 131 | 132 | this.log.debug({ blockNumber: block.number }, 'handling block done'); 133 | } 134 | 135 | private async handleTx( 136 | block: BlockWithTransactions | null, 137 | blockNumber: number, 138 | txIndex: number, 139 | tx: Transaction, 140 | logs: Log[] 141 | ) { 142 | this.log.debug({ txIndex }, 'handling transaction'); 143 | 144 | const helpers = await this.instance.getWriterHelpers(); 145 | 146 | if (this.instance.config.tx_fn) { 147 | await this.writers[this.instance.config.tx_fn]({ 148 | blockNumber, 149 | block, 150 | tx, 151 | helpers 152 | }); 153 | } 154 | 155 | if (this.instance.config.global_events) { 156 | const globalEventHandlers = this.instance.config.global_events.reduce((handlers, event) => { 157 | handlers[this.getEventHash(event.name)] = { 158 | name: event.name, 159 | fn: event.fn 160 | }; 161 | return handlers; 162 | }, {}); 163 | 164 | for (const [eventIndex, event] of logs.entries()) { 165 | const handler = globalEventHandlers[event.topics[0]]; 166 | if (!handler) continue; 167 | 168 | this.log.info( 169 | { contract: event.address, event: handler.name, handlerFn: handler.fn }, 170 | 'found contract event' 171 | ); 172 | 173 | await this.writers[handler.fn]({ 174 | block, 175 | blockNumber, 176 | tx, 177 | rawEvent: event, 178 | eventIndex, 179 | helpers 180 | }); 181 | } 182 | } 183 | 184 | let lastSources = this.instance.getCurrentSources(blockNumber); 185 | let sourcesQueue = [...lastSources]; 186 | 187 | let source: ContractSourceConfig | undefined; 188 | while ((source = sourcesQueue.shift())) { 189 | let foundContractData = false; 190 | for (const [eventIndex, log] of logs.entries()) { 191 | if (this.compareAddress(source.contract, log.address)) { 192 | for (const sourceEvent of source.events) { 193 | const targetTopic = this.getEventHash(sourceEvent.name); 194 | 195 | if (targetTopic === log.topics[0]) { 196 | foundContractData = true; 197 | this.log.info( 198 | { contract: source.contract, event: sourceEvent.name, handlerFn: sourceEvent.fn }, 199 | 'found contract event' 200 | ); 201 | 202 | let parsedEvent: LogDescription | undefined; 203 | if (source.abi && this.abis?.[source.abi]) { 204 | const iface = new Interface(this.abis[source.abi]); 205 | try { 206 | parsedEvent = iface.parseLog(log); 207 | } catch (err) { 208 | this.log.warn( 209 | { contract: source.contract, txType: tx.type, handlerFn: source.deploy_fn }, 210 | 'failed to parse event' 211 | ); 212 | } 213 | } 214 | 215 | await this.writers[sourceEvent.fn]({ 216 | source, 217 | block, 218 | blockNumber, 219 | tx, 220 | rawEvent: log, 221 | event: parsedEvent, 222 | eventIndex, 223 | helpers 224 | }); 225 | } 226 | } 227 | } 228 | } 229 | 230 | if (foundContractData) { 231 | await this.instance.insertCheckpoints([ 232 | { blockNumber, contractAddress: getAddress(source.contract) } 233 | ]); 234 | 235 | const nextSources = this.instance.getCurrentSources(blockNumber); 236 | sourcesQueue = sourcesQueue.concat(nextSources.slice(lastSources.length)); 237 | lastSources = this.instance.getCurrentSources(blockNumber); 238 | } 239 | } 240 | 241 | this.log.debug({ txIndex }, 'handling transaction done'); 242 | } 243 | 244 | private async getEvents(blockHash: string): Promise { 245 | const events = await this._getLogs({ 246 | blockHash 247 | }); 248 | 249 | return events.reduce((acc, event) => { 250 | if (!acc[event.transactionHash]) acc[event.transactionHash] = []; 251 | 252 | acc[event.transactionHash] = acc[event.transactionHash].concat(event); 253 | 254 | return acc; 255 | }, {}); 256 | } 257 | 258 | /** 259 | * This method is simpler implementation of getLogs method. 260 | * This allows using two filters that are not supported in ethers v5: 261 | * - `blockHash` to get logs for a specific block - if node doesn't know about that block it will fail. 262 | * - `address` as a single address or an array of addresses. 263 | * @param filter Logs filter 264 | */ 265 | private async _getLogs( 266 | filter: (GetLogsBlockHashFilter | GetLogsBlockRangeFilter) & { 267 | address?: string | string[]; 268 | } 269 | ): Promise { 270 | const params: { 271 | fromBlock?: string; 272 | toBlock?: string; 273 | blockHash?: string; 274 | address?: string | string[]; 275 | } = {}; 276 | 277 | if ('blockHash' in filter) { 278 | params.blockHash = filter.blockHash; 279 | } 280 | 281 | if ('fromBlock' in filter) { 282 | params.fromBlock = `0x${filter.fromBlock.toString(16)}`; 283 | } 284 | 285 | if ('toBlock' in filter) { 286 | params.toBlock = `0x${filter.toBlock.toString(16)}`; 287 | } 288 | 289 | if ('address' in filter) { 290 | params.address = filter.address; 291 | } 292 | 293 | const res = await fetch(this.instance.config.network_node_url, { 294 | method: 'POST', 295 | headers: { 296 | 'Content-Type': 'application/json' 297 | }, 298 | body: JSON.stringify({ 299 | jsonrpc: '2.0', 300 | id: 1, 301 | method: 'eth_getLogs', 302 | params: [params] 303 | }) 304 | }); 305 | 306 | if (!res.ok) { 307 | throw new Error(`Request failed: ${res.statusText}`); 308 | } 309 | 310 | const json = await res.json(); 311 | 312 | if (json.error) { 313 | throw new CustomJsonRpcError(json.error.message, json.error.code, json.error.data); 314 | } 315 | 316 | return Formatter.arrayOf(this.formatter.filterLog.bind(this.formatter))(json.result); 317 | } 318 | 319 | async getLogs(fromBlock: number, toBlock: number, address: string | string[]) { 320 | const result = [] as Log[]; 321 | 322 | let currentFrom = fromBlock; 323 | let currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); 324 | while (true) { 325 | try { 326 | const logs = await this._getLogs({ 327 | fromBlock: currentFrom, 328 | toBlock: currentTo, 329 | address 330 | }); 331 | 332 | result.push(...logs); 333 | 334 | if (currentTo === toBlock) break; 335 | currentFrom = currentTo + 1; 336 | currentTo = Math.min(toBlock, currentFrom + MAX_BLOCKS_PER_REQUEST); 337 | } catch (e: unknown) { 338 | // Handle Infura response size hint 339 | if (e instanceof CustomJsonRpcError) { 340 | if (e.code === -32005) { 341 | currentFrom = parseInt(e.data.from, 16); 342 | currentTo = Math.min(parseInt(e.data.to, 16), currentFrom + MAX_BLOCKS_PER_REQUEST); 343 | continue; 344 | } 345 | } 346 | 347 | this.log.error( 348 | { fromBlock: currentFrom, toBlock: currentTo, address, err: e }, 349 | 'getLogs failed' 350 | ); 351 | 352 | await sleep(5000); 353 | } 354 | } 355 | 356 | return result.map(log => ({ 357 | blockNumber: log.blockNumber, 358 | contractAddress: log.address 359 | })); 360 | } 361 | 362 | async getCheckpointsRange(fromBlock: number, toBlock: number): Promise { 363 | const sourceAddresses = this.instance 364 | .getCurrentSources(fromBlock) 365 | .map(source => source.contract); 366 | 367 | const chunks: string[][] = []; 368 | for (let i = 0; i < sourceAddresses.length; i += 20) { 369 | chunks.push(sourceAddresses.slice(i, i + 20)); 370 | } 371 | 372 | let events: CheckpointRecord[] = []; 373 | for (const chunk of chunks) { 374 | const chunkEvents = await this.getLogs(fromBlock, toBlock, chunk); 375 | events = events.concat(chunkEvents); 376 | } 377 | 378 | return events; 379 | } 380 | 381 | getEventHash(eventName: string) { 382 | if (!this.sourceHashes.has(eventName)) { 383 | this.sourceHashes.set(eventName, keccak256(toUtf8Bytes(eventName))); 384 | } 385 | 386 | return this.sourceHashes.get(eventName) as string; 387 | } 388 | 389 | compareAddress(a: string, b: string) { 390 | return a.toLowerCase() === b.toLowerCase(); 391 | } 392 | } 393 | -------------------------------------------------------------------------------- /src/providers/evm/types.ts: -------------------------------------------------------------------------------- 1 | import { Provider, Log } from '@ethersproject/providers'; 2 | import { LogDescription } from '@ethersproject/abi'; 3 | import { BaseWriterParams } from '../../types'; 4 | 5 | type BlockWithTransactions = Awaited>; 6 | type Transaction = BlockWithTransactions['transactions'][number]; 7 | 8 | export type Writer = ( 9 | args: { 10 | tx: Transaction; 11 | block: BlockWithTransactions | null; 12 | rawEvent?: Log; 13 | event?: LogDescription; 14 | } & BaseWriterParams 15 | ) => Promise; 16 | -------------------------------------------------------------------------------- /src/providers/index.ts: -------------------------------------------------------------------------------- 1 | export * from './base'; 2 | export * as starknet from './starknet'; 3 | export * as evm from './evm'; 4 | -------------------------------------------------------------------------------- /src/providers/starknet/index.ts: -------------------------------------------------------------------------------- 1 | export { StarknetProvider } from './provider'; 2 | export { StarknetIndexer } from './indexer'; 3 | export * from './types'; 4 | -------------------------------------------------------------------------------- /src/providers/starknet/indexer.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from '../../utils/logger'; 2 | import { Instance, BaseIndexer } from '../base'; 3 | import { StarknetProvider } from '.'; 4 | import { Writer } from './types'; 5 | 6 | export class StarknetIndexer extends BaseIndexer { 7 | private writers: Record; 8 | 9 | constructor(writers: Record) { 10 | super(); 11 | this.writers = writers; 12 | } 13 | 14 | init({ instance, log, abis }: { instance: Instance; log: Logger; abis?: Record }) { 15 | this.provider = new StarknetProvider({ instance, log, abis, writers: this.writers }); 16 | } 17 | 18 | public getHandlers(): string[] { 19 | return Object.keys(this.writers); 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/providers/starknet/provider.ts: -------------------------------------------------------------------------------- 1 | import { RpcProvider, hash, validateAndParseAddress } from 'starknet'; 2 | import { BaseProvider, BlockNotFoundError, ReorgDetectedError } from '../base'; 3 | import { parseEvent } from './utils'; 4 | import { CheckpointRecord } from '../../stores/checkpoints'; 5 | import { 6 | Block, 7 | FullBlock, 8 | Transaction, 9 | PendingTransaction, 10 | Event, 11 | EventsMap, 12 | ParsedEvent, 13 | isFullBlock, 14 | isDeployTransaction, 15 | Writer 16 | } from './types'; 17 | import { ContractSourceConfig } from '../../types'; 18 | import { sleep } from '../../utils/helpers'; 19 | 20 | export class StarknetProvider extends BaseProvider { 21 | private readonly provider: RpcProvider; 22 | private readonly writers: Record; 23 | private seenPoolTransactions = new Set(); 24 | private processedTransactions = new Set(); 25 | private startupLatestBlockNumber: number | undefined; 26 | private sourceHashes = new Map(); 27 | 28 | constructor({ 29 | instance, 30 | log, 31 | abis, 32 | writers 33 | }: ConstructorParameters[0] & { 34 | writers: Record; 35 | }) { 36 | super({ instance, log, abis }); 37 | 38 | this.provider = new RpcProvider({ 39 | nodeUrl: this.instance.config.network_node_url 40 | }); 41 | this.writers = writers; 42 | } 43 | 44 | public async init() { 45 | this.startupLatestBlockNumber = await this.getLatestBlockNumber(); 46 | } 47 | 48 | formatAddresses(addresses: string[]): string[] { 49 | return addresses.map(address => validateAndParseAddress(address)); 50 | } 51 | 52 | async getNetworkIdentifier(): Promise { 53 | const result = await this.provider.getChainId(); 54 | return `starknet_${result}`; 55 | } 56 | 57 | async getLatestBlockNumber(): Promise { 58 | return this.provider.getBlockNumber(); 59 | } 60 | 61 | async getBlockHash(blockNumber: number) { 62 | const block = await this.provider.getBlock(blockNumber); 63 | return block.block_hash; 64 | } 65 | 66 | async processBlock(blockNum: number, parentHash: string | null) { 67 | let block: Block; 68 | let blockEvents: EventsMap; 69 | try { 70 | [block, blockEvents] = await Promise.all([ 71 | this.provider.getBlockWithTxs(blockNum), 72 | this.getEvents(blockNum) 73 | ]); 74 | } catch (e) { 75 | if ((e as Error).message.includes('Block not found')) { 76 | this.log.info({ blockNumber: blockNum }, 'block not found'); 77 | throw new BlockNotFoundError(); 78 | } 79 | 80 | this.log.error({ blockNumber: blockNum, err: e }, 'getting block failed... retrying'); 81 | throw e; 82 | } 83 | 84 | if (parentHash && block.parent_hash !== parentHash) { 85 | this.log.error({ blockNumber: blockNum }, 'reorg detected'); 86 | throw new ReorgDetectedError(); 87 | } 88 | 89 | if (!isFullBlock(block) || block.block_number !== blockNum) { 90 | this.log.error({ blockNumber: blockNum }, 'invalid block'); 91 | throw new Error('invalid block'); 92 | } 93 | 94 | await this.handleBlock(block, blockEvents); 95 | 96 | if (isFullBlock(block)) { 97 | await this.instance.setBlockHash(blockNum, block.block_hash); 98 | } 99 | 100 | await this.instance.setLastIndexedBlock(block.block_number); 101 | 102 | return blockNum + 1; 103 | } 104 | 105 | async processPool(blockNumber: number) { 106 | const block = await this.provider.getBlockWithTxs('pending'); 107 | const receipts = await Promise.all( 108 | block.transactions.map(async tx => { 109 | if (!tx.transaction_hash || this.seenPoolTransactions.has(tx.transaction_hash)) { 110 | return null; 111 | } 112 | 113 | try { 114 | return await this.provider.getTransactionReceipt(tx.transaction_hash); 115 | } catch (err) { 116 | this.log.warn( 117 | { transactionHash: tx.transaction_hash, err }, 118 | 'getting transaction receipt failed' 119 | ); 120 | return null; 121 | } 122 | }) 123 | ); 124 | 125 | const txsWithReceipts = block.transactions.filter((_, index) => receipts[index] !== null); 126 | const eventsMap = receipts.reduce((acc, receipt) => { 127 | if (receipt === null) return acc; 128 | 129 | acc[receipt.transaction_hash] = receipt.events; 130 | return acc; 131 | }, {}); 132 | 133 | await this.handlePool(txsWithReceipts, eventsMap, blockNumber); 134 | } 135 | 136 | private async handleBlock(block: FullBlock, eventsMap: EventsMap) { 137 | this.log.info({ blockNumber: block.block_number }, 'handling block'); 138 | 139 | const txsToCheck = block.transactions.filter( 140 | tx => !this.seenPoolTransactions.has(tx.transaction_hash) 141 | ); 142 | 143 | for (const [i, tx] of txsToCheck.entries()) { 144 | await this.handleTx( 145 | block, 146 | block.block_number, 147 | i, 148 | tx, 149 | tx.transaction_hash ? eventsMap[tx.transaction_hash] || [] : [] 150 | ); 151 | } 152 | 153 | this.seenPoolTransactions.clear(); 154 | 155 | this.log.debug({ blockNumber: block.block_number }, 'handling block done'); 156 | } 157 | 158 | private async handlePool(txs: PendingTransaction[], eventsMap: EventsMap, blockNumber: number) { 159 | this.log.info('handling pool'); 160 | 161 | for (const [i, tx] of txs.entries()) { 162 | await this.handleTx( 163 | null, 164 | blockNumber, 165 | i, 166 | tx, 167 | tx.transaction_hash ? eventsMap[tx.transaction_hash] || [] : [] 168 | ); 169 | 170 | this.seenPoolTransactions.add(tx.transaction_hash); 171 | } 172 | 173 | this.log.info('handling pool done'); 174 | } 175 | 176 | private async handleTx( 177 | block: FullBlock | null, 178 | blockNumber: number, 179 | txIndex: number, 180 | tx: Transaction, 181 | events: Event[] 182 | ) { 183 | this.log.debug({ txIndex }, 'handling transaction'); 184 | 185 | if (this.processedTransactions.has(tx.transaction_hash)) { 186 | this.log.warn({ hash: tx.transaction_hash }, 'transaction already processed'); 187 | return; 188 | } 189 | 190 | let wasTransactionProcessed = false; 191 | const helpers = await this.instance.getWriterHelpers(); 192 | 193 | if (this.instance.config.tx_fn) { 194 | await this.writers[this.instance.config.tx_fn]({ 195 | blockNumber, 196 | block, 197 | tx, 198 | helpers 199 | }); 200 | 201 | wasTransactionProcessed = true; 202 | } 203 | 204 | if (this.instance.config.global_events) { 205 | const globalEventHandlers = this.instance.config.global_events.reduce((handlers, event) => { 206 | handlers[this.getEventHash(event.name)] = { 207 | name: event.name, 208 | fn: event.fn 209 | }; 210 | return handlers; 211 | }, {}); 212 | 213 | for (const [eventIndex, event] of events.entries()) { 214 | const handler = globalEventHandlers[event.keys[0]]; 215 | if (!handler) continue; 216 | 217 | this.log.info( 218 | { contract: event.from_address, event: handler.name, handlerFn: handler.fn }, 219 | 'found contract event' 220 | ); 221 | 222 | await this.writers[handler.fn]({ 223 | block, 224 | blockNumber, 225 | tx, 226 | rawEvent: event, 227 | eventIndex, 228 | helpers 229 | }); 230 | 231 | wasTransactionProcessed = true; 232 | } 233 | } 234 | 235 | let lastSources = this.instance.getCurrentSources(blockNumber); 236 | let sourcesQueue = [...lastSources]; 237 | 238 | let source: ContractSourceConfig | undefined; 239 | while ((source = sourcesQueue.shift())) { 240 | let foundContractData = false; 241 | const contract = validateAndParseAddress(source.contract); 242 | 243 | if ( 244 | isDeployTransaction(tx) && 245 | source.deploy_fn && 246 | contract === validateAndParseAddress(tx.contract_address) 247 | ) { 248 | this.log.info( 249 | { contract: source.contract, txType: tx.type, handlerFn: source.deploy_fn }, 250 | 'found deployment transaction' 251 | ); 252 | 253 | await this.writers[source.deploy_fn]({ 254 | source, 255 | block, 256 | blockNumber, 257 | tx, 258 | helpers 259 | }); 260 | 261 | wasTransactionProcessed = true; 262 | } 263 | 264 | for (const [eventIndex, event] of events.entries()) { 265 | if (contract === validateAndParseAddress(event.from_address)) { 266 | for (const sourceEvent of source.events) { 267 | if (this.getEventHash(sourceEvent.name) === event.keys[0]) { 268 | foundContractData = true; 269 | this.log.info( 270 | { contract: source.contract, event: sourceEvent.name, handlerFn: sourceEvent.fn }, 271 | 'found contract event' 272 | ); 273 | 274 | let parsedEvent: ParsedEvent | undefined; 275 | if (source.abi && this.abis?.[source.abi]) { 276 | try { 277 | parsedEvent = parseEvent(this.abis[source.abi], event); 278 | } catch (err) { 279 | this.log.warn( 280 | { contract: source.contract, txType: tx.type, handlerFn: source.deploy_fn }, 281 | 'failed to parse event' 282 | ); 283 | } 284 | } 285 | 286 | await this.writers[sourceEvent.fn]({ 287 | source, 288 | block, 289 | blockNumber, 290 | tx, 291 | rawEvent: event, 292 | event: parsedEvent, 293 | eventIndex, 294 | helpers 295 | }); 296 | 297 | wasTransactionProcessed = true; 298 | } 299 | } 300 | } 301 | } 302 | 303 | if (wasTransactionProcessed) { 304 | this.processedTransactions.add(tx.transaction_hash); 305 | } 306 | 307 | if (foundContractData) { 308 | await this.instance.insertCheckpoints([ 309 | { blockNumber, contractAddress: validateAndParseAddress(source.contract) } 310 | ]); 311 | 312 | const nextSources = this.instance.getCurrentSources(blockNumber); 313 | sourcesQueue = sourcesQueue.concat(nextSources.slice(lastSources.length)); 314 | lastSources = this.instance.getCurrentSources(blockNumber); 315 | } 316 | } 317 | 318 | this.log.debug({ txIndex }, 'handling transaction done'); 319 | } 320 | 321 | private async getEvents(blockNumber: number): Promise { 322 | let events: Event[] = []; 323 | 324 | let continuationToken: string | undefined; 325 | do { 326 | const result = await this.provider.getEvents({ 327 | from_block: { block_number: blockNumber }, 328 | to_block: { block_number: blockNumber }, 329 | chunk_size: 1000, 330 | continuation_token: continuationToken 331 | }); 332 | 333 | events = events.concat(result.events); 334 | 335 | continuationToken = result.continuation_token; 336 | } while (continuationToken); 337 | 338 | if ( 339 | events.length === 0 && 340 | this.startupLatestBlockNumber && 341 | blockNumber > this.startupLatestBlockNumber 342 | ) { 343 | throw new BlockNotFoundError(); 344 | } 345 | 346 | return events.reduce((acc, event) => { 347 | if (!acc[event.transaction_hash]) acc[event.transaction_hash] = []; 348 | 349 | acc[event.transaction_hash].push(event); 350 | 351 | return acc; 352 | }, {}); 353 | } 354 | 355 | async getCheckpointsRangeForAddress( 356 | fromBlock: number, 357 | toBlock: number, 358 | address: string, 359 | eventNames: string[] 360 | ): Promise { 361 | let events: Event[] = []; 362 | 363 | let continuationToken: string | undefined; 364 | do { 365 | try { 366 | const result = await this.provider.getEvents({ 367 | from_block: { block_number: fromBlock }, 368 | to_block: { block_number: toBlock }, 369 | address: address, 370 | keys: [eventNames.map(name => this.getEventHash(name))], 371 | chunk_size: 1000, 372 | continuation_token: continuationToken 373 | }); 374 | 375 | events = events.concat(result.events); 376 | 377 | continuationToken = result.continuation_token; 378 | } catch (e) { 379 | this.log.error( 380 | { fromBlock, toBlock, continuationToken, address, err: e }, 381 | 'getEvents failed' 382 | ); 383 | 384 | await sleep(5000); 385 | } 386 | } while (continuationToken); 387 | 388 | return events.map(event => ({ 389 | blockNumber: event.block_number, 390 | contractAddress: validateAndParseAddress(event.from_address) 391 | })); 392 | } 393 | 394 | async getCheckpointsRange(fromBlock: number, toBlock: number): Promise { 395 | let events: CheckpointRecord[] = []; 396 | 397 | for (const source of this.instance.getCurrentSources(fromBlock)) { 398 | const addressEvents = await this.getCheckpointsRangeForAddress( 399 | fromBlock, 400 | toBlock, 401 | source.contract, 402 | source.events.map(event => event.name) 403 | ); 404 | events = events.concat(addressEvents); 405 | } 406 | 407 | return events; 408 | } 409 | 410 | getEventHash(eventName: string) { 411 | if (!this.sourceHashes.has(eventName)) { 412 | this.sourceHashes.set(eventName, `0x${hash.starknetKeccak(eventName).toString(16)}`); 413 | } 414 | 415 | return this.sourceHashes.get(eventName) as string; 416 | } 417 | } 418 | -------------------------------------------------------------------------------- /src/providers/starknet/types.ts: -------------------------------------------------------------------------------- 1 | import { RPC } from 'starknet'; 2 | import { BaseWriterParams } from '../../types'; 3 | 4 | // Shortcuts to starknet types. 5 | export type Block = RPC.GetBlockWithTxs; 6 | export type Transaction = RPC.GetBlockWithTxs['transactions'][number]; 7 | export type PendingTransaction = RPC.PendingTransactions[number]; 8 | export type Event = RPC.GetEventsResponse['events'][number]; 9 | 10 | // (Partially) narrowed types as real types are not exported from `starknet`. 11 | export type FullBlock = Block & { block_number: number; block_hash: string }; 12 | export type DeployTransaction = Transaction & { contract_address: string }; 13 | 14 | export type EventsMap = { [key: string]: Event[] }; 15 | export type ParsedEvent = Record; 16 | 17 | export type Writer = ( 18 | args: { 19 | tx: Transaction; 20 | block: FullBlock | null; 21 | rawEvent?: Event; 22 | event?: ParsedEvent; 23 | } & BaseWriterParams 24 | ) => Promise; 25 | 26 | export function isFullBlock(block: Block): block is FullBlock { 27 | return 'block_number' in block; 28 | } 29 | 30 | export function isDeployTransaction(tx: Transaction | PendingTransaction): tx is DeployTransaction { 31 | return tx.type === 'DEPLOY'; 32 | } 33 | -------------------------------------------------------------------------------- /src/providers/starknet/utils.ts: -------------------------------------------------------------------------------- 1 | import { Abi, CallData, Event, ParsedEvent, events } from 'starknet'; 2 | 3 | const convertEvent = (input: any) => { 4 | if (typeof input === 'bigint') return `0x${input.toString(16)}`; 5 | if (Array.isArray(input)) return input.map(convertEvent); 6 | if (typeof input === 'object') 7 | return Object.fromEntries(Object.entries(input).map(([k, v]) => [k, convertEvent(v)])); 8 | 9 | return input; 10 | }; 11 | 12 | export const parseEvent = (abi: Abi, event: Event): ParsedEvent => { 13 | const abiEvents = events.getAbiEvents(abi); 14 | const structs = CallData.getAbiStruct(abi); 15 | const enums = CallData.getAbiEnum(abi); 16 | 17 | const parsedEvents = events.parseEvents([event], abiEvents, structs, enums); 18 | if (parsedEvents.length === 0) throw new Error('Failed to parse event'); 19 | 20 | const parsedEvent = parsedEvents[0]; 21 | const key = Object.keys(parsedEvent)[0]; 22 | 23 | return convertEvent(parsedEvent[key]); 24 | }; 25 | -------------------------------------------------------------------------------- /src/register.ts: -------------------------------------------------------------------------------- 1 | import { Knex } from 'knex'; 2 | 3 | function createRegister() { 4 | let knexInstance: Knex | null = null; 5 | const currentBlocks = new Map(); 6 | 7 | return { 8 | getCurrentBlock(indexerName: string) { 9 | return currentBlocks.get(indexerName) || 0n; 10 | }, 11 | setCurrentBlock(indexerName: string, block: bigint) { 12 | currentBlocks.set(indexerName, block); 13 | }, 14 | getKnex() { 15 | if (!knexInstance) { 16 | throw new Error('Knex is not initialized yet.'); 17 | } 18 | 19 | return knexInstance; 20 | }, 21 | setKnex(knex: Knex) { 22 | knexInstance = knex; 23 | } 24 | }; 25 | } 26 | 27 | export const register = createRegister(); 28 | -------------------------------------------------------------------------------- /src/schemas.ts: -------------------------------------------------------------------------------- 1 | import { z } from 'zod'; 2 | 3 | export const contractEventConfigSchema = z.object({ 4 | name: z.string(), 5 | fn: z.string() 6 | }); 7 | 8 | export const contractSourceConfigSchema = z.object({ 9 | contract: z.string(), 10 | abi: z.string().optional(), 11 | start: z.number().gte(0), 12 | deploy_fn: z.string().optional(), 13 | events: z.array(contractEventConfigSchema) 14 | }); 15 | 16 | export const contractTemplateSchema = z.object({ 17 | abi: z.string().optional(), 18 | events: z.array(contractEventConfigSchema) 19 | }); 20 | 21 | export const checkpointConfigSchema = z.object({ 22 | network_node_url: z.string().url(), 23 | optimistic_indexing: z.boolean().optional(), 24 | fetch_interval: z.number().optional(), 25 | start: z.number().gte(0).optional(), 26 | tx_fn: z.string().optional(), 27 | global_events: z.array(contractEventConfigSchema).optional(), 28 | sources: z.array(contractSourceConfigSchema).optional(), 29 | templates: z.record(contractTemplateSchema).optional(), 30 | abis: z.record(z.any()).optional() 31 | }); 32 | 33 | export const overridesConfigSchema = z.object({ 34 | /** Decimal types to define for use in your schema. */ 35 | decimal_types: z 36 | .record( 37 | z.object({ 38 | p: z.number(), 39 | d: z.number() 40 | }) 41 | ) 42 | .optional() 43 | }); 44 | -------------------------------------------------------------------------------- /src/stores/checkpoints.ts: -------------------------------------------------------------------------------- 1 | import * as crypto from 'crypto'; 2 | import { Knex } from 'knex'; 3 | import { Logger } from '../utils/logger'; 4 | import { chunk } from '../utils/helpers'; 5 | import { TemplateSource } from '../types'; 6 | 7 | export const Table = { 8 | Blocks: '_blocks', 9 | Checkpoints: '_checkpoints', 10 | Metadata: '_metadatas', // using plural names to conform with standards entities, 11 | TemplateSources: '_template_sources' 12 | }; 13 | 14 | export const Fields = { 15 | Blocks: { 16 | Indexer: 'indexer', 17 | Number: 'block_number', 18 | Hash: 'hash' 19 | }, 20 | Checkpoints: { 21 | Id: 'id', 22 | Indexer: 'indexer', 23 | BlockNumber: 'block_number', 24 | ContractAddress: 'contract_address' 25 | }, 26 | Metadata: { 27 | Id: 'id', 28 | Indexer: 'indexer', 29 | Value: 'value' 30 | }, 31 | TemplateSources: { 32 | Indexer: 'indexer', 33 | ContractAddress: 'contract_address', 34 | StartBlock: 'start_block', 35 | Template: 'template' 36 | } 37 | }; 38 | 39 | type ToString = { 40 | toString: () => string; 41 | }; 42 | 43 | export interface CheckpointRecord { 44 | blockNumber: number; 45 | contractAddress: string; 46 | } 47 | 48 | /** 49 | * Metadata Ids stored in the CheckpointStore. 50 | * 51 | */ 52 | export enum MetadataId { 53 | LastIndexedBlock = 'last_indexed_block', 54 | NetworkIdentifier = 'network_identifier', 55 | StartBlock = 'start_block', 56 | ConfigChecksum = 'config_checksum', 57 | SchemaVersion = 'schema_version' 58 | } 59 | 60 | export const INTERNAL_TABLES = Object.values(Table); 61 | 62 | const CheckpointIdSize = 10; 63 | 64 | /** 65 | * Generates a unique hex based on the contract address and block number. 66 | * Used when as id for storing checkpoints records. 67 | * 68 | */ 69 | export const getCheckpointId = (contract: string, block: number): string => { 70 | const data = `${contract}${block}`; 71 | return crypto.createHash('sha256').update(data).digest('hex').slice(-CheckpointIdSize); 72 | }; 73 | 74 | /** 75 | * Checkpoints store is a data store class for managing 76 | * checkpoints data schema and records. 77 | * 78 | * It interacts with an underlying database. 79 | */ 80 | export class CheckpointsStore { 81 | private readonly log: Logger; 82 | 83 | constructor(private readonly knex: Knex, log: Logger) { 84 | this.log = log.child({ component: 'checkpoints_store' }); 85 | } 86 | 87 | /** 88 | * Creates the core database tables to make Checkpoint run effectively. 89 | * 90 | * This only creates the tables if they don't exist. 91 | */ 92 | public async createStore(): Promise<{ builder: Knex.SchemaBuilder }> { 93 | this.log.debug('creating checkpoints tables...'); 94 | 95 | const hasBlocksTable = await this.knex.schema.hasTable(Table.Blocks); 96 | const hasCheckpointsTable = await this.knex.schema.hasTable(Table.Checkpoints); 97 | const hasMetadataTable = await this.knex.schema.hasTable(Table.Metadata); 98 | const hasTemplateSourcesTable = await this.knex.schema.hasTable(Table.TemplateSources); 99 | 100 | let builder = this.knex.schema; 101 | 102 | if (!hasBlocksTable) { 103 | builder = builder.createTable(Table.Blocks, t => { 104 | t.string(Fields.Blocks.Indexer).notNullable(); 105 | t.bigint(Fields.Blocks.Number); 106 | t.string(Fields.Blocks.Hash).notNullable(); 107 | t.primary([Fields.Blocks.Indexer, Fields.Blocks.Number]); 108 | }); 109 | } 110 | 111 | if (!hasCheckpointsTable) { 112 | builder = builder.createTable(Table.Checkpoints, t => { 113 | t.string(Fields.Checkpoints.Id, CheckpointIdSize); 114 | t.string(Fields.Checkpoints.Indexer).notNullable(); 115 | t.bigint(Fields.Checkpoints.BlockNumber).notNullable().index(); 116 | t.string(Fields.Checkpoints.ContractAddress, 66).notNullable().index(); 117 | t.primary([Fields.Checkpoints.Id, Fields.Checkpoints.Indexer]); 118 | }); 119 | } 120 | 121 | if (!hasMetadataTable) { 122 | builder = builder.createTable(Table.Metadata, t => { 123 | t.string(Fields.Metadata.Id, 20); 124 | t.string(Fields.Metadata.Indexer).notNullable(); 125 | t.string(Fields.Metadata.Value, 128).notNullable(); 126 | t.primary([Fields.Metadata.Id, Fields.Metadata.Indexer]); 127 | }); 128 | } 129 | 130 | if (!hasTemplateSourcesTable) { 131 | builder = builder.createTable(Table.TemplateSources, t => { 132 | t.string(Fields.TemplateSources.Indexer).notNullable(); 133 | t.string(Fields.TemplateSources.ContractAddress, 66); 134 | t.bigint(Fields.TemplateSources.StartBlock).notNullable(); 135 | t.string(Fields.TemplateSources.Template, 128).notNullable(); 136 | }); 137 | } 138 | 139 | await builder; 140 | 141 | this.log.debug('checkpoints tables created'); 142 | 143 | return { builder }; 144 | } 145 | 146 | /** 147 | * Recreates core database tables. 148 | * 149 | * Calling it will cause all checkpoints to be deleted and will force 150 | * syncing to start from start. 151 | * 152 | */ 153 | public async resetStore(): Promise { 154 | this.log.debug('truncating checkpoints tables'); 155 | 156 | const hasBlocksTable = await this.knex.schema.hasTable(Table.Blocks); 157 | const hasCheckpointsTable = await this.knex.schema.hasTable(Table.Checkpoints); 158 | const hasMetadataTable = await this.knex.schema.hasTable(Table.Metadata); 159 | const hasTemplateSourcesTable = await this.knex.schema.hasTable(Table.TemplateSources); 160 | 161 | if (hasBlocksTable) { 162 | await this.knex.schema.dropTable(Table.Blocks); 163 | } 164 | 165 | if (hasCheckpointsTable) { 166 | await this.knex.schema.dropTable(Table.Checkpoints); 167 | } 168 | 169 | if (hasMetadataTable) { 170 | await this.knex.schema.dropTable(Table.Metadata); 171 | } 172 | 173 | if (hasTemplateSourcesTable) { 174 | await this.knex.schema.dropTable(Table.TemplateSources); 175 | } 176 | 177 | this.log.debug('checkpoints tables dropped'); 178 | 179 | await this.createStore(); 180 | } 181 | 182 | public async removeFutureData(indexer: string, blockNumber: number): Promise { 183 | return this.knex.transaction(async trx => { 184 | await trx 185 | .table(Table.Metadata) 186 | .insert({ 187 | [Fields.Metadata.Id]: MetadataId.LastIndexedBlock, 188 | [Fields.Metadata.Indexer]: indexer, 189 | [Fields.Metadata.Value]: blockNumber 190 | }) 191 | .onConflict([Fields.Metadata.Id, Fields.Metadata.Indexer]) 192 | .merge(); 193 | 194 | await trx 195 | .table(Table.Checkpoints) 196 | .where(Fields.Checkpoints.Indexer, indexer) 197 | .where(Fields.Checkpoints.BlockNumber, '>', blockNumber) 198 | .del(); 199 | 200 | await trx.table(Table.Blocks).where(Fields.Blocks.Number, '>', blockNumber).del(); 201 | }); 202 | } 203 | 204 | public async setBlockHash(indexer: string, blockNumber: number, hash: string): Promise { 205 | await this.knex.table(Table.Blocks).insert({ 206 | [Fields.Blocks.Indexer]: indexer, 207 | [Fields.Blocks.Number]: blockNumber, 208 | [Fields.Blocks.Hash]: hash 209 | }); 210 | } 211 | 212 | public async getBlockHash(indexer: string, blockNumber: number): Promise { 213 | const blocks = await this.knex 214 | .select(Fields.Blocks.Hash) 215 | .from(Table.Blocks) 216 | .where(Fields.Blocks.Indexer, indexer) 217 | .where(Fields.Blocks.Number, blockNumber) 218 | .limit(1); 219 | 220 | if (blocks.length == 0) { 221 | return null; 222 | } 223 | 224 | return blocks[0][Fields.Blocks.Hash]; 225 | } 226 | 227 | public async removeBlocks(indexer: string): Promise { 228 | return this.knex(Table.Blocks).where(Fields.Blocks.Indexer, indexer).del(); 229 | } 230 | 231 | public async setMetadata(indexer: string, id: string, value: ToString): Promise { 232 | await this.knex 233 | .table(Table.Metadata) 234 | .insert({ 235 | [Fields.Metadata.Id]: id, 236 | [Fields.Metadata.Indexer]: indexer, 237 | [Fields.Metadata.Value]: value 238 | }) 239 | .onConflict([Fields.Metadata.Id, Fields.Metadata.Indexer]) 240 | .merge(); 241 | } 242 | 243 | public async getMetadata(indexer: string, id: string): Promise { 244 | const value = await this.knex 245 | .select(Fields.Metadata.Value) 246 | .from(Table.Metadata) 247 | .where(Fields.Metadata.Id, id) 248 | .where(Fields.Metadata.Indexer, indexer) 249 | .limit(1); 250 | 251 | if (value.length == 0) { 252 | return null; 253 | } 254 | 255 | return value[0][Fields.Metadata.Value]; 256 | } 257 | 258 | public async getMetadataNumber(indexer: string, id: string, base = 10): Promise { 259 | const strValue = await this.getMetadata(indexer, id); 260 | if (strValue === null) return null; 261 | 262 | return parseInt(strValue, base); 263 | } 264 | 265 | public async insertCheckpoints(indexer: string, checkpoints: CheckpointRecord[]): Promise { 266 | const insert = async (items: CheckpointRecord[]) => { 267 | try { 268 | await this.knex 269 | .table(Table.Checkpoints) 270 | .insert( 271 | items.map(checkpoint => { 272 | const id = getCheckpointId(checkpoint.contractAddress, checkpoint.blockNumber); 273 | 274 | return { 275 | [Fields.Checkpoints.Id]: id, 276 | [Fields.Checkpoints.Indexer]: indexer, 277 | [Fields.Checkpoints.BlockNumber]: checkpoint.blockNumber, 278 | [Fields.Checkpoints.ContractAddress]: checkpoint.contractAddress 279 | }; 280 | }) 281 | ) 282 | .onConflict([Fields.Checkpoints.Id, Fields.Checkpoints.Indexer]) 283 | .ignore(); 284 | } catch (err: any) { 285 | if (['ER_LOCK_DEADLOCK', '40P01'].includes(err.code)) { 286 | this.log.debug('deadlock detected, retrying...'); 287 | return this.insertCheckpoints(indexer, items); 288 | } 289 | 290 | throw err; 291 | } 292 | }; 293 | 294 | await Promise.all(chunk(checkpoints, 1000).map(chunk => insert(chunk))); 295 | } 296 | 297 | /** 298 | * Fetch list of checkpoint blocks greater than or equal to the 299 | * block number arguments, that have some events related to the 300 | * contracts in the lists. 301 | * 302 | * By default this returns at most 15 next blocks. This return limit 303 | * can be modified by the limit command. 304 | */ 305 | public async getNextCheckpointBlocks( 306 | indexer: string, 307 | block: number, 308 | contracts: string[], 309 | limit = 15 310 | ): Promise { 311 | const result = await this.knex 312 | .distinct(Fields.Checkpoints.BlockNumber) 313 | .from(Table.Checkpoints) 314 | .where(Fields.Checkpoints.Indexer, indexer) 315 | .where(Fields.Checkpoints.BlockNumber, '>=', block) 316 | .whereIn(Fields.Checkpoints.ContractAddress, contracts) 317 | .orderBy(Fields.Checkpoints.BlockNumber, 'asc') 318 | .limit(limit); 319 | 320 | this.log.debug({ result, block, contracts }, 'next checkpoint blocks'); 321 | 322 | return result.map(value => Number(value[Fields.Checkpoints.BlockNumber])); 323 | } 324 | 325 | public async insertTemplateSource( 326 | indexer: string, 327 | contractAddress: string, 328 | startBlock: number, 329 | template: string 330 | ): Promise { 331 | return this.knex.table(Table.TemplateSources).insert({ 332 | [Fields.TemplateSources.Indexer]: indexer, 333 | [Fields.TemplateSources.ContractAddress]: contractAddress, 334 | [Fields.TemplateSources.StartBlock]: startBlock, 335 | [Fields.TemplateSources.Template]: template 336 | }); 337 | } 338 | 339 | public async getTemplateSources(indexer: string): Promise { 340 | const data = await this.knex 341 | .select( 342 | Fields.TemplateSources.ContractAddress, 343 | Fields.TemplateSources.StartBlock, 344 | Fields.TemplateSources.Template 345 | ) 346 | .from(Table.TemplateSources) 347 | .where(Fields.TemplateSources.Indexer, indexer); 348 | 349 | return data.map(row => ({ 350 | contractAddress: row[Fields.TemplateSources.ContractAddress], 351 | startBlock: row[Fields.TemplateSources.StartBlock], 352 | template: row[Fields.TemplateSources.Template] 353 | })); 354 | } 355 | } 356 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import { z } from 'zod'; 2 | import { LogLevel } from './utils/logger'; 3 | import { 4 | contractSourceConfigSchema, 5 | contractTemplateSchema, 6 | checkpointConfigSchema, 7 | overridesConfigSchema 8 | } from './schemas'; 9 | import { Instance } from './providers'; 10 | 11 | export type TemplateSource = { 12 | contractAddress: string; 13 | startBlock: number; 14 | template: string; 15 | }; 16 | 17 | export interface CheckpointOptions { 18 | /** Setting this to true will trigger reset of database on config changes. */ 19 | resetOnConfigChange?: boolean; 20 | /** 21 | * Set the log output levels for checkpoint. Defaults to Error. 22 | * Note, this does not affect the log outputs in writers. 23 | */ 24 | logLevel?: LogLevel; 25 | /** Format logs to pretty output. Not recommended for production. */ 26 | prettifyLogs?: boolean; 27 | /** 28 | * Optional database connection string. Must be PostgreSQL connection string. 29 | * If not provided connection strinng will be read from DATABASE_URL environment variable. 30 | */ 31 | dbConnection?: string; 32 | /** Overrides for database types. */ 33 | overridesConfig?: OverridesConfig; 34 | } 35 | 36 | export type ContractSourceConfig = z.infer; 37 | export type ContractTemplate = z.infer; 38 | export type CheckpointConfig = z.infer; 39 | export type OverridesConfig = z.infer; 40 | 41 | export type BaseWriterParams = { 42 | blockNumber: number; 43 | eventIndex?: number; 44 | source?: ContractSourceConfig; 45 | helpers: ReturnType; 46 | }; 47 | -------------------------------------------------------------------------------- /src/utils/checkpoint.ts: -------------------------------------------------------------------------------- 1 | import objectHash from 'object-hash'; 2 | import { CheckpointConfig, ContractSourceConfig, ContractTemplate } from '../types'; 3 | 4 | export const getContractsFromConfig = (config: CheckpointConfig): string[] => { 5 | return (config.sources || []).map(source => source.contract); 6 | }; 7 | 8 | const getHashableProperties = (config: ContractTemplate | ContractSourceConfig) => ({ 9 | contract: 'contract' in config ? config.contract : undefined, 10 | start: 'start' in config ? config.start : undefined, 11 | events: (config.events || []).map(event => event.name) 12 | }); 13 | 14 | export const getConfigChecksum = (config: CheckpointConfig): string => { 15 | const { tx_fn, global_events, sources, templates } = config; 16 | 17 | return objectHash( 18 | { 19 | tx_fn, 20 | global_events, 21 | sources: (sources || []).map(source => getHashableProperties(source)), 22 | templates: Object.fromEntries( 23 | Object.entries(templates || {}).map(([key, value]) => [key, getHashableProperties(value)]) 24 | ) 25 | }, 26 | { 27 | unorderedArrays: true 28 | } 29 | ); 30 | }; 31 | -------------------------------------------------------------------------------- /src/utils/database.ts: -------------------------------------------------------------------------------- 1 | import pluralize from 'pluralize'; 2 | import { Knex } from 'knex'; 3 | import { INTERNAL_TABLES } from '../stores/checkpoints'; 4 | 5 | export type QueryFilter = { 6 | block?: number; 7 | indexer?: string; 8 | }; 9 | 10 | export const getTableName = (name: string) => { 11 | if (name === '_metadata') return '_metadatas'; 12 | 13 | return pluralize(name); 14 | }; 15 | 16 | export function applyQueryFilter( 17 | query: Knex.QueryBuilder, 18 | tableName: string, 19 | filters: QueryFilter 20 | ) { 21 | const isInternalTable = INTERNAL_TABLES.includes(tableName); 22 | 23 | let filteredQuery = query; 24 | 25 | if (!isInternalTable) { 26 | filteredQuery = 27 | filters.block !== undefined 28 | ? query.andWhereRaw(`${tableName}.block_range @> int8(??)`, [filters.block]) 29 | : query.andWhereRaw(`upper_inf(${tableName}.block_range)`); 30 | } 31 | 32 | if (filters.indexer !== undefined) { 33 | const columnName = isInternalTable ? 'indexer' : `_indexer`; 34 | 35 | filteredQuery = query.andWhere(`${tableName}.${columnName}`, filters.indexer); 36 | } 37 | 38 | return filteredQuery; 39 | } 40 | 41 | /** 42 | * Applies the default order to the query. 43 | * All entities are by default sorted by block_range in ascending order. 44 | * This function is used to ensure that the order is consistent across all queries. 45 | * @param query Knex query builder 46 | * @param tableName The name of the table to apply the order on 47 | * @returns The modified query with the default order applied 48 | */ 49 | export function applyDefaultOrder(query: Knex.QueryBuilder, tableName: string) { 50 | const isInternalTable = INTERNAL_TABLES.includes(tableName); 51 | 52 | if (isInternalTable) return query; 53 | 54 | return query.orderBy(`${tableName}.block_range`); 55 | } 56 | -------------------------------------------------------------------------------- /src/utils/graphql.ts: -------------------------------------------------------------------------------- 1 | import { 2 | GraphQLObjectType, 3 | GraphQLNonNull, 4 | isLeafType, 5 | isListType, 6 | GraphQLScalarType, 7 | GraphQLField, 8 | parse, 9 | visit, 10 | print 11 | } from 'graphql'; 12 | import { jsonToGraphQLQuery } from 'json-to-graphql-query'; 13 | import pluralize from 'pluralize'; 14 | 15 | export const extendSchema = (schema: string): string => { 16 | const ast = parse(schema); 17 | 18 | const updatedAst = visit(ast, { 19 | Document(node) { 20 | const directiveDefinition = { 21 | kind: 'DirectiveDefinition', 22 | name: { kind: 'Name', value: 'derivedFrom' }, 23 | arguments: [ 24 | { 25 | kind: 'InputValueDefinition', 26 | name: { kind: 'Name', value: 'field' }, 27 | type: { 28 | kind: 'NonNullType', 29 | type: { kind: 'NamedType', name: { kind: 'Name', value: 'String' } } 30 | } 31 | } 32 | ], 33 | locations: [{ kind: 'Name', value: 'FIELD_DEFINITION' }] 34 | }; 35 | 36 | return { 37 | ...node, 38 | definitions: [directiveDefinition, ...node.definitions] 39 | }; 40 | }, 41 | ObjectTypeDefinition(node) { 42 | const indexerField = { 43 | kind: 'FieldDefinition', 44 | name: { kind: 'Name', value: '_indexer' }, 45 | type: { 46 | kind: 'NonNullType', 47 | type: { kind: 'NamedType', name: { kind: 'Name', value: 'String' } } 48 | } 49 | }; 50 | 51 | return { 52 | ...node, 53 | fields: node.fields ? [...node.fields, indexerField] : [indexerField] 54 | }; 55 | } 56 | }); 57 | 58 | return print(updatedAst); 59 | }; 60 | 61 | /** 62 | * Returns name of query for fetching single entity record 63 | * 64 | */ 65 | export const singleEntityQueryName = (entity: GraphQLObjectType) => entity.name.toLowerCase(); 66 | 67 | /** 68 | * Returns name of query for fetching multiple entity records 69 | * 70 | */ 71 | export const multiEntityQueryName = (entity: GraphQLObjectType) => { 72 | if (entity.name === '_Metadata') return '_metadatas'; 73 | 74 | return pluralize(entity.name.toLowerCase()); 75 | }; 76 | 77 | /** 78 | * Generate sample query string based on entity object fields. 79 | * 80 | */ 81 | export const generateQueryForEntity = (entity: GraphQLObjectType): string => { 82 | // function to recursively build fields map 83 | const getObjectFields = (object: GraphQLObjectType, queryFields = {}): Record => { 84 | const objectFields = object.getFields(); 85 | 86 | Object.keys(objectFields).forEach(fieldName => { 87 | const rawFieldType = objectFields[fieldName].type; 88 | const fieldType = rawFieldType instanceof GraphQLNonNull ? rawFieldType.ofType : rawFieldType; 89 | 90 | if (isLeafType(fieldType)) { 91 | queryFields[fieldName] = true; 92 | } else if (isListType(fieldType)) { 93 | if (fieldType.ofType instanceof GraphQLScalarType) { 94 | queryFields[fieldName] = true; 95 | } 96 | } else { 97 | const childObjectFields = {}; 98 | getObjectFields(fieldType as GraphQLObjectType, childObjectFields); 99 | queryFields[fieldName] = childObjectFields; 100 | } 101 | }); 102 | 103 | return queryFields; 104 | }; 105 | 106 | return jsonToGraphQLQuery( 107 | { 108 | query: { 109 | [multiEntityQueryName(entity)]: { 110 | __args: { first: 10 }, 111 | ...getObjectFields(entity) 112 | } 113 | } 114 | }, 115 | { pretty: true } 116 | ); 117 | }; 118 | 119 | export const getNonNullType = (type: T): T => { 120 | if (type instanceof GraphQLNonNull) { 121 | return type.ofType; 122 | } 123 | 124 | return type; 125 | }; 126 | 127 | export const getDerivedFromDirective = (field: GraphQLField) => { 128 | const directives = field.astNode?.directives ?? []; 129 | return directives.find(dir => dir.name.value === 'derivedFrom'); 130 | }; 131 | -------------------------------------------------------------------------------- /src/utils/helpers.ts: -------------------------------------------------------------------------------- 1 | export function sleep(ms: number) { 2 | return new Promise(resolve => setTimeout(resolve, ms)); 3 | } 4 | 5 | export function chunk(array: T[], chunkSize: number) { 6 | const chunks = [] as T[][]; 7 | let index = 0; 8 | 9 | while (index < array.length) { 10 | chunks.push(array.slice(index, chunkSize + index)); 11 | index += chunkSize; 12 | } 13 | 14 | return chunks; 15 | } 16 | -------------------------------------------------------------------------------- /src/utils/logger.ts: -------------------------------------------------------------------------------- 1 | import pino, { Logger as PinoLogger, LoggerOptions } from 'pino'; 2 | 3 | /** The minimum level to log. */ 4 | export enum LogLevel { 5 | /** Disable all logs. */ 6 | Silent = 'silent', 7 | /** Log unrecoverable errors. */ 8 | Fatal = 'fatal', 9 | /** Log general errors. */ 10 | Error = 'error', 11 | /** Log alerts or notices */ 12 | Warn = 'warn', 13 | /** Log useful information. */ 14 | Info = 'info', 15 | /** Log debug and trace information. */ 16 | Debug = 'debug' 17 | } 18 | 19 | type Logger = Omit; 20 | 21 | export const createLogger = (opts: LoggerOptions = {}): Logger => { 22 | return pino( 23 | opts, 24 | pino.destination({ 25 | sync: true 26 | }) 27 | ); 28 | }; 29 | 30 | // re-export types as it is. 31 | export { Logger, LoggerOptions }; 32 | -------------------------------------------------------------------------------- /test/fixtures/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/checkpoint-labs/checkpoint/588be7744abb8ebf116061392f6aedfc7d8f72b9/test/fixtures/.keep -------------------------------------------------------------------------------- /test/fixtures/checkpointConfig.fixture.ts: -------------------------------------------------------------------------------- 1 | export const validCheckpointConfig = { 2 | network_node_url: 'https://starknet-goerli.infura.io/v3/SOME_KEY', 3 | sources: [ 4 | { 5 | contract: '0x0625dc1290b6e936be5f1a3e963cf629326b1f4dfd5a56738dea98e1ad31b7f3', 6 | start: 112319, 7 | deploy_fn: 'handleDeploy', 8 | events: [ 9 | { 10 | name: 'proposal_created', 11 | fn: 'handlePropose' 12 | }, 13 | { 14 | name: 'vote_created', 15 | fn: 'handleVote' 16 | } 17 | ] 18 | } 19 | ] 20 | }; 21 | -------------------------------------------------------------------------------- /test/unit/__snapshots__/codegen.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`codegen should generate javascript code 1`] = ` 4 | "import { Model } from '@snapshot-labs/checkpoint'; 5 | 6 | export class Space extends Model { 7 | static tableName = 'spaces'; 8 | 9 | constructor(id, indexerName) { 10 | super(Space.tableName, indexerName); 11 | 12 | this.initialSet('id', id); 13 | this.initialSet('name', null); 14 | this.initialSet('about', null); 15 | this.initialSet('controller', ""); 16 | this.initialSet('voting_delay', 0); 17 | this.initialSet('proposal_threshold', 0); 18 | this.initialSet('quorum', 0); 19 | this.initialSet('strategies', "[]"); 20 | this.initialSet('strategies_nonnull', "[]"); 21 | this.initialSet('_indexer', ""); 22 | } 23 | 24 | static async loadEntity(id, indexerName) { 25 | const entity = await super._loadEntity(Space.tableName, id, indexerName); 26 | if (!entity) return null; 27 | 28 | const model = new Space(id, indexerName); 29 | model.setExists(); 30 | 31 | for (const key in entity) { 32 | const value = entity[key] !== null && typeof entity[key] === 'object' 33 | ? JSON.stringify(entity[key]) 34 | : entity[key]; 35 | model.set(key, value); 36 | } 37 | 38 | return model; 39 | } 40 | 41 | get id() { 42 | return this.get('id'); 43 | } 44 | 45 | set id(value) { 46 | this.set('id', value); 47 | } 48 | 49 | get name() { 50 | return this.get('name'); 51 | } 52 | 53 | set name(value) { 54 | this.set('name', value); 55 | } 56 | 57 | get about() { 58 | return this.get('about'); 59 | } 60 | 61 | set about(value) { 62 | this.set('about', value); 63 | } 64 | 65 | get controller() { 66 | return this.get('controller'); 67 | } 68 | 69 | set controller(value) { 70 | this.set('controller', value); 71 | } 72 | 73 | get voting_delay() { 74 | return this.get('voting_delay'); 75 | } 76 | 77 | set voting_delay(value) { 78 | this.set('voting_delay', value); 79 | } 80 | 81 | get proposal_threshold() { 82 | return this.get('proposal_threshold'); 83 | } 84 | 85 | set proposal_threshold(value) { 86 | this.set('proposal_threshold', value); 87 | } 88 | 89 | get quorum() { 90 | return this.get('quorum'); 91 | } 92 | 93 | set quorum(value) { 94 | this.set('quorum', value); 95 | } 96 | 97 | get strategies() { 98 | return JSON.parse(this.get('strategies')); 99 | } 100 | 101 | set strategies(value) { 102 | this.set('strategies', JSON.stringify(value)); 103 | } 104 | 105 | get strategies_nonnull() { 106 | return JSON.parse(this.get('strategies_nonnull')); 107 | } 108 | 109 | set strategies_nonnull(value) { 110 | this.set('strategies_nonnull', JSON.stringify(value)); 111 | } 112 | 113 | get _indexer() { 114 | return this.get('_indexer'); 115 | } 116 | 117 | set _indexer(value) { 118 | this.set('_indexer', value); 119 | } 120 | } 121 | 122 | export class Proposal extends Model { 123 | static tableName = 'proposals'; 124 | 125 | constructor(id, indexerName) { 126 | super(Proposal.tableName, indexerName); 127 | 128 | this.initialSet('id', id); 129 | this.initialSet('proposal_id', 0); 130 | this.initialSet('space', ""); 131 | this.initialSet('title', ""); 132 | this.initialSet('scores_total', 0); 133 | this.initialSet('active', false); 134 | this.initialSet('progress', "0"); 135 | this.initialSet('_indexer', ""); 136 | } 137 | 138 | static async loadEntity(id, indexerName) { 139 | const entity = await super._loadEntity(Proposal.tableName, id, indexerName); 140 | if (!entity) return null; 141 | 142 | const model = new Proposal(id, indexerName); 143 | model.setExists(); 144 | 145 | for (const key in entity) { 146 | const value = entity[key] !== null && typeof entity[key] === 'object' 147 | ? JSON.stringify(entity[key]) 148 | : entity[key]; 149 | model.set(key, value); 150 | } 151 | 152 | return model; 153 | } 154 | 155 | get id() { 156 | return this.get('id'); 157 | } 158 | 159 | set id(value) { 160 | this.set('id', value); 161 | } 162 | 163 | get proposal_id() { 164 | return this.get('proposal_id'); 165 | } 166 | 167 | set proposal_id(value) { 168 | this.set('proposal_id', value); 169 | } 170 | 171 | get space() { 172 | return this.get('space'); 173 | } 174 | 175 | set space(value) { 176 | this.set('space', value); 177 | } 178 | 179 | get title() { 180 | return this.get('title'); 181 | } 182 | 183 | set title(value) { 184 | this.set('title', value); 185 | } 186 | 187 | get scores_total() { 188 | return this.get('scores_total'); 189 | } 190 | 191 | set scores_total(value) { 192 | this.set('scores_total', value); 193 | } 194 | 195 | get active() { 196 | return this.get('active'); 197 | } 198 | 199 | set active(value) { 200 | this.set('active', value); 201 | } 202 | 203 | get progress() { 204 | return this.get('progress'); 205 | } 206 | 207 | set progress(value) { 208 | this.set('progress', value); 209 | } 210 | 211 | get _indexer() { 212 | return this.get('_indexer'); 213 | } 214 | 215 | set _indexer(value) { 216 | this.set('_indexer', value); 217 | } 218 | } 219 | " 220 | `; 221 | 222 | exports[`codegen should generate typescript code 1`] = ` 223 | "import { Model } from '@snapshot-labs/checkpoint'; 224 | 225 | export class Space extends Model { 226 | static tableName = 'spaces'; 227 | 228 | constructor(id: string, indexerName: string) { 229 | super(Space.tableName, indexerName); 230 | 231 | this.initialSet('id', id); 232 | this.initialSet('name', null); 233 | this.initialSet('about', null); 234 | this.initialSet('controller', ""); 235 | this.initialSet('voting_delay', 0); 236 | this.initialSet('proposal_threshold', 0); 237 | this.initialSet('quorum', 0); 238 | this.initialSet('strategies', "[]"); 239 | this.initialSet('strategies_nonnull', "[]"); 240 | this.initialSet('_indexer', ""); 241 | } 242 | 243 | static async loadEntity(id: string, indexerName: string): Promise { 244 | const entity = await super._loadEntity(Space.tableName, id, indexerName); 245 | if (!entity) return null; 246 | 247 | const model = new Space(id, indexerName); 248 | model.setExists(); 249 | 250 | for (const key in entity) { 251 | const value = entity[key] !== null && typeof entity[key] === 'object' 252 | ? JSON.stringify(entity[key]) 253 | : entity[key]; 254 | model.set(key, value); 255 | } 256 | 257 | return model; 258 | } 259 | 260 | get id(): string { 261 | return this.get('id'); 262 | } 263 | 264 | set id(value: string) { 265 | this.set('id', value); 266 | } 267 | 268 | get name(): string | null { 269 | return this.get('name'); 270 | } 271 | 272 | set name(value: string | null) { 273 | this.set('name', value); 274 | } 275 | 276 | get about(): string | null { 277 | return this.get('about'); 278 | } 279 | 280 | set about(value: string | null) { 281 | this.set('about', value); 282 | } 283 | 284 | get controller(): string { 285 | return this.get('controller'); 286 | } 287 | 288 | set controller(value: string) { 289 | this.set('controller', value); 290 | } 291 | 292 | get voting_delay(): number { 293 | return this.get('voting_delay'); 294 | } 295 | 296 | set voting_delay(value: number) { 297 | this.set('voting_delay', value); 298 | } 299 | 300 | get proposal_threshold(): bigint { 301 | return this.get('proposal_threshold'); 302 | } 303 | 304 | set proposal_threshold(value: bigint) { 305 | this.set('proposal_threshold', value); 306 | } 307 | 308 | get quorum(): number { 309 | return this.get('quorum'); 310 | } 311 | 312 | set quorum(value: number) { 313 | this.set('quorum', value); 314 | } 315 | 316 | get strategies(): string[] { 317 | return JSON.parse(this.get('strategies')); 318 | } 319 | 320 | set strategies(value: string[]) { 321 | this.set('strategies', JSON.stringify(value)); 322 | } 323 | 324 | get strategies_nonnull(): string[] { 325 | return JSON.parse(this.get('strategies_nonnull')); 326 | } 327 | 328 | set strategies_nonnull(value: string[]) { 329 | this.set('strategies_nonnull', JSON.stringify(value)); 330 | } 331 | 332 | get _indexer(): string { 333 | return this.get('_indexer'); 334 | } 335 | 336 | set _indexer(value: string) { 337 | this.set('_indexer', value); 338 | } 339 | } 340 | 341 | export class Proposal extends Model { 342 | static tableName = 'proposals'; 343 | 344 | constructor(id: string, indexerName: string) { 345 | super(Proposal.tableName, indexerName); 346 | 347 | this.initialSet('id', id); 348 | this.initialSet('proposal_id', 0); 349 | this.initialSet('space', ""); 350 | this.initialSet('title', ""); 351 | this.initialSet('scores_total', 0); 352 | this.initialSet('active', false); 353 | this.initialSet('progress', "0"); 354 | this.initialSet('_indexer', ""); 355 | } 356 | 357 | static async loadEntity(id: string, indexerName: string): Promise { 358 | const entity = await super._loadEntity(Proposal.tableName, id, indexerName); 359 | if (!entity) return null; 360 | 361 | const model = new Proposal(id, indexerName); 362 | model.setExists(); 363 | 364 | for (const key in entity) { 365 | const value = entity[key] !== null && typeof entity[key] === 'object' 366 | ? JSON.stringify(entity[key]) 367 | : entity[key]; 368 | model.set(key, value); 369 | } 370 | 371 | return model; 372 | } 373 | 374 | get id(): string { 375 | return this.get('id'); 376 | } 377 | 378 | set id(value: string) { 379 | this.set('id', value); 380 | } 381 | 382 | get proposal_id(): number { 383 | return this.get('proposal_id'); 384 | } 385 | 386 | set proposal_id(value: number) { 387 | this.set('proposal_id', value); 388 | } 389 | 390 | get space(): string { 391 | return this.get('space'); 392 | } 393 | 394 | set space(value: string) { 395 | this.set('space', value); 396 | } 397 | 398 | get title(): string { 399 | return this.get('title'); 400 | } 401 | 402 | set title(value: string) { 403 | this.set('title', value); 404 | } 405 | 406 | get scores_total(): bigint { 407 | return this.get('scores_total'); 408 | } 409 | 410 | set scores_total(value: bigint) { 411 | this.set('scores_total', value); 412 | } 413 | 414 | get active(): boolean { 415 | return this.get('active'); 416 | } 417 | 418 | set active(value: boolean) { 419 | this.set('active', value); 420 | } 421 | 422 | get progress(): string { 423 | return this.get('progress'); 424 | } 425 | 426 | set progress(value: string) { 427 | this.set('progress', value); 428 | } 429 | 430 | get _indexer(): string { 431 | return this.get('_indexer'); 432 | } 433 | 434 | set _indexer(value: string) { 435 | this.set('_indexer', value); 436 | } 437 | } 438 | " 439 | `; 440 | -------------------------------------------------------------------------------- /test/unit/codegen.test.ts: -------------------------------------------------------------------------------- 1 | import { GraphQLObjectType, buildSchema } from 'graphql'; 2 | import { getInitialValue, getBaseType, getJSType, codegen, getTypeInfo } from '../../src/codegen'; 3 | import { GqlEntityController } from '../../src/graphql/controller'; 4 | import { extendSchema } from '../../src/utils/graphql'; 5 | 6 | const SCHEMA_SOURCE = ` 7 | scalar Id 8 | scalar Text 9 | scalar BigInt 10 | scalar BigDecimal 11 | scalar Unknown 12 | 13 | type Space { 14 | id: String! 15 | name: String 16 | about: String 17 | controller: String! 18 | voting_delay: Int! 19 | proposal_threshold: BigInt! 20 | quorum: Float! 21 | strategies: [String]! 22 | strategies_nonnull: [String!]! 23 | proposals: [Proposal]! @derivedFrom(field: "space") 24 | } 25 | 26 | type Proposal { 27 | id: String! 28 | proposal_id: Int! 29 | space: Space! 30 | title: Text! 31 | scores_total: BigInt! 32 | active: Boolean! 33 | progress: BigDecimal! 34 | } 35 | `; 36 | 37 | const schema = buildSchema(extendSchema(SCHEMA_SOURCE)); 38 | const space = schema.getType('Space') as GraphQLObjectType; 39 | const proposal = schema.getType('Proposal') as GraphQLObjectType; 40 | const spaceFields = space.getFields(); 41 | const proposalFields = proposal.getFields(); 42 | 43 | describe('getTypeInfo', () => { 44 | const simpleSchema = `scalar HugeDecimal 45 | type Space { 46 | id: String! 47 | value: HugeDecimal 48 | } 49 | `; 50 | 51 | const customDecimalTypes = { 52 | HugeDecimal: { 53 | p: 30, 54 | d: 14 55 | } 56 | }; 57 | 58 | const schema = buildSchema(extendSchema(simpleSchema)); 59 | const space = schema.getType('Space') as GraphQLObjectType; 60 | const spaceFields = space.getFields(); 61 | 62 | it('should throw when passed a wrapped type', () => { 63 | expect(() => getTypeInfo(spaceFields['id'].type)).toThrow(); 64 | }); 65 | 66 | it('should throw when passing unknown types', () => { 67 | expect(() => getTypeInfo(spaceFields['value'].type)).toThrow(); 68 | }); 69 | 70 | it('should handle non-default decimalTypes', () => { 71 | expect(getTypeInfo(spaceFields['value'].type, customDecimalTypes)).toEqual({ 72 | type: 'string', 73 | initialValue: '0' 74 | }); 75 | }); 76 | }); 77 | 78 | describe('getInitialValue', () => { 79 | it('should return null for nullable types', () => { 80 | expect(getInitialValue(spaceFields['name'].type)).toBeNull(); 81 | expect(getInitialValue(spaceFields['about'].type)).toBeNull(); 82 | }); 83 | 84 | it('should return 0 for Int/Float/BigInt types', () => { 85 | expect(getInitialValue(spaceFields['voting_delay'].type)).toBe(0); 86 | expect(getInitialValue(spaceFields['proposal_threshold'].type)).toBe(0); 87 | expect(getInitialValue(spaceFields['quorum'].type)).toBe(0); 88 | }); 89 | 90 | it('should return 0 string for BigDecimal types', () => { 91 | expect(getInitialValue(proposalFields['progress'].type)).toBe('0'); 92 | }); 93 | 94 | it('should return empty string for String/Text/Id types', () => { 95 | expect(getInitialValue(spaceFields['id'].type)).toBe(''); 96 | expect(getInitialValue(spaceFields['controller'].type)).toBe(''); 97 | expect(getInitialValue(proposalFields['title'].type)).toBe(''); 98 | }); 99 | 100 | it('should return false for Boolean types', () => { 101 | expect(getInitialValue(proposalFields['active'].type)).toBe(false); 102 | }); 103 | 104 | it('should return stringified empty array for List types', () => { 105 | expect(getInitialValue(spaceFields['strategies'].type)).toEqual('[]'); 106 | expect(getInitialValue(spaceFields['strategies_nonnull'].type)).toEqual('[]'); 107 | }); 108 | 109 | it('should return empty string for object types', () => { 110 | expect(getInitialValue(proposalFields['space'].type)).toBe(''); 111 | }); 112 | 113 | it('should return "0" for BigDecimal types', () => { 114 | expect(getInitialValue(proposalFields['progress'].type)).toBe('0'); 115 | }); 116 | }); 117 | 118 | describe('getBaseType', () => { 119 | it('should return number for Int/Float types', () => { 120 | expect(getBaseType(spaceFields['voting_delay'].type)).toBe('number'); 121 | expect(getBaseType(proposalFields['proposal_id'].type)).toBe('number'); 122 | }); 123 | 124 | it('should return string for String/Text/Id types', () => { 125 | expect(getBaseType(spaceFields['id'].type)).toBe('string'); 126 | expect(getBaseType(spaceFields['name'].type)).toBe('string'); 127 | expect(getBaseType(proposalFields['title'].type)).toBe('string'); 128 | }); 129 | 130 | it('should return string for Object types', () => { 131 | expect(getBaseType(proposalFields['space'].type)).toBe('string'); 132 | }); 133 | 134 | it('should return bigint for BigInt types', () => { 135 | expect(getBaseType(spaceFields['proposal_threshold'].type)).toBe('bigint'); 136 | }); 137 | 138 | it('should return boolean for Boolean types', () => { 139 | expect(getBaseType(proposalFields['active'].type)).toBe('boolean'); 140 | }); 141 | 142 | it('should return string for BigDecimal types', () => { 143 | expect(getBaseType(proposalFields['progress'].type)).toBe('string'); 144 | }); 145 | 146 | it('should return array type for List types', () => { 147 | expect(getBaseType(spaceFields['strategies'].type)).toBe('string[]'); 148 | expect(getBaseType(spaceFields['strategies_nonnull'].type)).toBe('string[]'); 149 | }); 150 | 151 | it('should return string for BigDecimal types', () => { 152 | expect(getBaseType(proposalFields['progress'].type)).toBe('string'); 153 | }); 154 | }); 155 | 156 | describe('getJSType', () => { 157 | it('should detect nullable types', () => { 158 | expect(getJSType(spaceFields['name'])).toEqual({ 159 | isNullable: true, 160 | isList: false, 161 | baseType: 'string' 162 | }); 163 | }); 164 | 165 | it('should detect list types', () => { 166 | expect(getJSType(spaceFields['strategies'])).toEqual({ 167 | isNullable: false, 168 | isList: true, 169 | baseType: 'string[]' 170 | }); 171 | }); 172 | }); 173 | 174 | describe('codegen', () => { 175 | const overridesConfig = {}; 176 | const extendedSchema = extendSchema(SCHEMA_SOURCE); 177 | const controller = new GqlEntityController(extendedSchema); 178 | 179 | it('should generate typescript code', () => { 180 | expect(codegen(controller, overridesConfig, 'typescript')).toMatchSnapshot(); 181 | }); 182 | 183 | it('should generate javascript code', () => { 184 | expect(codegen(controller, overridesConfig, 'javascript')).toMatchSnapshot(); 185 | }); 186 | }); 187 | -------------------------------------------------------------------------------- /test/unit/graphql/__snapshots__/controller.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[` 1`] = `"'id' field for type Vote must be non nullable."`; 4 | 5 | exports[` 2`] = `"'id' field for type Vote is not a scalar type."`; 6 | 7 | exports[` 3`] = `"'id' field for type Participant is not a scalar type."`; 8 | 9 | exports[`GqlEntityController createEntityStores should work 1`] = ` 10 | "drop table if exists \`votes\`; 11 | create table \`votes\` (\`uid\` char(36) default (lower(hex(randomblob(4))) || '-' || lower(hex(randomblob(2))) || '-4' || substr(lower(hex(randomblob(2))),2) || '-' || substr('89ab',abs(random()) % 4 + 1, 1) || substr(lower(hex(randomblob(2))),2) || '-' || lower(hex(randomblob(6)))), \`block_range\` int8range not null, \`id\` integer not null, \`name\` varchar(256), \`authenticators\` json, \`big_number\` bigint, \`decimal\` float, \`big_decimal\` float, primary key (\`uid\`)); 12 | create index \`votes_id_index\` on \`votes\` (\`id\`); 13 | create index \`votes_name_index\` on \`votes\` (\`name\`); 14 | create index \`votes_authenticators_index\` on \`votes\` (\`authenticators\`); 15 | create index \`votes_big_number_index\` on \`votes\` (\`big_number\`); 16 | create index \`votes_decimal_index\` on \`votes\` (\`decimal\`); 17 | create index \`votes_big_decimal_index\` on \`votes\` (\`big_decimal\`)" 18 | `; 19 | 20 | exports[`GqlEntityController generateQueryFields should work 1`] = ` 21 | "type Query { 22 | vote(id: Int!, indexer: String, block: Int): Vote 23 | votes(first: Int, skip: Int, orderBy: Vote_orderBy, orderDirection: OrderDirection, indexer: String, block: Int, where: Vote_filter): [Vote!]! 24 | } 25 | 26 | type Vote { 27 | id: Int! 28 | name: String 29 | authenticators: [String] 30 | } 31 | 32 | enum Vote_orderBy { 33 | id 34 | name 35 | } 36 | 37 | enum OrderDirection { 38 | asc 39 | desc 40 | } 41 | 42 | input Vote_filter { 43 | id_gt: Int 44 | id_gte: Int 45 | id_lt: Int 46 | id_lte: Int 47 | id: Int 48 | id_not: Int 49 | id_in: [Int] 50 | id_not_in: [Int] 51 | name_contains: String 52 | name_not_contains: String 53 | name_contains_nocase: String 54 | name_not_contains_nocase: String 55 | name: String 56 | name_not: String 57 | name_in: [String] 58 | name_not_in: [String] 59 | authenticators: [String] 60 | authenticators_not: [String] 61 | authenticators_contains: [String] 62 | authenticators_not_contains: [String] 63 | }" 64 | `; 65 | 66 | exports[`GqlEntityController generateSampleQuery should generate query schema 1`] = ` 67 | "type Query { 68 | vote(id: Int!, indexer: String, block: Int): Vote 69 | votes(first: Int, skip: Int, orderBy: Vote_orderBy, orderDirection: OrderDirection, indexer: String, block: Int, where: Vote_filter): [Vote!]! 70 | _metadata(id: ID!, indexer: String, block: Int): _Metadata 71 | _metadatas(first: Int, skip: Int, orderBy: _Metadata_orderBy, orderDirection: OrderDirection, indexer: String, block: Int, where: _Metadata_filter): [_Metadata!]! 72 | _checkpoint(id: ID!, indexer: String, block: Int): _Checkpoint 73 | _checkpoints(first: Int, skip: Int, orderBy: _Checkpoint_orderBy, orderDirection: OrderDirection, indexer: String, block: Int, where: _Checkpoint_filter): [_Checkpoint!]! 74 | } 75 | 76 | type Vote { 77 | id: Int! 78 | name: String 79 | authenticators: [String] 80 | } 81 | 82 | enum Vote_orderBy { 83 | id 84 | name 85 | } 86 | 87 | enum OrderDirection { 88 | asc 89 | desc 90 | } 91 | 92 | input Vote_filter { 93 | id_gt: Int 94 | id_gte: Int 95 | id_lt: Int 96 | id_lte: Int 97 | id: Int 98 | id_not: Int 99 | id_in: [Int] 100 | id_not_in: [Int] 101 | name_contains: String 102 | name_not_contains: String 103 | name_contains_nocase: String 104 | name_not_contains_nocase: String 105 | name: String 106 | name_not: String 107 | name_in: [String] 108 | name_not_in: [String] 109 | authenticators: [String] 110 | authenticators_not: [String] 111 | authenticators_contains: [String] 112 | authenticators_not_contains: [String] 113 | } 114 | 115 | """Core metadata values used internally by Checkpoint""" 116 | type _Metadata { 117 | """example: last_indexed_block""" 118 | id: ID! 119 | indexer: String! 120 | value: String 121 | } 122 | 123 | enum _Metadata_orderBy { 124 | id 125 | indexer 126 | value 127 | } 128 | 129 | input _Metadata_filter { 130 | id: ID 131 | id_not: ID 132 | id_in: [ID] 133 | id_not_in: [ID] 134 | indexer_contains: String 135 | indexer_not_contains: String 136 | indexer_contains_nocase: String 137 | indexer_not_contains_nocase: String 138 | indexer: String 139 | indexer_not: String 140 | indexer_in: [String] 141 | indexer_not_in: [String] 142 | value_contains: String 143 | value_not_contains: String 144 | value_contains_nocase: String 145 | value_not_contains_nocase: String 146 | value: String 147 | value_not: String 148 | value_in: [String] 149 | value_not_in: [String] 150 | } 151 | 152 | """Contract and Block where its event is found.""" 153 | type _Checkpoint { 154 | """id computed as last 5 bytes of sha256(contract+block)""" 155 | id: ID! 156 | indexer: String! 157 | block_number: Int! 158 | contract_address: String! 159 | } 160 | 161 | enum _Checkpoint_orderBy { 162 | id 163 | indexer 164 | block_number 165 | contract_address 166 | } 167 | 168 | input _Checkpoint_filter { 169 | id: ID 170 | id_not: ID 171 | id_in: [ID] 172 | id_not_in: [ID] 173 | indexer_contains: String 174 | indexer_not_contains: String 175 | indexer_contains_nocase: String 176 | indexer_not_contains_nocase: String 177 | indexer: String 178 | indexer_not: String 179 | indexer_in: [String] 180 | indexer_not_in: [String] 181 | block_number_gt: Int 182 | block_number_gte: Int 183 | block_number_lt: Int 184 | block_number_lte: Int 185 | block_number: Int 186 | block_number_not: Int 187 | block_number_in: [Int] 188 | block_number_not_in: [Int] 189 | contract_address_contains: String 190 | contract_address_not_contains: String 191 | contract_address_contains_nocase: String 192 | contract_address_not_contains_nocase: String 193 | contract_address: String 194 | contract_address_not: String 195 | contract_address_in: [String] 196 | contract_address_not_in: [String] 197 | }" 198 | `; 199 | 200 | exports[`GqlEntityController generateSampleQuery should return correct query sample for first and only entity 1`] = ` 201 | " 202 | # Welcome to Checkpoint. Try running the below example query from 203 | # your defined entity. 204 | 205 | query { 206 | votes (first: 10) { 207 | id 208 | name 209 | created_at 210 | } 211 | }" 212 | `; 213 | 214 | exports[`GqlEntityController generateSampleQuery should return correct query sample for nested objects 1`] = ` 215 | " 216 | # Welcome to Checkpoint. Try running the below example query from 217 | # your defined entity. 218 | 219 | query { 220 | votes (first: 10) { 221 | id 222 | name 223 | poster { 224 | id 225 | name 226 | venue { 227 | id 228 | location 229 | } 230 | } 231 | created_at 232 | } 233 | }" 234 | `; 235 | -------------------------------------------------------------------------------- /test/unit/graphql/controller.test.ts: -------------------------------------------------------------------------------- 1 | import { GraphQLObjectType, GraphQLSchema, printSchema } from 'graphql'; 2 | import knex from 'knex'; 3 | import { GqlEntityController } from '../../../src/graphql/controller'; 4 | 5 | describe('GqlEntityController', () => { 6 | describe('generateQueryFields', () => { 7 | it('should work', () => { 8 | const controller = new GqlEntityController(` 9 | type Vote { 10 | id: Int! 11 | name: String 12 | authenticators: [String] 13 | } 14 | `); 15 | const queryFields = controller.generateQueryFields(); 16 | const querySchema = new GraphQLObjectType({ 17 | name: 'Query', 18 | fields: queryFields 19 | }); 20 | 21 | const schema = printSchema(new GraphQLSchema({ query: querySchema })); 22 | expect(schema).toMatchSnapshot(); 23 | }); 24 | 25 | // list of error table tests 26 | describe.each([ 27 | { 28 | reason: 'non null object id', 29 | schema: `type Vote { id: String }` 30 | }, 31 | { 32 | reason: 'object id is not scalar type', 33 | schema: `type Vote { id: Participant! }\n\n type Participant { id: Int! }` 34 | }, 35 | { 36 | reason: 'object id is not scalar type 2', 37 | schema: `type Participant { id: [Int]! }` 38 | } 39 | ])('should fail for $reason', ({ schema }) => { 40 | const controller = new GqlEntityController(schema); 41 | expect(() => controller.generateQueryFields()).toThrowErrorMatchingSnapshot(); 42 | }); 43 | }); 44 | 45 | describe('createEntityStores', () => { 46 | const mockKnex = knex({ 47 | client: 'sqlite3', 48 | connection: { 49 | filename: ':memory:' 50 | }, 51 | useNullAsDefault: true 52 | }); 53 | 54 | afterAll(async () => { 55 | await mockKnex.destroy(); 56 | }); 57 | 58 | it('should work', async () => { 59 | const controller = new GqlEntityController(` 60 | scalar BigInt 61 | scalar Decimal 62 | scalar BigDecimal 63 | 64 | type Vote { 65 | id: Int! 66 | name: String 67 | authenticators: [String] 68 | big_number: BigInt 69 | decimal: Decimal 70 | big_decimal: BigDecimal 71 | } 72 | `); 73 | const { builder } = await controller.createEntityStores(mockKnex); 74 | 75 | expect(builder.toString()).toMatchSnapshot(); 76 | }); 77 | }); 78 | 79 | describe('generateSampleQuery', () => { 80 | it('should return undefined when no entities are defined', () => { 81 | const controller = new GqlEntityController('scalar Text'); 82 | 83 | expect(controller.generateSampleQuery()).toBeUndefined(); 84 | }); 85 | 86 | it.each([ 87 | { 88 | case: 'first and only entity', 89 | schema: ` 90 | type Vote { 91 | id: Int! 92 | name: String 93 | created_at: Int! 94 | } 95 | ` 96 | }, 97 | { 98 | case: 'nested objects', 99 | // Checkpoint doesn't support relationship among entities, 100 | // but this just tests to ensure the sample query works for it. 101 | schema: ` 102 | type Vote { 103 | id: Int! 104 | name: String 105 | poster: Poster 106 | created_at: Int! 107 | } 108 | 109 | type Poster { 110 | id: Int! 111 | name: String! 112 | venue: Venue! 113 | } 114 | 115 | type Venue { 116 | id: Int! 117 | location: String! 118 | } 119 | ` 120 | } 121 | ])('should return correct query sample for $case', ({ schema }) => { 122 | const controller = new GqlEntityController(schema); 123 | 124 | expect(controller.generateSampleQuery()).not.toBeUndefined(); 125 | expect(controller.generateSampleQuery()).toMatchSnapshot(); 126 | }); 127 | 128 | it('should generate query schema', () => { 129 | const schema = ` 130 | type Vote { 131 | id: Int! 132 | name: String 133 | authenticators: [String] 134 | }`; 135 | 136 | const controller = new GqlEntityController(schema); 137 | const generatedSchema = controller.generateSchema(); 138 | 139 | expect(printSchema(generatedSchema)).toMatchSnapshot(); 140 | }); 141 | }); 142 | }); 143 | -------------------------------------------------------------------------------- /test/unit/knex.test.ts: -------------------------------------------------------------------------------- 1 | import { createKnexConfig } from '../../src/knex'; 2 | 3 | describe('createKnexConfig', () => { 4 | it('should create knex config', () => { 5 | expect(createKnexConfig('postgres://root:default_password@localhost:3306/checkpoint')).toEqual({ 6 | client: 'pg', 7 | connection: { 8 | database: 'checkpoint', 9 | host: 'localhost', 10 | password: 'default_password', 11 | port: 3306, 12 | ssl: undefined, 13 | user: 'root' 14 | } 15 | }); 16 | }); 17 | }); 18 | -------------------------------------------------------------------------------- /test/unit/providers/starknet/__snapshots__/utils.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`utils parseEvent should parse cairo 1 event 1`] = ` 4 | { 5 | "class_hash": "0x7b52be32f53235445c94247942b516cb9f8ace110c82ca8a72af527eb7d44b0", 6 | "space_address": "0x40279a6371314c37464ebc327856a2d332b89022109f55b9ee93179bbbe727a", 7 | } 8 | `; 9 | 10 | exports[`utils parseEvent should parse event 1`] = ` 11 | { 12 | "authenticators": [ 13 | "0x5e1f273ca9a11f78bfb291cbe1b49294cf3c76dd48951e7ab7db6d9fb1e7d62", 14 | "0x64cce9272197eba6353f5bbf060e097e516b411e66e83a9cf5910a08697df14", 15 | ], 16 | "authenticators_len": "0x2", 17 | "controller": "0x6abd599ab530c5b3bc603111bdd20d77890db330402dc870fc9866f50ed6d2a", 18 | "deployer_address": "0x6abd599ab530c5b3bc603111bdd20d77890db330402dc870fc9866f50ed6d2a", 19 | "execution_strategies": [ 20 | "0x4ecc83848a519cc22b0d0ffb70e65ec8dde85d3d13439eff7145d4063cf6b4d", 21 | ], 22 | "execution_strategies_len": "0x1", 23 | "max_voting_duration": "0x15180", 24 | "metadata_uri": [ 25 | "0x697066733a2f2f6261666b726569617978776969726337666e6b7461796d62", 26 | "0x34367363356162767a6f6c6f6864343666717a6d71616b6e6f337467716c36", 27 | "0x32686669", 28 | ], 29 | "metadata_uri_len": "0x3", 30 | "min_voting_duration": "0x0", 31 | "proposal_threshold": { 32 | "high": "0x0", 33 | "low": "0x1", 34 | }, 35 | "quorum": { 36 | "high": "0x0", 37 | "low": "0x1", 38 | }, 39 | "space_address": "0x56ecf84acc36d7d878ab11067c2e9870a38f10d0819c698f8c1f559c40d3a", 40 | "voting_delay": "0x0", 41 | "voting_strategies": [ 42 | "0xd1b81feff3095ca9517fdfc7427e742ce96f7ca8f3b2664a21b2fba552493b", 43 | "0xd1b81feff3095ca9517fdfc7427e742ce96f7ca8f3b2664a21b2fba552493b", 44 | ], 45 | "voting_strategies_len": "0x2", 46 | "voting_strategy_params_flat": [ 47 | "0x2", 48 | "0x0", 49 | "0x2", 50 | "0xb4fbf271143f4fbf7b91a5ded31805e42b2208d6", 51 | "0x3", 52 | "0xb4fbf271143f4fbf7b91a5ded31805e42b2208d6", 53 | "0x3", 54 | ], 55 | "voting_strategy_params_flat_len": "0x7", 56 | } 57 | `; 58 | 59 | exports[`utils parseEvent should parse nested event 1`] = ` 60 | { 61 | "proposal_id": "0x8", 62 | "vote": { 63 | "choice": "0x1", 64 | "voting_power": { 65 | "high": "0x0", 66 | "low": "0x1", 67 | }, 68 | }, 69 | "voter_address": { 70 | "value": "0xef8305e140ac520225daf050e2f71d5fbcc543e7", 71 | }, 72 | } 73 | `; 74 | -------------------------------------------------------------------------------- /test/unit/providers/starknet/fixtures.ts: -------------------------------------------------------------------------------- 1 | export const spaceDeployedEvent = { 2 | data: [ 3 | '0x6abd599ab530c5b3bc603111bdd20d77890db330402dc870fc9866f50ed6d2a', 4 | '0x56ecf84acc36d7d878ab11067c2e9870a38f10d0819c698f8c1f559c40d3a', 5 | '0x0', 6 | '0x0', 7 | '0x15180', 8 | '0x1', 9 | '0x0', 10 | '0x6abd599ab530c5b3bc603111bdd20d77890db330402dc870fc9866f50ed6d2a', 11 | '0x1', 12 | '0x0', 13 | '0x2', 14 | '0xd1b81feff3095ca9517fdfc7427e742ce96f7ca8f3b2664a21b2fba552493b', 15 | '0xd1b81feff3095ca9517fdfc7427e742ce96f7ca8f3b2664a21b2fba552493b', 16 | '0x7', 17 | '0x2', 18 | '0x0', 19 | '0x2', 20 | '0xb4fbf271143f4fbf7b91a5ded31805e42b2208d6', 21 | '0x3', 22 | '0xb4fbf271143f4fbf7b91a5ded31805e42b2208d6', 23 | '0x3', 24 | '0x2', 25 | '0x5e1f273ca9a11f78bfb291cbe1b49294cf3c76dd48951e7ab7db6d9fb1e7d62', 26 | '0x64cce9272197eba6353f5bbf060e097e516b411e66e83a9cf5910a08697df14', 27 | '0x1', 28 | '0x4ecc83848a519cc22b0d0ffb70e65ec8dde85d3d13439eff7145d4063cf6b4d', 29 | '0x3', 30 | '0x697066733a2f2f6261666b726569617978776969726337666e6b7461796d62', 31 | '0x34367363356162767a6f6c6f6864343666717a6d71616b6e6f337467716c36', 32 | '0x32686669' 33 | ], 34 | from_address: '0xe1e511e496a72791ab3d591ba7d571a32de4261d84e4d183f26b6325970e20', 35 | keys: ['0xfb483ab6758cfd02170a30e08181f6e7397c1a32c2966ce3a8c4702b7ec142'] 36 | }; 37 | 38 | export const voteCreatedEvent = { 39 | data: ['0x8', '0xef8305e140ac520225daf050e2f71d5fbcc543e7', '0x1', '0x1', '0x0'], 40 | from_address: '0x750118894bf8b3ad7fd79763899c4528b2a7a0c17d7185dff78eaddbff2cb0b', 41 | keys: ['0x35a0a3a79d25118031c4960817fe040fe30a9d229c30e63c993a5bfee52d32b'] 42 | }; 43 | 44 | export const spaceDeployedEventCairo1 = { 45 | block_hash: '0x8c069e00a34ae275efe05b4b5f9395d615f9caf003c4a0d0576c225a95d673', 46 | block_number: 114, 47 | data: [ 48 | '0x7b52be32f53235445c94247942b516cb9f8ace110c82ca8a72af527eb7d44b0', 49 | '0x40279a6371314c37464ebc327856a2d332b89022109f55b9ee93179bbbe727a' 50 | ], 51 | from_address: '0x6838e761bc2e07c9563251a03a60b9a013f2d36b73246fc9d850932ac519696', 52 | keys: ['0x2d8cd3e2509f757328c6abf59278c05024d30ae28426655f886b32be3eeaa9f'], 53 | transaction_hash: '0x569e2dac76352ea03888e9ff3135374e87124324ce22ce98ab5d16f922cc33e' 54 | }; 55 | 56 | export const spaceFactoryAbi = [ 57 | { 58 | name: 'Uint256', 59 | size: 2, 60 | type: 'struct', 61 | members: [ 62 | { 63 | name: 'low', 64 | type: 'felt', 65 | offset: 0 66 | }, 67 | { 68 | name: 'high', 69 | type: 'felt', 70 | offset: 1 71 | } 72 | ] 73 | }, 74 | { 75 | data: [ 76 | { 77 | name: 'deployer_address', 78 | type: 'felt' 79 | }, 80 | { 81 | name: 'space_address', 82 | type: 'felt' 83 | }, 84 | { 85 | name: 'voting_delay', 86 | type: 'felt' 87 | }, 88 | { 89 | name: 'min_voting_duration', 90 | type: 'felt' 91 | }, 92 | { 93 | name: 'max_voting_duration', 94 | type: 'felt' 95 | }, 96 | { 97 | name: 'proposal_threshold', 98 | type: 'Uint256' 99 | }, 100 | { 101 | name: 'controller', 102 | type: 'felt' 103 | }, 104 | { 105 | name: 'quorum', 106 | type: 'Uint256' 107 | }, 108 | { 109 | name: 'voting_strategies_len', 110 | type: 'felt' 111 | }, 112 | { 113 | name: 'voting_strategies', 114 | type: 'felt*' 115 | }, 116 | { 117 | name: 'voting_strategy_params_flat_len', 118 | type: 'felt' 119 | }, 120 | { 121 | name: 'voting_strategy_params_flat', 122 | type: 'felt*' 123 | }, 124 | { 125 | name: 'authenticators_len', 126 | type: 'felt' 127 | }, 128 | { 129 | name: 'authenticators', 130 | type: 'felt*' 131 | }, 132 | { 133 | name: 'execution_strategies_len', 134 | type: 'felt' 135 | }, 136 | { 137 | name: 'execution_strategies', 138 | type: 'felt*' 139 | }, 140 | { 141 | name: 'metadata_uri_len', 142 | type: 'felt' 143 | }, 144 | { 145 | name: 'metadata_uri', 146 | type: 'felt*' 147 | } 148 | ], 149 | keys: [], 150 | name: 'space_deployed', 151 | type: 'event' 152 | }, 153 | { 154 | name: 'constructor', 155 | type: 'constructor', 156 | inputs: [ 157 | { 158 | name: 'space_class_hash', 159 | type: 'felt' 160 | } 161 | ], 162 | outputs: [] 163 | }, 164 | { 165 | name: 'deploySpace', 166 | type: 'function', 167 | inputs: [ 168 | { 169 | name: 'public_key', 170 | type: 'felt' 171 | }, 172 | { 173 | name: 'voting_delay', 174 | type: 'felt' 175 | }, 176 | { 177 | name: 'min_voting_duration', 178 | type: 'felt' 179 | }, 180 | { 181 | name: 'max_voting_duration', 182 | type: 'felt' 183 | }, 184 | { 185 | name: 'proposal_threshold', 186 | type: 'Uint256' 187 | }, 188 | { 189 | name: 'controller', 190 | type: 'felt' 191 | }, 192 | { 193 | name: 'quorum', 194 | type: 'Uint256' 195 | }, 196 | { 197 | name: 'voting_strategies_len', 198 | type: 'felt' 199 | }, 200 | { 201 | name: 'voting_strategies', 202 | type: 'felt*' 203 | }, 204 | { 205 | name: 'voting_strategy_params_flat_len', 206 | type: 'felt' 207 | }, 208 | { 209 | name: 'voting_strategy_params_flat', 210 | type: 'felt*' 211 | }, 212 | { 213 | name: 'authenticators_len', 214 | type: 'felt' 215 | }, 216 | { 217 | name: 'authenticators', 218 | type: 'felt*' 219 | }, 220 | { 221 | name: 'execution_strategies_len', 222 | type: 'felt' 223 | }, 224 | { 225 | name: 'execution_strategies', 226 | type: 'felt*' 227 | }, 228 | { 229 | name: 'metadata_uri_len', 230 | type: 'felt' 231 | }, 232 | { 233 | name: 'metadata_uri', 234 | type: 'felt*' 235 | } 236 | ], 237 | outputs: [] 238 | } 239 | ]; 240 | 241 | export const spaceAbi = [ 242 | { 243 | name: 'Address', 244 | size: 1, 245 | type: 'struct', 246 | members: [ 247 | { 248 | name: 'value', 249 | type: 'felt', 250 | offset: 0 251 | } 252 | ] 253 | }, 254 | { 255 | name: 'Proposal', 256 | size: 5, 257 | type: 'struct', 258 | members: [ 259 | { 260 | name: 'quorum', 261 | type: 'Uint256', 262 | offset: 0 263 | }, 264 | { 265 | name: 'timestamps', 266 | type: 'felt', 267 | offset: 2 268 | }, 269 | { 270 | name: 'execution_strategy', 271 | type: 'felt', 272 | offset: 3 273 | }, 274 | { 275 | name: 'execution_hash', 276 | type: 'felt', 277 | offset: 4 278 | } 279 | ] 280 | }, 281 | { 282 | name: 'Uint256', 283 | size: 2, 284 | type: 'struct', 285 | members: [ 286 | { 287 | name: 'low', 288 | type: 'felt', 289 | offset: 0 290 | }, 291 | { 292 | name: 'high', 293 | type: 'felt', 294 | offset: 1 295 | } 296 | ] 297 | }, 298 | { 299 | name: 'Vote', 300 | size: 3, 301 | type: 'struct', 302 | members: [ 303 | { 304 | name: 'choice', 305 | type: 'felt', 306 | offset: 0 307 | }, 308 | { 309 | name: 'voting_power', 310 | type: 'Uint256', 311 | offset: 1 312 | } 313 | ] 314 | }, 315 | { 316 | name: 'AccountCallArray', 317 | size: 4, 318 | type: 'struct', 319 | members: [ 320 | { 321 | name: 'to', 322 | type: 'felt', 323 | offset: 0 324 | }, 325 | { 326 | name: 'selector', 327 | type: 'felt', 328 | offset: 1 329 | }, 330 | { 331 | name: 'data_offset', 332 | type: 'felt', 333 | offset: 2 334 | }, 335 | { 336 | name: 'data_len', 337 | type: 'felt', 338 | offset: 3 339 | } 340 | ] 341 | }, 342 | { 343 | name: 'ProposalInfo', 344 | size: 11, 345 | type: 'struct', 346 | members: [ 347 | { 348 | name: 'proposal', 349 | type: 'Proposal', 350 | offset: 0 351 | }, 352 | { 353 | name: 'power_for', 354 | type: 'Uint256', 355 | offset: 5 356 | }, 357 | { 358 | name: 'power_against', 359 | type: 'Uint256', 360 | offset: 7 361 | }, 362 | { 363 | name: 'power_abstain', 364 | type: 'Uint256', 365 | offset: 9 366 | } 367 | ] 368 | }, 369 | { 370 | data: [ 371 | { 372 | name: 'previousOwner', 373 | type: 'felt' 374 | }, 375 | { 376 | name: 'newOwner', 377 | type: 'felt' 378 | } 379 | ], 380 | keys: [], 381 | name: 'OwnershipTransferred', 382 | type: 'event' 383 | }, 384 | { 385 | data: [ 386 | { 387 | name: 'proposal_id', 388 | type: 'felt' 389 | }, 390 | { 391 | name: 'proposer_address', 392 | type: 'Address' 393 | }, 394 | { 395 | name: 'proposal', 396 | type: 'Proposal' 397 | }, 398 | { 399 | name: 'metadata_uri_len', 400 | type: 'felt' 401 | }, 402 | { 403 | name: 'metadata_uri', 404 | type: 'felt*' 405 | }, 406 | { 407 | name: 'execution_params_len', 408 | type: 'felt' 409 | }, 410 | { 411 | name: 'execution_params', 412 | type: 'felt*' 413 | } 414 | ], 415 | keys: [], 416 | name: 'proposal_created', 417 | type: 'event' 418 | }, 419 | { 420 | data: [ 421 | { 422 | name: 'proposal_id', 423 | type: 'felt' 424 | }, 425 | { 426 | name: 'voter_address', 427 | type: 'Address' 428 | }, 429 | { 430 | name: 'vote', 431 | type: 'Vote' 432 | } 433 | ], 434 | keys: [], 435 | name: 'vote_created', 436 | type: 'event' 437 | }, 438 | { 439 | data: [ 440 | { 441 | name: 'previous', 442 | type: 'Uint256' 443 | }, 444 | { 445 | name: 'new_quorum', 446 | type: 'Uint256' 447 | } 448 | ], 449 | keys: [], 450 | name: 'quorum_updated', 451 | type: 'event' 452 | }, 453 | { 454 | data: [ 455 | { 456 | name: 'previous', 457 | type: 'felt' 458 | }, 459 | { 460 | name: 'new_voting_delay', 461 | type: 'felt' 462 | } 463 | ], 464 | keys: [], 465 | name: 'voting_delay_updated', 466 | type: 'event' 467 | }, 468 | { 469 | data: [ 470 | { 471 | name: 'previous', 472 | type: 'felt' 473 | }, 474 | { 475 | name: 'new_voting_duration', 476 | type: 'felt' 477 | } 478 | ], 479 | keys: [], 480 | name: 'min_voting_duration_updated', 481 | type: 'event' 482 | }, 483 | { 484 | data: [ 485 | { 486 | name: 'previous', 487 | type: 'felt' 488 | }, 489 | { 490 | name: 'new_voting_duration', 491 | type: 'felt' 492 | } 493 | ], 494 | keys: [], 495 | name: 'max_voting_duration_updated', 496 | type: 'event' 497 | }, 498 | { 499 | data: [ 500 | { 501 | name: 'previous', 502 | type: 'Uint256' 503 | }, 504 | { 505 | name: 'new_proposal_threshold', 506 | type: 'Uint256' 507 | } 508 | ], 509 | keys: [], 510 | name: 'proposal_threshold_updated', 511 | type: 'event' 512 | }, 513 | { 514 | data: [ 515 | { 516 | name: 'new_metadata_uri_len', 517 | type: 'felt' 518 | }, 519 | { 520 | name: 'new_metadata_uri', 521 | type: 'felt*' 522 | } 523 | ], 524 | keys: [], 525 | name: 'metadata_uri_updated', 526 | type: 'event' 527 | }, 528 | { 529 | data: [ 530 | { 531 | name: 'added_len', 532 | type: 'felt' 533 | }, 534 | { 535 | name: 'added', 536 | type: 'felt*' 537 | } 538 | ], 539 | keys: [], 540 | name: 'authenticators_added', 541 | type: 'event' 542 | }, 543 | { 544 | data: [ 545 | { 546 | name: 'removed_len', 547 | type: 'felt' 548 | }, 549 | { 550 | name: 'removed', 551 | type: 'felt*' 552 | } 553 | ], 554 | keys: [], 555 | name: 'authenticators_removed', 556 | type: 'event' 557 | }, 558 | { 559 | data: [ 560 | { 561 | name: 'added_len', 562 | type: 'felt' 563 | }, 564 | { 565 | name: 'added', 566 | type: 'felt*' 567 | } 568 | ], 569 | keys: [], 570 | name: 'execution_strategies_added', 571 | type: 'event' 572 | }, 573 | { 574 | data: [ 575 | { 576 | name: 'removed_len', 577 | type: 'felt' 578 | }, 579 | { 580 | name: 'removed', 581 | type: 'felt*' 582 | } 583 | ], 584 | keys: [], 585 | name: 'execution_strategies_removed', 586 | type: 'event' 587 | }, 588 | { 589 | data: [ 590 | { 591 | name: 'added_len', 592 | type: 'felt' 593 | }, 594 | { 595 | name: 'added', 596 | type: 'felt*' 597 | } 598 | ], 599 | keys: [], 600 | name: 'voting_strategies_added', 601 | type: 'event' 602 | }, 603 | { 604 | data: [ 605 | { 606 | name: 'removed_len', 607 | type: 'felt' 608 | }, 609 | { 610 | name: 'removed', 611 | type: 'felt*' 612 | } 613 | ], 614 | keys: [], 615 | name: 'voting_strategies_removed', 616 | type: 'event' 617 | }, 618 | { 619 | name: 'constructor', 620 | type: 'constructor', 621 | inputs: [ 622 | { 623 | name: 'public_key', 624 | type: 'felt' 625 | }, 626 | { 627 | name: 'voting_delay', 628 | type: 'felt' 629 | }, 630 | { 631 | name: 'min_voting_duration', 632 | type: 'felt' 633 | }, 634 | { 635 | name: 'max_voting_duration', 636 | type: 'felt' 637 | }, 638 | { 639 | name: 'proposal_threshold', 640 | type: 'Uint256' 641 | }, 642 | { 643 | name: 'controller', 644 | type: 'felt' 645 | }, 646 | { 647 | name: 'quorum', 648 | type: 'Uint256' 649 | }, 650 | { 651 | name: 'voting_strategies_len', 652 | type: 'felt' 653 | }, 654 | { 655 | name: 'voting_strategies', 656 | type: 'felt*' 657 | }, 658 | { 659 | name: 'voting_strategy_params_flat_len', 660 | type: 'felt' 661 | }, 662 | { 663 | name: 'voting_strategy_params_flat', 664 | type: 'felt*' 665 | }, 666 | { 667 | name: 'authenticators_len', 668 | type: 'felt' 669 | }, 670 | { 671 | name: 'authenticators', 672 | type: 'felt*' 673 | }, 674 | { 675 | name: 'execution_strategies_len', 676 | type: 'felt' 677 | }, 678 | { 679 | name: 'execution_strategies', 680 | type: 'felt*' 681 | } 682 | ], 683 | outputs: [] 684 | }, 685 | { 686 | name: 'getPublicKey', 687 | type: 'function', 688 | inputs: [], 689 | outputs: [ 690 | { 691 | name: 'publicKey', 692 | type: 'felt' 693 | } 694 | ], 695 | stateMutability: 'view' 696 | }, 697 | { 698 | name: 'supportsInterface', 699 | type: 'function', 700 | inputs: [ 701 | { 702 | name: 'interfaceId', 703 | type: 'felt' 704 | } 705 | ], 706 | outputs: [ 707 | { 708 | name: 'success', 709 | type: 'felt' 710 | } 711 | ], 712 | stateMutability: 'view' 713 | }, 714 | { 715 | name: 'setPublicKey', 716 | type: 'function', 717 | inputs: [ 718 | { 719 | name: 'newPublicKey', 720 | type: 'felt' 721 | } 722 | ], 723 | outputs: [] 724 | }, 725 | { 726 | name: 'isValidSignature', 727 | type: 'function', 728 | inputs: [ 729 | { 730 | name: 'hash', 731 | type: 'felt' 732 | }, 733 | { 734 | name: 'signature_len', 735 | type: 'felt' 736 | }, 737 | { 738 | name: 'signature', 739 | type: 'felt*' 740 | } 741 | ], 742 | outputs: [ 743 | { 744 | name: 'isValid', 745 | type: 'felt' 746 | } 747 | ], 748 | stateMutability: 'view' 749 | }, 750 | { 751 | name: '__validate__', 752 | type: 'function', 753 | inputs: [ 754 | { 755 | name: 'call_array_len', 756 | type: 'felt' 757 | }, 758 | { 759 | name: 'call_array', 760 | type: 'AccountCallArray*' 761 | }, 762 | { 763 | name: 'calldata_len', 764 | type: 'felt' 765 | }, 766 | { 767 | name: 'calldata', 768 | type: 'felt*' 769 | } 770 | ], 771 | outputs: [] 772 | }, 773 | { 774 | name: '__validate_declare__', 775 | type: 'function', 776 | inputs: [ 777 | { 778 | name: 'class_hash', 779 | type: 'felt' 780 | } 781 | ], 782 | outputs: [] 783 | }, 784 | { 785 | name: '__validate_deploy__', 786 | type: 'function', 787 | inputs: [ 788 | { 789 | name: 'class_hash', 790 | type: 'felt' 791 | }, 792 | { 793 | name: 'salt', 794 | type: 'felt' 795 | }, 796 | { 797 | name: 'publicKey', 798 | type: 'felt' 799 | } 800 | ], 801 | outputs: [] 802 | }, 803 | { 804 | name: '__execute__', 805 | type: 'function', 806 | inputs: [ 807 | { 808 | name: 'call_array_len', 809 | type: 'felt' 810 | }, 811 | { 812 | name: 'call_array', 813 | type: 'AccountCallArray*' 814 | }, 815 | { 816 | name: 'calldata_len', 817 | type: 'felt' 818 | }, 819 | { 820 | name: 'calldata', 821 | type: 'felt*' 822 | } 823 | ], 824 | outputs: [ 825 | { 826 | name: 'response_len', 827 | type: 'felt' 828 | }, 829 | { 830 | name: 'response', 831 | type: 'felt*' 832 | } 833 | ] 834 | }, 835 | { 836 | name: 'propose', 837 | type: 'function', 838 | inputs: [ 839 | { 840 | name: 'proposer_address', 841 | type: 'Address' 842 | }, 843 | { 844 | name: 'metadata_uri_string_len', 845 | type: 'felt' 846 | }, 847 | { 848 | name: 'metadata_uri_len', 849 | type: 'felt' 850 | }, 851 | { 852 | name: 'metadata_uri', 853 | type: 'felt*' 854 | }, 855 | { 856 | name: 'execution_strategy', 857 | type: 'felt' 858 | }, 859 | { 860 | name: 'used_voting_strategies_len', 861 | type: 'felt' 862 | }, 863 | { 864 | name: 'used_voting_strategies', 865 | type: 'felt*' 866 | }, 867 | { 868 | name: 'user_voting_strategy_params_flat_len', 869 | type: 'felt' 870 | }, 871 | { 872 | name: 'user_voting_strategy_params_flat', 873 | type: 'felt*' 874 | }, 875 | { 876 | name: 'execution_params_len', 877 | type: 'felt' 878 | }, 879 | { 880 | name: 'execution_params', 881 | type: 'felt*' 882 | } 883 | ], 884 | outputs: [] 885 | }, 886 | { 887 | name: 'vote', 888 | type: 'function', 889 | inputs: [ 890 | { 891 | name: 'voter_address', 892 | type: 'Address' 893 | }, 894 | { 895 | name: 'proposal_id', 896 | type: 'felt' 897 | }, 898 | { 899 | name: 'choice', 900 | type: 'felt' 901 | }, 902 | { 903 | name: 'used_voting_strategies_len', 904 | type: 'felt' 905 | }, 906 | { 907 | name: 'used_voting_strategies', 908 | type: 'felt*' 909 | }, 910 | { 911 | name: 'user_voting_strategy_params_flat_len', 912 | type: 'felt' 913 | }, 914 | { 915 | name: 'user_voting_strategy_params_flat', 916 | type: 'felt*' 917 | } 918 | ], 919 | outputs: [] 920 | }, 921 | { 922 | name: 'finalizeProposal', 923 | type: 'function', 924 | inputs: [ 925 | { 926 | name: 'proposal_id', 927 | type: 'felt' 928 | }, 929 | { 930 | name: 'execution_params_len', 931 | type: 'felt' 932 | }, 933 | { 934 | name: 'execution_params', 935 | type: 'felt*' 936 | } 937 | ], 938 | outputs: [] 939 | }, 940 | { 941 | name: 'cancelProposal', 942 | type: 'function', 943 | inputs: [ 944 | { 945 | name: 'proposal_id', 946 | type: 'felt' 947 | }, 948 | { 949 | name: 'execution_params_len', 950 | type: 'felt' 951 | }, 952 | { 953 | name: 'execution_params', 954 | type: 'felt*' 955 | } 956 | ], 957 | outputs: [] 958 | }, 959 | { 960 | name: 'hasVoted', 961 | type: 'function', 962 | inputs: [ 963 | { 964 | name: 'proposal_id', 965 | type: 'felt' 966 | }, 967 | { 968 | name: 'voter_address', 969 | type: 'Address' 970 | } 971 | ], 972 | outputs: [ 973 | { 974 | name: 'voted', 975 | type: 'felt' 976 | } 977 | ], 978 | stateMutability: 'view' 979 | }, 980 | { 981 | name: 'getProposalInfo', 982 | type: 'function', 983 | inputs: [ 984 | { 985 | name: 'proposal_id', 986 | type: 'felt' 987 | } 988 | ], 989 | outputs: [ 990 | { 991 | name: 'proposal_info', 992 | type: 'ProposalInfo' 993 | } 994 | ], 995 | stateMutability: 'view' 996 | }, 997 | { 998 | name: 'setController', 999 | type: 'function', 1000 | inputs: [ 1001 | { 1002 | name: 'new_controller', 1003 | type: 'felt' 1004 | } 1005 | ], 1006 | outputs: [] 1007 | }, 1008 | { 1009 | name: 'setQuorum', 1010 | type: 'function', 1011 | inputs: [ 1012 | { 1013 | name: 'new_quorum', 1014 | type: 'Uint256' 1015 | } 1016 | ], 1017 | outputs: [] 1018 | }, 1019 | { 1020 | name: 'setVotingDelay', 1021 | type: 'function', 1022 | inputs: [ 1023 | { 1024 | name: 'new_delay', 1025 | type: 'felt' 1026 | } 1027 | ], 1028 | outputs: [] 1029 | }, 1030 | { 1031 | name: 'setMinVotingDuration', 1032 | type: 'function', 1033 | inputs: [ 1034 | { 1035 | name: 'new_min_voting_duration', 1036 | type: 'felt' 1037 | } 1038 | ], 1039 | outputs: [] 1040 | }, 1041 | { 1042 | name: 'setMaxVotingDuration', 1043 | type: 'function', 1044 | inputs: [ 1045 | { 1046 | name: 'new_max_voting_duration', 1047 | type: 'felt' 1048 | } 1049 | ], 1050 | outputs: [] 1051 | }, 1052 | { 1053 | name: 'setProposalThreshold', 1054 | type: 'function', 1055 | inputs: [ 1056 | { 1057 | name: 'new_proposal_threshold', 1058 | type: 'Uint256' 1059 | } 1060 | ], 1061 | outputs: [] 1062 | }, 1063 | { 1064 | name: 'setMetadataUri', 1065 | type: 'function', 1066 | inputs: [ 1067 | { 1068 | name: 'new_metadata_uri_len', 1069 | type: 'felt' 1070 | }, 1071 | { 1072 | name: 'new_metadata_uri', 1073 | type: 'felt*' 1074 | } 1075 | ], 1076 | outputs: [] 1077 | }, 1078 | { 1079 | name: 'addExecutionStrategies', 1080 | type: 'function', 1081 | inputs: [ 1082 | { 1083 | name: 'addresses_len', 1084 | type: 'felt' 1085 | }, 1086 | { 1087 | name: 'addresses', 1088 | type: 'felt*' 1089 | } 1090 | ], 1091 | outputs: [] 1092 | }, 1093 | { 1094 | name: 'removeExecutionStrategies', 1095 | type: 'function', 1096 | inputs: [ 1097 | { 1098 | name: 'addresses_len', 1099 | type: 'felt' 1100 | }, 1101 | { 1102 | name: 'addresses', 1103 | type: 'felt*' 1104 | } 1105 | ], 1106 | outputs: [] 1107 | }, 1108 | { 1109 | name: 'addVotingStrategies', 1110 | type: 'function', 1111 | inputs: [ 1112 | { 1113 | name: 'addresses_len', 1114 | type: 'felt' 1115 | }, 1116 | { 1117 | name: 'addresses', 1118 | type: 'felt*' 1119 | }, 1120 | { 1121 | name: 'params_flat_len', 1122 | type: 'felt' 1123 | }, 1124 | { 1125 | name: 'params_flat', 1126 | type: 'felt*' 1127 | } 1128 | ], 1129 | outputs: [] 1130 | }, 1131 | { 1132 | name: 'removeVotingStrategies', 1133 | type: 'function', 1134 | inputs: [ 1135 | { 1136 | name: 'indexes_len', 1137 | type: 'felt' 1138 | }, 1139 | { 1140 | name: 'indexes', 1141 | type: 'felt*' 1142 | } 1143 | ], 1144 | outputs: [] 1145 | }, 1146 | { 1147 | name: 'addAuthenticators', 1148 | type: 'function', 1149 | inputs: [ 1150 | { 1151 | name: 'addresses_len', 1152 | type: 'felt' 1153 | }, 1154 | { 1155 | name: 'addresses', 1156 | type: 'felt*' 1157 | } 1158 | ], 1159 | outputs: [] 1160 | }, 1161 | { 1162 | name: 'removeAuthenticators', 1163 | type: 'function', 1164 | inputs: [ 1165 | { 1166 | name: 'addresses_len', 1167 | type: 'felt' 1168 | }, 1169 | { 1170 | name: 'addresses', 1171 | type: 'felt*' 1172 | } 1173 | ], 1174 | outputs: [] 1175 | } 1176 | ]; 1177 | 1178 | export const factoryAbiCairo1 = [ 1179 | { 1180 | type: 'impl', 1181 | name: 'Factory', 1182 | interface_name: 'sx::factory::factory::IFactory' 1183 | }, 1184 | { 1185 | type: 'struct', 1186 | name: 'core::array::Span::', 1187 | members: [ 1188 | { 1189 | name: 'snapshot', 1190 | type: '@core::array::Array::' 1191 | } 1192 | ] 1193 | }, 1194 | { 1195 | type: 'interface', 1196 | name: 'sx::factory::factory::IFactory', 1197 | items: [ 1198 | { 1199 | type: 'function', 1200 | name: 'deploy', 1201 | inputs: [ 1202 | { 1203 | name: 'class_hash', 1204 | type: 'core::starknet::class_hash::ClassHash' 1205 | }, 1206 | { 1207 | name: 'contract_address_salt', 1208 | type: 'core::felt252' 1209 | }, 1210 | { 1211 | name: 'initialize_calldata', 1212 | type: 'core::array::Span::' 1213 | } 1214 | ], 1215 | outputs: [ 1216 | { 1217 | type: 'core::starknet::contract_address::ContractAddress' 1218 | } 1219 | ], 1220 | state_mutability: 'external' 1221 | } 1222 | ] 1223 | }, 1224 | { 1225 | type: 'event', 1226 | name: 'sx::factory::factory::Factory::SpaceDeployed', 1227 | kind: 'struct', 1228 | members: [ 1229 | { 1230 | name: 'class_hash', 1231 | type: 'core::starknet::class_hash::ClassHash', 1232 | kind: 'data' 1233 | }, 1234 | { 1235 | name: 'space_address', 1236 | type: 'core::starknet::contract_address::ContractAddress', 1237 | kind: 'data' 1238 | } 1239 | ] 1240 | }, 1241 | { 1242 | type: 'event', 1243 | name: 'sx::factory::factory::Factory::Event', 1244 | kind: 'enum', 1245 | variants: [ 1246 | { 1247 | name: 'SpaceDeployed', 1248 | type: 'sx::factory::factory::Factory::SpaceDeployed', 1249 | kind: 'nested' 1250 | } 1251 | ] 1252 | } 1253 | ]; 1254 | -------------------------------------------------------------------------------- /test/unit/providers/starknet/utils.test.ts: -------------------------------------------------------------------------------- 1 | import { parseEvent } from '../../../../src/providers/starknet/utils'; 2 | import { 3 | spaceFactoryAbi, 4 | spaceAbi, 5 | factoryAbiCairo1, 6 | spaceDeployedEvent, 7 | voteCreatedEvent, 8 | spaceDeployedEventCairo1 9 | } from './fixtures'; 10 | 11 | describe('utils', () => { 12 | describe('parseEvent', () => { 13 | it('should parse event', () => { 14 | const output = parseEvent(spaceFactoryAbi, spaceDeployedEvent); 15 | 16 | expect(output).toMatchSnapshot(); 17 | }); 18 | 19 | it('should parse nested event', () => { 20 | const output = parseEvent(spaceAbi, voteCreatedEvent); 21 | 22 | expect(output).toMatchSnapshot(); 23 | }); 24 | 25 | it('should parse cairo 1 event', () => { 26 | const output = parseEvent(factoryAbiCairo1, spaceDeployedEventCairo1); 27 | 28 | expect(output).toMatchSnapshot(); 29 | }); 30 | }); 31 | }); 32 | -------------------------------------------------------------------------------- /test/unit/stores/__snapshots__/checkpoints.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`CheckpointsStore blocks should remove blocks 1`] = ` 4 | [ 5 | { 6 | "block_number": 5000, 7 | "hash": "0x0", 8 | "indexer": "default", 9 | }, 10 | { 11 | "block_number": 5001, 12 | "hash": "0x1", 13 | "indexer": "default", 14 | }, 15 | ] 16 | `; 17 | 18 | exports[`CheckpointsStore blocks should set block hash 1`] = ` 19 | [ 20 | { 21 | "block_number": 5000, 22 | "hash": "0x0", 23 | "indexer": "default", 24 | }, 25 | { 26 | "block_number": 5001, 27 | "hash": "0x1", 28 | "indexer": "default", 29 | }, 30 | { 31 | "block_number": 5000, 32 | "hash": "0xa", 33 | "indexer": "OTHER", 34 | }, 35 | ] 36 | `; 37 | 38 | exports[`CheckpointsStore checkpoints should insert checkpoints 1`] = ` 39 | [ 40 | { 41 | "block_number": 5000, 42 | "contract_address": "0x01", 43 | "id": "6f1246bdea", 44 | "indexer": "default", 45 | }, 46 | { 47 | "block_number": 9000, 48 | "contract_address": "0x02", 49 | "id": "92df4d22e3", 50 | "indexer": "default", 51 | }, 52 | { 53 | "block_number": 11000, 54 | "contract_address": "0x01", 55 | "id": "24a44a0b0b", 56 | "indexer": "default", 57 | }, 58 | ] 59 | `; 60 | 61 | exports[`CheckpointsStore createStore should execute correct query 1`] = ` 62 | "create table \`_blocks\` (\`indexer\` varchar(255) not null, \`block_number\` bigint, \`hash\` varchar(255) not null, primary key (\`indexer\`, \`block_number\`)); 63 | create table \`_checkpoints\` (\`id\` varchar(10), \`indexer\` varchar(255) not null, \`block_number\` bigint not null, \`contract_address\` varchar(66) not null, primary key (\`id\`, \`indexer\`)); 64 | create index \`_checkpoints_block_number_index\` on \`_checkpoints\` (\`block_number\`); 65 | create index \`_checkpoints_contract_address_index\` on \`_checkpoints\` (\`contract_address\`); 66 | create table \`_metadatas\` (\`id\` varchar(20), \`indexer\` varchar(255) not null, \`value\` varchar(128) not null, primary key (\`id\`, \`indexer\`)); 67 | create table \`_template_sources\` (\`indexer\` varchar(255) not null, \`contract_address\` varchar(66), \`start_block\` bigint not null, \`template\` varchar(128) not null)" 68 | `; 69 | 70 | exports[`CheckpointsStore metadata should set metadata 1`] = ` 71 | [ 72 | { 73 | "id": "key", 74 | "indexer": "default", 75 | "value": "default_value", 76 | }, 77 | { 78 | "id": "number_key", 79 | "indexer": "default", 80 | "value": "1111", 81 | }, 82 | { 83 | "id": "key", 84 | "indexer": "OTHER", 85 | "value": "other_value", 86 | }, 87 | ] 88 | `; 89 | 90 | exports[`CheckpointsStore removeFutureData should remove future data 1`] = ` 91 | [ 92 | { 93 | "block_number": 5000, 94 | "contract_address": "0x01", 95 | "id": "6f1246bdea", 96 | "indexer": "default", 97 | }, 98 | { 99 | "block_number": 9000, 100 | "contract_address": "0x02", 101 | "id": "92df4d22e3", 102 | "indexer": "default", 103 | }, 104 | { 105 | "block_number": 11000, 106 | "contract_address": "0x01", 107 | "id": "24a44a0b0b", 108 | "indexer": "OTHER", 109 | }, 110 | ] 111 | `; 112 | 113 | exports[`CheckpointsStore template sources should insert template sources 1`] = ` 114 | [ 115 | { 116 | "contract_address": "0x01", 117 | "indexer": "default", 118 | "start_block": 1000, 119 | "template": "Template1", 120 | }, 121 | { 122 | "contract_address": "0x01", 123 | "indexer": "default", 124 | "start_block": 2000, 125 | "template": "Template1", 126 | }, 127 | { 128 | "contract_address": "0x02", 129 | "indexer": "default", 130 | "start_block": 2100, 131 | "template": "Template3", 132 | }, 133 | { 134 | "contract_address": "0x01", 135 | "indexer": "OTHER", 136 | "start_block": 50, 137 | "template": "Template1", 138 | }, 139 | ] 140 | `; 141 | 142 | exports[`CheckpointsStore template sources should retrieve template sources 1`] = ` 143 | [ 144 | { 145 | "contractAddress": "0x01", 146 | "startBlock": 1000, 147 | "template": "Template1", 148 | }, 149 | { 150 | "contractAddress": "0x01", 151 | "startBlock": 2000, 152 | "template": "Template1", 153 | }, 154 | { 155 | "contractAddress": "0x02", 156 | "startBlock": 2100, 157 | "template": "Template3", 158 | }, 159 | ] 160 | `; 161 | -------------------------------------------------------------------------------- /test/unit/stores/checkpoints.test.ts: -------------------------------------------------------------------------------- 1 | import knex from 'knex'; 2 | import { mockDeep } from 'jest-mock-extended'; 3 | import { CheckpointsStore, MetadataId, Table } from '../../../src/stores/checkpoints'; 4 | import { Logger } from '../../../src/utils/logger'; 5 | 6 | function createMockLogger() { 7 | return mockDeep({ 8 | child: () => createMockLogger() 9 | }); 10 | } 11 | 12 | describe('CheckpointsStore', () => { 13 | const INDEXER = 'default'; 14 | 15 | const mockKnex = knex({ 16 | client: 'sqlite3', 17 | connection: { 18 | filename: ':memory:' 19 | }, 20 | useNullAsDefault: true 21 | }); 22 | 23 | const logger = createMockLogger(); 24 | const store = new CheckpointsStore(mockKnex, logger); 25 | 26 | afterAll(async () => { 27 | await mockKnex.destroy(); 28 | }); 29 | 30 | describe('createStore', () => { 31 | it('should execute correct query', async () => { 32 | const { builder } = await store.createStore(); 33 | 34 | expect(builder.toString()).toMatchSnapshot(); 35 | }); 36 | }); 37 | 38 | describe('removeFutureData', () => { 39 | afterAll(async () => { 40 | await store.resetStore(); 41 | }); 42 | 43 | it('should remove future data', async () => { 44 | await store.setMetadata(INDEXER, MetadataId.LastIndexedBlock, 11001); 45 | await store.setMetadata('OTHER', MetadataId.LastIndexedBlock, 11001); 46 | 47 | await store.insertCheckpoints(INDEXER, [ 48 | { 49 | contractAddress: '0x01', 50 | blockNumber: 5000 51 | }, 52 | { 53 | contractAddress: '0x02', 54 | blockNumber: 9000 55 | }, 56 | { 57 | contractAddress: '0x01', 58 | blockNumber: 11000 59 | } 60 | ]); 61 | await store.insertCheckpoints('OTHER', [ 62 | { 63 | contractAddress: '0x01', 64 | blockNumber: 11000 65 | } 66 | ]); 67 | 68 | await store.removeFutureData(INDEXER, 10000); 69 | 70 | const defaultLastIndexedBlock = await store.getMetadataNumber( 71 | INDEXER, 72 | MetadataId.LastIndexedBlock 73 | ); 74 | expect(defaultLastIndexedBlock).toEqual(10000); 75 | 76 | const otherLastIndexedBlock = await store.getMetadataNumber( 77 | 'OTHER', 78 | MetadataId.LastIndexedBlock 79 | ); 80 | expect(otherLastIndexedBlock).toEqual(11001); 81 | 82 | const result = await mockKnex.select('*').from(Table.Checkpoints); 83 | expect(result).toMatchSnapshot(); 84 | }); 85 | }); 86 | 87 | describe('blocks', () => { 88 | afterAll(async () => { 89 | await store.resetStore(); 90 | }); 91 | 92 | it('should set block hash', async () => { 93 | await store.setBlockHash(INDEXER, 5000, '0x0'); 94 | await store.setBlockHash(INDEXER, 5001, '0x1'); 95 | await store.setBlockHash('OTHER', 5000, '0xa'); 96 | 97 | const result = await mockKnex.select('*').from(Table.Blocks); 98 | expect(result).toMatchSnapshot(); 99 | }); 100 | 101 | it('should retrieve block hash', async () => { 102 | const result = await store.getBlockHash(INDEXER, 5000); 103 | expect(result).toEqual('0x0'); 104 | }); 105 | 106 | it('should return null if retrieving non-existent block hash', async () => { 107 | const result = await store.getBlockHash(INDEXER, 6000); 108 | expect(result).toBeNull(); 109 | }); 110 | 111 | it('should remove blocks', async () => { 112 | await store.removeBlocks('OTHER'); 113 | 114 | const result = await mockKnex.select('*').from(Table.Blocks); 115 | expect(result).toMatchSnapshot(); 116 | }); 117 | }); 118 | 119 | describe('metadata', () => { 120 | afterAll(async () => { 121 | await store.resetStore(); 122 | }); 123 | 124 | it('should set metadata', async () => { 125 | await store.setMetadata(INDEXER, 'key', 'default_value'); 126 | await store.setMetadata(INDEXER, 'number_key', 1111); 127 | await store.setMetadata('OTHER', 'key', 'other_value'); 128 | 129 | const result = await mockKnex.select('*').from(Table.Metadata); 130 | expect(result).toMatchSnapshot(); 131 | }); 132 | 133 | it('should retrieve metadata', async () => { 134 | const result = await store.getMetadata(INDEXER, 'key'); 135 | expect(result).toEqual('default_value'); 136 | }); 137 | 138 | it('should retrieve metadata as number', async () => { 139 | const result = await store.getMetadataNumber(INDEXER, 'number_key'); 140 | expect(result).toEqual(1111); 141 | }); 142 | 143 | it('should return null if retrieving non-existent metadata value', async () => { 144 | const result = await store.getMetadata(INDEXER, 'non_existent_key'); 145 | expect(result).toBeNull(); 146 | }); 147 | 148 | it('should return null if retrieving non-existent metadata value as number', async () => { 149 | const result = await store.getMetadataNumber(INDEXER, 'non_existent_key'); 150 | expect(result).toBeNull(); 151 | }); 152 | 153 | it('should update metadata', async () => { 154 | await store.setMetadata(INDEXER, 'key', 'new_value'); 155 | const result = await store.getMetadata(INDEXER, 'key'); 156 | expect(result).toEqual('new_value'); 157 | }); 158 | }); 159 | 160 | describe('checkpoints', () => { 161 | const CONTRACT_A = '0x01'; 162 | const CONTRACT_B = '0x02'; 163 | 164 | afterAll(async () => { 165 | await store.resetStore(); 166 | }); 167 | 168 | it('should insert checkpoints', async () => { 169 | const checkpoints = [ 170 | { 171 | contractAddress: CONTRACT_A, 172 | blockNumber: 5000 173 | }, 174 | { 175 | contractAddress: CONTRACT_B, 176 | blockNumber: 9000 177 | }, 178 | { 179 | contractAddress: CONTRACT_A, 180 | blockNumber: 11000 181 | } 182 | ]; 183 | 184 | await store.insertCheckpoints(INDEXER, checkpoints); 185 | 186 | const result = await mockKnex.select('*').from(Table.Checkpoints); 187 | expect(result).toMatchSnapshot(); 188 | }); 189 | 190 | it('should return next checkpoint blocks', async () => { 191 | let result = await store.getNextCheckpointBlocks(INDEXER, 4000, [CONTRACT_A, CONTRACT_B]); 192 | expect(result).toEqual([5000, 9000, 11000]); 193 | 194 | result = await store.getNextCheckpointBlocks(INDEXER, 7000, [CONTRACT_A, CONTRACT_B]); 195 | expect(result).toEqual([9000, 11000]); 196 | 197 | result = await store.getNextCheckpointBlocks(INDEXER, 4000, [CONTRACT_B]); 198 | expect(result).toEqual([9000]); 199 | }); 200 | }); 201 | 202 | describe('template sources', () => { 203 | const CONTRACT_A = '0x01'; 204 | const CONTRACT_B = '0x02'; 205 | 206 | afterAll(async () => { 207 | await store.resetStore(); 208 | }); 209 | 210 | it('should insert template sources', async () => { 211 | await store.insertTemplateSource(INDEXER, CONTRACT_A, 1000, 'Template1'); 212 | await store.insertTemplateSource(INDEXER, CONTRACT_A, 2000, 'Template1'); 213 | await store.insertTemplateSource(INDEXER, CONTRACT_B, 2100, 'Template3'); 214 | await store.insertTemplateSource('OTHER', CONTRACT_A, 50, 'Template1'); 215 | 216 | const result = await mockKnex.select('*').from(Table.TemplateSources); 217 | expect(result).toMatchSnapshot(); 218 | }); 219 | 220 | it('should retrieve template sources', async () => { 221 | const result = await store.getTemplateSources(INDEXER); 222 | expect(result).toMatchSnapshot(); 223 | }); 224 | }); 225 | }); 226 | -------------------------------------------------------------------------------- /test/unit/utils/__snapshots__/checkpoint.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`getContractsFromConfig should work 1`] = ` 4 | [ 5 | "0x0625dc1290b6e936be5f1a3e963cf629326b1f4dfd5a56738dea98e1ad31b7f3", 6 | ] 7 | `; 8 | -------------------------------------------------------------------------------- /test/unit/utils/checkpoint.test.ts: -------------------------------------------------------------------------------- 1 | import { getContractsFromConfig } from '../../../src/utils/checkpoint'; 2 | import { validCheckpointConfig } from '../../fixtures/checkpointConfig.fixture'; 3 | 4 | describe('getContractsFromConfig', () => { 5 | it('should work', () => { 6 | expect(getContractsFromConfig(validCheckpointConfig)).toMatchSnapshot(); 7 | }); 8 | }); 9 | -------------------------------------------------------------------------------- /test/unit/utils/database.test.ts: -------------------------------------------------------------------------------- 1 | import knex from 'knex'; 2 | import { getTableName, applyQueryFilter } from '../../../src/utils/database'; 3 | 4 | const mockKnex = knex({ 5 | client: 'sqlite3', 6 | connection: { 7 | filename: ':memory:' 8 | }, 9 | useNullAsDefault: true 10 | }); 11 | 12 | afterAll(async () => { 13 | await mockKnex.destroy(); 14 | }); 15 | 16 | describe('getTableName', () => { 17 | it.each([ 18 | ['table', 'tables'], 19 | ['user', 'users'], 20 | ['post', 'posts'], 21 | ['space', 'spaces'], 22 | ['vote', 'votes'], 23 | ['comment', 'comments'] 24 | ])('should return pluralized table name', (name, expected) => { 25 | expect(getTableName(name)).toEqual(expected); 26 | }); 27 | 28 | it('should return hardcoded table name for metadata', () => { 29 | expect(getTableName('_metadata')).toEqual('_metadatas'); 30 | }); 31 | }); 32 | 33 | describe('applyQueryFilter', () => { 34 | it('should not apply block filter filter for internal tables', () => { 35 | const query = mockKnex.select('*').from('_metadatas'); 36 | 37 | const result = applyQueryFilter(query, '_metadatas', { block: 123, indexer: 'indexer' }); 38 | 39 | expect(result.toString()).toBe( 40 | "select * from `_metadatas` where `_metadatas`.`indexer` = 'indexer'" 41 | ); 42 | }); 43 | 44 | it('should apply capped block filter if block is provided', () => { 45 | const query = mockKnex.select('*').from('posts'); 46 | 47 | const result = applyQueryFilter(query, 'posts', { block: 123, indexer: 'indexer' }); 48 | 49 | expect(result.toString()).toBe( 50 | "select * from `posts` where posts.block_range @> int8(123) and `posts`.`_indexer` = 'indexer'" 51 | ); 52 | }); 53 | 54 | it('should apply upper_inf block filter if block is not provided', () => { 55 | const query = mockKnex.select('*').from('posts'); 56 | 57 | const result = applyQueryFilter(query, 'posts', { indexer: 'indexer' }); 58 | 59 | expect(result.toString()).toBe( 60 | "select * from `posts` where upper_inf(posts.block_range) and `posts`.`_indexer` = 'indexer'" 61 | ); 62 | }); 63 | 64 | it('should not apply indexer filter if not provided', () => { 65 | const query = mockKnex.select('*').from('posts'); 66 | 67 | const result = applyQueryFilter(query, 'posts', {}); 68 | 69 | expect(result.toString()).toBe('select * from `posts` where upper_inf(posts.block_range)'); 70 | }); 71 | }); 72 | -------------------------------------------------------------------------------- /test/unit/utils/helpers.test.ts: -------------------------------------------------------------------------------- 1 | import { chunk } from '../../../src/utils/helpers'; 2 | 3 | describe('chunk', () => { 4 | it('should chunk array', () => { 5 | const array = [1, 2, 3, 4, 5, 6, 7, 8]; 6 | const chunked = chunk(array, 3); 7 | 8 | expect(chunked).toEqual([ 9 | [1, 2, 3], 10 | [4, 5, 6], 11 | [7, 8] 12 | ]); 13 | }); 14 | }); 15 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "esnext", 4 | "module": "commonjs", 5 | "rootDir": "./", 6 | "outDir": "./dist", 7 | "esModuleInterop": true, 8 | "strict": true, 9 | "noImplicitAny": false, 10 | "resolveJsonModule": true, 11 | "moduleResolution": "Node", 12 | "declaration": true 13 | }, 14 | "include": ["src/**/*"], 15 | "exclude": ["node_modules"] 16 | } 17 | --------------------------------------------------------------------------------