├── .eslintrc.yaml ├── .github ├── CODEOWNERS ├── pull_request_templates │ ├── ISSUE_TEMPLATE.md │ └── PULL_REQUEST_TEMPLATE.md └── workflows │ ├── ci.yml │ ├── security-build.yml │ └── tests.yml ├── .gitignore ├── CHANGELOG.md ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── REVIEW.md ├── SECURITY.md ├── babel.config.json ├── examples ├── README.md ├── dev-env │ ├── docker-compose.yml │ └── getting-started.properties ├── matic_transfer │ ├── README.md │ ├── consumer │ │ ├── .env.example │ │ ├── .eslintrc.js │ │ ├── README.md │ │ ├── babel.config.json │ │ ├── jest.config.ts │ │ ├── package-lock.json │ │ ├── package.json │ │ ├── src │ │ │ ├── consumer.ts │ │ │ ├── index.ts │ │ │ ├── interfaces │ │ │ │ ├── matic_transfer_tx.ts │ │ │ │ ├── transfer.ts │ │ │ │ └── transfer_methods.ts │ │ │ ├── mapper │ │ │ │ └── transfer.ts │ │ │ ├── models │ │ │ │ └── transfer.ts │ │ │ ├── schemas │ │ │ │ └── matic_transfer.proto │ │ │ └── services │ │ │ │ └── transfer.ts │ │ ├── tests │ │ │ ├── mock_data │ │ │ │ └── transfer_message.js │ │ │ └── services │ │ │ │ └── transfer.test.ts │ │ └── tsconfig.json │ ├── producer │ │ ├── .env.example │ │ ├── .eslintrc.js │ │ ├── .gitkeep │ │ ├── README.md │ │ ├── package-lock.json │ │ ├── package.json │ │ ├── src │ │ │ └── index.ts │ │ └── tsconfig.json │ └── transformer │ │ ├── .env.example │ │ ├── README.md │ │ ├── babel.config.json │ │ ├── jest.config.ts │ │ ├── package-lock.json │ │ ├── package.json │ │ ├── src │ │ ├── index.ts │ │ ├── interfaces │ │ │ └── matic_transfer_tx.ts │ │ ├── mappers │ │ │ └── matic_transfer_mapper.ts │ │ ├── matic_transfer_data_transformer.ts │ │ └── schemas │ │ │ └── matic_transfer.proto │ │ ├── tests │ │ ├── mappers │ │ │ └── matic_transfer_mapper.test.ts │ │ └── mock_data │ │ │ ├── ethereum_full_block.json │ │ │ └── matic_transfer.json │ │ └── tsconfig.json └── nft_balancer │ ├── README.md │ ├── consumer │ ├── .env.example │ ├── .eslintrc.js │ ├── README.md │ ├── babel.config.json │ ├── jest.config.ts │ ├── package-lock.json │ ├── package.json │ ├── src │ │ ├── consumer.ts │ │ ├── index.ts │ │ ├── interfaces │ │ │ ├── nft_transfer_tx.ts │ │ │ ├── token.ts │ │ │ └── token_methods.ts │ │ ├── mapper │ │ │ └── transfer_token.ts │ │ ├── models │ │ │ └── token.ts │ │ ├── schemas │ │ │ └── nft_transfer.proto │ │ └── services │ │ │ └── transfer_token.ts │ ├── tests │ │ ├── mock_data │ │ │ └── transfer_message.js │ │ └── services │ │ │ └── transfer.test.ts │ └── tsconfig.json │ ├── producer │ ├── .env.example │ ├── .eslintrc.js │ ├── README.md │ ├── package-lock.json │ ├── package.json │ ├── src │ │ └── index.ts │ └── tsconfig.json │ └── transformer │ ├── .env.example │ ├── README.md │ ├── babel.config.json │ ├── jest.config.ts │ ├── package-lock.json │ ├── package.json │ ├── src │ ├── index.ts │ ├── interfaces │ │ └── nft_transfer_tx.ts │ ├── mappers │ │ └── nft_transfer_mapper.ts │ ├── nft_transfer_data_transformer.ts │ └── schemas │ │ └── nft_transfer.proto │ ├── tests │ ├── mappers │ │ └── nft_transfer_mapper.test.ts │ └── mock_data │ │ ├── ethereum_full_block.json │ │ └── nft_transfer.json │ └── tsconfig.json ├── internal ├── block_getters │ ├── block_getter.ts │ ├── block_getter_worker.ts │ ├── erigon_block_getter.ts │ ├── erigon_block_getter_worker.ts │ ├── quicknode_block_getter.ts │ └── quicknode_block_getter_worker.ts ├── block_producers │ ├── block_producer.ts │ └── produced_blocks_model.ts ├── block_subscription │ ├── abstract_block_subscription.ts │ ├── block_polling.ts │ └── block_subscription.ts ├── coder │ ├── abi_coder.ts │ └── protobuf_coder.ts ├── data_transformation │ └── abstract_data_transformer.ts ├── enums │ ├── bridgetype.ts │ └── tokentype.ts ├── errors │ ├── api_error.ts │ ├── base_error.ts │ ├── block_producer_error.ts │ ├── coder_error.ts │ ├── create_error_object.ts │ ├── error_codes.ts │ ├── event_consumer_error.ts │ ├── get_error_message.ts │ ├── is_base_error.ts │ ├── is_librdkafka_error.ts │ └── kafka_error.ts ├── event_consumer │ └── abstract_event_consumer.ts ├── filter │ └── bloom_filter.ts ├── formatters │ └── block_formatter.ts ├── interfaces │ ├── async_observer.ts │ ├── block.ts │ ├── block_getter.ts │ ├── block_getter_worker_promise.ts │ ├── block_header.ts │ ├── block_producer_config.ts │ ├── block_subscription.ts │ ├── block_worker_message.ts │ ├── coder.ts │ ├── coder_config.ts │ ├── common_kafka_events.ts │ ├── config.ts │ ├── consumer_config.ts │ ├── consumer_queue_object.ts │ ├── deposit.ts │ ├── deserialised_kafka_message.ts │ ├── event_log.ts │ ├── index.ts │ ├── kafka_coder_config.ts │ ├── logger_config.ts │ ├── mapper.ts │ ├── new_heads_subscriber.ts │ ├── observer.ts │ ├── producer_config.ts │ ├── quicknode_response.ts │ ├── raw_block.ts │ ├── raw_receipt.ts │ ├── raw_transaction.ts │ ├── rpc_payload.ts │ ├── sequential_consumer_config.ts │ ├── stream_api_block.ts │ ├── synchronous_producer.ts │ ├── transaction.ts │ ├── transaction_receipt.ts │ ├── transformed_block.ts │ ├── web3_transaction.ts │ └── web3_transaction_receipt.ts ├── kafka │ ├── consumer │ │ ├── abstract_consumer.ts │ │ ├── asynchronous_consumer.ts │ │ └── synchronous_consumer.ts │ └── producer │ │ ├── abstract_producer.ts │ │ ├── asynchronous_producer.ts │ │ └── synchronous_producer.ts ├── logger │ └── logger.ts ├── mongo │ └── database.ts ├── queue │ └── queue.ts └── rpc │ └── json_rpc_client.ts ├── jest.config.ts ├── package-lock.json ├── package.json ├── public ├── block_producers │ ├── block_polling_producer.ts │ ├── block_producer.ts │ ├── erigon_block_producer.ts │ └── quicknode_block_producer.ts ├── coder │ └── abi_coder.ts ├── data_transformation │ ├── asynchronous_data_transformer.ts │ ├── synchronous_data_transformer.ts │ └── transform.ts ├── enums │ ├── bridgetype.ts │ └── tokentype.ts ├── errors │ ├── api_error.ts │ ├── base_error.ts │ ├── block_producer_error.ts │ ├── coder_error.ts │ ├── create_error_object.ts │ ├── error_codes.ts │ ├── event_consumer_error.ts │ ├── get_error_message.ts │ ├── index.ts │ ├── is_base_error.ts │ ├── is_librdkafka_error.ts │ └── kafka_error.ts ├── event_consumer │ └── abstract_event_consumer.ts ├── filter │ └── bloom_filter.ts ├── index.ts ├── interfaces │ ├── async_observer.ts │ ├── block.ts │ ├── block_getter.ts │ ├── block_getter_worker_promise.ts │ ├── block_header.ts │ ├── block_producer_config.ts │ ├── block_subscription.ts │ ├── block_worker_message.ts │ ├── coder.ts │ ├── coder_config.ts │ ├── common_kafka_events.ts │ ├── config.ts │ ├── consumer_config.ts │ ├── consumer_queue_object.ts │ ├── deposit.ts │ ├── deserialised_kafka_message.ts │ ├── event_log.ts │ ├── event_producer.ts │ ├── event_transformer.ts │ ├── index.ts │ ├── kafka_coder_config.ts │ ├── logger_config.ts │ ├── mapper.ts │ ├── new_heads_subscriber.ts │ ├── observer.ts │ ├── producer_config.ts │ ├── quicknode_response.ts │ ├── raw_block.ts │ ├── raw_receipt.ts │ ├── raw_transaction.ts │ ├── rpc_payload.ts │ ├── sequential_consumer_config.ts │ ├── stream_api_block.ts │ ├── synchronous_producer.ts │ ├── transaction.ts │ ├── transaction_receipt.ts │ ├── transformed_block.ts │ ├── transformer_config.ts │ ├── web3_transaction.ts │ └── web3_transaction_receipt.ts ├── kafka │ ├── consumer │ │ ├── asynchronous_consumer.ts │ │ ├── consume.ts │ │ └── synchronous_consumer.ts │ └── producer │ │ ├── asynchronous_producer.ts │ │ ├── produce.ts │ │ └── synchronous_producer.ts ├── logger │ └── logger.ts ├── mongo │ └── database.ts └── rpc │ └── json_rpc_client.ts ├── schemas ├── block.proto ├── bridge_assets.proto ├── burnblock.proto ├── checkpointblock.proto ├── claim_assets.proto ├── depositblock.proto ├── eventlog.proto ├── global_exit_root.proto ├── global_exit_root_l2.proto ├── mappings.proto ├── new_batch.proto ├── posmapping.proto ├── statesync.proto ├── test.proto ├── transaction.proto └── withdrawblock.proto ├── sonar-project.properties ├── tests ├── __mocks__ │ ├── coder.js │ └── observer.js ├── block_getters │ ├── block_getter.test.ts │ ├── block_getter_worker.test.ts │ ├── erigon_block_getter.test.ts │ └── quicknode_block_getter.test.ts ├── block_producer │ ├── block_producer.test.ts │ └── quicknode_block_producer.test.ts ├── block_subscription │ ├── abstract_block_subscription.test.ts │ ├── block_polling.test.ts │ └── block_subscription.test.ts ├── coder │ ├── abi_coder.test.ts │ └── protobuf_coder.test.ts ├── errors │ ├── api_error.test.ts │ ├── base_error.test.ts │ ├── block_producer_error.test.ts │ ├── coder_error.test.ts │ ├── create_error_object.test.ts │ ├── event_consumer_error.test.ts │ ├── get_error_message.test.ts │ └── kafka_error.test.ts ├── event_consumer │ └── abstract_event_consumer.test.ts ├── formatters │ └── block_formatter.test.ts ├── kafka │ ├── consumer │ │ ├── abstract_consumer.test.ts │ │ ├── asynchronous_consumer.test.ts │ │ └── synchronous_consumer.test.ts │ └── producer │ │ ├── abstract_producer.test.ts │ │ ├── asynchronous_producer.test.ts │ │ └── synchronous_producer.test.ts ├── logger │ └── logger.test.ts ├── mock_data │ ├── block.js │ ├── connect_error.json │ ├── disconnected_error.json │ ├── ethereum_block.json │ ├── ethereum_full_block.json │ ├── ethereum_transaction_receipts.json │ ├── ethereum_transaction_receipts_array.json │ ├── log.json │ ├── metadata_mock.json │ ├── mock_message.json │ ├── polygon_block.json │ ├── raw_ethereum_block.json │ └── zkevm_block.js └── queue │ └── queue.test.ts └── tsconfig.json /.eslintrc.yaml: -------------------------------------------------------------------------------- 1 | { "root": true, 2 | "extends": [ 3 | "eslint:recommended", 4 | "plugin:@typescript-eslint/recommended" 5 | ], 6 | "parser": "@typescript-eslint/parser", 7 | "parserOptions": { "project": ["./tsconfig.json"] }, 8 | "plugins": [ 9 | "@typescript-eslint" 10 | ], 11 | "rules": { 12 | "semi": ["error", "always"], 13 | "quotes": ["error", "double"], 14 | "@typescript-eslint/no-inferrable-types": "off", 15 | "linebreak-style": ["error", "unix"], 16 | "@typescript-eslint/ban-ts-comment": "off", 17 | "@typescript-eslint/no-explicit-any": "off", 18 | "@typescript-eslint/no-unused-vars": "off", 19 | "no-async-promise-executor": "off", 20 | "no-empty": "off", 21 | "@typescript-eslint/no-non-null-assertion": "off", 22 | "@typescript-eslint/no-empty-function": "off", 23 | "@typescript-eslint/ban-types": "off", 24 | "@typescript-eslint/no-this-alias": "off" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @0xPolygon/product-applications 2 | -------------------------------------------------------------------------------- /.github/pull_request_templates/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | 10 | 11 | #### Expected behavior 12 | 13 | #### Actual behavior 14 | 15 | #### Steps to reproduce the behavior 16 | 17 | 1. [First step] 18 | 2. [Second step] 19 | 3. [and so on...] 20 | 21 | #### Logs 22 | 23 | 24 | 25 | #### Environment 26 | 27 | -------------------------------------------------------------------------------- /.github/pull_request_templates/PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | ## Description 2 | 3 | Please include a summary of the changes and be sure to follow our [Contribution Guidelines](../CONTRIBUTIONS.md). 4 | 5 | 9 | 10 | ## Type of change 11 | 12 | 13 | 14 | - [ ] Bug fix (non-breaking change which fixes an issue) 15 | - [ ] New feature (non-breaking change which adds functionality) 16 | - [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected) 17 | 18 | ## Checklist: 19 | 20 | - [ ] I have selected the correct base branch. 21 | - [ ] I have performed a self-review of my own code. 22 | - [ ] I have commented my code, particularly in hard-to-understand areas. 23 | - [ ] I have made corresponding changes to the documentation. 24 | - [ ] My changes generate no new warnings. 25 | - [ ] Any dependent changes have been merged and published in downstream modules. 26 | - [ ] I ran `npm run lint` with success and extended the tests and types if necessary. 27 | - [ ] I ran `npm run tests` with success. 28 | - [ ] I ran `npm run build` with success.. 29 | - [ ] I have tested my code. 30 | - [ ] I have linked Issue(s) with this PR in "Linked Issues" menu. -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: CHECK-OUT GIT REPOSITORY 14 | uses: actions/checkout@v3 15 | - name: Use Node.js (v18) 16 | uses: actions/setup-node@v3 17 | with: 18 | node-version: '18' 19 | - name: Install dependencies 20 | run: npm ci 21 | - name: Check lint 22 | run: npm run lint 23 | - name: Build 24 | run: npm run build 25 | -------------------------------------------------------------------------------- /.github/workflows/security-build.yml: -------------------------------------------------------------------------------- 1 | name: Security Build 2 | on: 3 | push: 4 | branches: 5 | - main # or the name of your main and preffered branches 6 | - dev 7 | - staging # or the name of your main and preffered branches 8 | workflow_dispatch: {} 9 | pull_request: 10 | types: [opened, synchronize, reopened] 11 | 12 | jobs: 13 | sonarcloud: 14 | name: SonarCloud 15 | runs-on: ubuntu-latest 16 | steps: 17 | - uses: actions/checkout@v3 18 | with: 19 | fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis 20 | - name: SonarCloud Scan 21 | uses: SonarSource/sonarcloud-github-action@master 22 | env: 23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any 24 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} 25 | -------------------------------------------------------------------------------- /.github/workflows/tests.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - '**' 7 | 8 | jobs: 9 | Tests: 10 | name: Tests 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: CHECK-OUT GIT REPOSITORY 14 | uses: actions/checkout@v3 15 | - name: Use Node.js (v18) 16 | uses: actions/setup-node@v3 17 | with: 18 | node-version: '18' 19 | - name: Check ubuntu version 20 | run: lsb_release -a 21 | - name: Install dependencies 22 | run: npm ci 23 | - name: Build Packages 24 | run: npm run build 25 | - name: Run Unit Tests 26 | run: npm run tests 27 | - name: Run Integration Tests 28 | run: npm run tests:integration 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | build/ 3 | dist/ 4 | coverage/ 5 | .env 6 | logs/ 7 | *.log 8 | .DS_Store 9 | *.rdb 10 | .idea/ 11 | .vscode 12 | .env.local 13 | *.key 14 | *.cert 15 | pids/ 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 PT Services DMCC 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /REVIEW.md: -------------------------------------------------------------------------------- 1 | # Review Guidelines 2 | 3 | Our review guidelines are intended to provide clear steps for PR proposers and reviewers. 4 | 5 | Only published PRs will be considered for review. Draft PRs will be considered in-progress and not yet ready for review. 6 | 7 | ## Rules 8 | 9 | - [ ] PR follows the provided [template](.github/PULL_REQUEST_TEMPLATE.md). 10 | - [ ] PR doesn't contain unnecessary changes. 11 | - [ ] The changed code preserves the conventions and stylistic consistency of the project. 12 | - [ ] PR uses labels accordingly. (new labels may be suggested) 13 | - [ ] PR includes unit and e2e tests if related to any logic changes. 14 | - [ ] The code coverage rate did not decrease. 15 | - [ ] The error case is always tested. 16 | - [ ] The description of the test case is self-describing. 17 | - [ ] Test cases are grouped for clarity. 18 | - [ ] A PR may only be merged if the following conditions are fulfilled: 19 | - [ ] The correct base branch is selected. 20 | - [ ] Any new files contain the web3.js file header. 21 | - [ ] The documentation was updated (if applicable). 22 | - [ ] The CI with QA passes successfully. 23 | - [ ] The CI logs were manually checked to ensure false positives were not reported. 24 | - [ ] All comments have been addressed. 25 | - [ ] Doesn't add undue maintenance burden. 26 | - [ ] Doesn't increase the bundle size or is clearly explained why. -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Polygon Technology Security Information 2 | 3 | ## Link to vulnerability disclosure details (Bug Bounty). 4 | - Websites and Applications: https://hackerone.com/polygon-technology 5 | - Smart Contracts: https://immunefi.com/bounty/polygon 6 | 7 | ## Languages that our team speaks and understands. 8 | Preferred-Languages: en 9 | 10 | ## Security-related job openings at Polygon. 11 | https://polygon.technology/careers 12 | 13 | ## Polygon security contact details. 14 | security@polygon.technology 15 | 16 | ## The URL for accessing the security.txt file. 17 | Canonical: https://polygon.technology/security.txt 18 | -------------------------------------------------------------------------------- /babel.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", 5 | { 6 | "targets": { 7 | "node": "current" 8 | } 9 | } 10 | ], 11 | "@babel/preset-typescript" 12 | ], 13 | "plugins": [ 14 | "babel-plugin-transform-import-meta" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /examples/dev-env/docker-compose.yml: -------------------------------------------------------------------------------- 1 | --- 2 | version: '3' 3 | services: 4 | zookeeper: 5 | image: confluentinc/cp-zookeeper:7.0.0 6 | hostname: zookeeper 7 | container_name: zookeeper 8 | environment: 9 | ZOOKEEPER_CLIENT_PORT: 2181 10 | ZOOKEEPER_TICK_TIME: 2000 11 | 12 | broker: 13 | image: confluentinc/cp-kafka:7.0.0 14 | container_name: broker 15 | ports: 16 | - "9092:9092" 17 | depends_on: 18 | - zookeeper 19 | environment: 20 | KAFKA_BROKER_ID: 1 21 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 22 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_INTERNAL:PLAINTEXT 23 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://localhost:9092,PLAINTEXT_INTERNAL://broker:29092 24 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 25 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 26 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 27 | -------------------------------------------------------------------------------- /examples/dev-env/getting-started.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=localhost:9092 -------------------------------------------------------------------------------- /examples/matic_transfer/README.md: -------------------------------------------------------------------------------- 1 | # MATIC_TRANSFER 2 | 3 | Matic-transfer is an exemplary project that showcases the usage of Chain Indexer Framework to index all MATIC transfers occurring on the Ethereum blockchain. 4 | 5 | ## Overview 6 | The project consists of three distinct packages, each serving a specific function: 7 | 8 | 1. **Producers**: This package acts as the initial entry point for the indexer service. It collects all ethereum blockchain data from a starting block as per env and streams it into Kafka without any discrimination. 9 | 10 | 2. **Transformers**: The Transformers package consumes the blockchain data from the Kafka stream generated by the producers. It then filters out all MATIC transfers and reproduces these events to a dedicated topic within the Kafka stream. 11 | 12 | 3. **Consumers**: Consumers are responsible for subscribing to the event-specific Kafka stream and persisting the data into a database. Additionally, they expose endpoints that allow clients to retrieve the data according to their specific requirements. 13 | 14 | This serves as a useful reference for understanding how Chain Indexer Framework can be effectively utilized to manage and process blockchain data efficiently along with reorg handling. 15 | 16 | 17 | ## How to Build 18 | 19 | - Make sure you run kafka inside your docker and mongoDB. steps can be found [here](../README.md) 20 | 21 | - Run [Producer](./producer/README.md) 22 | 23 | - Run [Transformer](./transformer/README.md) 24 | 25 | - Run [Consumer](./consumer/README.md) 26 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/.env.example: -------------------------------------------------------------------------------- 1 | # COMMON 2 | MONGO_URL= 3 | 4 | #CONSUMER 5 | KAFKA_CONNECTION_URL= 6 | CONSUMER_GROUP_ID=matic.transfer.consumer 7 | 8 | # MAPPING_TOPIC 9 | TRANSFER_TOPIC=apps.1.matic.transfer 10 | 11 | # LOGGER_ENV 12 | SENTRY_DSN= 13 | SENTRY_ENVIRONMENT= 14 | DATADOG_API_KEY= 15 | DATADOG_APP_KEY= 16 | 17 | # START_CONSUMER and START_API_ENDPOINTS can be true and false based on if there will 18 | # be different server for both or same for both. -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | es2021: true, 4 | node: true, 5 | }, 6 | extends: [ 7 | 'airbnb-base', 8 | ], 9 | parser: '@typescript-eslint/parser', 10 | parserOptions: { 11 | ecmaVersion: 'latest', 12 | sourceType: 'module', 13 | }, 14 | plugins: [ 15 | '@typescript-eslint', 16 | ], 17 | rules: { 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/README.md: -------------------------------------------------------------------------------- 1 | # Consumer 2 | 3 | This package consumes the events from the Kafka topic, which were previously produced and filtered in the Transformer layer. The Consumer then performs two primary functions: 4 | 5 | 1. **Data Storage**: The package saves the consumed data into the database. It efficiently stores the relevant events, ensuring seamless access to the data for further processing. 6 | 7 | 2. **Endpoint Exposition**: The Consumer exposes endpoints to clients, enabling them to retrieve the stored data from the database. These endpoints provide a convenient and structured way for clients to access the indexed blockchain data related to MATIC token transfers. 8 | 9 | ## How to Use 10 | Note: Make sure you are inside the examples/matic_transfer/consumer folder. 11 | 12 | ### 1. Set Configuration 13 | Begin by configuring your environment variables. Copy the `.env.example` file and rename it to `.env`. Then, provide appropriate values for the keys mentioned in the `.env` file. 14 | 15 | ### 2. Install Packages 16 | Install the required packages by running the following command: 17 | 18 | ``` 19 | npm i 20 | ``` 21 | 22 | ### 3. Build the Package 23 | Build the package by executing the following command: 24 | 25 | ``` 26 | npm run build 27 | ``` 28 | 29 | ### 4. Run the Package 30 | Run the producer service using the following command: 31 | 32 | ``` 33 | npm run start 34 | ``` 35 | 36 | ## Running the Example Using Source Code 37 | 38 | This section guides you on running the example code using the current source code, typically for debugging purposes. 39 | 40 | ### 1. Build & Link the Source Code 41 | Run the following command at the root of this project: 42 | 43 | ``` 44 | npm run build:link 45 | ``` 46 | 47 | If you encounter permission issues, run the command using `sudo`. 48 | 49 | 50 | ### 2. Link the Library 51 | 52 | - Navigate to the examples/matic_transfer/consumer folder: 53 | 54 | ``` 55 | cd examples/matic_transfer/consumer 56 | ``` 57 | 58 | - Execute the link command: 59 | 60 | ``` 61 | npm run link:lib 62 | ``` 63 | 64 | This documentation clarifies the setup and usage of the Consumer package in the Chain Indexer Framework project, making it easier for developers to integrate the package into their applications or utilize it for debugging and testing purposes. 65 | 66 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/babel.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", 5 | { 6 | "targets": { 7 | "node": "current" 8 | } 9 | } 10 | ], 11 | "@babel/preset-typescript" 12 | ], 13 | "plugins": [ 14 | "babel-plugin-transform-import-meta" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/jest.config.ts: -------------------------------------------------------------------------------- 1 | import type { JestConfigWithTsJest } from "ts-jest"; 2 | 3 | const jestConfig: JestConfigWithTsJest = { 4 | "transform": { 5 | "^.+\\.(j|t)s?$": "babel-jest" 6 | }, 7 | extensionsToTreatAsEsm: ['.ts'], 8 | clearMocks: true 9 | } 10 | 11 | export default jestConfig 12 | 13 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "matic-transfer-consumer", 3 | "version": "1.0.0", 4 | "description": "consumers for Matic transfers", 5 | "exports": "./dist/", 6 | "main": "dist/index.js", 7 | "type": "module", 8 | "scripts": { 9 | "tests": "jest --coverage", 10 | "tests:integration": "echo 'Hello integration tests'", 11 | "lint": "eslint src/**/*.ts", 12 | "lint:fix": "eslint --fix ./src/**/*.ts", 13 | "build": "tsc && npm run copy-proto", 14 | "copy-proto": "cp -R ./src/schemas/ ./dist/schemas", 15 | "start": "node --experimental-import-meta-resolve --trace-warnings dist/index.js", 16 | "link:lib": "npm link @maticnetwork/chain-indexer-framework", 17 | "link:lib:build": "npm run link:lib && npm run build" 18 | }, 19 | "author": "nitinmittal23", 20 | "license": "UNLICENSED", 21 | "dependencies": { 22 | "@maticnetwork/chain-indexer-framework": "^1.0.0", 23 | "dotenv": "^16.0.1", 24 | "long": "^5.2.0", 25 | "mongoose": "^6.5.2", 26 | "path": "^0.12.7", 27 | "web3-utils": "^1.8.1" 28 | }, 29 | "devDependencies": { 30 | "@typescript-eslint/eslint-plugin": "^5.31.0", 31 | "@typescript-eslint/parser": "^5.31.0", 32 | "eslint": "^8.20.0", 33 | "eslint-config-airbnb-base": "^15.0.0", 34 | "eslint-plugin-import": "^2.26.0", 35 | "ts-jest": "^29.0.3", 36 | "tsup": "^6.2.3", 37 | "typescript": "^4.8.2" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/consumer.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@maticnetwork/chain-indexer-framework/interfaces/transformed_block"; 2 | import { DeserialisedMessage } from "@maticnetwork/chain-indexer-framework/interfaces/deserialised_kafka_message"; 3 | import { consume } from "@maticnetwork/chain-indexer-framework/kafka/consumer/consume"; 4 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 5 | 6 | import TransferService from "./services/transfer.js"; 7 | import TransferMapper from "./mapper/transfer.js"; 8 | import IMaticTransferTx from "./interfaces/matic_transfer_tx.js"; 9 | 10 | import dotenv from 'dotenv'; 11 | import path from "path"; 12 | 13 | dotenv.config() 14 | 15 | /** 16 | * startConsuming function which starts consuming the events from kafka and then save the data to 17 | * database. it also handles the reorg internally in the save function. 18 | * 19 | * @function startConsume 20 | * 21 | * @param {TransferService} transferService - The transfer service class 22 | * @param {TransferMapper} transferMapper - the transfer Mapper class 23 | * 24 | * @returns {Promise} 25 | */ 26 | export default async function startConsuming(transferService: TransferService, transferMapper: TransferMapper): Promise { 27 | try { 28 | consume({ 29 | "metadata.broker.list": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 30 | "group.id": process.env.CONSUMER_GROUP_ID ?? "matic.transfer.consumer", 31 | "security.protocol": "plaintext", 32 | topic: process.env.TRANSFER_TOPIC ?? "apps.1.matic.transfer", 33 | coders: { 34 | fileName: "matic_transfer", 35 | packageName: "matictransferpackage", 36 | messageType: "MaticTransferBlock", 37 | fileDirectory: path.resolve("dist", "./schemas") 38 | }, 39 | type: 'synchronous' 40 | }, { 41 | next: async (message: DeserialisedMessage) => { 42 | const transformedBlock = message.value as ITransformedBlock; 43 | const transfers: IMaticTransferTx[] = transformedBlock.data as IMaticTransferTx[]; 44 | 45 | if (transfers && transfers.length > 0) { 46 | await transferService.save( 47 | transferMapper.map(transformedBlock) 48 | ); 49 | } 50 | }, 51 | error(err: Error) { 52 | console.error('something wrong occurred: ' + err); 53 | }, 54 | closed: () => { 55 | Logger.info(`subscription is ended.`); 56 | throw new Error("Consumer stopped"); 57 | }, 58 | }); 59 | } catch (error) { 60 | Logger.error(`Consumer instance is exiting due to error: ${error}`); 61 | process.exit(1); 62 | 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/index.ts: -------------------------------------------------------------------------------- 1 | import { Database } from "@maticnetwork/chain-indexer-framework/mongo/database"; 2 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 3 | 4 | import TransferMapper from "./mapper/transfer.js"; 5 | import TransferService from "./services/transfer.js"; 6 | import { TransferModel } from "./models/transfer.js"; 7 | 8 | import startConsuming from "./consumer.js"; 9 | 10 | async function start(): Promise { 11 | try { 12 | Logger.create({ 13 | sentry: { 14 | dsn: process.env.SENTRY_DSN, 15 | level: 'error' 16 | }, 17 | datadog: { 18 | api_key: process.env.DATADOG_API_KEY, 19 | service_name: process.env.DATADOG_APP_KEY 20 | }, 21 | console: { 22 | level: "debug" 23 | } 24 | }); 25 | 26 | const database = new Database(process.env.MONGO_URL ?? 'mongodb://localhost:27017/chain-indexer'); 27 | await database.connect(); 28 | 29 | const transferService = new TransferService( 30 | await TransferModel.new(database), 31 | ); 32 | 33 | await startConsuming(transferService, new TransferMapper()); 34 | 35 | } catch (error) { 36 | Logger.error(`Error when starting consumer service: ${(error as Error).message}`); 37 | } 38 | } 39 | 40 | start(); 41 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/interfaces/matic_transfer_tx.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | 3 | export default interface IMaticTransferTx { 4 | transactionIndex: Long, 5 | transactionHash: string, 6 | transactionInitiator: string, 7 | tokenAddress: string, 8 | senderAddress: string, 9 | receiverAddress: string, 10 | amount: string, 11 | } 12 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/interfaces/transfer.ts: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * Interface for token transfers. it has attributes which is present in mongodb and the way it will 4 | * be saved there for all transfers. 5 | */ 6 | export interface ITransfer { 7 | transactionIndex: number, 8 | transactionHash: string, 9 | transactionInitiator: string, 10 | tokenAddress: string, 11 | senderAddress: string, 12 | receiverAddress: string, 13 | amount: string, 14 | timestamp: Date, 15 | blockNumber: number 16 | } 17 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/interfaces/transfer_methods.ts: -------------------------------------------------------------------------------- 1 | import { ITransfer } from "./transfer.js"; 2 | 3 | /** 4 | * this class contains methods to interact with the database methods 5 | * 6 | * @returns implementation of all the transfer model method 7 | */ 8 | const statics = { 9 | 10 | /** 11 | * Get the last block number in the transfer collection for matic transfers 12 | * 13 | * @returns {Promise} 14 | */ 15 | async getLastTransferBlock(): Promise { 16 | //@ts-ignore 17 | const tx = await this.findOne().sort({ timestamp: -1 }).exec(); 18 | 19 | return tx?.blockNumber ?? 0; 20 | }, 21 | 22 | /** 23 | * Inserts multiple documents for matic transfers into transfer collection 24 | * 25 | * @param {ITransfer[]} data 26 | * 27 | * @returns {Promise} 28 | */ 29 | async addAllTransfers(data: ITransfer[]): Promise { 30 | for (let transfer of data) { 31 | //@ts-ignore 32 | await this.create([transfer], { rawResult: false }); 33 | } 34 | return; 35 | }, 36 | 37 | /** 38 | * Deletes all the transactions for reorg 39 | * 40 | * @param {number} blockNumber 41 | * 42 | * @returns {Promise} 43 | */ 44 | async deleteTxsForReorg(blockNumber: number): Promise { 45 | 46 | let deletedCount = ( 47 | //@ts-ignore 48 | await this.deleteMany({ blockNumber: { $gte: blockNumber } }) 49 | ).deletedCount; 50 | 51 | return deletedCount; 52 | } 53 | } 54 | 55 | export default statics; 56 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/mapper/transfer.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@maticnetwork/chain-indexer-framework/interfaces/transformed_block"; 2 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 3 | import IMaticTransferTx from "../interfaces/matic_transfer_tx.js"; 4 | import { ITransfer } from "../interfaces/transfer.js"; 5 | 6 | /** 7 | * TransferMapper class is a mapper class which has function to map the data according to all matic transfers and 8 | * these functions are not async as there is only data transformation according to the way it will be saved in mongodb. 9 | * 10 | * @class TransferMapper 11 | */ 12 | export default class TransferMapper { 13 | 14 | /** 15 | * this is a public function which takes data from the kafka consumer and return in the form 16 | * where it will be saved in db for matic transfer transactions. it will be used when user want 17 | * to have data for all matic transfers. 18 | * 19 | * @param {ITransformedBlock} transformedBlock - data from the kafka consumer 20 | 21 | * @returns {ITransfer[]} 22 | */ 23 | public map(transformedBlock: ITransformedBlock): ITransfer[] { 24 | let transfers: ITransfer[] = []; 25 | 26 | for (const transfer of transformedBlock.data) { 27 | transfers.push({ 28 | transactionIndex: transfer.transactionIndex.toNumber(), 29 | transactionHash: transfer.transactionHash, 30 | transactionInitiator: transfer.transactionInitiator, 31 | tokenAddress: transfer.tokenAddress, 32 | senderAddress: transfer.senderAddress, 33 | receiverAddress: transfer.receiverAddress, 34 | amount: transfer.amount, 35 | timestamp: new Date(parseInt(transformedBlock.timestamp.toString())), 36 | blockNumber: transformedBlock.blockNumber.toNumber() 37 | }); 38 | } 39 | 40 | //Remove below when app is stable 41 | Logger.debug({ 42 | location: "mapper: transfers", 43 | function: "mapTransfers", 44 | status: "function completed", 45 | }) 46 | return transfers; 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/models/transfer.ts: -------------------------------------------------------------------------------- 1 | import { Database } from "@maticnetwork/chain-indexer-framework/mongo/database"; 2 | import { Model, Schema } from "mongoose"; 3 | import { ITransfer } from "../interfaces/transfer.js"; 4 | import statics from "../interfaces/transfer_methods.js"; 5 | 6 | const TransferSchema = new Schema({ 7 | transactionIndex: { 8 | type: Number 9 | }, 10 | transactionHash: { 11 | type: String 12 | }, 13 | blockNumber: { 14 | type: Number 15 | }, 16 | timestamp: { 17 | type: Date 18 | }, 19 | transactionInitiator: { 20 | type: String 21 | }, 22 | amount: { 23 | type: String, 24 | }, 25 | tokenAddress: { 26 | type: String, 27 | }, 28 | senderAddress: { 29 | type: String 30 | }, 31 | receiverAddress: { 32 | type: String 33 | } 34 | }, 35 | { 36 | versionKey: false, 37 | statics: statics 38 | } 39 | ); 40 | /** 41 | * This class represents Transfer Model 42 | * 43 | * @class 44 | */ 45 | export class TransferModel { 46 | /** 47 | * Get the transfer model defined on this mongoose database instance 48 | * 49 | * @param {Database} database 50 | * 51 | */ 52 | public static async new(database: Database) { 53 | const model = database.model>( 54 | "transfer", 55 | TransferSchema 56 | ); 57 | await model.createCollection(); 58 | 59 | return model; 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/schemas/matic_transfer.proto: -------------------------------------------------------------------------------- 1 | package matictransferpackage; 2 | syntax = "proto3"; 3 | 4 | message MaticTransferBlock { 5 | message MaticTransferEvent { 6 | required uint64 transactionIndex = 1; 7 | required string transactionHash = 2; 8 | required string transactionInitiator = 3; 9 | required string tokenAddress = 4; 10 | required string amount = 5; 11 | required string senderAddress = 6; 12 | required string receiverAddress = 7; 13 | } 14 | 15 | required uint64 blockNumber = 1; 16 | required uint64 timestamp = 2; 17 | repeated MaticTransferEvent data = 3; 18 | } 19 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/src/services/transfer.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 2 | import { Model } from "mongoose"; 3 | import { ITransfer } from "../interfaces/transfer.js"; 4 | 5 | /** 6 | * TransferService class has all the exposed functions to fetch transfer data from db so that API service can create an 7 | * instance of this class and can call these function and get data as per the requirement. 8 | */ 9 | export default class TransferService { 10 | /** 11 | * @constructor 12 | * 13 | * @param {Model} transferModel 14 | */ 15 | constructor( 16 | private transferModel: Model, 17 | ) { } 18 | 19 | /** 20 | * this is a public function which takes an array of matic transfer events and save it in mongodb. 21 | * parallely it also handles the reorg part where it checks for the last saved blocknumber and acts 22 | * accordingly and returns a boolean value. 23 | * 24 | * @param {ITransfer[]} data - data to be saved in mongo 25 | * 26 | * @returns {Promise} 27 | */ 28 | public async save(data: ITransfer[]): Promise { 29 | Logger.debug({ 30 | location: "transfer_service", 31 | function: "saveTransfers", 32 | status: "function call", 33 | data: { 34 | length: data.length 35 | } 36 | }); 37 | 38 | 39 | if (data && data.length) { 40 | //@ts-ignore 41 | const latestTransferBlockNumber = await this.transferModel.getLastTransferBlock(); 42 | 43 | if (latestTransferBlockNumber >= data[0].blockNumber) { 44 | //@ts-ignore 45 | await this.transferModel.deleteTxsForReorg(data[0].blockNumber); 46 | } 47 | //@ts-ignore 48 | await this.transferModel.addAllTransfers(data); 49 | } 50 | 51 | Logger.debug({ 52 | location: "transfer_service", 53 | function: "saveTransfers", 54 | status: "function completed" 55 | }); 56 | 57 | return true; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/tests/mock_data/transfer_message.js: -------------------------------------------------------------------------------- 1 | export const transferTransactionMessage = { 2 | "key": "1234", 3 | "topic": "apps.1.matic.transfer", 4 | "value": { 5 | "blockNumber": { 6 | "high": 0, 7 | "low": 0, 8 | "unsigned": true, 9 | }, 10 | "timestamp": 12340, 11 | "data": [ 12 | { 13 | transactionIndex: { 14 | "high": 1, 15 | "low": 0, 16 | "unsigned": true 17 | }, 18 | transactionHash: "0x0bbd76664f215b0a74d4ee773c85c19cc649dcb504963678db568dca6912f0aa", 19 | transactionInitiator: "0x65a8f07bd9a8598e1b5b6c0a88f4779dbc077675", 20 | tokenAddress: "0x8839e639f210b80ffea73aedf51baed8dac04499", 21 | senderAddress: "0xe95b7d229cfaed717600d64b0d938a36fd5d5060", 22 | receiverAddress: "0xab6395382798ee6ea6e9a97cdfd18557f34adc87", 23 | amount: "7954515646169844787673", 24 | } 25 | ] 26 | } 27 | }; 28 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/tests/services/transfer.test.ts: -------------------------------------------------------------------------------- 1 | import TransferService from "../../dist/services/transfer.js"; 2 | import { Model } from "mongoose"; 3 | import { ITransfer } from "../../dist/interfaces/transfer.js"; 4 | 5 | describe("transfer service", () => { 6 | let transferService: jest.Mocked, 7 | mockTransferModel: jest.MockedObject> 8 | 9 | const data: ITransfer[] = [{ 10 | "transactionIndex": 1, 11 | "transactionHash": "0x0bbd76664f215b0a74d4ee773c85c19cc649dcb504963678db568dca6912f0aa", 12 | "blockNumber": 1234, 13 | "timestamp": new Date(12340), 14 | "transactionInitiator": "0x65a8f07bd9a8598e1b5b6c0a88f4779dbc077675", 15 | "amount": "7954515646169844787673", 16 | "tokenAddress": "0x8839e639f210b80ffea73aedf51baed8dac04499", 17 | "senderAddress": "0xe95b7d229cfaed717600d64b0d938a36fd5d5060", 18 | "receiverAddress": "0xab6395382798ee6ea6e9a97cdfd18557f34adc87" 19 | }]; 20 | 21 | const mockSession = { 22 | startTransaction: jest.fn(), 23 | commitTransaction: jest.fn(), 24 | endSession: jest.fn() 25 | }; 26 | 27 | beforeEach(() => { 28 | mockTransferModel = Object.assign({ 29 | add: jest.fn(), 30 | getAll: jest.fn(), 31 | getLastTransferBlock: jest.fn(), 32 | addAllTransfers: jest.fn(), 33 | startSession: jest.fn().mockResolvedValue(mockSession), 34 | session: mockSession, 35 | getTransactionCount: jest.fn(), 36 | deleteTxsForReorg: jest.fn(), 37 | }) as jest.MockedObject>; 38 | 39 | transferService = new TransferService(mockTransferModel as unknown as Model) as jest.Mocked; 40 | 41 | }); 42 | 43 | test("test the save method without reorg", async () => { 44 | //@ts-ignore 45 | mockTransferModel.getLastTransferBlock.mockResolvedValueOnce(1233); 46 | 47 | await transferService.save(data); 48 | //@ts-ignore 49 | expect(mockTransferModel.addAllTransfers).toBeCalledWith(data, mockSession); 50 | }) 51 | 52 | test("test the save method with reorg", async () => { 53 | //@ts-ignore 54 | mockTransferModel.getLastTransferBlock.mockResolvedValueOnce(1235); 55 | 56 | await transferService.save(data); 57 | //@ts-ignore 58 | expect(mockTransferModel.deleteTxsForReorg).toBeCalledWith(data[0].blockNumber, mockSession); 59 | //@ts-ignore 60 | expect(mockTransferModel.addAllTransfers).toBeCalledWith(data, mockSession); 61 | }) 62 | }) 63 | -------------------------------------------------------------------------------- /examples/matic_transfer/consumer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "esModuleInterop": true, 5 | "allowSyntheticDefaultImports": true, 6 | "target": "esnext", 7 | "moduleResolution": "nodenext", 8 | "sourceMap": true, 9 | "outDir": "dist", 10 | "declaration": true, 11 | "alwaysStrict": true, 12 | "strict": true, 13 | }, 14 | "include": [ 15 | "./src/**/*" 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /examples/matic_transfer/producer/.env.example: -------------------------------------------------------------------------------- 1 | RPC_WS_ENDPOINT_URL_LIST= 2 | START_BLOCK= 3 | KAFKA_CONNECTION_URL= 4 | MONGO_URL= 5 | PRODUCER_TOPIC=polygon.1.blocks 6 | 7 | #LOGGER_ENV 8 | SENTRY_DSN= 9 | SENTRY_ENVIRONMENT= 10 | DATADOG_API_KEY= 11 | DATADOG_APP_KEY= 12 | 13 | -------------------------------------------------------------------------------- /examples/matic_transfer/producer/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | es2021: true, 4 | node: true, 5 | }, 6 | extends: [ 7 | 'airbnb-base', 8 | ], 9 | parser: '@typescript-eslint/parser', 10 | parserOptions: { 11 | ecmaVersion: 'latest', 12 | sourceType: 'module', 13 | }, 14 | plugins: [ 15 | '@typescript-eslint', 16 | ], 17 | rules: { 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /examples/matic_transfer/producer/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0xPolygon/chain-indexer-framework/350e0d402eab6aedb4ebe82d3c4cf14c760322ab/examples/matic_transfer/producer/.gitkeep -------------------------------------------------------------------------------- /examples/matic_transfer/producer/README.md: -------------------------------------------------------------------------------- 1 | # Producer 2 | The Producer package exemplifies the producer layer of Chain Indexer Framework. It fetches blockchain data from Ethereum and produces it to a Kafka topic. 3 | 4 | ## How to Use 5 | Note: Make sure you are inside the example/matic_transfer/producer folder. 6 | 7 | ### 1. Set Configuration 8 | Begin by configuring your environment variables. Copy the `.env.example` file and rename it to `.env`. Then, provide appropriate values for the keys mentioned in the `.env` file. 9 | 10 | ### 2. Install Packages 11 | Install the required packages by running the following command: 12 | 13 | ``` 14 | npm i 15 | ``` 16 | 17 | ### 3. Build the Package 18 | Build the package by executing the following command: 19 | 20 | ``` 21 | npm run build 22 | ``` 23 | 24 | ### 4. Run the Package 25 | Run the producer service using the following command: 26 | 27 | ``` 28 | npm run start 29 | ``` 30 | 31 | ## Running the Example Using Source Code 32 | 33 | This section guides you on running the example code using the current source code, typically for debugging purposes. 34 | 35 | ### 1. Build & Link the Source Code 36 | Run the following command at the root of this project: 37 | 38 | ``` 39 | npm run build:link 40 | ``` 41 | 42 | If you encounter permission issues, run the command using `sudo`. 43 | 44 | 45 | ### 2. Link the Library 46 | 47 | - Navigate to the examples/matic_transfer/producer folder: 48 | 49 | ``` 50 | cd examples/matic_transfer/producer 51 | ``` 52 | 53 | - Execute the link command: 54 | 55 | ``` 56 | npm run link:lib 57 | ``` 58 | 59 | This documentation clarifies the setup and usage of the Producer package in the Chain Indexer Framework project, making it easier for developers to integrate the package into their applications or utilize it for debugging and testing purposes. 60 | -------------------------------------------------------------------------------- /examples/matic_transfer/producer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ethereum-producer", 3 | "version": "1.0.0", 4 | "description": "", 5 | "exports": "./dist/", 6 | "type": "module", 7 | "scripts": { 8 | "tests": "echo here will be Unit test run", 9 | "tests:integration": "echo 'Hello integration tests'", 10 | "lint": "eslint src/**/*.ts", 11 | "lint:fix": "eslint --fix ./src/**/*.ts", 12 | "build": "tsc", 13 | "start": "node --experimental-import-meta-resolve --trace-warnings dist/index.js", 14 | "link:lib": "npm link @maticnetwork/chain-indexer-framework", 15 | "link:lib:build": "npm run link:lib && npm run build" 16 | }, 17 | "author": "nitinmittal23", 18 | "license": "UNLICENSED", 19 | "dependencies": { 20 | "@maticnetwork/chain-indexer-framework": "^1.0.0", 21 | "dotenv": "^16.0.1" 22 | }, 23 | "devDependencies": { 24 | "@typescript-eslint/eslint-plugin": "^5.31.0", 25 | "@typescript-eslint/parser": "^5.31.0", 26 | "eslint": "^8.20.0", 27 | "eslint-config-airbnb-base": "^15.0.0", 28 | "eslint-plugin-import": "^2.26.0", 29 | "tsup": "^6.2.3", 30 | "typescript": "^4.8.2" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /examples/matic_transfer/producer/src/index.ts: -------------------------------------------------------------------------------- 1 | import { produce } from "@maticnetwork/chain-indexer-framework/kafka/producer/produce"; 2 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 3 | import dotenv from 'dotenv'; 4 | import { ErigonBlockProducer } from "@maticnetwork/chain-indexer-framework/block_producers/erigon_block_producer"; 5 | 6 | dotenv.config(); 7 | Logger.create({ 8 | sentry: { 9 | dsn: process.env.SENTRY_DSN, 10 | level: 'error' 11 | }, 12 | datadog: { 13 | api_key: process.env.DATADOG_API_KEY, 14 | service_name: process.env.DATADOG_APP_KEY 15 | }, 16 | console: { 17 | level: "debug" 18 | } 19 | }); 20 | 21 | const producer = produce({ 22 | startBlock: parseInt(process.env.START_BLOCK as string), 23 | rpcWsEndpoints: process.env.RPC_WS_ENDPOINT_URL_LIST?.split(','), 24 | topic: process.env.PRODUCER_TOPIC ?? "polygon.1.blocks", 25 | maxReOrgDepth: 96, 26 | maxRetries: 5, 27 | mongoUrl: process.env.MONGO_URL ?? 'mongodb://localhost:27017/chain-indexer', 28 | blockSubscriptionTimeout: 120000, 29 | "bootstrap.servers": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 30 | "security.protocol": "plaintext", 31 | type: "blocks:erigon" 32 | }); 33 | 34 | producer.on("blockProducer.fatalError", (error: any) => { 35 | Logger.error(`Block producer exited. ${error.message}`); 36 | 37 | process.exit(1); //Exiting process on fatal error. Process manager needs to restart the process. 38 | }); 39 | -------------------------------------------------------------------------------- /examples/matic_transfer/producer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "esModuleInterop": true, 5 | "allowSyntheticDefaultImports": true, 6 | "target": "esnext", 7 | "moduleResolution": "nodenext", 8 | "sourceMap": true, 9 | "outDir": "dist", 10 | "declaration": true, 11 | "alwaysStrict": true, 12 | "strict": true 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/.env.example: -------------------------------------------------------------------------------- 1 | # COMMON_ENV 2 | KAFKA_CONNECTION_URL=localhost:9092 3 | 4 | # CONSUMER_ENV 5 | CONSUMER_TOPIC=polygon.1.blocks 6 | 7 | # PRODUCER_ENV 8 | PRODUCER_TOPIC=apps.1.matic.transfer 9 | 10 | # LOGGER_ENV 11 | SENTRY_DSN= 12 | SENTRY_ENVIRONMENT= 13 | DATADOG_API_KEY= 14 | DATADOG_APP_KEY= 15 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/README.md: -------------------------------------------------------------------------------- 1 | # Transformer 2 | 3 | This package provides an example implementation of the transformer layer for Chain Indexer Framework. This layer consumes events from the Kafka topic produced by the Producer and performs filtering based on MATIC token transfers. It then re-produces the filtered events to specific Kafka topics corresponding to each event. 4 | 5 | ## How to Use 6 | Note: Make sure you are inside the example/matic_transfer/transformer folder. 7 | 8 | ### 1. Set Configuration 9 | Begin by configuring your environment variables. Copy the `.env.example` file and rename it to `.env`. Then, provide appropriate values for the keys mentioned in the `.env` file. 10 | 11 | ### 2. Install Packages 12 | Install the required packages by running the following command: 13 | 14 | ``` 15 | npm i 16 | ``` 17 | 18 | ### 3. Build the Package 19 | Build the package by executing the following command: 20 | 21 | ``` 22 | npm run build 23 | ``` 24 | 25 | ### 4. Run the Package 26 | Run the producer service using the following command: 27 | 28 | ``` 29 | npm run start 30 | ``` 31 | 32 | ## Running the Example Using Source Code 33 | 34 | This section guides you on running the example code using the current source code, typically for debugging purposes. 35 | 36 | ### 1. Build & Link the Source Code 37 | Run the following command at the root of this project: 38 | 39 | ``` 40 | npm run build:link 41 | ``` 42 | 43 | If you encounter permission issues, run the command using `sudo`. 44 | 45 | 46 | ### 2. Link the Library 47 | 48 | - Navigate to the examples/matic_transfer/transformer folder: 49 | 50 | ``` 51 | cd examples/matic_transfer/transformer 52 | ``` 53 | 54 | - Execute the link command: 55 | 56 | ``` 57 | npm run link:lib 58 | ``` 59 | 60 | This documentation clarifies the setup and usage of the Transformer package in the Chain Indexer Framework project, making it easier for developers to integrate the package into their applications or utilize it for debugging and testing purposes. 61 | 62 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/babel.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", 5 | { 6 | "targets": { 7 | "node": "current" 8 | } 9 | } 10 | ], 11 | "@babel/preset-typescript" 12 | ], 13 | "plugins": [ 14 | "babel-plugin-transform-import-meta" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/jest.config.ts: -------------------------------------------------------------------------------- 1 | const jestConfig = { 2 | transform: { 3 | "^.+\\.(j|t)s?$": "babel-jest" 4 | }, 5 | extensionsToTreatAsEsm: ['.ts'], 6 | clearMocks: true 7 | }; 8 | 9 | export default jestConfig; 10 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "matic-transfer-transformer", 3 | "version": "1.0.0", 4 | "description": "", 5 | "exports": "./dist/", 6 | "main": "dist/index.js", 7 | "type": "module", 8 | "scripts": { 9 | "tests": "jest --coverage", 10 | "tests:integration": "echo 'Hello integration tests'", 11 | "lint": "eslint src/**/*.ts", 12 | "lint:fix": "eslint --fix ./src/**/*.ts", 13 | "build": "tsc && npm run copy-proto", 14 | "copy-proto": "cp -R ./src/schemas/ ./dist/schemas", 15 | "start": "node --experimental-import-meta-resolve --trace-warnings dist/index.js", 16 | "link:lib": "npm link @maticnetwork/chain-indexer-framework", 17 | "link:lib:build": "npm run link:lib && npm run build" 18 | }, 19 | "author": "nitinmittal23", 20 | "license": "UNLICENSED", 21 | "dependencies": { 22 | "@maticnetwork/chain-indexer-framework": "^1.0.0", 23 | "dotenv": "^16.0.3", 24 | "long": "^5.2.1", 25 | "path": "^0.12.7", 26 | "web3-utils": "^1.8.2" 27 | }, 28 | "devDependencies": { 29 | "@babel/preset-env": "^7.19.1", 30 | "@babel/preset-typescript": "^7.18.6", 31 | "@types/jest": "^29.0.3", 32 | "babel-plugin-transform-import-meta": "^2.2.0", 33 | "jest": "^29.0.3", 34 | "rollup": "^2.77.0", 35 | "ts-jest": "^29.0.3", 36 | "ts-node": "^10.9.1", 37 | "tsup": "^6.2.3", 38 | "typescript": "^4.8.2" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/src/index.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 2 | import { BlockProducerError } from "@maticnetwork/chain-indexer-framework/errors/block_producer_error"; 3 | import startTransforming from "./matic_transfer_data_transformer.js"; 4 | import { MaticTransferMapper } from "./mappers/matic_transfer_mapper.js"; 5 | import dotenv from 'dotenv'; 6 | import path from "path"; 7 | 8 | dotenv.config(); 9 | 10 | Logger.create({ 11 | sentry: { 12 | dsn: process.env.SENTRY_DSN, 13 | level: 'error' 14 | }, 15 | datadog: { 16 | api_key: process.env.DATADOG_API_KEY, 17 | service_name: process.env.DATADOG_APP_KEY, 18 | } 19 | }); 20 | 21 | /** 22 | * Initialise the transform service with producer topic, proto file names, 23 | * producer config, consumer topic and consumer proto files 24 | */ 25 | try { 26 | startTransforming( 27 | { 28 | "bootstrap.servers": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 29 | "group.id": "matic.transfer.transformer", 30 | "security.protocol": "plaintext", 31 | "message.max.bytes": 26214400, 32 | "fetch.message.max.bytes": 26214400, 33 | coders: { 34 | fileName: "block", 35 | packageName: "blockpackage", 36 | messageType: "Block" 37 | }, 38 | topic: process.env.CONSUMER_TOPIC ?? "polygon.1.blocks", 39 | }, 40 | { 41 | topic: process.env.PRODUCER_TOPIC ?? "apps.1.matic.transfer", 42 | "bootstrap.servers": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 43 | "security.protocol": "plaintext", 44 | "message.max.bytes": 26214400, 45 | coder: { 46 | fileName: "matic_transfer", 47 | packageName: "matictransferpackage", 48 | messageType: "MaticTransferBlock", 49 | fileDirectory: path.resolve("dist", "./schemas/") 50 | } 51 | }, 52 | new MaticTransferMapper() 53 | ); 54 | } catch (e) { 55 | Logger.error(BlockProducerError.createUnknown(e)); 56 | } 57 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/src/interfaces/matic_transfer_tx.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | 3 | export default interface IMaticTransferTx { 4 | transactionIndex: Long, 5 | transactionHash: string, 6 | transactionInitiator: string, 7 | tokenAddress: string, 8 | senderAddress: string, 9 | receiverAddress: string, 10 | amount: string, 11 | } 12 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/src/matic_transfer_data_transformer.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@maticnetwork/chain-indexer-framework/interfaces/transformed_block"; 2 | import { ITransaction } from "@maticnetwork/chain-indexer-framework/interfaces/transaction"; 3 | import { IBlock } from "@maticnetwork/chain-indexer-framework/interfaces/block"; 4 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 5 | import { IConsumerConfig } from "@maticnetwork/chain-indexer-framework/interfaces/consumer_config"; 6 | import { IProducerConfig } from "@maticnetwork/chain-indexer-framework/interfaces/producer_config"; 7 | import { transform } from "@maticnetwork/chain-indexer-framework/data_transformation/transform"; 8 | import IMaticTransferTx from "./interfaces/matic_transfer_tx.js"; 9 | import { MaticTransferMapper } from "./mappers/matic_transfer_mapper.js"; 10 | 11 | /** 12 | * startTransforming function which starts consuming events from the consumer and then transforming it 13 | * and then finally producing the trasnformed data to new kafka topic 14 | * 15 | * @function startTransforming 16 | * 17 | * @param {IConsumerConfig} consumerConfig - consumer config 18 | * @param {IProducerConfig} producerConfig - producer config 19 | * @param {MaticTransferMapper} maticTransferMapper - transfer mapper class instance 20 | * 21 | * @returns {Promise} 22 | */ 23 | export default async function startTransforming( 24 | consumerConfig: IConsumerConfig, 25 | producerConfig: IProducerConfig, 26 | maticTransferMapper: MaticTransferMapper 27 | ): Promise { 28 | try { 29 | transform({ 30 | consumerConfig, 31 | producerConfig, 32 | type: 'asynchronous' 33 | }, { 34 | transform: async (block: IBlock): Promise> => { 35 | let transfers: IMaticTransferTx[] = []; 36 | 37 | block.transactions.forEach((transaction: ITransaction) => { 38 | transfers = transfers.concat(maticTransferMapper.map(transaction)); 39 | }); 40 | 41 | return { 42 | blockNumber: block.number, 43 | timestamp: block.timestamp, 44 | data: transfers 45 | }; 46 | }, 47 | error(err: Error) { 48 | console.error('something wrong occurred: ' + err); 49 | }, 50 | }) 51 | } catch (error) { 52 | Logger.error(`Transformer instance is exiting due to error: ${error}`); 53 | process.exit(1); 54 | 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/src/schemas/matic_transfer.proto: -------------------------------------------------------------------------------- 1 | package matictransferpackage; 2 | syntax = "proto3"; 3 | 4 | message MaticTransferBlock { 5 | message MaticTransferEvent { 6 | required uint64 transactionIndex = 1; 7 | required string transactionHash = 2; 8 | required string transactionInitiator = 3; 9 | required string tokenAddress = 4; 10 | required string amount = 5; 11 | required string senderAddress = 6; 12 | required string receiverAddress = 7; 13 | } 14 | 15 | required uint64 blockNumber = 1; 16 | required uint64 timestamp = 2; 17 | repeated MaticTransferEvent data = 3; 18 | } 19 | -------------------------------------------------------------------------------- /examples/matic_transfer/transformer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "esModuleInterop": true, 5 | "allowSyntheticDefaultImports": true, 6 | "target": "esnext", 7 | "moduleResolution": "nodenext", 8 | "resolveJsonModule": true, 9 | "sourceMap": true, 10 | "outDir": "dist", 11 | "declaration": true, 12 | "alwaysStrict": true, 13 | "strict": true, 14 | }, 15 | "include": [ 16 | "./src/**/*" 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /examples/nft_balancer/README.md: -------------------------------------------------------------------------------- 1 | # NFT_BALANCER 2 | 3 | NFT_Balancer is an exemplary project that showcases the usage of Chain Indexer Framework to maintain the NFT detail along with owner on polygon zkEVM Testnet (blueberry) chain 4 | 5 | ## Overview 6 | The project consists of three distinct packages, each serving a specific function: 7 | 8 | 1. **Producers**: This package acts as the initial entry point for the indexer service. It collects all blueberry blockchain data from a starting block as per env and streams it into Kafka without any discrimination. 9 | 10 | 2. **Transformers**: The Transformers package consumes the blockchain data from the Kafka stream generated by the producers. It then filters out all NFT transfers and reproduces these events to a dedicated topic within the Kafka stream. 11 | 12 | 3. **Consumers**: Consumers are responsible for subscribing to the event-specific Kafka stream and persisting the data into a database. Additionally, they expose endpoints that allow clients to retrieve the data according to their specific requirements. 13 | 14 | This serves as a useful reference for understanding how Chain Indexer Framework can be effectively utilized to manage and process blockchain data efficiently. 15 | 16 | 17 | ## How to Build 18 | 19 | - Make sure you run kafka inside your docker and mongoDB. steps can be found [here](../README.md) 20 | 21 | - Run [Producer](./producer/README.md) 22 | 23 | - Run [Transformer](./transformer/README.md) 24 | 25 | - Run [Consumer](./consumer/README.md) 26 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/.env.example: -------------------------------------------------------------------------------- 1 | # COMMON 2 | MONGO_URL= 3 | 4 | #CONSUMER 5 | KAFKA_CONNECTION_URL= 6 | CONSUMER_GROUP_ID=nft.token.consumer 7 | 8 | # MAPPING_TOPIC 9 | TRANSFER_TOPIC=apps.1442.nft.transfer 10 | 11 | # LOGGER_ENV 12 | SENTRY_DSN= 13 | SENTRY_ENVIRONMENT= 14 | DATADOG_API_KEY= 15 | DATADOG_APP_KEY= 16 | 17 | # START_CONSUMER and START_API_ENDPOINTS can be true and false based on if there will 18 | # be different server for both or same for both. 19 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | es2021: true, 4 | node: true, 5 | }, 6 | extends: [ 7 | 'airbnb-base', 8 | ], 9 | parser: '@typescript-eslint/parser', 10 | parserOptions: { 11 | ecmaVersion: 'latest', 12 | sourceType: 'module', 13 | }, 14 | plugins: [ 15 | '@typescript-eslint', 16 | ], 17 | rules: { 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/README.md: -------------------------------------------------------------------------------- 1 | # Consumer 2 | 3 | This package consumes the events from the Kafka topic, which were previously produced and filtered in the Transformer layer. The Consumer then performs two primary functions: 4 | 5 | 1. **Data Storage**: The package saves the consumed data into the database. It efficiently stores the relevant events, ensuring seamless access to the data for further processing. 6 | 7 | 2. **Endpoint Exposition**: The Consumer exposes endpoints to clients, enabling them to retrieve the stored data from the database. These endpoints provide a convenient and structured way for clients to access the indexed blockchain data related to MATIC token transfers. 8 | 9 | ## How to Use 10 | Note: Make sure you are inside the example/nft_balancer/consumer folder. 11 | 12 | ### 1. Set Configuration 13 | Begin by configuring your environment variables. Copy the `.env.example` file and rename it to `.env`. Then, provide appropriate values for the keys mentioned in the `.env` file. 14 | 15 | ### 2. Install Packages 16 | Install the required packages by running the following command: 17 | 18 | ``` 19 | npm i 20 | ``` 21 | 22 | ### 3. Build the Package 23 | Build the package by executing the following command: 24 | 25 | ``` 26 | npm run build 27 | ``` 28 | 29 | ### 4. Run the Package 30 | Run the producer service using the following command: 31 | 32 | ``` 33 | npm run start 34 | ``` 35 | 36 | ## Running the Example Using Source Code 37 | 38 | This section guides you on running the example code using the current source code, typically for debugging purposes. 39 | 40 | ### 1. Build & Link the Source Code 41 | Run the following command at the root of this project: 42 | 43 | ``` 44 | npm run build:link 45 | ``` 46 | 47 | If you encounter permission issues, run the command using `sudo`. 48 | 49 | 50 | ### 2. Link the Library 51 | 52 | - Navigate to the examples/nft_balancer/consumer folder: 53 | 54 | ``` 55 | cd examples/nft_balancer/consumer 56 | ``` 57 | 58 | - Execute the link command: 59 | 60 | ``` 61 | npm run link:lib 62 | ``` 63 | 64 | This documentation clarifies the setup and usage of the Consumer package in the Chain Indexer Framework project, making it easier for developers to integrate the package into their applications or utilize it for debugging and testing purposes. 65 | 66 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/babel.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", 5 | { 6 | "targets": { 7 | "node": "current" 8 | } 9 | } 10 | ], 11 | "@babel/preset-typescript" 12 | ], 13 | "plugins": [ 14 | "babel-plugin-transform-import-meta" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/jest.config.ts: -------------------------------------------------------------------------------- 1 | import type { JestConfigWithTsJest } from "ts-jest"; 2 | 3 | const jestConfig: JestConfigWithTsJest = { 4 | "transform": { 5 | "^.+\\.(j|t)s?$": "babel-jest" 6 | }, 7 | extensionsToTreatAsEsm: ['.ts'], 8 | clearMocks: true 9 | } 10 | 11 | export default jestConfig 12 | 13 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nft-token-consumer", 3 | "version": "1.0.0", 4 | "description": "consumers for Matic transfers", 5 | "exports": "./dist/", 6 | "main": "dist/index.js", 7 | "type": "module", 8 | "scripts": { 9 | "tests": "jest --coverage", 10 | "tests:integration": "echo 'Hello integration tests'", 11 | "lint": "eslint src/**/*.ts", 12 | "lint:fix": "eslint --fix ./src/**/*.ts", 13 | "build": "tsc && npm run copy-proto", 14 | "copy-proto": "cp -R ./src/schemas/ ./dist/schemas", 15 | "start": "node --experimental-import-meta-resolve --trace-warnings dist/index.js", 16 | "link:lib": "npm link @maticnetwork/chain-indexer-framework", 17 | "link:lib:build": "npm run link:lib && npm run build" 18 | }, 19 | "author": "nitinmittal23", 20 | "license": "UNLICENSED", 21 | "dependencies": { 22 | "@maticnetwork/chain-indexer-framework": "^1.0.0", 23 | "dotenv": "^16.0.1", 24 | "long": "^5.2.0", 25 | "mongoose": "^6.5.2", 26 | "path": "^0.12.7", 27 | "web3-utils": "^1.8.1" 28 | }, 29 | "devDependencies": { 30 | "@typescript-eslint/eslint-plugin": "^5.31.0", 31 | "@typescript-eslint/parser": "^5.31.0", 32 | "eslint": "^8.20.0", 33 | "eslint-config-airbnb-base": "^15.0.0", 34 | "eslint-plugin-import": "^2.26.0", 35 | "ts-jest": "^29.0.3", 36 | "tsup": "^6.2.3", 37 | "typescript": "^4.8.2" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/consumer.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@maticnetwork/chain-indexer-framework/interfaces/transformed_block"; 2 | import { DeserialisedMessage } from "@maticnetwork/chain-indexer-framework/interfaces/deserialised_kafka_message"; 3 | import { consume } from "@maticnetwork/chain-indexer-framework/kafka/consumer/consume"; 4 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 5 | 6 | import TransferTokenService from "./services/transfer_token.js"; 7 | import TransferTokenMapper from "./mapper/transfer_token.js"; 8 | import INFTTransferTx from "./interfaces/nft_transfer_tx.js"; 9 | 10 | import dotenv from 'dotenv'; 11 | import path from "path"; 12 | 13 | dotenv.config() 14 | 15 | /** 16 | * startConsuming function which starts consuming the events from kafka and then save the data to 17 | * database. it also handles the reorg internally in the save function. 18 | * 19 | * @function startConsume 20 | * 21 | * @param {TransferTokenService} transferTokenService - The transfer token service class 22 | * @param {TransferTokenMapper} transferTokenMapper - the transfer token Mapper class 23 | * 24 | * @returns {Promise} 25 | */ 26 | export default async function startConsuming(transferTokenService: TransferTokenService, transferTokenMapper: TransferTokenMapper): Promise { 27 | try { 28 | consume({ 29 | "metadata.broker.list": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 30 | "group.id": process.env.CONSUMER_GROUP_ID ?? "matic.transfer.consumer", 31 | "security.protocol": "plaintext", 32 | topic: process.env.TRANSFER_TOPIC ?? "apps.1.matic.transfer", 33 | coders: { 34 | fileName: "nft_transfer", 35 | packageName: "nfttransferpackage", 36 | messageType: "NFTTransferBlock", 37 | fileDirectory: path.resolve("dist", "./schemas") 38 | }, 39 | type: 'synchronous' 40 | }, { 41 | next: async (message: DeserialisedMessage) => { 42 | const transformedBlock = message.value as ITransformedBlock; 43 | const transfers: INFTTransferTx[] = transformedBlock.data as INFTTransferTx[]; 44 | 45 | if (transfers && transfers.length > 0) { 46 | await transferTokenService.save( 47 | transferTokenMapper.map(transformedBlock) 48 | ); 49 | } 50 | }, 51 | error(err: Error) { 52 | console.error('something wrong occurred: ' + err); 53 | }, 54 | closed: () => { 55 | Logger.info(`subscription is ended.`); 56 | throw new Error("Consumer stopped"); 57 | }, 58 | }); 59 | } catch (error) { 60 | Logger.error(`Consumer instance is exiting due to error: ${error}`); 61 | process.exit(1); 62 | 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/index.ts: -------------------------------------------------------------------------------- 1 | import { Database } from "@maticnetwork/chain-indexer-framework/mongo/database"; 2 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 3 | 4 | import TransferTokenMapper from "./mapper/transfer_token.js"; 5 | import TransferTokenService from "./services/transfer_token.js"; 6 | import { TokenModel } from "./models/token.js"; 7 | 8 | import startConsuming from "./consumer.js"; 9 | 10 | async function start(): Promise { 11 | try { 12 | Logger.create({ 13 | sentry: { 14 | dsn: process.env.SENTRY_DSN, 15 | level: 'error' 16 | }, 17 | datadog: { 18 | api_key: process.env.DATADOG_API_KEY, 19 | service_name: process.env.DATADOG_APP_KEY 20 | }, 21 | console: { 22 | level: "debug" 23 | } 24 | }); 25 | 26 | const database = new Database(process.env.MONGO_URL ?? 'mongodb://localhost:27017/chain-indexer'); 27 | await database.connect(); 28 | 29 | const transferService = new TransferTokenService( 30 | await TokenModel.new(database), 31 | ); 32 | 33 | await startConsuming(transferService, new TransferTokenMapper()); 34 | 35 | } catch (error) { 36 | Logger.error(`Error when starting consumer service: ${(error as Error).message}`); 37 | } 38 | } 39 | 40 | start(); 41 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/interfaces/nft_transfer_tx.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | 3 | export default interface INFTTransferTx { 4 | transactionIndex: Long, 5 | transactionHash: string, 6 | transactionInitiator: string, 7 | tokenAddress: string, 8 | senderAddress: string, 9 | receiverAddress: string, 10 | tokenId: number, 11 | } 12 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/interfaces/token.ts: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * Interface for token details and its owner 4 | */ 5 | export interface IToken { 6 | tokenId: number, 7 | owner: string 8 | } 9 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/interfaces/token_methods.ts: -------------------------------------------------------------------------------- 1 | import { IToken } from "./token.js"; 2 | 3 | /** 4 | * this class contains methods to interact with the database methods 5 | * 6 | * @returns implementation of all the token model method 7 | */ 8 | const statics = { 9 | 10 | /** 11 | * Inserts multiple documents for NFT ownership into token collection 12 | * 13 | * @param {IToken[]} data 14 | * @param {ClientSession} session 15 | * 16 | * @returns {Promise} 17 | */ 18 | async updateTokens(data: IToken[]): Promise { 19 | for (let transfer of data) { 20 | //@ts-ignore 21 | await this.updateOne({ tokenId: transfer.tokenId }, { owner: transfer.owner }, { upsert: true }); 22 | } 23 | return; 24 | } 25 | } 26 | 27 | export default statics; 28 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/mapper/transfer_token.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@maticnetwork/chain-indexer-framework/interfaces/transformed_block"; 2 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 3 | import INFTTransferTx from "../interfaces/nft_transfer_tx.js"; 4 | import { IToken } from "../interfaces/token.js"; 5 | 6 | /** 7 | * TransferTokenMapper class is a mapper class which has function to map the data according to all NFT transfers and 8 | * these functions are not async as there is only data transformation according to the way it will be saved in mongodb. 9 | * 10 | * @class TransferTokenMapper 11 | */ 12 | export default class TransferTokenMapper { 13 | 14 | /** 15 | * this is a public function which takes data from the kafka consumer and return in the form 16 | * where it will be saved in db for NFT token ownership. it will be used when user want 17 | * to have data for all balances. 18 | * 19 | * @param {ITransformedBlock} transformedBlock - data from the kafka consumer 20 | 21 | * @returns {IToken[]} 22 | */ 23 | public map(transformedBlock: ITransformedBlock): IToken[] { 24 | let tokens: IToken[] = []; 25 | 26 | for (const transfer of transformedBlock.data) { 27 | tokens.push({ 28 | owner: transfer.receiverAddress.toLowerCase(), 29 | tokenId: transfer.tokenId 30 | }); 31 | } 32 | 33 | //Remove below when app is stable 34 | Logger.debug({ 35 | location: "mapper: tokens", 36 | function: "mapTokens", 37 | status: "function completed", 38 | }) 39 | return tokens; 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/models/token.ts: -------------------------------------------------------------------------------- 1 | import { Database } from "@maticnetwork/chain-indexer-framework/mongo/database"; 2 | import { Model, Schema } from "mongoose"; 3 | import { IToken } from "../interfaces/token.js"; 4 | import statics from "../interfaces/token_methods.js"; 5 | 6 | const TokenSchema = new Schema({ 7 | tokenId: { 8 | type: Number, 9 | }, 10 | owner: { 11 | type: String, 12 | } 13 | }, 14 | { 15 | versionKey: false, 16 | statics: statics 17 | } 18 | ); 19 | /** 20 | * This class represents Token Model 21 | * 22 | * @class 23 | */ 24 | export class TokenModel { 25 | /** 26 | * Get the token model defined on this mongoose database instance 27 | * 28 | * @param {Database} database 29 | * 30 | */ 31 | public static async new(database: Database) { 32 | const model = database.model>( 33 | "token", 34 | TokenSchema 35 | ); 36 | await model.createCollection(); 37 | 38 | return model; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/schemas/nft_transfer.proto: -------------------------------------------------------------------------------- 1 | package nfttransferpackage; 2 | syntax = "proto3"; 3 | 4 | message NFTTransferBlock { 5 | message NFTTransferEvent { 6 | required uint64 transactionIndex = 1; 7 | required string transactionHash = 2; 8 | required string transactionInitiator = 3; 9 | required string tokenAddress = 4; 10 | required uint64 tokenId = 5; 11 | required string senderAddress = 6; 12 | required string receiverAddress = 7; 13 | } 14 | 15 | required uint64 blockNumber = 1; 16 | required uint64 timestamp = 2; 17 | repeated NFTTransferEvent data = 3; 18 | } 19 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/src/services/transfer_token.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 2 | import { Model } from "mongoose"; 3 | import { IToken } from "../interfaces/token.js"; 4 | 5 | /** 6 | * TransferTokenService class has all the exposed functions to fetch transfer data from db so that API service can create an 7 | * instance of this class and can call these function and get data as per the requirement. 8 | */ 9 | export default class TransferTokenService { 10 | /** 11 | * @constructor 12 | * 13 | * @param {Model} transferModel 14 | */ 15 | constructor( 16 | private transferModel: Model, 17 | ) { } 18 | 19 | /** 20 | * this is a public function which takes an array of NFT transfer events and save it in mongodb. 21 | * 22 | * @param {IToken[]} data - data to be saved in mongo 23 | * 24 | * @returns {Promise} 25 | */ 26 | public async save(data: IToken[]): Promise { 27 | Logger.debug({ 28 | location: "transfer_token_service", 29 | function: "saveTokenTransfers", 30 | status: "function call", 31 | data: { 32 | length: data.length 33 | } 34 | }); 35 | 36 | 37 | if (data && data.length) { 38 | //@ts-ignore 39 | await this.transferModel.updateTokens(data); 40 | } 41 | 42 | Logger.debug({ 43 | location: "transfer_token_service", 44 | function: "saveTokenTransfers", 45 | status: "function completed" 46 | }); 47 | 48 | return true; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/tests/mock_data/transfer_message.js: -------------------------------------------------------------------------------- 1 | export const transferTransactionMessage = { 2 | "key": "1234", 3 | "topic": "apps.1442.nft.transfer", 4 | "value": { 5 | "blockNumber": { 6 | "high": 0, 7 | "low": 0, 8 | "unsigned": true, 9 | }, 10 | "timestamp": 12340, 11 | "data": [ 12 | { 13 | transactionIndex: { 14 | "high": 1, 15 | "low": 0, 16 | "unsigned": true 17 | }, 18 | transactionHash: "0x0bbd76664f215b0a74d4ee773c85c19cc649dcb504963678db568dca6912f0aa", 19 | transactionInitiator: "0x65a8f07bd9a8598e1b5b6c0a88f4779dbc077675", 20 | tokenAddress: "0x8839e639f210b80ffea73aedf51baed8dac04499", 21 | senderAddress: "0xe95b7d229cfaed717600d64b0d938a36fd5d5060", 22 | receiverAddress: "0xab6395382798ee6ea6e9a97cdfd18557f34adc87", 23 | tokenId: "7954515646169844787673", 24 | } 25 | ] 26 | } 27 | }; 28 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/tests/services/transfer.test.ts: -------------------------------------------------------------------------------- 1 | import TransferTokenService from "../../dist/services/transfer_token"; 2 | import { Model } from "mongoose"; 3 | import { IToken } from "../../dist/interfaces/token"; 4 | 5 | describe("transfer token service", () => { 6 | let transferTokenService: jest.Mocked, 7 | mockTransferModel: jest.MockedObject> 8 | 9 | const data: IToken[] = [{ 10 | "tokenId": 7954515646169844787673, 11 | "owner": "0xab6395382798ee6ea6e9a97cdfd18557f34adc87" 12 | }]; 13 | 14 | beforeEach(() => { 15 | mockTransferModel = Object.assign({ 16 | add: jest.fn(), 17 | getAll: jest.fn(), 18 | updateTokens: jest.fn(), 19 | }) as jest.MockedObject>; 20 | 21 | transferTokenService = new TransferTokenService(mockTransferModel as unknown as Model) as jest.Mocked; 22 | 23 | }); 24 | 25 | test("test the save method", async () => { 26 | 27 | await transferTokenService.save(data); 28 | //@ts-ignore 29 | expect(mockTransferModel.updateTokens).toBeCalledWith(data); 30 | }) 31 | }) 32 | -------------------------------------------------------------------------------- /examples/nft_balancer/consumer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "esModuleInterop": true, 5 | "allowSyntheticDefaultImports": true, 6 | "target": "esnext", 7 | "moduleResolution": "nodenext", 8 | "sourceMap": true, 9 | "outDir": "dist", 10 | "declaration": true, 11 | "alwaysStrict": true, 12 | "strict": true, 13 | }, 14 | "include": [ 15 | "./src/**/*" 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /examples/nft_balancer/producer/.env.example: -------------------------------------------------------------------------------- 1 | HTTP_PROVIDER= 2 | START_BLOCK=2600000 3 | BLOCK_POLLING_TIMEOUT=20000 4 | KAFKA_CONNECTION_URL= 5 | MONGO_URL= 6 | PRODUCER_TOPIC=polygon.1442.blocks 7 | 8 | #LOGGER_ENV 9 | SENTRY_DSN= 10 | SENTRY_ENVIRONMENT= 11 | DATADOG_API_KEY= 12 | DATADOG_APP_KEY= 13 | 14 | -------------------------------------------------------------------------------- /examples/nft_balancer/producer/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | env: { 3 | es2021: true, 4 | node: true, 5 | }, 6 | extends: [ 7 | 'airbnb-base', 8 | ], 9 | parser: '@typescript-eslint/parser', 10 | parserOptions: { 11 | ecmaVersion: 'latest', 12 | sourceType: 'module', 13 | }, 14 | plugins: [ 15 | '@typescript-eslint', 16 | ], 17 | rules: { 18 | }, 19 | }; 20 | -------------------------------------------------------------------------------- /examples/nft_balancer/producer/README.md: -------------------------------------------------------------------------------- 1 | # Producer 2 | The Producer package exemplifies the producer layer of Chain Indexer Framework. It fetches blockchain data from Polygon zkEVM testnet chain and produces it to a Kafka topic. 3 | 4 | ## How to Use 5 | Note: Make sure you are inside the example/nft_balancer/producer folder. 6 | 7 | ### 1. Set Configuration 8 | Begin by configuring your environment variables. Copy the `.env.example` file and rename it to `.env`. Then, provide appropriate values for the keys mentioned in the `.env` file. 9 | 10 | ### 2. Install Packages 11 | Install the required packages by running the following command: 12 | 13 | ``` 14 | npm i 15 | ``` 16 | 17 | ### 3. Build the Package 18 | Build the package by executing the following command: 19 | 20 | ``` 21 | npm run build 22 | ``` 23 | 24 | ### 4. Run the Package 25 | Run the producer service using the following command: 26 | 27 | ``` 28 | npm run start 29 | ``` 30 | 31 | ## Running the Example Using Source Code 32 | 33 | This section guides you on running the example code using the current source code, typically for debugging purposes. 34 | 35 | ### 1. Build & Link the Source Code 36 | Run the following command at the root of this project: 37 | 38 | ``` 39 | npm run build:link 40 | ``` 41 | 42 | If you encounter permission issues, run the command using `sudo`. 43 | 44 | 45 | ### 2. Link the Library 46 | 47 | - Navigate to the examples/nft_balancer/producer folder: 48 | 49 | ``` 50 | cd examples/nft_balancer/producer 51 | ``` 52 | 53 | - Execute the link command: 54 | 55 | ``` 56 | npm run link:lib 57 | ``` 58 | 59 | This documentation clarifies the setup and usage of the Producer package in the Chain Indexer Framework project, making it easier for developers to integrate the package into their applications or utilize it for debugging and testing purposes. 60 | -------------------------------------------------------------------------------- /examples/nft_balancer/producer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "blueberry-producer", 3 | "version": "1.0.0", 4 | "description": "", 5 | "exports": "./dist/", 6 | "type": "module", 7 | "scripts": { 8 | "tests": "echo here will be Unit test run", 9 | "tests:integration": "echo 'Hello integration tests'", 10 | "lint": "eslint src/**/*.ts", 11 | "lint:fix": "eslint --fix ./src/**/*.ts", 12 | "build": "tsc", 13 | "start": "node --experimental-import-meta-resolve --trace-warnings dist/index.js", 14 | "link:lib": "npm link @maticnetwork/chain-indexer-framework", 15 | "link:lib:build": "npm run link:lib && npm run build" 16 | }, 17 | "author": "nitinmittal23", 18 | "license": "UNLICENSED", 19 | "dependencies": { 20 | "@maticnetwork/chain-indexer-framework": "^1.0.0", 21 | "dotenv": "^16.0.1" 22 | }, 23 | "devDependencies": { 24 | "@typescript-eslint/eslint-plugin": "^5.31.0", 25 | "@typescript-eslint/parser": "^5.31.0", 26 | "eslint": "^8.20.0", 27 | "eslint-config-airbnb-base": "^15.0.0", 28 | "eslint-plugin-import": "^2.26.0", 29 | "tsup": "^6.2.3", 30 | "typescript": "^4.8.2" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /examples/nft_balancer/producer/src/index.ts: -------------------------------------------------------------------------------- 1 | import { produce } from "@maticnetwork/chain-indexer-framework/kafka/producer/produce"; 2 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 3 | import dotenv from 'dotenv'; 4 | import { BlockPollerProducer } from "@maticnetwork/chain-indexer-framework/block_producers/block_polling_producer"; 5 | 6 | dotenv.config(); 7 | Logger.create({ 8 | sentry: { 9 | dsn: process.env.SENTRY_DSN, 10 | level: 'error' 11 | }, 12 | datadog: { 13 | api_key: process.env.DATADOG_API_KEY, 14 | service_name: process.env.DATADOG_APP_KEY 15 | }, 16 | console: { 17 | level: "debug" 18 | } 19 | }); 20 | 21 | const producer = produce({ 22 | startBlock: parseInt(process.env.START_BLOCK as string), 23 | rpcWsEndpoints: process.env.HTTP_PROVIDER ? [process.env.HTTP_PROVIDER] : undefined, 24 | blockPollingTimeout: parseInt(process.env.BLOCK_POLLING_TIMEOUT as string), 25 | topic: process.env.PRODUCER_TOPIC ?? "polygon.1442.blocks", 26 | maxReOrgDepth: 0, 27 | maxRetries: 5, 28 | mongoUrl: process.env.MONGO_URL ?? 'mongodb://localhost:27017/chain-indexer', 29 | // blockSubscriptionTimeout: 120000, 30 | "bootstrap.servers": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 31 | "security.protocol": "plaintext", 32 | type: "blocks:polling" 33 | }); 34 | 35 | producer.on("blockProducer.fatalError", (error: any) => { 36 | Logger.error(`Block producer exited. ${error.message}`); 37 | 38 | process.exit(1); //Exiting process on fatal error. Process manager needs to restart the process. 39 | }); 40 | -------------------------------------------------------------------------------- /examples/nft_balancer/producer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "esModuleInterop": true, 5 | "allowSyntheticDefaultImports": true, 6 | "target": "esnext", 7 | "moduleResolution": "nodenext", 8 | "sourceMap": true, 9 | "outDir": "dist", 10 | "declaration": true, 11 | "alwaysStrict": true, 12 | "strict": true 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/.env.example: -------------------------------------------------------------------------------- 1 | # COMMON_ENV 2 | KAFKA_CONNECTION_URL=localhost:9092 3 | 4 | # CONSUMER_ENV 5 | CONSUMER_TOPIC=polygon.1442.blocks 6 | 7 | # PRODUCER_ENV 8 | PRODUCER_TOPIC=apps.1442.nft.transfer 9 | 10 | NFT_CONTRACT="0xb58f5110855fbef7a715d325d60543e7d4c18143" 11 | 12 | # LOGGER_ENV 13 | SENTRY_DSN= 14 | SENTRY_ENVIRONMENT= 15 | DATADOG_API_KEY= 16 | DATADOG_APP_KEY= 17 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/README.md: -------------------------------------------------------------------------------- 1 | # Transformer 2 | 3 | This package provides an example implementation of the transformer layer for Chain Indexer Framework. This layer consumes events from the Kafka topic produced by the Producer and performs filtering based on NFT token transfers. It then re-produces the filtered events to specific Kafka topics corresponding to each event. 4 | 5 | ## How to Use 6 | Note: Make sure you are inside the example/nft_balancer/transformer folder. 7 | 8 | ### 1. Set Configuration 9 | Begin by configuring your environment variables. Copy the `.env.example` file and rename it to `.env`. Then, provide appropriate values for the keys mentioned in the `.env` file. 10 | 11 | ### 2. Install Packages 12 | Install the required packages by running the following command: 13 | 14 | ``` 15 | npm i 16 | ``` 17 | 18 | ### 3. Build the Package 19 | Build the package by executing the following command: 20 | 21 | ``` 22 | npm run build 23 | ``` 24 | 25 | ### 4. Run the Package 26 | Run the producer service using the following command: 27 | 28 | ``` 29 | npm run start 30 | ``` 31 | 32 | ## Running the Example Using Source Code 33 | 34 | This section guides you on running the example code using the current source code, typically for debugging purposes. 35 | 36 | ### 1. Build & Link the Source Code 37 | Run the following command at the root of this project: 38 | 39 | ``` 40 | npm run build:link 41 | ``` 42 | 43 | If you encounter permission issues, run the command using `sudo`. 44 | 45 | 46 | ### 2. Link the Library 47 | 48 | - Navigate to the examples/nft_balancer/transformer folder: 49 | 50 | ``` 51 | cd examples/nft_balancer/transformer 52 | ``` 53 | 54 | - Execute the link command: 55 | 56 | ``` 57 | npm run link:lib 58 | ``` 59 | 60 | This documentation clarifies the setup and usage of the Transformer package in the Chain Indexer Framework project, making it easier for developers to integrate the package into their applications or utilize it for debugging and testing purposes. 61 | 62 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/babel.config.json: -------------------------------------------------------------------------------- 1 | { 2 | "presets": [ 3 | [ 4 | "@babel/preset-env", 5 | { 6 | "targets": { 7 | "node": "current" 8 | } 9 | } 10 | ], 11 | "@babel/preset-typescript" 12 | ], 13 | "plugins": [ 14 | "babel-plugin-transform-import-meta" 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/jest.config.ts: -------------------------------------------------------------------------------- 1 | const jestConfig = { 2 | transform: { 3 | "^.+\\.(j|t)s?$": "babel-jest" 4 | }, 5 | extensionsToTreatAsEsm: ['.ts'], 6 | clearMocks: true 7 | }; 8 | 9 | export default jestConfig; 10 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nft-transfer-transformer", 3 | "version": "1.0.0", 4 | "description": "", 5 | "exports": "./dist/", 6 | "main": "dist/index.js", 7 | "type": "module", 8 | "scripts": { 9 | "tests": "jest --coverage", 10 | "tests:integration": "echo 'Hello integration tests'", 11 | "lint": "eslint src/**/*.ts", 12 | "lint:fix": "eslint --fix ./src/**/*.ts", 13 | "build": "tsc && npm run copy-proto", 14 | "copy-proto": "cp -R ./src/schemas/ ./dist/schemas", 15 | "start": "node --experimental-import-meta-resolve --trace-warnings dist/index.js", 16 | "link:lib": "npm link @maticnetwork/chain-indexer-framework", 17 | "link:lib:build": "npm run link:lib && npm run build" 18 | }, 19 | "author": "nitinmittal23", 20 | "license": "UNLICENSED", 21 | "dependencies": { 22 | "@maticnetwork/chain-indexer-framework": "^1.0.0", 23 | "dotenv": "^16.0.3", 24 | "long": "^5.2.1", 25 | "path": "^0.12.7", 26 | "web3-utils": "^1.8.2" 27 | }, 28 | "devDependencies": { 29 | "@babel/preset-env": "^7.19.1", 30 | "@babel/preset-typescript": "^7.18.6", 31 | "@types/jest": "^29.0.3", 32 | "babel-plugin-transform-import-meta": "^2.2.0", 33 | "jest": "^29.0.3", 34 | "rollup": "^2.77.0", 35 | "ts-jest": "^29.0.3", 36 | "ts-node": "^10.9.1", 37 | "tsup": "^6.2.3", 38 | "typescript": "^4.8.2" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/src/index.ts: -------------------------------------------------------------------------------- 1 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 2 | import { BlockProducerError } from "@maticnetwork/chain-indexer-framework/errors/block_producer_error"; 3 | import startTransforming from "./nft_transfer_data_transformer.js"; 4 | import { NFTTransferMapper } from "./mappers/nft_transfer_mapper.js"; 5 | import dotenv from 'dotenv'; 6 | import path from "path"; 7 | 8 | dotenv.config(); 9 | 10 | Logger.create({ 11 | sentry: { 12 | dsn: process.env.SENTRY_DSN, 13 | level: 'error' 14 | }, 15 | datadog: { 16 | api_key: process.env.DATADOG_API_KEY, 17 | service_name: process.env.DATADOG_APP_KEY, 18 | } 19 | }); 20 | 21 | /** 22 | * Initialise the transform service with producer topic, proto file names, 23 | * producer config, consumer topic and consumer proto files 24 | */ 25 | try { 26 | startTransforming( 27 | { 28 | "bootstrap.servers": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 29 | "group.id": "matic.transfer.transformer", 30 | "security.protocol": "plaintext", 31 | "message.max.bytes": 26214400, 32 | "fetch.message.max.bytes": 26214400, 33 | coders: { 34 | fileName: "block", 35 | packageName: "blockpackage", 36 | messageType: "Block" 37 | }, 38 | topic: process.env.CONSUMER_TOPIC ?? "polygon.1.blocks", 39 | }, 40 | { 41 | topic: process.env.PRODUCER_TOPIC ?? "apps.1.matic.transfer", 42 | "bootstrap.servers": process.env.KAFKA_CONNECTION_URL ?? "localhost:9092", 43 | "security.protocol": "plaintext", 44 | "message.max.bytes": 26214400, 45 | coder: { 46 | fileName: "nft_transfer", 47 | packageName: "nfttransferpackage", 48 | messageType: "NFTTransferBlock", 49 | fileDirectory: path.resolve("dist", "./schemas/") 50 | } 51 | }, 52 | new NFTTransferMapper() 53 | ); 54 | } catch (e) { 55 | Logger.error(BlockProducerError.createUnknown(e)); 56 | } 57 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/src/interfaces/nft_transfer_tx.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | 3 | export default interface INFTTransferTx { 4 | transactionIndex: Long, 5 | transactionHash: string, 6 | transactionInitiator: string, 7 | tokenAddress: string, 8 | senderAddress: string, 9 | receiverAddress: string, 10 | tokenId: number, 11 | } 12 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/src/nft_transfer_data_transformer.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@maticnetwork/chain-indexer-framework/interfaces/transformed_block"; 2 | import { ITransaction } from "@maticnetwork/chain-indexer-framework/interfaces/transaction"; 3 | import { IBlock } from "@maticnetwork/chain-indexer-framework/interfaces/block"; 4 | import { Logger } from "@maticnetwork/chain-indexer-framework/logger"; 5 | import { IConsumerConfig } from "@maticnetwork/chain-indexer-framework/interfaces/consumer_config"; 6 | import { IProducerConfig } from "@maticnetwork/chain-indexer-framework/interfaces/producer_config"; 7 | import { transform } from "@maticnetwork/chain-indexer-framework/data_transformation/transform"; 8 | import INFTTransferTx from "./interfaces/nft_transfer_tx.js"; 9 | import { NFTTransferMapper } from "./mappers/nft_transfer_mapper.js"; 10 | 11 | /** 12 | * startTransforming function which starts consuming events from the consumer and then transforming it 13 | * and then finally producing the trasnformed data to new kafka topic 14 | * 15 | * @function startTransforming 16 | * 17 | * @param {IConsumerConfig} consumerConfig - consumer config 18 | * @param {IProducerConfig} producerConfig - producer config 19 | * @param {NFTTransferMapper} nftTransferMapper - transfer mapper class instance 20 | * 21 | * @returns {Promise} 22 | */ 23 | export default async function startTransforming( 24 | consumerConfig: IConsumerConfig, 25 | producerConfig: IProducerConfig, 26 | nftTransferMapper: NFTTransferMapper 27 | ): Promise { 28 | try { 29 | transform({ 30 | consumerConfig, 31 | producerConfig, 32 | type: 'asynchronous' 33 | }, { 34 | transform: async (block: IBlock): Promise> => { 35 | let transfers: INFTTransferTx[] = []; 36 | 37 | block.transactions.forEach((transaction: ITransaction) => { 38 | transfers = transfers.concat(nftTransferMapper.map(transaction)); 39 | }); 40 | 41 | return { 42 | blockNumber: block.number, 43 | timestamp: block.timestamp, 44 | data: transfers 45 | }; 46 | }, 47 | error(err: Error) { 48 | console.error('something wrong occurred: ' + err); 49 | }, 50 | }) 51 | } catch (error) { 52 | Logger.error(`Transformer instance is exiting due to error: ${error}`); 53 | process.exit(1); 54 | 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/src/schemas/nft_transfer.proto: -------------------------------------------------------------------------------- 1 | package nfttransferpackage; 2 | syntax = "proto3"; 3 | 4 | message NFTTransferBlock { 5 | message NFTTransferEvent { 6 | required uint64 transactionIndex = 1; 7 | required string transactionHash = 2; 8 | required string transactionInitiator = 3; 9 | required string tokenAddress = 4; 10 | required uint64 tokenId = 5; 11 | required string senderAddress = 6; 12 | required string receiverAddress = 7; 13 | } 14 | 15 | required uint64 blockNumber = 1; 16 | required uint64 timestamp = 2; 17 | repeated NFTTransferEvent data = 3; 18 | } 19 | -------------------------------------------------------------------------------- /examples/nft_balancer/transformer/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "esModuleInterop": true, 5 | "allowSyntheticDefaultImports": true, 6 | "target": "esnext", 7 | "moduleResolution": "nodenext", 8 | "resolveJsonModule": true, 9 | "sourceMap": true, 10 | "outDir": "dist", 11 | "declaration": true, 12 | "alwaysStrict": true, 13 | "strict": true, 14 | }, 15 | "include": [ 16 | "./src/**/*" 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /internal/block_getters/block_getter_worker.ts: -------------------------------------------------------------------------------- 1 | import { IBlockWorkerMessage } from "../interfaces/block_worker_message.js"; 2 | import { parentPort, workerData } from "worker_threads"; 3 | import { BlockGetter } from "./block_getter.js"; 4 | import EthClass from "web3-eth"; 5 | 6 | if (!workerData || !parentPort) { 7 | process.exit(1); 8 | } 9 | 10 | const blockGetter = new BlockGetter( 11 | //@ts-ignore 12 | new EthClass( 13 | //@ts-ignore 14 | new EthClass.providers.WebsocketProvider( 15 | workerData.endpoint, 16 | { 17 | reconnect: { 18 | auto: true 19 | }, 20 | clientConfig: { 21 | maxReceivedFrameSize: 1000000000, 22 | maxReceivedMessageSize: 1000000000, 23 | }, 24 | timeout: 45000 25 | } 26 | ) 27 | ), 28 | workerData.maxRetries 29 | ); 30 | 31 | parentPort.on("message", async (message: { 32 | blockNumber: number, 33 | callBackId: number 34 | }) => { 35 | try { 36 | parentPort?.postMessage( 37 | { 38 | callBackId: message.callBackId, 39 | error: null, 40 | block: await blockGetter.getBlockWithTransactionReceipts(message.blockNumber) 41 | } as IBlockWorkerMessage 42 | ); 43 | } catch (error) { 44 | parentPort?.postMessage( 45 | { 46 | callBackId: message.callBackId, 47 | error: error 48 | } as IBlockWorkerMessage 49 | ); 50 | } 51 | }); 52 | -------------------------------------------------------------------------------- /internal/block_getters/erigon_block_getter_worker.ts: -------------------------------------------------------------------------------- 1 | import { IBlockWorkerMessage } from "../interfaces/block_worker_message.js"; 2 | import { parentPort, workerData } from "worker_threads"; 3 | import { ErigonBlockGetter } from "./erigon_block_getter.js"; 4 | import EthClass from "web3-eth"; 5 | 6 | if (!workerData || !parentPort) { 7 | process.exit(1); 8 | } 9 | 10 | const blockGetter = new ErigonBlockGetter( 11 | //@ts-ignore 12 | new EthClass( 13 | //@ts-ignore 14 | new EthClass.providers.WebsocketProvider( 15 | workerData.endpoint, 16 | { 17 | reconnect: { 18 | auto: true 19 | }, 20 | clientConfig: { 21 | maxReceivedFrameSize: 1000000000, 22 | maxReceivedMessageSize: 1000000000, 23 | }, 24 | timeout: 45000 25 | } 26 | ) 27 | ), 28 | workerData.maxRetries 29 | ); 30 | 31 | parentPort.on("message", async (message: { 32 | blockNumber: number, 33 | callBackId: number 34 | }) => { 35 | try { 36 | parentPort?.postMessage( 37 | { 38 | callBackId: message.callBackId, 39 | error: null, 40 | block: await blockGetter.getBlockWithTransactionReceipts(message.blockNumber) 41 | } as IBlockWorkerMessage 42 | ); 43 | } catch (error) { 44 | parentPort?.postMessage( 45 | { 46 | callBackId: message.callBackId, 47 | error: error 48 | } as IBlockWorkerMessage 49 | ); 50 | } 51 | }); 52 | -------------------------------------------------------------------------------- /internal/block_getters/quicknode_block_getter_worker.ts: -------------------------------------------------------------------------------- 1 | import { IBlockWorkerMessage } from "../interfaces/block_worker_message.js"; 2 | import { QuickNodeBlockGetter } from "./quicknode_block_getter.js"; 3 | import { parentPort, workerData } from "worker_threads"; 4 | import EthClass from "web3-eth"; 5 | 6 | if(!workerData || !parentPort) { 7 | process.exit(1); 8 | } 9 | 10 | const blockGetter = new QuickNodeBlockGetter( 11 | //@ts-ignore 12 | new EthClass( 13 | //@ts-ignore 14 | new EthClass.providers.WebsocketProvider( 15 | workerData.endpoint, 16 | { 17 | reconnect: { 18 | auto: true 19 | }, 20 | clientConfig: { 21 | maxReceivedFrameSize: 1000000000, 22 | maxReceivedMessageSize: 1000000000, 23 | }, 24 | timeout: 45000 25 | } 26 | ) 27 | ), 28 | workerData.maxRetries, 29 | //@ts-ignore 30 | new EthClass( 31 | //@ts-ignore 32 | new EthClass.providers.WebsocketProvider( 33 | workerData.alternateEndpoint, 34 | { 35 | reconnect: { 36 | auto: true 37 | }, 38 | clientConfig: { 39 | maxReceivedFrameSize: 1000000000, 40 | maxReceivedMessageSize: 1000000000, 41 | }, 42 | timeout: 45000 43 | } 44 | ) 45 | ), 46 | workerData.rpcTimeout 47 | ); 48 | 49 | parentPort.on("message", async (message: { 50 | blockNumber: number, 51 | callBackId: number 52 | }) => { 53 | try { 54 | parentPort?.postMessage( 55 | { 56 | callBackId: message.callBackId, 57 | error: null, 58 | block: await blockGetter.getBlockWithTransactionReceipts(message.blockNumber) 59 | } as IBlockWorkerMessage 60 | ); 61 | } catch (error) { 62 | parentPort?.postMessage( 63 | { 64 | callBackId: message.callBackId, 65 | error: error 66 | } as IBlockWorkerMessage 67 | ); 68 | } 69 | }); 70 | -------------------------------------------------------------------------------- /internal/block_producers/produced_blocks_model.ts: -------------------------------------------------------------------------------- 1 | import { Schema, Model } from "mongoose"; 2 | 3 | export interface IProducedBlock { 4 | number: number 5 | hash: string 6 | } 7 | 8 | export interface IProducedBlocksModel extends Model { 9 | get(blockNumber?: number): Promise; 10 | add(block: T, maxReOrgDepth?: number): Promise; 11 | } 12 | 13 | export const ProducedBlocksModel = new Schema>( 14 | { 15 | number: { 16 | type: Number, 17 | required: true, 18 | unique: true 19 | }, 20 | hash: { 21 | type: String, 22 | required: true 23 | }, 24 | }, 25 | { 26 | versionKey: false, 27 | statics: { 28 | async get(blockNumber?: number): Promise { 29 | const query = blockNumber ? { number: blockNumber } : {}; 30 | 31 | return (await this.find(query, null).sort({ number: -1 }).limit(1).exec())[0]; 32 | }, 33 | 34 | async add(block: IProducedBlock, maxReOrgDepth: number = 0): Promise { 35 | await this.create(block); 36 | await this.deleteMany( 37 | { 38 | $or: [ 39 | { number: { $lt: block.number - maxReOrgDepth } }, 40 | { number: { $gt: block.number } } 41 | ] 42 | } 43 | ); 44 | } 45 | } 46 | } 47 | ); 48 | -------------------------------------------------------------------------------- /internal/coder/abi_coder.ts: -------------------------------------------------------------------------------- 1 | import AbiCoder from "web3-eth-abi"; 2 | 3 | /** 4 | * web3 helper class to access any web3 js related functionalities, use this to define any web3 helper functions 5 | */ 6 | export class ABICoder { 7 | /** 8 | * @param type {any} - RLP type as eg address 9 | * @param hex {string} - The bytes string given 10 | * 11 | * @returns {any} - Can return arrays, numbers, objects, etc. depends on the RLP type 12 | */ 13 | public static decodeParameter(type: any, hex: string): any { 14 | return (AbiCoder as any).decodeParameter(type, hex); 15 | } 16 | 17 | /** 18 | * @param types {any[]} - RLP types 19 | * @param hex {string} - The bytes string given 20 | * 21 | * @returns {any} - Can return an object of arrays, numbers, objects, etc. depends on the RLP type 22 | */ 23 | public static decodeParameters(types: any[], hex: string): { [key: string]: any } { 24 | return (AbiCoder as any).decodeParameters(types, hex); 25 | } 26 | 27 | /** 28 | * @param types {any[]} - RLP types 29 | * @param values {string[]} - The array of values 30 | * 31 | * @returns {any} - return hex string 32 | */ 33 | public static encodeParameters(types: any[], values: string[]): string { 34 | return (AbiCoder as any).encodeParameters(types, values); 35 | } 36 | 37 | /** 38 | * // TODO: Overtake private type from web3.js or submit PR 39 | * 40 | * @param inputs {AbiInput[]} - ABI objects 41 | * @param hex {string} - bytes given from log.data 42 | * @param topics {string[]} - Indexed topics 43 | * 44 | * @returns 45 | */ 46 | public static decodeLog(inputs: any[], hex: string, topics: string[]): { [key: string]: string } { 47 | return (AbiCoder as any).decodeLog(inputs, hex, topics); 48 | } 49 | 50 | /** 51 | * decode method 52 | * 53 | * @param {any[]} types - function name 54 | * @param {string} data - input Data 55 | * 56 | * @returns {{ [key: string]: any }} 57 | */ 58 | public static decodeMethod(types: any[], data: string): { [key: string]: any } { 59 | return ABICoder.decodeParameters(types, "0x" + data.slice(10)); 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /internal/enums/bridgetype.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Enum for BridgeType. It contains the 3 values PLASMA, POS, FX 3 | */ 4 | export enum BridgeType { 5 | PLASMA = "PLASMA", 6 | POS = "POS", 7 | FX = "FX" 8 | } 9 | -------------------------------------------------------------------------------- /internal/enums/tokentype.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Enum for TokenType. It contains the 4 values ERC20, ERC721, ERC1155 and ETHER 3 | */ 4 | export enum TokenType { 5 | ERC20 = "ERC20", 6 | ERC721 = "ERC721", 7 | ERC1155 = "ERC1155", 8 | ETHER = "ETHER", 9 | NONE = "NONE" 10 | } 11 | 12 | -------------------------------------------------------------------------------- /internal/errors/api_error.ts: -------------------------------------------------------------------------------- 1 | import { createErrorObject } from "./create_error_object.js"; 2 | import { isBaseError } from "./is_base_error.js"; 3 | import { BaseError } from "./base_error.js"; 4 | import { codes } from "./error_codes.js"; 5 | 6 | /** 7 | * BlockProducerError object used within common. 8 | */ 9 | export class ApiError extends BaseError { 10 | public static codes = { 11 | ...BaseError.codes, 12 | ...codes.api 13 | }; 14 | 15 | /** 16 | * @param name {string} - The error name 17 | * @param code {number} - The error code 18 | * @param isFatal {boolean} - Flag to know if it is a fatal error 19 | * @param message {string} - The actual error message 20 | * @param origin {string} - The point this error originated 21 | * @param stack {string} - The stack trace 22 | */ 23 | constructor( 24 | name: string = "Internal server error", 25 | code: number = ApiError.codes.SERVER_ERROR, 26 | isFatal: boolean = false, 27 | message?: string, 28 | origin: string = "local", 29 | stack?: string, 30 | ) { 31 | super(name, code, isFatal, message, origin, stack); 32 | } 33 | 34 | /** 35 | * Static method that converts any error that is not an instance of BaseError into BlockProducerError 36 | * 37 | * @param {any} error - Error that needs to be checked and converted. 38 | * 39 | * @returns {BlockProducerError|BaseError} - Returns either BlockProducer or any instance of BaseError 40 | */ 41 | public static createUnknown(error: any, isLocal: boolean = true): ApiError | BaseError { 42 | if (!isBaseError(error)) { 43 | const errorObject = createErrorObject(error); 44 | 45 | return new ApiError( 46 | "Internal server error", 47 | ApiError.codes.SERVER_ERROR, 48 | true, 49 | errorObject.message, 50 | isLocal ? "local" : "remote", 51 | errorObject.stack 52 | ); 53 | } 54 | 55 | return error; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /internal/errors/base_error.ts: -------------------------------------------------------------------------------- 1 | import { codes } from "./error_codes.js"; 2 | 3 | /** 4 | * BaseError used within the micro services that guarantees we don't loose the stack trace. 5 | */ 6 | export class BaseError extends Error { 7 | /** 8 | * @param name {string} - The error name 9 | * @param code {number} - The error code 10 | * @param isFatal {boolean} - Flag to know if it is a fatal error 11 | * @param message {string} - The actual error message 12 | * @param origin {string} - The point this error originated 13 | * @param stack {string} - The stack trace 14 | */ 15 | constructor( 16 | public name: string, 17 | public code: number, 18 | public isFatal: boolean = false, 19 | message?: string, 20 | public origin?: string, 21 | public stack?: string, 22 | ) { 23 | super(message || name); 24 | 25 | if (stack) { 26 | this.stack = stack; 27 | } 28 | } 29 | 30 | public static codes = codes.base; 31 | public identifier: number = BaseError.codes.BASE_ERROR; 32 | } 33 | -------------------------------------------------------------------------------- /internal/errors/block_producer_error.ts: -------------------------------------------------------------------------------- 1 | import { createErrorObject } from "./create_error_object.js"; 2 | import { isBaseError } from "./is_base_error.js"; 3 | import { BaseError } from "./base_error.js"; 4 | import { codes } from "./error_codes.js"; 5 | 6 | /** 7 | * BlockProducerError object used within common. 8 | */ 9 | export class BlockProducerError extends BaseError { 10 | /** 11 | * @param name {string} - The error name 12 | * @param code {number} - The error code 13 | * @param isFatal {boolean} - Flag to know if it is a fatal error 14 | * @param message {string} - The actual error message 15 | * @param origin {string} - The point this error originated 16 | * @param stack {string} - The stack trace 17 | */ 18 | constructor( 19 | name: string = "Block producer error", 20 | code: number = BlockProducerError.codes.UNKNOWN_ERR, 21 | isFatal: boolean = false, 22 | message?: string, 23 | origin: string = "local", 24 | stack?: string, 25 | ) { 26 | super(name, code, isFatal, message, origin, stack); 27 | } 28 | 29 | public static codes = { 30 | ...BaseError.codes, 31 | ...codes.blockProducer 32 | }; 33 | 34 | /** 35 | * Static method that converts any error that is not an instance of BaseError into BlockProducerError 36 | * 37 | * @param {any} error - Error that needs to be checked and converted. 38 | * 39 | * @returns {BlockProducerError|BaseError} - Returns either BlockProducer or any instance of BaseError 40 | */ 41 | public static createUnknown(error: any, isLocal: boolean = true): BlockProducerError | BaseError { 42 | if (!isBaseError(error)) { 43 | const errorObject = createErrorObject(error); 44 | 45 | return new BlockProducerError( 46 | "Block producer error", 47 | BlockProducerError.codes.UNKNOWN_ERR, 48 | true, 49 | errorObject.message, 50 | isLocal ? "local" : "remote", 51 | errorObject.stack 52 | ); 53 | } 54 | 55 | return error; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /internal/errors/coder_error.ts: -------------------------------------------------------------------------------- 1 | import { BaseError } from "./base_error.js"; 2 | import { codes } from "./error_codes.js"; 3 | 4 | /** 5 | * Error type specific to coder errors 6 | */ 7 | export class CoderError extends BaseError { 8 | public static codes = { 9 | ...BaseError.codes, 10 | ...codes.coder 11 | }; 12 | 13 | /** 14 | * @param name {string} - The error name 15 | * @param code {number} - The error code 16 | * @param isFatal {boolean} - Flag to know if it is a fatal error 17 | * @param message {string} - The actual error message 18 | * @param stack {string} - The stack trace 19 | */ 20 | constructor( 21 | name: string = "Coder Error", 22 | code: number = CoderError.codes.UNKNOWN_CODER_ERR, 23 | isFatal: boolean = false, 24 | message?: string, 25 | stack?: string 26 | ) { 27 | super(name, code, isFatal, message, "local", stack); 28 | } 29 | } -------------------------------------------------------------------------------- /internal/errors/create_error_object.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Small helper we have added for now. 3 | * 4 | * @param error {unknown} 5 | * 6 | * @returns {Error|TypeError} 7 | */ 8 | export function createErrorObject(error: unknown): Error|TypeError { 9 | if (error instanceof Error || error instanceof TypeError) { 10 | return error; 11 | } 12 | 13 | return new Error(String(error)); 14 | } 15 | -------------------------------------------------------------------------------- /internal/errors/error_codes.ts: -------------------------------------------------------------------------------- 1 | export const codes = { 2 | // Base error identifier 3 | base: { BASE_ERROR: 100 }, 4 | 5 | // Coder related errors. 6 | coder: { 7 | UNKNOWN_CODER_ERR: 1000, 8 | INVALID_PATH_PROTO: 1001, 9 | INVALID_PATH_TYPE: 1002, 10 | DECODING_ERROR: 1003, 11 | ENCODING_VERIFICATION_FAILED: 1004 12 | }, 13 | 14 | // Kafka Consumer error codes. 15 | kafkaclient: { 16 | UNKNOWN_CONSUMER_ERR: 2000, 17 | CONSUMER_OBSERVER_INVALID: 2001, 18 | INVALID_CODER_CONFIG: 2002, 19 | UNKNOWN_PRODUCER_ERR: 3000, 20 | DELIVERY_TIMED_OUT: 3001 21 | }, 22 | 23 | // Block producer error codes. 24 | blockProducer: { 25 | UNKNOWN_ERR: 4000, 26 | RECEIPT_NOT_FOUND: 4001, 27 | OBSERVER_NOT_SET: 4002, 28 | REORG_ENCOUNTERED: 4003, 29 | RPC_ERR: 4004 30 | }, 31 | 32 | // Event consumer error codes 33 | eventConsumer: { 34 | UNKNOWN_ERR: 5000, 35 | SAVE_EXECUTE_ERROR: 5001, 36 | MAPPING_ERROR: 5002, 37 | COMMAND_EXECUTE_ERROR: 5003, 38 | INVALID_PARAMS_VALIDATION: 5004 39 | }, 40 | 41 | // API Errors 42 | api: { 43 | BAD_REQUEST: 400, 44 | NOT_FOUND: 404, 45 | SERVER_ERROR: 500 46 | } 47 | }; 48 | -------------------------------------------------------------------------------- /internal/errors/event_consumer_error.ts: -------------------------------------------------------------------------------- 1 | import { createErrorObject } from "./create_error_object.js"; 2 | import { isBaseError } from "./is_base_error.js"; 3 | import { BaseError } from "./base_error.js"; 4 | import { codes } from "./error_codes.js"; 5 | 6 | /** 7 | * Error type specific to event consumer errors 8 | */ 9 | export class EventConsumerError extends BaseError { 10 | public static codes = { 11 | ...BaseError.codes, 12 | ...codes.eventConsumer 13 | }; 14 | 15 | /** 16 | * @param name {string} - The error name 17 | * @param code {number} - The error code 18 | * @param isFatal {boolean} - Flag to know if it is a fatal error 19 | * @param message {string} - The actual error message 20 | * @param origin {string} - The point this error originated 21 | * @param stack {string} - The stack trace 22 | */ 23 | constructor( 24 | name: string = "Event Consumer Error", 25 | code: number = EventConsumerError.codes.UNKNOWN_ERR, 26 | isFatal: boolean = false, 27 | message?: string, 28 | origin: string = "local", 29 | stack?: string 30 | ) { 31 | super(name, code, isFatal, message, origin, stack); 32 | } 33 | 34 | /** 35 | * Static method that converts any error that is not an instance of BaseError into BlockProducerError 36 | * 37 | * @param {any} error - Error that needs to be checked and converted. 38 | * 39 | * @returns {BlockProducerError|BaseError} - Returns either BlockProducer or any instance of BaseError 40 | */ 41 | public static createUnknown(error: any, isLocal: boolean = true): EventConsumerError | BaseError { 42 | if (!isBaseError(error)) { 43 | const errorObject = createErrorObject(error); 44 | 45 | return new EventConsumerError( 46 | "Event Consumer Error", 47 | EventConsumerError.codes.UNKNOWN_ERR, 48 | true, 49 | errorObject.message, 50 | isLocal ? "local" : "remote", 51 | errorObject.stack 52 | ); 53 | } 54 | 55 | return error; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /internal/errors/get_error_message.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Smaller helper we have added for now. 3 | * 4 | * @param error {unkown} 5 | * 6 | * @returns {String} 7 | */ 8 | export function getErrorMessage(error: unknown): string { 9 | if (!error) { 10 | return "Unknown error"; 11 | } 12 | 13 | if (error instanceof Error) return error.message; 14 | return typeof error === "object" ? JSON.stringify(error) : String(error); 15 | } 16 | -------------------------------------------------------------------------------- /internal/errors/is_base_error.ts: -------------------------------------------------------------------------------- 1 | import { BaseError } from "./base_error.js"; 2 | 3 | /** 4 | * Check if the given error is BaseError. Small helper we might will remove later on. 5 | * 6 | * @param error {unknown} 7 | * 8 | * @returns {boolean} 9 | */ 10 | export function isBaseError(error: unknown): boolean { 11 | return ( 12 | //@ts-ignore 13 | typeof error === "object" && error !== null && "identifier" in error && error.identifier === BaseError.codes.BASE_ERROR 14 | ); 15 | } 16 | -------------------------------------------------------------------------------- /internal/errors/is_librdkafka_error.ts: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * Checks if given error is a LibrdKafkaError. Small helper we might will remove later on. 4 | * 5 | * @param error {unknown} 6 | * 7 | * @returns {boolean} 8 | */ 9 | export function isLibrdKafkaError(error: unknown): boolean { 10 | return ( 11 | typeof error === "object" && 12 | error !== null && 13 | "message" in error && 14 | "code" in error && 15 | "isFatal" in error && 16 | "origin" in error && 17 | "stack" in error 18 | ); 19 | } 20 | -------------------------------------------------------------------------------- /internal/errors/kafka_error.ts: -------------------------------------------------------------------------------- 1 | import { isLibrdKafkaError } from "./is_librdkafka_error.js"; 2 | import { createErrorObject } from "./create_error_object.js"; 3 | import { isBaseError } from "./is_base_error.js"; 4 | import { LibrdKafkaError } from "node-rdkafka"; 5 | import { BaseError } from "./base_error.js"; 6 | import { codes } from "./error_codes.js"; 7 | 8 | /** 9 | * KafkaError object used within common. 10 | */ 11 | export class KafkaError extends BaseError { 12 | /** 13 | * @param name {string} - The error name 14 | * @param code {number} - The error code 15 | * @param isFatal {boolean} - Flag to know if it is a fatal error 16 | * @param message {string} - The actual error message 17 | * @param origin {string} - The point this error originated 18 | * @param stack {string} - The stack trace 19 | */ 20 | constructor( 21 | name: string = "Kafka Error", 22 | code: number, 23 | isFatal: boolean = false, 24 | message?: string, 25 | origin: string = "local", 26 | stack?: string, 27 | ) { 28 | super(name, code, isFatal, message, origin, stack); 29 | } 30 | 31 | public static codes = { 32 | ...BaseError.codes, 33 | ...codes.kafkaclient 34 | }; 35 | 36 | /** 37 | * Internal method to convert LibKafkaError to KafkaError 38 | * 39 | * @param {LibrdKafkaError} error - The error object to be converted. 40 | * 41 | * @returns {KafkaError} - Returns the kafka error created from the error passed. 42 | */ 43 | public static convertLibError(error: LibrdKafkaError, isProducer: boolean = false): KafkaError { 44 | return new KafkaError( 45 | isProducer ? "Kafka producer error" : "Kafka consumer error", 46 | error.code || (isProducer ? KafkaError.codes.UNKNOWN_PRODUCER_ERR : KafkaError.codes.UNKNOWN_CONSUMER_ERR), 47 | error.isFatal, 48 | error.message, 49 | error.origin, 50 | error.stack 51 | ); 52 | } 53 | 54 | /** 55 | * Static method that converts any error that is not an instance of BaseError into KafkaError 56 | * 57 | * @param {any} error - Error that needs to be checked and converted. 58 | * 59 | * @returns {KafkaError|BaseError} - Returns either KafkaError or any instance of BaseError 60 | */ 61 | public static createUnknown(error: any, isProducer: boolean = false): KafkaError | BaseError { 62 | if (!isBaseError(error)) { 63 | if (isLibrdKafkaError(error)) { 64 | return KafkaError.convertLibError(error, isProducer); 65 | } 66 | 67 | const errorObject = createErrorObject(error); 68 | return new KafkaError( 69 | isProducer ? "Kafka producer error" : "Kafka consumer error", 70 | isProducer ? KafkaError.codes.UNKNOWN_PRODUCER_ERR : KafkaError.codes.UNKNOWN_CONSUMER_ERR, 71 | true, 72 | errorObject.message, 73 | "local", 74 | errorObject.stack 75 | ); 76 | } 77 | 78 | return error as KafkaError | BaseError; 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /internal/event_consumer/abstract_event_consumer.ts: -------------------------------------------------------------------------------- 1 | import { DeserialisedMessage } from "../interfaces/deserialised_kafka_message.js"; 2 | import { SynchronousConsumer } from "../kafka/consumer/synchronous_consumer.js"; 3 | import { EventConsumerError } from "../errors/event_consumer_error.js"; 4 | import { Logger } from "../logger/logger.js"; 5 | 6 | /** 7 | * This class will start consuming the events and has functions to call what to do with the data for each event. 8 | */ 9 | export abstract class AbstractEventConsumer extends SynchronousConsumer { 10 | /** 11 | * Public execute function has no parameters. this function will start consuming the events and will call 12 | * the callback function when an event is consumed. only need to call this event when you want to start 13 | * consuming the event. 14 | * 15 | * @returns {Promise} 16 | */ 17 | public async execute(): Promise { 18 | await this.start({ 19 | next: async (event) => { 20 | try { 21 | await this.onEvent(event); 22 | } catch (error) { 23 | throw EventConsumerError.createUnknown(error); 24 | } 25 | }, 26 | error: (err: Error) => { 27 | Logger.error(err); 28 | }, 29 | closed: () => { 30 | Logger.info(`Consumer stopped for topics ${this.topics}`); 31 | }, 32 | }); 33 | } 34 | 35 | /** 36 | * Private internal callback method for the consumer to call when any deposit data is received. this function will 37 | * filter data based on its token type and execute the command based on which tokentype is present. 38 | * 39 | * @param {DeserialisedMessage} data - Data that is received from kafka consumer 40 | * 41 | * @returns {Promise} 42 | */ 43 | protected abstract onEvent(data: DeserialisedMessage): Promise; 44 | } 45 | -------------------------------------------------------------------------------- /internal/filter/bloom_filter.ts: -------------------------------------------------------------------------------- 1 | import { isContractAddressInBloom, isTopicInBloom } from "ethereum-bloom-filters"; 2 | 3 | /** 4 | * Bloomfilter class which extends the Bloom filter package, all methods to be implemented here. 5 | */ 6 | export class BloomFilter { 7 | /** 8 | * @param {string} bloom - The bloom filter passed 9 | * @param {string} contractAddress - The address you are looking for 10 | * 11 | * @returns {boolean} 12 | */ 13 | public static isContractAddressInBloom(bloom: string, contractAddress: string): boolean { 14 | return isContractAddressInBloom(bloom, contractAddress); 15 | } 16 | 17 | /** 18 | * @param {string} bloom - The bloom filter passed 19 | * @param {string} topic - The topic signature you are looking for 20 | * 21 | * @returns {boolean} 22 | */ 23 | public static isTopicInBloom(bloom: string, topic: string): boolean { 24 | return isTopicInBloom(bloom, topic); 25 | } 26 | } -------------------------------------------------------------------------------- /internal/interfaces/async_observer.ts: -------------------------------------------------------------------------------- 1 | import { Observer } from "rxjs"; 2 | 3 | export interface AsyncObserver extends Observer { 4 | next(): Promise 5 | error(error: unknown): void 6 | complete(): void 7 | } 8 | -------------------------------------------------------------------------------- /internal/interfaces/block.ts: -------------------------------------------------------------------------------- 1 | import { ITransaction } from "./transaction.js"; 2 | import Long from "long"; 3 | 4 | export interface IBlock { 5 | difficulty?: string; 6 | totalDifficulty?: string; 7 | number: Long; 8 | gasLimit: Long; 9 | baseFeePerGas?: string; 10 | gasUsed: Long; 11 | logsBloom: string; 12 | hash: string; 13 | parentHash: string; 14 | receiptsRoot: string; 15 | sha3Uncles: string; 16 | size: string; 17 | stateRoot: string; 18 | timestamp: Long; 19 | transactionsRoot: string; 20 | miner: string; 21 | nonce: Long; 22 | extraData: string; 23 | transactions: ITransaction[]; 24 | //TODO - store uncles. 25 | } 26 | -------------------------------------------------------------------------------- /internal/interfaces/block_getter.ts: -------------------------------------------------------------------------------- 1 | import { Block } from "web3-eth"; 2 | import { IBlock } from "./block.js"; 3 | 4 | export interface IBlockGetter { 5 | getBlock(blockNumber: number|string): Promise; 6 | getBlockWithTransactionReceipts(blockNumber: number): Promise; 7 | getLatestBlockNumber(): Promise; 8 | } 9 | -------------------------------------------------------------------------------- /internal/interfaces/block_getter_worker_promise.ts: -------------------------------------------------------------------------------- 1 | import { IBlock } from "./block.js"; 2 | 3 | export interface IBlockGetterWorkerPromise { 4 | block: IBlock, error: Error | null 5 | } 6 | -------------------------------------------------------------------------------- /internal/interfaces/block_header.ts: -------------------------------------------------------------------------------- 1 | export interface IBlockHeader { 2 | number: number; 3 | hash: string; 4 | gasLimit: number; 5 | gasUsed: number; 6 | logsBloom: string; 7 | parentHash: string; 8 | receiptsRoot: string; 9 | sha3Uncles: string; 10 | stateRoot: string; 11 | timestamp: number|string; 12 | transactionsRoot: string; 13 | miner: string; 14 | nonce: string; 15 | extraData?: string; 16 | } 17 | -------------------------------------------------------------------------------- /internal/interfaces/block_producer_config.ts: -------------------------------------------------------------------------------- 1 | import { IProducerConfig } from "./producer_config.js"; 2 | 3 | export interface IBlockProducerConfig extends IProducerConfig { 4 | startBlock?: number, 5 | rpcWsEndpoints?: string[], 6 | mongoUrl?: string, 7 | dbCollection?: string, 8 | maxReOrgDepth?: number, 9 | maxRetries?: number, 10 | blockPollingTimeout?: number, 11 | blockSubscriptionTimeout?: number, 12 | blockDelay?: number, 13 | alternateEndpoint?: string, 14 | rpcTimeout?: number 15 | } 16 | -------------------------------------------------------------------------------- /internal/interfaces/block_subscription.ts: -------------------------------------------------------------------------------- 1 | import { IObserver } from "./observer.js"; 2 | 3 | export interface IBlockSubscription { 4 | subscribe(observer: IObserver, startBlock: number): Promise | void, 5 | unsubscribe(): Promise 6 | } 7 | -------------------------------------------------------------------------------- /internal/interfaces/block_worker_message.ts: -------------------------------------------------------------------------------- 1 | import { IBlock } from "./block.js"; 2 | 3 | export interface IBlockWorkerMessage { 4 | callBackId: number; 5 | error: null | Error; 6 | block: IBlock 7 | } 8 | -------------------------------------------------------------------------------- /internal/interfaces/coder.ts: -------------------------------------------------------------------------------- 1 | export interface ICoder { 2 | deserialize: ( 3 | buffer: Buffer, 4 | messageType?: string, 5 | packageName?: string 6 | ) => Promise, 7 | serialize: ( 8 | messageObject: object, 9 | messageType?: string, 10 | packageName?: string 11 | ) => Promise 12 | } 13 | -------------------------------------------------------------------------------- /internal/interfaces/coder_config.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface ICoderConfig { 3 | fileName: string, 4 | packageName: string, 5 | messageType: string, 6 | fileDirectory?: string 7 | } 8 | -------------------------------------------------------------------------------- /internal/interfaces/common_kafka_events.ts: -------------------------------------------------------------------------------- 1 | import { ClientMetrics, LibrdKafkaError, DeliveryReport, ReadyInfo, Metadata, Message, EofEvent, TopicPartition, SubscribeTopicList, TopicPartitionOffset } from "node-rdkafka"; 2 | // 3 | type KafkaClientEvents = "disconnected" | "ready" | "connection.failure" | "event.error" | "event.stats" | "event.log" | "event.event" | "event.throttle"; 4 | 5 | type EventListenerMap = { 6 | // ### Client 7 | // connectivity events 8 | "disconnected": (metrics: ClientMetrics) => void, 9 | "ready": (info: ReadyInfo, metadata: Metadata) => void, 10 | "connection.failure": (error: LibrdKafkaError, metrics: ClientMetrics) => void, 11 | // event messages 12 | "event.error": (error: LibrdKafkaError) => void, 13 | "event.stats": (eventData: any) => void, 14 | "event.log": (eventData: any) => void, 15 | "event.event": (eventData: any) => void, 16 | "event.throttle": (eventData: any) => void, 17 | // ### Consumer only 18 | // domain events 19 | "data": (arg: Message) => void, 20 | "partition.eof": (arg: EofEvent) => void, 21 | "rebalance": (err: LibrdKafkaError, assignments: TopicPartition[]) => void, 22 | "rebalance.error": (err: Error) => void, 23 | // connectivity events 24 | "subscribed": (topics: SubscribeTopicList) => void, 25 | "unsubscribe": () => void, 26 | "unsubscribed": () => void, 27 | // offsets 28 | "offset.commit": (error: LibrdKafkaError, topicPartitions: TopicPartitionOffset[]) => void, 29 | // ### Producer only 30 | // delivery 31 | "delivery-report": (error: LibrdKafkaError, report: DeliveryReport) => void, 32 | } 33 | 34 | export type KafkaConsumerEvents = "data" | "partition.eof" | "rebalance" | "rebalance.error" | "subscribed" | "unsubscribed" | "unsubscribe" | "offset.commit" | KafkaClientEvents; 35 | export type KafkaProducerEvents = "delivery-report" | KafkaClientEvents; 36 | export type EventListener = K extends keyof EventListenerMap ? EventListenerMap[K] : never; 37 | -------------------------------------------------------------------------------- /internal/interfaces/config.ts: -------------------------------------------------------------------------------- 1 | export interface IConfig { 2 | [key: string]: string, 3 | 4 | POS_DEPOSIT_TO_ADDRESS: string, 5 | FX_ROOT_TUNNEL: string, 6 | PLASMA_DEPOSIT_TO_ADDRESS: string, 7 | POS_ERC20_PREDICATE_PROXY: string, 8 | POS_ERC721_PREDICATE_PROXY: string, 9 | POS_ERC1155_PREDICATE_PROXY: string, 10 | POS_ERC20_MINTABLE_PREDICATE_PROXY: string, 11 | POS_ERC721_MINTABLE_PREDICATE_PROXY: string, 12 | POS_ERC1155_MINTABLE_PREDICATE_PROXY: string, 13 | POS_ETHER_PREDICATE_PROXY: string, 14 | POS_ROOT_CHAIN_MANAGER_PROXY: string, 15 | STATE_SENDER: string, 16 | STATE_RECEIVER: string, 17 | GOVERNANCE_PROXY: string, 18 | ROOT_CHAIN_PROXY: string, 19 | 20 | LOCKED_ERC20_TOPIC: string, 21 | LOCKED_ERC721_TOPIC: string, 22 | LOCKED_ERC1155_TOPIC: string, 23 | LOCKED_ETHER_TOPIC: string, 24 | LOCKED_ERC20_MINTABLE_TOPIC: string, 25 | LOCKED_ERC721_MINTABLE_TOPIC: string, 26 | LOCKED_ERC1155_MINTABLE_TOPIC: string, 27 | 28 | EXITED_ETHER_TOPIC: string, 29 | FX_ERC20_WITHDRAW_TOPIC: string, 30 | FX_ERC721_WITHDRAW_TOPIC: string, 31 | FX_ERC20_MINTABLE_WITHDRAW_TOPIC: string, 32 | FX_ERC721_MINTABLE_WITHDRAW_TOPIC: string, 33 | 34 | FX_ERC20_ROOT_TUNNEL: string, 35 | FX_ERC721_ROOT_TUNNEL: string, 36 | FX_ERC1155_ROOT_TUNNEL: string, 37 | 38 | FX_ERC20_TOPIC: string, 39 | FX_ERC721_TOPIC: string, 40 | FX_ERC1155_TOPIC: string, 41 | 42 | PLASMA_TOPIC: string, 43 | PLASMA_CONFIRM_WITHDRAW_TOPIC: string, 44 | PLASMA_EXIT_TOPIC: string, 45 | PLASMA_ERC20_PREDICATE: string, 46 | 47 | STATE_SYNCED_TOPIC: string, 48 | 49 | ERC20_ERC721_TRANSFER_TOPIC: string, 50 | NULL_ADDRESS_TOPIC: string, 51 | NULL_ADDRESS: string, 52 | ETH_ADDRESS: string, 53 | 54 | CUSTOM_WITHDRAW_TOPIC: string, 55 | 56 | PLASMA_BURN_TOPIC: string, 57 | 58 | CUSTOM_MAPPING_TOKEN_ADDRESS_1: string, 59 | CUSTOM_MAPPING_TOKEN_ADDRESS_2: string, 60 | CUSTOM_MAPPING_TOKEN_ADDRESS_3: string, 61 | 62 | CUSTOM_MAPPING_PREDICATE_ADDRESS: string, 63 | 64 | PLASMA_MAPPING_TOPIC: string, 65 | POS_MAPPING_TOPIC: string, 66 | FX_ERC20_MAPPING_TOPIC: string, 67 | FX_ERC721_MAPPING_TOPIC: string, 68 | FX_ERC1155_MAPPING_TOPIC: string, 69 | 70 | ERC20_TOKEN_TYPE: string, 71 | ERC721_TOKEN_TYPE: string, 72 | ERC1155_TOKEN_TYPE: string, 73 | MINTABLE_ERC20_TOKEN_TYPE: string, 74 | MINTABLE_ERC721_TOKEN_TYPE: string, 75 | MINTABLE_ERC1155_TOKEN_TYPE: string, 76 | CUSTOM_TOKEN_TYPE: string, 77 | 78 | CHECKPOINT_TOPIC: string 79 | } 80 | -------------------------------------------------------------------------------- /internal/interfaces/consumer_config.ts: -------------------------------------------------------------------------------- 1 | import { ConsumerGlobalConfig, ConsumerTopicConfig } from "node-rdkafka"; 2 | import { ICoderConfig } from "./coder_config.js"; 3 | import { IKafkaCoderConfig } from "./kafka_coder_config.js"; 4 | 5 | export interface IConsumerConfig extends ConsumerGlobalConfig { 6 | maxBufferLength?: number, 7 | maxRetries?: number, 8 | connectionTimeout?: number, 9 | topicConfig?: ConsumerTopicConfig, 10 | startOffsets?: { 11 | [topic: string]: number 12 | }, 13 | topic?: string | string[], 14 | coders?: ICoderConfig | ICoderConfig[] | IKafkaCoderConfig, 15 | type?: string, 16 | } 17 | -------------------------------------------------------------------------------- /internal/interfaces/consumer_queue_object.ts: -------------------------------------------------------------------------------- 1 | export interface IConsumerQueueObject { 2 | message: T, 3 | promise?: Promise 4 | } 5 | -------------------------------------------------------------------------------- /internal/interfaces/deposit.ts: -------------------------------------------------------------------------------- 1 | import { TokenType } from "../enums/tokentype.js"; 2 | import { BridgeType } from "../enums/bridgetype.js"; 3 | 4 | export interface IDeposit { 5 | tokenType: TokenType, 6 | bridgeType: BridgeType; 7 | transactionHash: string, 8 | depositor: string, 9 | depositReceiver: string, 10 | rootToken: string, 11 | amounts?: string[], 12 | tokenIds?: string[], 13 | timestamp?: number, 14 | rootTunnelAddress?: string 15 | refuel?: boolean, 16 | nonce?: number 17 | } 18 | -------------------------------------------------------------------------------- /internal/interfaces/deserialised_kafka_message.ts: -------------------------------------------------------------------------------- 1 | import { Message } from "node-rdkafka"; 2 | 3 | export interface DeserialisedMessage extends Omit { 4 | value?: object 5 | } 6 | -------------------------------------------------------------------------------- /internal/interfaces/event_log.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | 3 | export interface IEventLog { 4 | address: string; 5 | data: string; 6 | logIndex: Long; 7 | topics: string[]; 8 | transactionHash: string; 9 | transactionIndex: Long; 10 | blockHash: string; 11 | blockNumber: Long; 12 | removed?: boolean; 13 | } 14 | -------------------------------------------------------------------------------- /internal/interfaces/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./async_observer.js"; 2 | export * from "./block_getter_worker_promise.js"; 3 | export * from "./block_getter.js"; 4 | export * from "./block_header.js"; 5 | export * from "./block_producer_config.js"; 6 | export * from "./block_subscription.js"; 7 | export * from "./block_worker_message.js"; 8 | export * from "./block.js"; 9 | export * from "./coder_config.js"; 10 | export * from "./coder.js"; 11 | export * from "./common_kafka_events.js"; 12 | export * from "./config.js"; 13 | export * from "./consumer_config.js"; 14 | export * from "./consumer_queue_object.js"; 15 | export * from "./deposit.js"; 16 | export * from "./deserialised_kafka_message.js"; 17 | export * from "./event_log.js"; 18 | export * from "./kafka_coder_config.js"; 19 | export * from "./logger_config.js"; 20 | export * from "./mapper.js"; 21 | export * from "./new_heads_subscriber.js"; 22 | export * from "./observer.js"; 23 | export * from "./producer_config.js"; 24 | export * from "./quicknode_response.js"; 25 | export * from "./raw_block.js"; 26 | export * from "./raw_receipt.js"; 27 | export * from "./raw_transaction.js"; 28 | export * from "./rpc_payload.js"; 29 | export * from "./sequential_consumer_config.js"; 30 | export * from "./stream_api_block.js"; 31 | export * from "./synchronous_producer.js"; 32 | export * from "./transaction_receipt.js"; 33 | export * from "./transaction.js"; 34 | export * from "./transformed_block.js"; 35 | export * from "./web3_transaction_receipt.js"; 36 | export * from "./web3_transaction.js"; 37 | -------------------------------------------------------------------------------- /internal/interfaces/kafka_coder_config.ts: -------------------------------------------------------------------------------- 1 | import { ICoder } from "./coder.js"; 2 | 3 | export interface IKafkaCoderConfig { 4 | [topic: string]: ICoder 5 | } 6 | -------------------------------------------------------------------------------- /internal/interfaces/logger_config.ts: -------------------------------------------------------------------------------- 1 | import winston from "winston"; 2 | 3 | export interface LoggerConfig { 4 | sentry?: { 5 | dsn?: string, 6 | level?: string, 7 | environment?: string 8 | } 9 | datadog?: { 10 | service_name?: string, 11 | api_key?: string 12 | } 13 | console?: { 14 | level?: string 15 | } 16 | winston?: winston.LoggerOptions; 17 | } 18 | -------------------------------------------------------------------------------- /internal/interfaces/mapper.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface IMapper { 3 | map(data: T): G[]| Promise; 4 | } 5 | -------------------------------------------------------------------------------- /internal/interfaces/new_heads_subscriber.ts: -------------------------------------------------------------------------------- 1 | import {Observer, Subscription } from "rxjs"; 2 | import {IBlockHeader} from "./block_header.js"; 3 | 4 | export interface INewHeadsSubscriber { 5 | createBlockSubscription(subscriber: Observer): Subscription 6 | } 7 | -------------------------------------------------------------------------------- /internal/interfaces/observer.ts: -------------------------------------------------------------------------------- 1 | export interface IObserver { 2 | next: (value: T) => Promise | void 3 | error: (value: E) => void 4 | closed: () => void 5 | } 6 | -------------------------------------------------------------------------------- /internal/interfaces/producer_config.ts: -------------------------------------------------------------------------------- 1 | import { ProducerGlobalConfig } from "node-rdkafka"; 2 | import { ICoder } from "../interfaces/coder.js"; 3 | import { ICoderConfig } from "./coder_config.js"; 4 | 5 | export interface IProducerConfig extends ProducerGlobalConfig { 6 | topic: string; 7 | pollInterval?: number, 8 | connectionTimeout?: number, 9 | flushTimeout?: number, 10 | deliveryTimeout?: number, 11 | type?: string, 12 | coder?: ICoder | ICoderConfig 13 | } 14 | -------------------------------------------------------------------------------- /internal/interfaces/quicknode_response.ts: -------------------------------------------------------------------------------- 1 | import { IRawReceipt } from "./raw_receipt.js"; 2 | import { IRawBlock } from "./raw_block.js"; 3 | 4 | export interface IQuickNodeResponse { 5 | block: IRawBlock, 6 | receipts: IRawReceipt[] 7 | } 8 | -------------------------------------------------------------------------------- /internal/interfaces/raw_block.ts: -------------------------------------------------------------------------------- 1 | import { IRawTransaction } from "./raw_transaction.js"; 2 | 3 | export interface IRawBlock { 4 | difficulty: string; 5 | totalDifficulty: string; 6 | number: string; 7 | gasLimit: string; 8 | baseFeePerGas?: string; 9 | gasUsed: string; 10 | logsBloom: string; 11 | hash: string; 12 | parentHash: string; 13 | receiptsRoot: string; 14 | sha3Uncles: string; 15 | size: string; 16 | stateRoot: string; 17 | timestamp: string; 18 | transactionsRoot: string; 19 | miner: string; 20 | nonce: string; 21 | extraData: string; 22 | transactions: IRawTransaction[]; 23 | uncles?: string[]; 24 | } 25 | -------------------------------------------------------------------------------- /internal/interfaces/raw_receipt.ts: -------------------------------------------------------------------------------- 1 | export interface IRawEventLog { 2 | address: string; 3 | data: string; 4 | logIndex: string; 5 | topics: string[]; 6 | transactionHash: string; 7 | transactionIndex: string; 8 | blockHash: string; 9 | blockNumber: string; 10 | removed?: boolean; 11 | } 12 | 13 | export interface IRawReceipt { 14 | transactionHash: string; 15 | transactionIndex: string; 16 | from: string; 17 | to: string 18 | blockNumber: string; 19 | blockHash: string; 20 | contractAddress?: string; 21 | gasUsed: string; 22 | cumulativeGasUsed: string; 23 | logs: IRawEventLog[]; 24 | logsBloom: string; 25 | status: string; 26 | effectiveGasPrice?: string; 27 | } 28 | -------------------------------------------------------------------------------- /internal/interfaces/raw_transaction.ts: -------------------------------------------------------------------------------- 1 | export interface IRawTransaction { 2 | hash: string, 3 | nonce: string, 4 | blockHash: string | null, 5 | blockNumber: string, 6 | transactionIndex?: string, 7 | from: string, 8 | to: string | null, 9 | value: string, 10 | gasPrice: string, 11 | gas: string, 12 | input: string, 13 | maxFeePerGas?: string, 14 | maxPriorityFeePerGas?: string, 15 | } 16 | -------------------------------------------------------------------------------- /internal/interfaces/rpc_payload.ts: -------------------------------------------------------------------------------- 1 | export interface IRPCPayload { 2 | method: string, 3 | params?: string[] 4 | } 5 | -------------------------------------------------------------------------------- /internal/interfaces/sequential_consumer_config.ts: -------------------------------------------------------------------------------- 1 | 2 | export interface ISequentialConsumerConfig { 3 | maxBufferLength?: number, 4 | maxRetries?: number, 5 | } 6 | -------------------------------------------------------------------------------- /internal/interfaces/stream_api_block.ts: -------------------------------------------------------------------------------- 1 | type Log = { 2 | lid: string; 3 | address: string; 4 | topics: string[]; 5 | data: string; 6 | blockNumber: string; 7 | transactionIndex: string; 8 | logIndex: string; 9 | removed: boolean; 10 | } 11 | 12 | type Account = { 13 | "@type": "Account"; 14 | address: string; 15 | } 16 | 17 | type Transaction = { 18 | hash: string; 19 | blockNumber: string 20 | from: Account 21 | gas: string 22 | gasPrice: string 23 | maxFeePerGas: string 24 | maxPriorityFeePerGas: string 25 | input: string 26 | nonce: string 27 | to: Account 28 | transactionIndex: string 29 | value: string 30 | fee: string 31 | type: string 32 | chainId: string 33 | v: string 34 | r: string 35 | s: string 36 | logsBloom: string 37 | root: string 38 | contractAddress: string 39 | cumulativeGasUsed: string 40 | gasUsed: string 41 | status: string 42 | logs: Log[] 43 | } 44 | 45 | export interface IStreamApiBlock { 46 | hash: string; 47 | number: string; 48 | baseFeePerGas: string; 49 | difficulty: string; 50 | extraData: string; 51 | gasLimit: string; 52 | gasUsed: string; 53 | logsBloom: string; 54 | miner: Account; 55 | mixHash: string; 56 | nonce: string; 57 | parentHash: string; 58 | receiptsRoot: string; 59 | sha3Uncles: string; 60 | size: string 61 | stateRoot: string 62 | timestamp: string 63 | totalDifficulty: string 64 | transactions: Transaction[] 65 | transactionsRoot: string 66 | ommerCount: string 67 | logs: Log[] 68 | } 69 | -------------------------------------------------------------------------------- /internal/interfaces/synchronous_producer.ts: -------------------------------------------------------------------------------- 1 | import { DeliveryReport, LibrdKafkaError } from "node-rdkafka"; 2 | import { KafkaProducerEvents, EventListener } from "./common_kafka_events.js"; 3 | 4 | export interface ISynchronousProducer { 5 | produceEvent( 6 | topic: string, 7 | key: string, 8 | message: object, 9 | protobufMessageType: string, 10 | partition?: number, 11 | timestamp?: number, 12 | ): Promise 13 | 14 | once(event: E, listener: EventListener): this; 15 | on(event: E, listener: EventListener): this; 16 | } 17 | -------------------------------------------------------------------------------- /internal/interfaces/transaction.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | import { ITransactionReceipt } from "./transaction_receipt.js"; 3 | 4 | export interface ITransaction { 5 | hash: string, 6 | nonce: Long, 7 | blockHash: string | null, 8 | blockNumber: Long | null, 9 | transactionIndex: Long | null, 10 | from: string, 11 | to: string | null, 12 | value: string, 13 | gasPrice: string, 14 | gas: Long, 15 | input: string, 16 | maxFeePerGas?: string, 17 | maxPriorityFeePerGas?: string, 18 | chainId: string, 19 | v: string, 20 | r: string, 21 | s: string, 22 | type: number, 23 | receipt: ITransactionReceipt 24 | } 25 | -------------------------------------------------------------------------------- /internal/interfaces/transaction_receipt.ts: -------------------------------------------------------------------------------- 1 | import { IEventLog } from "./event_log.js"; 2 | import Long from "long"; 3 | 4 | export interface ITransactionReceipt { 5 | transactionHash: string; 6 | transactionIndex: Long; 7 | from: string; 8 | to: string 9 | blockNumber: Long; 10 | blockHash: string; 11 | contractAddress?: string | null; //Have to add null as web3.js types are incorrect. 12 | gasUsed: Long; 13 | cumulativeGasUsed: Long; 14 | logs: IEventLog[]; 15 | logsBloom: string; 16 | status: boolean; 17 | effectiveGasPrice?: string; 18 | } 19 | -------------------------------------------------------------------------------- /internal/interfaces/transformed_block.ts: -------------------------------------------------------------------------------- 1 | import Long from "long"; 2 | 3 | export interface ITransformedBlock { 4 | blockNumber: Long, 5 | timestamp: Long, 6 | data: T[] 7 | } 8 | -------------------------------------------------------------------------------- /internal/interfaces/web3_transaction.ts: -------------------------------------------------------------------------------- 1 | import { Transaction } from "web3-core"; 2 | 3 | export interface IWeb3Transaction extends Transaction { 4 | chainId: string; 5 | v: string; 6 | r: string; 7 | s: string; 8 | type: number; 9 | } 10 | -------------------------------------------------------------------------------- /internal/interfaces/web3_transaction_receipt.ts: -------------------------------------------------------------------------------- 1 | import { TransactionReceipt } from "web3-core"; 2 | 3 | export interface IWeb3TransactionReceipt extends Omit { 4 | effectiveGasPrice?: string | number; 5 | } 6 | -------------------------------------------------------------------------------- /internal/kafka/consumer/asynchronous_consumer.ts: -------------------------------------------------------------------------------- 1 | import { BaseError } from "../../errors/base_error.js"; 2 | import { AbstractConsumer } from "./abstract_consumer.js"; 3 | import { DeserialisedMessage } from "../../interfaces/deserialised_kafka_message.js"; 4 | import { IConsumerQueueObject } from "../../interfaces/consumer_queue_object.js"; 5 | import { isBaseError } from "../../errors/is_base_error.js"; 6 | import { KafkaError } from "../../errors/kafka_error.js"; 7 | 8 | /** 9 | * The AsynchronousConsumer extends AbstractConsumer class to provide guarantee of 10 | * ordered committing of offsets. The messages are processed concurrently using the event loop, 11 | * but offsets are committed in order. If an earlier promise in the queue fails, later offsets will 12 | * not be comitted even if successful. 13 | * @extends AbstractConsumer 14 | */ 15 | export class AsynchronousConsumer extends AbstractConsumer { 16 | /** 17 | * Private method to be used as a wrapper to retry the next promise internally upto max retries by using recursive calls. 18 | * 19 | * @param {DeserialisedMessage} message - The Deserialised message to be passed to the next promise. 20 | * 21 | * @param {number} errorCount - This param should not be set externally and is used by recursive calls to track the number of times next promise failed. 22 | * 23 | * @returns {Promise} 24 | */ 25 | private async retryPromise(message: DeserialisedMessage, errorCount: number = 0): Promise { 26 | try { 27 | await this.observer?.next(message); 28 | } catch (error) { 29 | if (errorCount >= this.maxRetries || (isBaseError(error) && (error as BaseError).isFatal)) { 30 | throw KafkaError.createUnknown(error); 31 | } 32 | 33 | return this.retryPromise(message, errorCount + 1); 34 | } 35 | } 36 | 37 | /** 38 | * Implementation of the abstract enqueue method. This implementation adds queue object with the wrapped retryPromise to the queue 39 | * 40 | * @param {DeserialisedMessage} message - The message of which queue object needs to be added to the internal queue. 41 | * 42 | * @returns {IConsumerQueueObject} - Returns consumer queue object with the observer.next wrapped in next promise. 43 | */ 44 | protected enqueue(message: DeserialisedMessage): IConsumerQueueObject { 45 | return { 46 | message, 47 | promise: this.retryPromise(message) 48 | }; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /internal/kafka/consumer/synchronous_consumer.ts: -------------------------------------------------------------------------------- 1 | import { AbstractConsumer } from "./abstract_consumer.js"; 2 | import { DeserialisedMessage } from "../../interfaces/deserialised_kafka_message.js"; 3 | import { IConsumerQueueObject } from "../../interfaces/consumer_queue_object.js"; 4 | 5 | /** 6 | * The SynchronousConsumer extends AbstractConsumer class to provide guarantee of 7 | * synchronous one at a time processing of events and committing of offsets. The class maintains the new events in an 8 | * internal buffer and only moves to the next event after successful processing of the previous one. In case of exception, 9 | * The queue is cleared, on a maximum number of retries that can be set via config. The class also internally handles 10 | * back pressure by pausing the consumer if buffer exceeds the maximum limit. 11 | * @extends AbstractConsumer 12 | */ 13 | export class SynchronousConsumer extends AbstractConsumer { 14 | /** 15 | * Implementation of the abstract enqueue method. This implementation adds queue object with the wrapped retryPromise to the queue 16 | * 17 | * @param {DeserialisedMessage} message - The message of which queue object needs to be added to the internal queue. 18 | * 19 | * @returns {IConsumerQueueObject} - Returns consumer queue object without the observer.next promise. 20 | */ 21 | protected enqueue(message: DeserialisedMessage): IConsumerQueueObject { 22 | return {message}; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /internal/mongo/database.ts: -------------------------------------------------------------------------------- 1 | import { Mongoose, Schema, CompileModelOptions } from "mongoose"; 2 | 3 | let database: Database | null = null; 4 | 5 | /** 6 | * Database class is a singleton class that provides simple straightforward method to connect and disconnect to database 7 | * with a particular collection. it has one inconsistency on not extending this class from mongoose due to mongoose implementation. 8 | * that is why mongoose is initialized in the constructor. 9 | */ 10 | export class Database { 11 | private database!: Mongoose; 12 | 13 | /** 14 | * @param {string} url - The url for database that needs to be connected 15 | * this constructor will create instance of database and initialize it with database URL 16 | */ 17 | constructor(private url: string) { 18 | if (!database) { 19 | this.database = new Mongoose(); 20 | database = this; 21 | } 22 | 23 | return database; 24 | } 25 | 26 | /** 27 | * The method connect will connect to database and will return if already connnected to db. It is an async function 28 | * to handle errors in better ways. It has a void return type. this function should only be called once when 29 | * the class is initialized. there is no harm in calling the function again as it will return if its already 30 | * connected to database 31 | * 32 | * @returns {Promis} 33 | */ 34 | public async connect(): Promise { 35 | if (!(this.database.connection.readyState === 1 || this.database.connection.readyState === 2)) { 36 | await this.database.connect(this.url); 37 | } 38 | 39 | return true; 40 | } 41 | 42 | /** 43 | * it will disconnect from database if database is connected. and has a void return type. It is internally 44 | * calling disconnect function of Mongoose. this function should only be called when database needs to be disconnected. 45 | * 46 | * @returns {Promise} 47 | */ 48 | public async disconnect(): Promise { 49 | if (!(this.database.connection.readyState === 0 || this.database.connection.readyState === 3)) { 50 | await this.database.disconnect(); 51 | } 52 | 53 | return true; 54 | } 55 | 56 | /** 57 | * Defines a model or retrieves it. 58 | * Models defined on this mongoose instance are available to all connection created by the same mongoose instance. 59 | * 60 | * @param {string} name - Name of the model. 61 | * @param {Schema} schema - Schema for which model is to be created. 62 | * @param {string} collection - Collection name of the model. 63 | * @param {CompileModelOptions} options - Optional object for mongoose options while creating the collection. 64 | * 65 | * @returns {U} - The mongoose model. 66 | */ 67 | public model( 68 | name: string, 69 | schema?: Schema, 70 | collection?: string, 71 | options?: CompileModelOptions 72 | ): U { 73 | return this.database.model(name, schema, collection, options); 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /internal/rpc/json_rpc_client.ts: -------------------------------------------------------------------------------- 1 | import { IRPCPayload } from "../interfaces/rpc_payload.js"; 2 | import axios from "axios"; 3 | 4 | /** 5 | * A utility class to make RPC calls to the given node URL. 6 | */ 7 | export class JSONRPCClient { 8 | 9 | /** 10 | * @constructor 11 | * 12 | * @param {string} url - The url of the node to make RPC call. 13 | */ 14 | constructor( 15 | private url: string 16 | ) {} 17 | 18 | /** 19 | * Method to make an rpc call 20 | * 21 | * @param {IRPCPayload} payload 22 | * 23 | * @returns {Promise} 24 | */ 25 | public async call(payload: IRPCPayload): Promise { 26 | const response = await axios.post( 27 | this.url, 28 | { 29 | jsonrpc: "2.0", 30 | id: new Date().getTime(), 31 | method: payload.method, 32 | params: payload.params ?? [] 33 | } 34 | ); 35 | 36 | return response.data.result; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /jest.config.ts: -------------------------------------------------------------------------------- 1 | import { JestConfigWithTsJest } from "ts-jest"; 2 | 3 | const jestConfig: JestConfigWithTsJest = { 4 | "transform": { 5 | "^.+\\.(j|t)s?$": "babel-jest" 6 | }, 7 | extensionsToTreatAsEsm: ['.ts'], 8 | clearMocks: true, 9 | coverageThreshold: { 10 | global: { 11 | "branches": 80, 12 | "functions": 80, 13 | "lines": 80, 14 | "statements": 80 15 | }, 16 | }, 17 | roots: [ 18 | "./tests/", 19 | ], 20 | } 21 | 22 | export default jestConfig 23 | -------------------------------------------------------------------------------- /public/block_producers/block_polling_producer.ts: -------------------------------------------------------------------------------- 1 | import { IProducedBlock, ProducedBlocksModel, IProducedBlocksModel } from "@internal/block_producers/produced_blocks_model.js"; 2 | import { IBlockProducerConfig } from "@internal/interfaces/block_producer_config.js"; 3 | import { IProducerConfig } from "@internal/interfaces/producer_config.js"; 4 | import { BlockGetter } from "@internal/block_getters/block_getter.js"; 5 | import { Coder } from "@internal/coder/protobuf_coder.js"; 6 | import { BlockPoller } from "@internal/block_subscription/block_polling.js"; 7 | import { Database } from "@internal/mongo/database.js"; 8 | import { BlockProducer } from "@internal/block_producers/block_producer.js"; 9 | import Eth from "web3-eth"; 10 | 11 | /** 12 | * Block Poller producer class which retrieves block from polling every block 13 | * for producing to kafka. 14 | * 15 | */ 16 | export class BlockPollerProducer extends BlockProducer { 17 | /** 18 | * @constructor 19 | * 20 | * @param {IBlockProducerConfig} config 21 | * 22 | * @returns {BlockPollerProducer} 23 | */ 24 | constructor(config: IBlockProducerConfig) { 25 | const endpoint = config.rpcWsEndpoints?.[0] ?? ""; 26 | const startBlock = config.startBlock ?? 0; 27 | const mongoUrl = config.mongoUrl ?? "mongodb://localhost:27017/chain-indexer"; 28 | const dbCollection = config.dbCollection ?? "producedblocks"; 29 | const blockPollingTimeout = config.blockPollingTimeout ?? 2000; 30 | const maxRetries = config.maxRetries ?? 0; 31 | const maxReOrgDepth = config.maxReOrgDepth ?? 0; 32 | 33 | delete config.rpcWsEndpoints; 34 | delete config.startBlock; 35 | delete config.mongoUrl; 36 | delete config.dbCollection; 37 | delete config.maxReOrgDepth; 38 | delete config.maxRetries; 39 | delete config.blockPollingTimeout; 40 | 41 | const database = new Database(mongoUrl); 42 | 43 | const blockGetter = new BlockGetter( 44 | //@ts-ignore 45 | new Eth(endpoint), 46 | maxRetries 47 | ); 48 | 49 | super( 50 | new Coder( 51 | "block", 52 | "blockpackage", 53 | "Block" 54 | ), 55 | config as IProducerConfig, 56 | new BlockPoller( 57 | blockGetter, 58 | blockPollingTimeout 59 | ), 60 | blockGetter, 61 | database, 62 | database.model>( 63 | "ProducedBlocks", 64 | ProducedBlocksModel, 65 | dbCollection 66 | ), 67 | startBlock, 68 | maxReOrgDepth 69 | ); 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /public/coder/abi_coder.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/coder/abi_coder.js"; 2 | -------------------------------------------------------------------------------- /public/data_transformation/asynchronous_data_transformer.ts: -------------------------------------------------------------------------------- 1 | import { AsynchronousConsumer } from "../kafka/consumer/asynchronous_consumer.js"; 2 | import { AsynchronousProducer } from "../kafka/producer/asynchronous_producer.js"; 3 | import { AbstractDataTransformer } from "@internal/data_transformation/abstract_data_transformer.js"; 4 | import { IConsumerConfig } from "@internal/interfaces/consumer_config.js"; 5 | import { IProducerConfig } from "@internal/interfaces/producer_config.js"; 6 | 7 | 8 | /** 9 | * Concurrent Data transformer transforms the data concurrently and doesn't for the transformed data to produce before 10 | * transforming data further. Services needs to implement their own transform method 11 | */ 12 | export abstract class AsynchronousDataTransformer extends AbstractDataTransformer { 13 | /** 14 | * @param {IConsumerConfig} consumerConfig - consumer config to create AsynchronousConsumer interface 15 | * @param {IProducerConfig} producerConfig - producer config to create AsynchronousProducer interface 16 | */ 17 | constructor(consumerConfig: IConsumerConfig, producerConfig: IProducerConfig) { 18 | super( 19 | new AsynchronousConsumer(consumerConfig), 20 | new AsynchronousProducer(producerConfig) 21 | ); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /public/data_transformation/synchronous_data_transformer.ts: -------------------------------------------------------------------------------- 1 | import { SynchronousConsumer } from "../kafka/consumer/synchronous_consumer.js"; 2 | import { AsynchronousProducer } from "../kafka/producer/asynchronous_producer.js"; 3 | import { AbstractDataTransformer } from "@internal/data_transformation/abstract_data_transformer.js"; 4 | import { IConsumerConfig } from "@internal/interfaces/consumer_config.js"; 5 | import { IProducerConfig } from "@internal/interfaces/producer_config.js"; 6 | 7 | /** 8 | * SynchronousDataTransformer transforms the data sequentially and waits for the transformed data to produce before 9 | * transforming data further. Services needs to implement their own transform method 10 | */ 11 | export abstract class SynchronousDataTransformer extends AbstractDataTransformer { 12 | /** 13 | * @param {IConsumerConfig} consumerConfig - consumer config to create SynchronousConsumer interface 14 | * @param {IProducerConfig} producerConfig - producer config to create AsynchronousProducer interface 15 | */ 16 | constructor(consumerConfig: IConsumerConfig, producerConfig: IProducerConfig) { 17 | super( 18 | new SynchronousConsumer(consumerConfig), 19 | new AsynchronousProducer(producerConfig) 20 | ); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /public/data_transformation/transform.ts: -------------------------------------------------------------------------------- 1 | import { AsynchronousDataTransformer } from "./asynchronous_data_transformer.js"; 2 | import { SynchronousDataTransformer } from "./synchronous_data_transformer.js"; 3 | import { ITransformerConfig } from "../interfaces/transformer_config.js"; 4 | import { IEventTransformer } from "../interfaces/event_transformer.js"; 5 | import { KafkaError } from "@internal/errors/kafka_error.js"; 6 | 7 | /** 8 | * Function to be used as functional implementation for the transformer classes for asynchronous 9 | * and synchronous data trasnformer. this function will creates the class and then start transforming the events. 10 | * transform function needs to be passed. 11 | * 12 | * @param {ITransformerConfig} config - consumer config 13 | * @param {IEventTransformer} eventTransformer - event transformer containing and transform and error function 14 | * 15 | * @returns {AsynchronousDataTransformer | SynchronousDataTransformer} 16 | */ 17 | export function transform( 18 | config: ITransformerConfig, eventTransformer: IEventTransformer 19 | ): AsynchronousDataTransformer | SynchronousDataTransformer { 20 | const type = config.type; 21 | const consumerConfig = config.consumerConfig; 22 | const producerConfig = config.producerConfig; 23 | 24 | let transformer: AsynchronousDataTransformer | SynchronousDataTransformer; 25 | 26 | switch (type) { 27 | case "asynchronous": { 28 | //@ts-ignore 29 | transformer = new AsynchronousDataTransformer(consumerConfig, producerConfig); 30 | break; 31 | } 32 | 33 | case "synchronous": { 34 | //@ts-ignore 35 | transformer = new SynchronousDataTransformer(consumerConfig, producerConfig); 36 | break; 37 | } 38 | 39 | default: { 40 | throw new Error("Invalid type"); 41 | } 42 | } 43 | 44 | eventTransformer.error.bind(transformer); 45 | eventTransformer.transform.bind(transformer); 46 | 47 | //@ts-ignore 48 | transformer.transform = eventTransformer.transform; 49 | 50 | transformer.on("dataTransformer.fatalError", eventTransformer.error); 51 | 52 | transformer.start(); 53 | 54 | return transformer; 55 | } 56 | -------------------------------------------------------------------------------- /public/enums/bridgetype.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/enums/bridgetype.js"; 2 | -------------------------------------------------------------------------------- /public/enums/tokentype.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/enums/tokentype.js"; 2 | -------------------------------------------------------------------------------- /public/errors/api_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/api_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/base_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/base_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/block_producer_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/block_producer_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/coder_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/coder_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/create_error_object.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/create_error_object.js"; 2 | -------------------------------------------------------------------------------- /public/errors/error_codes.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/error_codes.js"; 2 | -------------------------------------------------------------------------------- /public/errors/event_consumer_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/event_consumer_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/get_error_message.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/get_error_message.js"; 2 | -------------------------------------------------------------------------------- /public/errors/index.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/block_producer_error.js"; 2 | export * from "@internal/errors/coder_error.js"; 3 | export * from "@internal/errors/create_error_object.js"; 4 | export * from "@internal/errors/error_codes.js"; 5 | export * from "@internal/errors/event_consumer_error.js"; 6 | export * from "@internal/errors/get_error_message.js"; 7 | export * from "@internal/errors/is_base_error.js"; 8 | export * from "@internal/errors/is_librdkafka_error.js"; 9 | export * from "@internal/errors/kafka_error.js"; 10 | -------------------------------------------------------------------------------- /public/errors/is_base_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/is_base_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/is_librdkafka_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/is_librdkafka_error.js"; 2 | -------------------------------------------------------------------------------- /public/errors/kafka_error.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/errors/kafka_error.js"; 2 | -------------------------------------------------------------------------------- /public/event_consumer/abstract_event_consumer.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/event_consumer/abstract_event_consumer.js"; 2 | -------------------------------------------------------------------------------- /public/filter/bloom_filter.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/filter/bloom_filter.js"; 2 | -------------------------------------------------------------------------------- /public/index.ts: -------------------------------------------------------------------------------- 1 | // block_producer 2 | export * from "./block_producers/block_polling_producer.js"; 3 | export * from "./block_producers/erigon_block_producer.js"; 4 | export * from "./block_producers/quicknode_block_producer.js"; 5 | export * from "./block_producers/block_producer.js"; 6 | 7 | // coder 8 | export * from "./coder/abi_coder.js"; 9 | 10 | // data transformation 11 | export * from "./data_transformation/asynchronous_data_transformer.js"; 12 | export * from "./data_transformation/synchronous_data_transformer.js"; 13 | export * from "./data_transformation/transform.js"; 14 | 15 | // Enums 16 | export * from "./enums/bridgetype.js"; 17 | export * from "./enums/tokentype.js"; 18 | 19 | // Errors 20 | export * from "./errors/api_error.js"; 21 | export * from "./errors/base_error.js"; 22 | export * from "./errors/block_producer_error.js"; 23 | export * from "./errors/coder_error.js"; 24 | export * from "./errors/create_error_object.js"; 25 | export * from "./errors/error_codes.js"; 26 | export * from "./errors/event_consumer_error.js"; 27 | export * from "./errors/get_error_message.js"; 28 | export * from "./errors/is_base_error.js"; 29 | export * from "./errors/is_librdkafka_error.js"; 30 | export * from "./errors/kafka_error.js"; 31 | 32 | //Event Consumer 33 | export * from "./event_consumer/abstract_event_consumer.js"; 34 | 35 | // Bloom Filter 36 | export * from "./filter/bloom_filter.js"; 37 | 38 | // Interfaces 39 | export * from "./interfaces/index.js"; 40 | 41 | // kafka 42 | export * from "./kafka/consumer/consume.js"; 43 | export * from "./kafka/consumer/asynchronous_consumer.js"; 44 | export * from "./kafka/consumer/synchronous_consumer.js"; 45 | export * from "./kafka/producer/produce.js"; 46 | export * from "./kafka/producer/asynchronous_producer.js"; 47 | export * from "./kafka/producer/synchronous_producer.js"; 48 | 49 | // logger 50 | export * from "./logger/logger.js"; 51 | 52 | // MongoDB 53 | export * from "./mongo/database.js"; 54 | 55 | // rpc 56 | export * from "./rpc/json_rpc_client.js"; 57 | -------------------------------------------------------------------------------- /public/interfaces/async_observer.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/async_observer.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block_getter.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block_getter.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block_getter_worker_promise.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block_getter_worker_promise.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block_header.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block_header.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block_producer_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block_producer_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block_subscription.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block_subscription.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/block_worker_message.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/block_worker_message.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/coder.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/coder.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/coder_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/coder_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/common_kafka_events.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/common_kafka_events.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/consumer_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/consumer_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/consumer_queue_object.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/consumer_queue_object.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/deposit.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/deposit.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/deserialised_kafka_message.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/deserialised_kafka_message.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/event_log.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/event_log.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/event_producer.ts: -------------------------------------------------------------------------------- 1 | export interface IEventProducer { 2 | emitter: () => Promise | void 3 | error: (value: E) => void 4 | closed: () => void 5 | } 6 | -------------------------------------------------------------------------------- /public/interfaces/event_transformer.ts: -------------------------------------------------------------------------------- 1 | import { ITransformedBlock } from "@internal/interfaces/transformed_block.js"; 2 | 3 | export interface IEventTransformer { 4 | transform: (value: G) => Promise> 5 | error: (error: E) => void 6 | } 7 | -------------------------------------------------------------------------------- /public/interfaces/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./async_observer.js"; 2 | export * from "./block_getter_worker_promise.js"; 3 | export * from "./block_getter.js"; 4 | export * from "./block_header.js"; 5 | export * from "./block_producer_config.js"; 6 | export * from "./block_subscription.js"; 7 | export * from "./block_worker_message.js"; 8 | export * from "./block.js"; 9 | export * from "./coder_config.js"; 10 | export * from "./coder.js"; 11 | export * from "./common_kafka_events.js"; 12 | export * from "./config.js"; 13 | export * from "./consumer_config.js"; 14 | export * from "./consumer_queue_object.js"; 15 | export * from "./deposit.js"; 16 | export * from "./deserialised_kafka_message.js"; 17 | export * from "./event_log.js"; 18 | export * from "./event_producer.js"; 19 | export * from "./event_transformer.js"; 20 | export * from "./kafka_coder_config.js"; 21 | export * from "./logger_config.js"; 22 | export * from "./mapper.js"; 23 | export * from "./new_heads_subscriber.js"; 24 | export * from "./observer.js"; 25 | export * from "./producer_config.js"; 26 | export * from "./quicknode_response.js"; 27 | export * from "./raw_block.js"; 28 | export * from "./raw_receipt.js"; 29 | export * from "./raw_transaction.js"; 30 | export * from "./rpc_payload.js"; 31 | export * from "./sequential_consumer_config.js"; 32 | export * from "./stream_api_block.js"; 33 | export * from "./synchronous_producer.js"; 34 | export * from "./transaction_receipt.js"; 35 | export * from "./transaction.js"; 36 | export * from "./transformed_block.js"; 37 | export * from "./transformer_config.js"; 38 | export * from "./web3_transaction_receipt.js"; 39 | export * from "./web3_transaction.js"; 40 | -------------------------------------------------------------------------------- /public/interfaces/kafka_coder_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/kafka_coder_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/logger_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/logger_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/mapper.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/mapper.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/new_heads_subscriber.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/new_heads_subscriber.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/observer.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/observer.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/producer_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/producer_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/quicknode_response.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/quicknode_response.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/raw_block.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/raw_block.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/raw_receipt.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/raw_receipt.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/raw_transaction.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/raw_transaction.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/rpc_payload.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/rpc_payload.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/sequential_consumer_config.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/sequential_consumer_config.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/stream_api_block.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/stream_api_block.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/synchronous_producer.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/synchronous_producer.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/transaction.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/transaction.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/transaction_receipt.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/transaction_receipt.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/transformed_block.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/transformed_block.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/transformer_config.ts: -------------------------------------------------------------------------------- 1 | import { IConsumerConfig } from "@internal/interfaces/consumer_config.js"; 2 | import { IProducerConfig } from "@internal/interfaces/producer_config.js"; 3 | 4 | export interface ITransformerConfig { 5 | consumerConfig: IConsumerConfig, 6 | producerConfig: IProducerConfig 7 | type: string, 8 | } 9 | -------------------------------------------------------------------------------- /public/interfaces/web3_transaction.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/web3_transaction.js"; 2 | -------------------------------------------------------------------------------- /public/interfaces/web3_transaction_receipt.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/interfaces/web3_transaction_receipt.js"; 2 | -------------------------------------------------------------------------------- /public/kafka/consumer/asynchronous_consumer.ts: -------------------------------------------------------------------------------- 1 | import { AsynchronousConsumer as InternalAsynchronousConsumer } from "@internal/kafka/consumer/asynchronous_consumer.js"; 2 | import { IConsumerConfig } from "@internal/interfaces/consumer_config.js"; 3 | import { Coder } from "@internal/coder/protobuf_coder.js"; 4 | import { ICoderConfig } from "@internal/interfaces/coder_config.js"; 5 | 6 | /** 7 | * The AsynchronousConsumer extends InternalAsynchronousConsumer class to provide the abstraction of the coder class. 8 | * coders can be passed optionally if another type of serialising/deserialising is required. 9 | * 10 | * @extends AsynchronousConsumer 11 | */ 12 | export class AsynchronousConsumer extends InternalAsynchronousConsumer { 13 | 14 | /** 15 | * @constructor 16 | * 17 | * @param {IConsumerConfig} config - Key value pairs to override the default config of the consumer client. 18 | */ 19 | constructor( 20 | config: IConsumerConfig, 21 | ) { 22 | let coders = config.coders; 23 | const topic = config.topic; 24 | delete config.topic; 25 | delete config.coders; 26 | 27 | if (!topic) { 28 | throw new Error("Please provide topic"); 29 | } 30 | 31 | if (!coders) { 32 | throw new Error("Please provide coders"); 33 | } 34 | 35 | if (Array.isArray(coders) || "fileName" in coders) { 36 | const coderConfig = coders; 37 | coders = {}; 38 | if (Array.isArray(topic) && Array.isArray(coderConfig)) { 39 | for (let topicIndex = 0; topicIndex < topic.length; topicIndex++) { 40 | coders[topic[topicIndex]] = new Coder( 41 | coderConfig[topicIndex].fileName, 42 | coderConfig[topicIndex].packageName, 43 | coderConfig[topicIndex].messageType 44 | ); 45 | } 46 | } else if (!Array.isArray(topic) && !Array.isArray(coderConfig)) { 47 | coders[topic] = new Coder( 48 | (coderConfig as ICoderConfig).fileName, 49 | (coderConfig as ICoderConfig).packageName, 50 | (coderConfig as ICoderConfig).messageType, 51 | (coderConfig as ICoderConfig).fileDirectory, 52 | ); 53 | } else { 54 | throw new Error("Please provide valid coder config or topic"); 55 | } 56 | } 57 | 58 | super( 59 | topic, 60 | coders, 61 | config 62 | ); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /public/kafka/consumer/consume.ts: -------------------------------------------------------------------------------- 1 | import { IConsumerConfig } from "@internal/interfaces/consumer_config.js"; 2 | import { SynchronousConsumer } from "./synchronous_consumer.js"; 3 | import { AsynchronousConsumer } from "./asynchronous_consumer.js"; 4 | import { IObserver } from "@internal/interfaces/observer.js"; 5 | import { DeserialisedMessage } from "public/index.js"; 6 | import { BaseError } from "@internal/errors/base_error.js"; 7 | 8 | /** 9 | * Function to be used as functional implementation for the consumer classes for asynchronous 10 | * and synchronous consumer. this function will create coder class if protobuf coder is required. 11 | * type and coder can be passed if coder other that protobuf coder is needed. 12 | * 13 | * @param {IConsumerConfig} config - consumer config 14 | * @param {IObserver} observer - observer class for next, error, closed event 15 | * 16 | * @returns {AsynchronousConsumer | SynchronousConsumer} 17 | */ 18 | export function consume( 19 | config: IConsumerConfig, observer: IObserver 20 | ): AsynchronousConsumer | SynchronousConsumer { 21 | const type = config.type; 22 | delete config.type; 23 | 24 | let consumer: AsynchronousConsumer | SynchronousConsumer; 25 | 26 | switch (type) { 27 | case "asynchronous": { 28 | consumer = new AsynchronousConsumer(config); 29 | break; 30 | } 31 | 32 | case "synchronous": { 33 | consumer = new SynchronousConsumer(config); 34 | break; 35 | } 36 | 37 | default: { 38 | throw new Error("Invalid type"); 39 | } 40 | } 41 | 42 | consumer.start(observer); 43 | 44 | return consumer; 45 | } 46 | -------------------------------------------------------------------------------- /public/kafka/consumer/synchronous_consumer.ts: -------------------------------------------------------------------------------- 1 | import { SynchronousConsumer as InternalSynchronousConsumer } from "@internal/kafka/consumer/synchronous_consumer.js"; 2 | import { IConsumerConfig } from "@internal/interfaces/consumer_config.js"; 3 | import { Coder } from "@internal/coder/protobuf_coder.js"; 4 | import { ICoderConfig } from "@internal/interfaces/coder_config.js"; 5 | 6 | /** 7 | * The SynchronousConsumer extends InternalSynchronousConsumer class to provide the abstraction of the coder class. 8 | * coders can be passed optionally if another type of serialising/deserialising is required. 9 | * 10 | * @extends SynchronousConsumer 11 | */ 12 | export class SynchronousConsumer extends InternalSynchronousConsumer { 13 | 14 | /** 15 | * @constructor 16 | * 17 | * @param {IConsumerConfig} config - Key value pairs to override the default config of the consumer client. 18 | */ 19 | constructor( 20 | config: IConsumerConfig, 21 | ) { 22 | let coders = config.coders; 23 | const topic = config.topic; 24 | delete config.topic; 25 | delete config.coders; 26 | 27 | if (!topic) { 28 | throw new Error("Please provide topic"); 29 | } 30 | 31 | if (!coders) { 32 | throw new Error("Please provide coders"); 33 | } 34 | 35 | if (Array.isArray(coders) || "fileName" in coders) { 36 | const coderConfig = coders; 37 | coders = {}; 38 | if (Array.isArray(topic) && Array.isArray(coderConfig)) { 39 | for (let topicIndex = 0; topicIndex < topic.length; topicIndex++) { 40 | coders[topic[topicIndex]] = new Coder( 41 | coderConfig[topicIndex].fileName, 42 | coderConfig[topicIndex].packageName, 43 | coderConfig[topicIndex].messageType 44 | ); 45 | } 46 | } else if (!Array.isArray(topic) && !Array.isArray(coderConfig)) { 47 | coders[topic] = new Coder( 48 | (coderConfig as ICoderConfig).fileName, 49 | (coderConfig as ICoderConfig).packageName, 50 | (coderConfig as ICoderConfig).messageType, 51 | (coderConfig as ICoderConfig).fileDirectory, 52 | ); 53 | } else { 54 | throw new Error("Please provide valid coder config or topic"); 55 | } 56 | } 57 | 58 | super( 59 | topic, 60 | coders, 61 | config 62 | ); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /public/kafka/producer/asynchronous_producer.ts: -------------------------------------------------------------------------------- 1 | import { AsynchronousProducer as InternalAsynchronousProducer } from "@internal/kafka/producer/asynchronous_producer.js"; 2 | import { IProducerConfig } from "@internal/interfaces/producer_config.js"; 3 | import { Coder } from "@internal/coder/protobuf_coder.js"; 4 | 5 | /** 6 | * AsynchronousProducer class entends InternalAsynchronousProducer which creates an instance of AsynchronousProducer 7 | * it abstracts the usage of coder class internally. serialiser can be passed optionally if another type of 8 | * serialising/deserialising is required. 9 | */ 10 | export class AsynchronousProducer extends InternalAsynchronousProducer { 11 | /** 12 | * 13 | * @param {IProducerConfig} config - key value pairs to override the default config of the producer client. 14 | */ 15 | constructor( 16 | config: IProducerConfig 17 | ) { 18 | let coder = config.coder; 19 | delete config.coder; 20 | 21 | if (!coder) { 22 | throw new Error("Please provide coder"); 23 | } 24 | 25 | if ("fileName" in coder) { 26 | coder = new Coder( 27 | coder.fileName, 28 | coder.packageName, 29 | coder.messageType, 30 | coder.fileDirectory, 31 | ); 32 | } 33 | 34 | super( 35 | coder, 36 | config 37 | ); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /public/kafka/producer/synchronous_producer.ts: -------------------------------------------------------------------------------- 1 | import { SynchronousProducer as InternalSynchronousProducer } from "@internal/kafka/producer/synchronous_producer.js"; 2 | import { IProducerConfig } from "@internal/interfaces/producer_config.js"; 3 | import { Coder } from "@internal/coder/protobuf_coder.js"; 4 | 5 | /** 6 | * SynchronousProducer class entends InternalSynchronousProducer which creates an instance of SynchronousProducer 7 | * it abstracts the usage of coder class internally. serialiser can be passed optionally if another type of 8 | * serialising/deserialising is required. 9 | */ 10 | export class SynchronousProducer extends InternalSynchronousProducer { 11 | /** 12 | * 13 | * @param {IProducerConfig} config - key value pairs to override the default config of the producer client. 14 | */ 15 | constructor( 16 | config: IProducerConfig, 17 | ) { 18 | let coder = config.coder; 19 | delete config.coder; 20 | 21 | if (!coder) { 22 | throw new Error("Please provide coder"); 23 | } 24 | 25 | if ("fileName" in coder) { 26 | coder = new Coder( 27 | coder.fileName, 28 | coder.packageName, 29 | coder.messageType, 30 | coder.fileDirectory, 31 | ); 32 | } 33 | 34 | super( 35 | coder, 36 | config 37 | ); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /public/logger/logger.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/logger/logger.js"; 2 | -------------------------------------------------------------------------------- /public/mongo/database.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/mongo/database.js"; 2 | -------------------------------------------------------------------------------- /public/rpc/json_rpc_client.ts: -------------------------------------------------------------------------------- 1 | export * from "@internal/rpc/json_rpc_client.js"; 2 | -------------------------------------------------------------------------------- /schemas/block.proto: -------------------------------------------------------------------------------- 1 | package blockpackage; 2 | syntax = "proto3"; 3 | 4 | import "transaction.proto"; 5 | 6 | 7 | message Block { 8 | required uint64 number = 1; 9 | required string hash = 2; 10 | required string logsBloom = 3; 11 | required string parentHash = 4; 12 | required string receiptsRoot = 5; 13 | required string stateRoot = 6; 14 | required string transactionsRoot = 7; 15 | required uint64 timestamp = 8; 16 | required uint64 nonce = 9; 17 | required uint64 gasLimit = 10; 18 | required uint64 gasUsed = 11; 19 | string baseFeePerGas = 12; 20 | string difficulty = 13; 21 | string totalDifficulty = 14; 22 | string extraData = 15; 23 | string sha3Uncles = 16; 24 | string miner = 17; 25 | string size = 18; 26 | repeated Transaction transactions = 19; 27 | } 28 | -------------------------------------------------------------------------------- /schemas/bridge_assets.proto: -------------------------------------------------------------------------------- 1 | package assetspackage; 2 | syntax = "proto3"; 3 | 4 | message Networks { 5 | uint32 origin = 1; 6 | uint32 destination = 2; 7 | } 8 | 9 | message RollUpMetaData { 10 | string bridgeContractAddress = 1; 11 | string globalExitRootManager = 2; 12 | Networks networks = 3; 13 | } 14 | 15 | message TokenInfo { 16 | string tokenType = 1; 17 | uint32 originTokenNetwork = 2; 18 | string originTokenAddress = 3; 19 | string wrappedTokenAddress = 4; 20 | } 21 | 22 | message ExitRoots { 23 | string mainnet = 1; 24 | string rollUp = 2; 25 | } 26 | 27 | message BridgeAssets { 28 | message BridgeEvent { 29 | RollUpMetaData rollUpMetaData = 1; 30 | string transactionHash = 2; 31 | TokenInfo tokenInfo = 3; 32 | string receiver = 4; 33 | string amount = 5; 34 | string metadata = 6; 35 | uint64 depositCount = 7; 36 | ExitRoots exitRoots = 8; 37 | string depositor = 9; 38 | uint32 leafType = 10; 39 | bool refuel = 11; 40 | string leaf = 12; 41 | string refuelAmount = 13; 42 | string refuelType = 14; 43 | uint32 refuelTrackingNumber = 15; 44 | } 45 | 46 | uint64 blockNumber = 1; 47 | uint64 timestamp = 2; 48 | repeated BridgeEvent data = 3; 49 | } 50 | -------------------------------------------------------------------------------- /schemas/burnblock.proto: -------------------------------------------------------------------------------- 1 | package burnblockpackage; 2 | syntax = "proto3"; 3 | 4 | message BurnBlock { 5 | message BurnEvent { 6 | required string tokenType = 1; 7 | required uint64 transactionIndex = 2; 8 | required string transactionHash = 3; 9 | required string withdrawInitiator = 4; 10 | required string childToken = 5; 11 | repeated string amounts = 6; 12 | repeated string tokenIds = 7; 13 | string bridgeType = 8; 14 | } 15 | 16 | required uint64 blockNumber = 1; 17 | required uint64 timestamp = 2; 18 | repeated BurnEvent data = 3; 19 | } 20 | -------------------------------------------------------------------------------- /schemas/checkpointblock.proto: -------------------------------------------------------------------------------- 1 | package checkpointblockpackage; 2 | syntax = "proto3"; 3 | 4 | message CheckpointBlock { 5 | message CheckpointEvent { 6 | required string transactionHash = 1; 7 | required string proposer = 2; 8 | required uint64 start = 3; 9 | required uint64 end = 4; 10 | required uint64 checkpointNumber = 5; 11 | required uint64 transactionIndex = 6; 12 | required string root = 7; 13 | required uint64 headerBlockId = 8; 14 | required string reward = 9; 15 | required string input = 10; 16 | } 17 | 18 | required uint64 blockNumber = 1; 19 | required uint64 timestamp = 2; 20 | repeated CheckpointEvent data = 3; 21 | } 22 | -------------------------------------------------------------------------------- /schemas/claim_assets.proto: -------------------------------------------------------------------------------- 1 | package assetspackage; 2 | syntax = "proto3"; 3 | import public "bridge_assets.proto"; 4 | 5 | message ClaimAssets { 6 | message ClaimEvent { 7 | RollUpMetaData rollUpMetaData = 1; 8 | string transactionHash = 2; 9 | TokenInfo tokenInfo = 3; 10 | string receiver = 4; 11 | string amount = 5; 12 | uint32 index = 6; 13 | } 14 | 15 | uint64 blockNumber = 1; 16 | uint64 timestamp = 2; 17 | repeated ClaimEvent data = 3; 18 | } 19 | -------------------------------------------------------------------------------- /schemas/depositblock.proto: -------------------------------------------------------------------------------- 1 | package depositblockpackage; 2 | syntax = "proto3"; 3 | 4 | message DepositBlock { 5 | message DepositEvent { 6 | required string tokenType = 1; 7 | required string bridgeType = 2; 8 | required string transactionHash = 3; 9 | required string depositor = 4; 10 | required string depositReceiver = 5; 11 | required string rootToken = 6; 12 | repeated string amounts = 7; 13 | repeated string tokenIds = 8; 14 | string rootTunnelAddress = 9; 15 | bool refuel = 10; 16 | string nonce = 11; 17 | } 18 | 19 | required uint64 blockNumber = 1; 20 | required uint64 timestamp = 2; 21 | repeated DepositEvent data = 3; 22 | } 23 | -------------------------------------------------------------------------------- /schemas/eventlog.proto: -------------------------------------------------------------------------------- 1 | package blockpackage; 2 | syntax = "proto3"; 3 | 4 | message EventLog { 5 | required string address = 1; 6 | required string data = 2; 7 | required uint64 logIndex = 3; 8 | repeated string topics = 4; 9 | required string transactionHash = 5; 10 | required uint64 transactionIndex = 6; 11 | required uint64 blockNumber = 7; 12 | required string blockHash = 8; 13 | bool removed = 9; 14 | } 15 | -------------------------------------------------------------------------------- /schemas/global_exit_root.proto: -------------------------------------------------------------------------------- 1 | package globalexitrootpackage; 2 | syntax = "proto3"; 3 | 4 | message ExitRoots { 5 | string mainnet = 1; 6 | string rollUp = 2; 7 | } 8 | 9 | message GlobalExitRoot { 10 | message GlobalExitRootEvent { 11 | uint64 rollUpId = 1; 12 | uint64 batchNumber = 2; 13 | uint64 depositCount = 3; 14 | string aggregator = 4; 15 | string rollUpExitRoot = 5; 16 | string globalExitRoot = 6; 17 | ExitRoots exitRoots = 7; 18 | } 19 | 20 | uint64 blockNumber = 1; 21 | uint64 timestamp = 2; 22 | repeated GlobalExitRootEvent data = 3; 23 | } 24 | -------------------------------------------------------------------------------- /schemas/global_exit_root_l2.proto: -------------------------------------------------------------------------------- 1 | package globalexitrootl2package; 2 | syntax = "proto3"; 3 | 4 | message GlobalExitRootL2 { 5 | required string globalExitRootL2 = 1; 6 | bool removed = 2; 7 | } 8 | -------------------------------------------------------------------------------- /schemas/mappings.proto: -------------------------------------------------------------------------------- 1 | package assetspackage; 2 | syntax = "proto3"; 3 | import public "bridge_assets.proto"; 4 | 5 | message Mappings { 6 | message Metadata { 7 | string name = 1; 8 | string symbol = 2; 9 | uint32 decimals = 3; 10 | } 11 | 12 | message MappingEvent { 13 | string transactionHash = 1; 14 | TokenInfo tokenInfo = 2; 15 | uint32 wrappedTokenNetwork = 3; 16 | Metadata metadata = 4; 17 | } 18 | 19 | uint64 blockNumber = 1; 20 | uint64 timestamp = 2; 21 | repeated MappingEvent data = 3; 22 | } 23 | -------------------------------------------------------------------------------- /schemas/new_batch.proto: -------------------------------------------------------------------------------- 1 | package batchpackage; 2 | syntax = "proto3"; 3 | 4 | message Batch { 5 | required uint64 number = 1; 6 | required string globalExitRoot = 2; 7 | required string mainnetExitRoot = 3; 8 | required string rollupExitRoot = 4; 9 | } 10 | -------------------------------------------------------------------------------- /schemas/posmapping.proto: -------------------------------------------------------------------------------- 1 | package posmappingpackage; 2 | syntax = "proto3"; 3 | 4 | message Mappings { 5 | message MappingEvent { 6 | string transactionHash = 1; 7 | string rootToken = 2; 8 | string childToken = 3; 9 | string tokenType = 4; 10 | string bridgeType = 5; 11 | string tokenTypeHash = 6; 12 | string rootTunnel = 7; 13 | } 14 | 15 | uint64 blockNumber = 1; 16 | uint64 timestamp = 2; 17 | repeated MappingEvent data = 3; 18 | } 19 | -------------------------------------------------------------------------------- /schemas/statesync.proto: -------------------------------------------------------------------------------- 1 | package statesyncpackage; 2 | syntax = "proto3"; 3 | 4 | message StateSync { 5 | message DepositEvent { 6 | required uint64 stateId = 1; 7 | string contractAddress = 2; 8 | string data = 3; 9 | uint64 timestamp = 4; 10 | } 11 | 12 | required uint64 blockNumber = 1; 13 | required uint64 timestamp = 2; 14 | repeated DepositEvent data = 3; 15 | } 16 | -------------------------------------------------------------------------------- /schemas/test.proto: -------------------------------------------------------------------------------- 1 | package testpackage; 2 | syntax = "proto3"; 3 | 4 | 5 | 6 | message Test { 7 | required uint64 number = 1; 8 | required string string = 2; 9 | } 10 | -------------------------------------------------------------------------------- /schemas/transaction.proto: -------------------------------------------------------------------------------- 1 | package blockpackage; 2 | syntax = "proto3"; 3 | 4 | import public "eventlog.proto"; 5 | 6 | message TransactionReceipt { 7 | required string transactionHash = 1; 8 | required uint64 transactionIndex = 2; 9 | string from = 3; 10 | string to = 4; 11 | required uint64 blockNumber = 5; 12 | required string blockHash = 6; 13 | string contractAddress = 7; 14 | required uint64 gasUsed = 8; 15 | required uint64 cumulativeGasUsed = 9; 16 | repeated EventLog logs = 10; 17 | required string logsBloom = 11; 18 | string effectiveGasPrice = 12; 19 | bool status = 13; 20 | } 21 | 22 | message Transaction { 23 | required string hash = 1; 24 | required uint64 nonce = 2; 25 | string blockHash = 3; 26 | uint64 blockNumber = 4; 27 | uint64 transactionIndex = 5; 28 | required string from = 6; 29 | string to = 7; 30 | required string value = 8; 31 | required string gasPrice = 9; 32 | required uint64 gas = 10; 33 | required string input = 11; 34 | string maxFeePerGas = 12; 35 | string maxPriorityFeePerGas = 13; 36 | string chainId = 14; 37 | string v = 15; 38 | string r = 16; 39 | string s = 17; 40 | uint32 type = 18; 41 | required TransactionReceipt receipt = 19; 42 | } 43 | -------------------------------------------------------------------------------- /schemas/withdrawblock.proto: -------------------------------------------------------------------------------- 1 | package withdrawblockpackage; 2 | syntax = "proto3"; 3 | 4 | message WithdrawBlock { 5 | message WithdrawEvent { 6 | string tokenType = 1; 7 | required string bridgeType = 2; 8 | required string transactionHash = 3; 9 | required string withdrawReceiver = 4; 10 | uint64 burnTransactionIndex = 5; 11 | uint64 burnTransactionBlockNumber = 6; 12 | string transactionType = 7; 13 | repeated string amounts = 8; 14 | repeated string tokenIds = 9; 15 | required bool isDecoded = 10; 16 | required string rootToken = 11; 17 | string rootTunnel = 12; 18 | string exitId = 13; 19 | 20 | } 21 | 22 | required uint64 blockNumber = 1; 23 | required uint64 timestamp = 2; 24 | repeated WithdrawEvent data = 3; 25 | } 26 | -------------------------------------------------------------------------------- /sonar-project.properties: -------------------------------------------------------------------------------- 1 | sonar.projectKey=0xPolygon_chain-indexer-framework 2 | sonar.organization=0xpolygon 3 | -------------------------------------------------------------------------------- /tests/__mocks__/coder.js: -------------------------------------------------------------------------------- 1 | export const coder = { 2 | serialize: jest.fn().mockReturnValue("demo"), 3 | deserialize: jest.fn().mockReturnValue("demo") 4 | }; 5 | -------------------------------------------------------------------------------- /tests/__mocks__/observer.js: -------------------------------------------------------------------------------- 1 | export const observer = { 2 | next: jest.fn(), 3 | error: jest.fn(), 4 | closed: jest.fn() 5 | } 6 | -------------------------------------------------------------------------------- /tests/coder/abi_coder.test.ts: -------------------------------------------------------------------------------- 1 | import { ABICoder } from "../../dist/internal/coder/abi_coder"; 2 | import AbiCoder from "web3-eth-abi"; 3 | 4 | jest.mock("web3-eth-abi"); 5 | 6 | describe("abi_coder", () => { 7 | let abiCoderObject: jest.MockedObject 8 | 9 | beforeEach(() => { 10 | abiCoderObject = AbiCoder as jest.MockedObject; 11 | }); 12 | 13 | describe("ABICoder", () => { 14 | test("decodeParameter", () => { 15 | //@ts-ignore 16 | abiCoderObject.decodeParameter.mockReturnValueOnce("mocked_result") 17 | expect( 18 | ABICoder.decodeParameter( 19 | "mocked_type", 20 | "mocked_input" 21 | ) 22 | ).toEqual("mocked_result") 23 | }); 24 | 25 | test("decodeParameters", () => { 26 | //@ts-ignore 27 | abiCoderObject.decodeParameters.mockReturnValueOnce(["mocked_result"]) 28 | expect( 29 | ABICoder.decodeParameters( 30 | ["mocked_type"], 31 | "mocked_input" 32 | ) 33 | ).toEqual(["mocked_result"]) 34 | }); 35 | 36 | test("encodeParameters", () => { 37 | //@ts-ignore 38 | abiCoderObject.encodeParameters.mockReturnValueOnce("mocked_result") 39 | expect( 40 | ABICoder.encodeParameters( 41 | ["mocked_type"], 42 | ["mocked_value"] 43 | ) 44 | ).toEqual("mocked_result") 45 | }); 46 | 47 | test("decodeLog", () => { 48 | //@ts-ignore 49 | abiCoderObject.decodeLog.mockReturnValueOnce(["mocked_result"]) 50 | expect( 51 | ABICoder.decodeLog( 52 | ["mocked_input"], 53 | "mocked_hex", 54 | ["mocked_topics"] 55 | ) 56 | ).toEqual(["mocked_result"]) 57 | }); 58 | 59 | test("decodeMethod", () => { 60 | //@ts-ignore 61 | abiCoderObject.decodeParameters.mockReturnValueOnce(["mocked_result"]) 62 | expect( 63 | ABICoder.decodeMethod( 64 | ['bytes'], 65 | "mocked_input_data" 66 | ) 67 | ).toEqual(["mocked_result"]) 68 | }); 69 | }); 70 | }); 71 | -------------------------------------------------------------------------------- /tests/errors/base_error.test.ts: -------------------------------------------------------------------------------- 1 | import { BaseError } from "../../dist/internal/errors/base_error"; 2 | 3 | describe("BaseError", () => { 4 | let baseErrorCodes: { BASE_ERROR: number}; 5 | 6 | beforeEach(() => { 7 | baseErrorCodes = { BASE_ERROR: 100 }; 8 | }); 9 | 10 | test("BaseError.message must be name of the error if the message is not passed.", 11 | () => { 12 | expect(new BaseError("mock", 123).message).toBe("mock"); 13 | } 14 | ); 15 | 16 | test("BaseError must have static property codes with error codes related to BaseError", 17 | () => { 18 | expect(BaseError.codes).toEqual(baseErrorCodes); 19 | } 20 | ); 21 | 22 | test("BaseError must have unique identifier to identify all base errors", 23 | () => { 24 | expect(new BaseError("mock", 123).identifier).toEqual(baseErrorCodes.BASE_ERROR); 25 | } 26 | ); 27 | 28 | test("BaseError.message must be message passed to constructor", 29 | () => { 30 | expect(new BaseError("mock", 123, false, "mock message").message).toBe("mock message"); 31 | } 32 | ); 33 | 34 | test("Base error stack must be the stack passed to the constructor if passed", 35 | () => { 36 | expect( 37 | new BaseError( 38 | "mock", 39 | 123, 40 | false, 41 | "mock message", 42 | "local", 43 | "mock error stack" 44 | ).stack 45 | ).toBe("mock error stack"); 46 | } 47 | ); 48 | }); 49 | -------------------------------------------------------------------------------- /tests/errors/coder_error.test.ts: -------------------------------------------------------------------------------- 1 | import { CoderError } from "../../dist/internal/errors/coder_error"; 2 | import { BaseError } from "../../dist/internal/errors/base_error"; 3 | 4 | jest.mock("../../dist/internal/errors/base_error"); 5 | jest.mock("../../dist/internal/errors/is_base_error"); 6 | jest.mock("../../dist/internal/errors/create_error_object"); 7 | 8 | describe("Coder Error", () => { 9 | let coderError: CoderError; 10 | let mockedBaseErrorClass: jest.MockedClass; 11 | 12 | 13 | beforeEach(() => { 14 | coderError = new CoderError(); 15 | mockedBaseErrorClass = BaseError as jest.MockedClass; 16 | }); 17 | 18 | test("coder error by default must have 'Coder Error' as name set via super()", 19 | () => { 20 | expect(mockedBaseErrorClass).toBeCalledWith( 21 | "Coder Error", 22 | expect.anything(), 23 | expect.anything(), 24 | undefined, 25 | "local", 26 | undefined 27 | ); 28 | } 29 | ); 30 | 31 | test("coder error by default must have 1000 as error code set via super()", 32 | () => { 33 | expect(mockedBaseErrorClass).toBeCalledWith( 34 | expect.anything(), 35 | 1000, 36 | expect.anything(), 37 | undefined, 38 | expect.anything(), 39 | undefined 40 | ); 41 | } 42 | ); 43 | 44 | test("coder error must always set origin to 'local'", 45 | () => { 46 | expect(mockedBaseErrorClass).toBeCalledWith( 47 | expect.anything(), 48 | expect.anything(), 49 | expect.anything(), 50 | undefined, 51 | "local", 52 | undefined 53 | ); 54 | } 55 | ); 56 | 57 | test("Block producer must have a static property codes which is an object of all block producer codes", 58 | () => { 59 | expect(CoderError.codes).toEqual({ 60 | BASE_ERROR: 100, 61 | UNKNOWN_CODER_ERR: 1000, 62 | INVALID_PATH_PROTO: 1001, 63 | INVALID_PATH_TYPE: 1002, 64 | DECODING_ERROR: 1003, 65 | ENCODING_VERIFICATION_FAILED: 1004 66 | }); 67 | } 68 | ); 69 | }); 70 | -------------------------------------------------------------------------------- /tests/errors/create_error_object.test.ts: -------------------------------------------------------------------------------- 1 | import { createErrorObject } from "../../dist/internal/errors/create_error_object"; 2 | 3 | describe("Errors - create error object", () => { 4 | const tests = [ 5 | 0, 6 | "string", 7 | { 8 | string: "string", 9 | number: 0 10 | } 11 | ]; 12 | 13 | test("Must return object without modification, if an instance of Error class", 14 | () => { 15 | const error: Error = new Error("demo"); 16 | expect(createErrorObject(error)).toBe(error); 17 | } 18 | ); 19 | 20 | test("Must return object without modification, if an instance of TypeError class", 21 | () => { 22 | const error: TypeError = new TypeError("demo"); 23 | expect(createErrorObject(error)).toBe(error); 24 | } 25 | ); 26 | 27 | tests.forEach((item) => test("Must return error object", () => { 28 | expect(createErrorObject(item)).toBeInstanceOf(Error); 29 | })); 30 | }); 31 | -------------------------------------------------------------------------------- /tests/errors/get_error_message.test.ts: -------------------------------------------------------------------------------- 1 | import { getErrorMessage } from "../../dist/internal/errors/get_error_message"; 2 | 3 | describe("Errors - get error message", () => { 4 | test("Must return error message when error object is passed", 5 | () => { 6 | expect(getErrorMessage(new Error("demo"))).toBe("demo"); 7 | } 8 | ); 9 | 10 | test("Must return string as error message if passed variable is not instance of Error", 11 | () => { 12 | expect(getErrorMessage("error")).toBe("error"); 13 | expect(getErrorMessage(123)).toBe("123"); 14 | expect(getErrorMessage({ 15 | code: 123 16 | })).toBe("{\"code\":123}"); 17 | } 18 | ); 19 | }); 20 | -------------------------------------------------------------------------------- /tests/kafka/consumer/synchronous_consumer.test.ts: -------------------------------------------------------------------------------- 1 | jest.mock("../../../dist/internal/kafka/consumer/abstract_consumer"); 2 | import { IConsumerQueueObject } from "../../../dist/internal/interfaces/consumer_queue_object"; 3 | import { DeserialisedMessage } from "../../../dist/internal/interfaces/deserialised_kafka_message"; 4 | import { SynchronousConsumer } from "../../../dist/internal/kafka/consumer/synchronous_consumer"; 5 | import { Coder } from "../../../dist/internal/coder/protobuf_coder"; 6 | import mockMessage from "../../mock_data/mock_message.json"; 7 | //@ts-ignore 8 | import { observer } from "../../__mocks__/observer"; 9 | //@ts-ignore 10 | import { coder } from "../../__mocks__/coder"; 11 | 12 | jest.mock("node-rdkafka"); 13 | 14 | 15 | describe("Kafka - Synchronous Consumer", () => { 16 | class MockClass extends SynchronousConsumer { 17 | public enqueueMock(message: DeserialisedMessage): IConsumerQueueObject { 18 | this.observer = observer; 19 | return this.enqueue(message); 20 | } 21 | } 22 | 23 | let consumer: MockClass; 24 | 25 | beforeEach(() => { 26 | consumer = new MockClass("chainId-137", {"chainId-137": coder as unknown as Coder}); 27 | }); 28 | 29 | test("Enqueue method must return ConsumerQueueObject without promise", () => { 30 | expect( 31 | consumer.enqueueMock( 32 | mockMessage as unknown as DeserialisedMessage 33 | ).promise 34 | ).toBeFalsy(); 35 | }); 36 | 37 | test("Enqueue method must return ConsumerQueueObject without modifying kafka message", () => { 38 | expect( 39 | consumer.enqueueMock( 40 | mockMessage as unknown as DeserialisedMessage 41 | ).message 42 | ).toBe(mockMessage); 43 | }); 44 | 45 | test("On enqueue observer.next must not be called", async () => { 46 | consumer.enqueueMock( 47 | mockMessage as unknown as DeserialisedMessage 48 | ); 49 | 50 | expect(observer.next).not.toBeCalled(); 51 | }); 52 | }); 53 | -------------------------------------------------------------------------------- /tests/mock_data/connect_error.json: -------------------------------------------------------------------------------- 1 | { 2 | "message": "Local: Broker transport failure", 3 | "code": -195, 4 | "errno": -195, 5 | "origin": "kafka", 6 | "isFatal": true, 7 | "stack": "mock stack string" 8 | } 9 | -------------------------------------------------------------------------------- /tests/mock_data/disconnected_error.json: -------------------------------------------------------------------------------- 1 | { 2 | "message": "KafkaConsumer is disconnected", 3 | "origin": "kafka", 4 | "code": -172, 5 | "errno": -172, 6 | "isFatal": true, 7 | "stack": "demo" 8 | } 9 | -------------------------------------------------------------------------------- /tests/mock_data/log.json: -------------------------------------------------------------------------------- 1 | { 2 | "address": "0x68a0F33CcEB2F7d3635e04DcC5FcA2b176108a07", 3 | "topics": [ 4 | "0xc12d213b215ecee7106a8e55cd2ee2f1157e4d6d787ad8fe68b09469cc1fc577", 5 | "0x000000000000000000000000f2017ed6f046d4cbb293db0b3aba9b8fe10ca6df", 6 | "0x000000000000000000000000000000000000000000000000000000000000365b" 7 | ], 8 | "data": "0x0000000000000000000000000000000000000000000000000000000000000012000000000000000000000000000000000000000000000000000000000000005100000000000000000000000000000000000000000000032d26d12e980b60000000000000000000000000000094b62040d663fa870d399e2fd044d40fbc0b79ec", 9 | "blockNumber": 0, 10 | "transactionHash": "0x738826631c0e38ab6f08284c1a0c9e75c80de15d3c11239b12739cbee250bec8", 11 | "transactionIndex": 96, 12 | "blockHash": "0x439350975cd7921db29dddd8bf6d0983258b52afdb2feac779d49f32ba81ffa9", 13 | "logIndex": 536, 14 | "removed": false, 15 | "id": "log_8d14e3ee" 16 | } 17 | -------------------------------------------------------------------------------- /tests/mock_data/metadata_mock.json: -------------------------------------------------------------------------------- 1 | { 2 | "orig_broker_id": 1, 3 | "orig_broker_name": "localhost:9092/1", 4 | "topics": [ 5 | { 6 | "name": "chainId-137", 7 | "partitions": [ 8 | { 9 | "id": 0, 10 | "leader": 1, 11 | "replicas": [ 12 | 1 13 | ], 14 | "isrs": [ 15 | 1 16 | ] 17 | } 18 | ] 19 | } 20 | ], 21 | "brokers": [ 22 | { 23 | "id": 1, 24 | "host": "localhost", 25 | "port": 9092 26 | } 27 | ] 28 | } 29 | -------------------------------------------------------------------------------- /tests/mock_data/mock_message.json: -------------------------------------------------------------------------------- 1 | { 2 | "key": 123, 3 | "value": "demo", 4 | "topic": "chainId-137" 5 | } 6 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "NodeNext", 4 | "allowSyntheticDefaultImports": true, 5 | "moduleResolution": "nodenext", 6 | "resolveJsonModule": true, 7 | "esModuleInterop": true, 8 | "target": "esnext", 9 | "sourceMap": true, 10 | "outDir": "dist/", 11 | "declaration": true, 12 | "strict": true, 13 | "alwaysStrict": true, 14 | "baseUrl": "./", 15 | "paths": { 16 | "@internal/*": [ 17 | "internal/*" 18 | ] 19 | }, 20 | }, 21 | "include": [ 22 | "./internal/**/*", 23 | "./public/**/*" 24 | ] 25 | } 26 | --------------------------------------------------------------------------------