├── .dockerignore ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── epic-template-.md │ ├── feature-request-.md │ └── secondary-focus-area-.md └── workflows │ ├── block-streamer-ci.yml │ ├── close-completed-issues.yml │ ├── coordinator-ci.yml │ ├── deploy-dev-widgets.yml │ ├── deploy-prod-widgets.yml │ ├── deploy-widgets.yml │ ├── frontend-ci.yml │ └── runner-ci.yml ├── .gitignore ├── README.md ├── block-server ├── .gitignore ├── handler.js └── serverless.yml ├── block-streamer ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── README.md ├── build.rs ├── data │ ├── 000093085141 │ │ ├── block.json │ │ ├── shard_0.json │ │ ├── shard_1.json │ │ ├── shard_2.json │ │ └── shard_3.json │ ├── 000107503704 │ │ ├── block.json │ │ ├── list_objects.xml │ │ ├── shard_0.json │ │ ├── shard_1.json │ │ ├── shard_2.json │ │ └── shard_3.json │ ├── 000107503705 │ │ ├── block.json │ │ ├── list_objects.xml │ │ ├── shard_0.json │ │ ├── shard_1.json │ │ ├── shard_2.json │ │ └── shard_3.json │ └── invalid │ │ ├── block.json │ │ └── list_objects.xml ├── examples │ ├── list_streams.rs │ ├── start_stream.rs │ └── stop_stream.rs ├── graphql │ └── dataplatform_near │ │ ├── get_bitmaps_exact.graphql │ │ ├── get_bitmaps_wildcard.graphql │ │ └── schema.graphql ├── proto │ └── block_streamer.proto └── src │ ├── block_stream.rs │ ├── graphql │ ├── client.rs │ └── mod.rs │ ├── indexer_config.rs │ ├── lake_s3_client.rs │ ├── lib.rs │ ├── main.rs │ ├── metrics.rs │ ├── receiver_blocks │ ├── bitmap.rs │ ├── mod.rs │ └── receiver_blocks_processor.rs │ ├── redis.rs │ ├── rules │ ├── matcher.rs │ ├── mod.rs │ ├── outcomes_reducer.rs │ └── types.rs │ ├── s3_client.rs │ ├── server │ ├── block_streamer_service.rs │ └── mod.rs │ ├── test_utils.rs │ └── utils.rs ├── coordinator ├── Cargo.lock ├── Cargo.toml ├── Dockerfile ├── build.rs ├── proto │ └── indexer_manager.proto └── src │ ├── handlers │ ├── block_streams.rs │ ├── data_layer.rs │ ├── executors.rs │ └── mod.rs │ ├── indexer_config.rs │ ├── indexer_state.rs │ ├── lifecycle.rs │ ├── main.rs │ ├── redis.rs │ ├── registry.rs │ ├── server │ ├── indexer_manager_service.rs │ └── mod.rs │ ├── synchroniser.rs │ └── utils.rs ├── core-indexers ├── .eslintrc.js ├── jest.config.js ├── package-lock.json ├── package.json ├── receiver-blocks │ ├── indexer.js │ ├── schema.sql │ └── unit.test.ts └── tsconfig.json ├── docker-compose.yml ├── docs ├── README.md ├── best-practices.md ├── context.md ├── feed-indexer.md ├── how-works.md ├── hype-indexer.md ├── index-function.md ├── indexers.md ├── intro.md ├── migrate.md ├── nft-indexer.md ├── posts-indexer.md ├── query-data.md └── queryapi │ ├── autocomp-error.png │ ├── autocomp-types.png │ ├── autocomp1.jpg │ ├── autocomp2.jpg │ ├── autocomp3.jpg │ ├── autocomp4.jpg │ └── autocomp5.jpg ├── frontend ├── .editorconfig ├── .eslintrc.js ├── .gitignore ├── .prettierrc.js ├── Dockerfile ├── README.md ├── jest.config.js ├── next.config.js ├── package-lock.json ├── package.json ├── postcss.config.js ├── replacement.dev.json ├── replacement.local.json ├── replacement.mainnet.json ├── src │ ├── classes │ │ └── ValidationError.ts │ ├── components │ │ ├── Common │ │ │ ├── Alert.tsx │ │ │ ├── CustomTooltip.tsx │ │ │ ├── Icons │ │ │ │ ├── AlertSquareIcon.js │ │ │ │ ├── CheckMarkIcon.js │ │ │ │ ├── CheckMarkSquareIcon.js │ │ │ │ └── ClearIcon.js │ │ │ └── LatestBlock.tsx │ │ ├── CreateNewIndexer │ │ │ ├── CreateNewIndexer.js │ │ │ └── index.js │ │ ├── Editor │ │ │ ├── EditorComponents │ │ │ │ ├── Editor.tsx │ │ │ │ ├── FileSwitcher.jsx │ │ │ │ ├── GlyphContainer.js │ │ │ │ ├── ResizableLayoutEditor.jsx │ │ │ │ ├── custom.d.ts │ │ │ │ ├── index.js │ │ │ │ └── primitives.d.ts │ │ │ ├── EditorView │ │ │ │ ├── BlockPickerView.jsx │ │ │ │ ├── DeveloperToolsView.jsx │ │ │ │ └── EditorMenuView.jsx │ │ │ ├── EditorViewContainer │ │ │ │ ├── BlockPickerContainer.tsx │ │ │ │ ├── DeveloperToolsContainer.tsx │ │ │ │ └── EditorMenuContainer.tsx │ │ │ └── QueryApiStorageManager.tsx │ │ ├── Logs │ │ │ ├── GraphQL │ │ │ │ ├── Query.ts │ │ │ │ └── QueryValidation.ts │ │ │ ├── LogsMenu.tsx │ │ │ ├── LogsView │ │ │ │ ├── ClearButtonView.jsx │ │ │ │ ├── DateSelectorView.jsx │ │ │ │ ├── IndexerLogsView.jsx │ │ │ │ ├── LogFieldCardView.jsx │ │ │ │ ├── LogTypeSelectorView.jsx │ │ │ │ ├── OptionSelectorView.jsx │ │ │ │ └── SeveritySelectorView.jsx │ │ │ └── LogsViewContainer │ │ │ │ ├── ClearButtonContainer.tsx │ │ │ │ ├── DateSelectorContainer.tsx │ │ │ │ ├── IndexerLogsContainer.tsx │ │ │ │ ├── LogTypeSelectorContainer.tsx │ │ │ │ ├── OptionSelectorContainer.tsx │ │ │ │ └── SeveritySelectorContainer.tsx │ │ ├── Modals │ │ │ ├── ForkIndexerModal.jsx │ │ │ ├── ModalsContainer │ │ │ │ └── PublishFormContainer.tsx │ │ │ ├── ModalsView │ │ │ │ └── PublishFormView.jsx │ │ │ ├── PublishModal.jsx │ │ │ └── ResetChangesModal.jsx │ │ └── Playground │ │ │ ├── graphiql.jsx │ │ │ └── index.js │ ├── constants │ │ ├── DurationMap.ts │ │ ├── RegexExp.js │ │ └── Strings.js │ ├── contexts │ │ ├── IndexerDetailsContext.tsx │ │ └── ModalContext.js │ ├── core │ │ └── InfoModal.jsx │ ├── pages │ │ ├── _app.jsx │ │ ├── _document.jsx │ │ ├── api │ │ │ ├── WizardCodeGenerator.ts │ │ │ └── generateCode.ts │ │ ├── create-new-indexer │ │ │ └── index.js │ │ ├── global.css │ │ ├── index.jsx │ │ └── query-api-editor │ │ │ └── index.js │ ├── test │ │ └── api │ │ │ └── generateCode.test.ts │ └── utils │ │ ├── calculateBlockTimeDifference.ts │ │ ├── calculateTimestamp.ts │ │ ├── debounce.js │ │ ├── fetchBlock.js │ │ ├── formatTimestamp.ts │ │ ├── formatters.js │ │ ├── formatters.test.js │ │ ├── getLatestBlockHeight.js │ │ ├── helpers.ts │ │ ├── indexerRunner.js │ │ ├── pgSchemaTypeGen.js │ │ ├── queryIndexerFunction.js │ │ ├── validators.test.ts │ │ └── validators.ts ├── tailwind.config.js ├── tsconfig.json └── widgets │ ├── examples │ └── feed │ │ └── src │ │ ├── QueryApi.Examples.Feed.Comment.jsx │ │ ├── QueryApi.Examples.Feed.LikeButton.jsx │ │ ├── QueryApi.Examples.Feed.Post.jsx │ │ ├── QueryApi.Examples.Feed.PostPage.jsx │ │ ├── QueryApi.Examples.Feed.Posts.jsx │ │ ├── QueryApi.Examples.Feed.jsx │ │ ├── QueryApi.Feed.ActivityPage.jsx │ │ ├── QueryApi.Feed.jsx │ │ └── QueryApi.dev.Feed.jsx │ └── src │ ├── NearQueryApi.jsx │ ├── NearQueryApi.metadata.json │ ├── QueryApi.App.jsx │ ├── QueryApi.App.metadata.json │ ├── QueryApi.Dashboard.jsx │ ├── QueryApi.Dashboard.metadata.json │ ├── QueryApi.Editor.jsx │ ├── QueryApi.Editor.metadata.json │ ├── QueryApi.IndexerCard.jsx │ ├── QueryApi.IndexerExplorer.jsx │ ├── QueryApi.IndexerStatus.jsx │ ├── QueryApi.IndexerStatus.metadata.json │ ├── QueryApi.Launchpad.jsx │ ├── QueryApi.NotFound.jsx │ ├── components │ └── toggle.jsx │ └── props.json ├── hasura-authentication-service ├── Cargo.lock ├── Cargo.toml ├── Dockerfile └── src │ ├── main.rs │ └── services.rs ├── postgres ├── Dockerfile └── init.sql ├── prometheus.yml ├── registry ├── contract │ ├── Cargo.lock │ ├── Cargo.toml │ ├── README.md │ ├── build.sh │ ├── deploy-dev.sh │ ├── deploy-local-testing.sh │ ├── deploy-prod.sh │ └── src │ │ └── lib.rs └── types │ ├── Cargo.lock │ ├── Cargo.toml │ └── src │ └── lib.rs ├── runner-client ├── Cargo.lock ├── Cargo.toml ├── build.rs ├── examples │ ├── check_provisioning_task_status.rs │ ├── list_executors.rs │ ├── start_executor.rs │ ├── start_provisioning_task.rs │ └── stop_executor.rs ├── proto │ ├── data-layer.proto │ └── runner.proto └── src │ └── lib.rs ├── runner ├── .eslintrc.js ├── .gitignore ├── Dockerfile ├── examples │ ├── list-executors.ts │ ├── start-executor.ts │ └── stop-executor.ts ├── jest.config.js ├── package-lock.json ├── package.json ├── protos │ ├── data-layer.proto │ └── runner.proto ├── scripts │ └── suspend-indexer.ts ├── src │ ├── globals.d.ts │ ├── index.ts │ ├── indexer-config │ │ ├── index.ts │ │ ├── indexer-config.test.ts │ │ └── indexer-config.ts │ ├── indexer-meta │ │ ├── index.ts │ │ ├── indexer-meta.test.ts │ │ ├── indexer-meta.ts │ │ ├── log-entry.test.ts │ │ ├── log-entry.ts │ │ └── no-op-indexer-meta.ts │ ├── indexer │ │ ├── context-builder │ │ │ ├── __snapshots__ │ │ │ │ └── context-builder.test.ts.snap │ │ │ ├── context-builder.test.ts │ │ │ ├── context-builder.ts │ │ │ └── index.ts │ │ ├── dml-handler │ │ │ ├── dml-handler.test.ts │ │ │ ├── dml-handler.ts │ │ │ ├── in-memory-dml-handler.test.ts │ │ │ ├── in-memory-dml-handler.ts │ │ │ └── index.ts │ │ ├── index.ts │ │ ├── indexer.test.ts │ │ ├── indexer.ts │ │ └── local-indexer.ts │ ├── instrumentation │ │ ├── index.ts │ │ └── tracer.ts │ ├── lake-client │ │ ├── index.ts │ │ ├── lake-client.test.ts │ │ └── lake-client.ts │ ├── logger.ts │ ├── metrics.ts │ ├── pg-client.ts │ ├── provisioner │ │ ├── __snapshots__ │ │ │ └── provisioner.test.ts.snap │ │ ├── hasura-client │ │ │ ├── __snapshots__ │ │ │ │ └── hasura-client.test.ts.snap │ │ │ ├── hasura-client.test.ts │ │ │ ├── hasura-client.ts │ │ │ └── index.ts │ │ ├── index.ts │ │ ├── provisioner.test.ts │ │ ├── provisioner.ts │ │ ├── provisioning-state │ │ │ ├── __snapshots__ │ │ │ │ └── provisioning-state.test.ts.snap │ │ │ ├── index.ts │ │ │ ├── provisioning-state.test.ts │ │ │ └── provisioning-state.ts │ │ └── schemas │ │ │ ├── logs-table.ts │ │ │ └── metadata-table.ts │ ├── server │ │ ├── index.ts │ │ └── services │ │ │ ├── data-layer │ │ │ ├── data-layer-service.test.ts │ │ │ ├── data-layer-service.ts │ │ │ └── index.ts │ │ │ └── runner │ │ │ ├── index.ts │ │ │ ├── runner-client.ts │ │ │ ├── runner-service.test.ts │ │ │ └── runner-service.ts │ ├── stream-handler │ │ ├── index.ts │ │ ├── redis-client │ │ │ ├── index.ts │ │ │ ├── redis-client.test.ts │ │ │ └── redis-client.ts │ │ ├── stream-handler.ts │ │ └── worker.ts │ └── utility.ts ├── tests │ ├── blocks │ │ ├── 00115185108 │ │ │ └── streamer_message.json │ │ └── 00115185109 │ │ │ └── streamer_message.json │ ├── integration.test.ts │ └── testcontainers │ │ ├── hasura.ts │ │ ├── postgres.ts │ │ └── utils.ts ├── tsconfig.build.json └── tsconfig.json ├── scripts └── wipe-database.sh └── terraform ├── pagoda-data-stack-dev ├── .terraform.lock.hcl ├── block-streamer.tf ├── coordinator-v1.tf ├── coordinator.tf ├── frontend.tf ├── hasura.tf ├── iam │ ├── .terraform.lock.hcl │ ├── main.tf │ └── resources.tf ├── postgres.tf ├── redis.tf ├── resources.tf ├── runner.tf ├── secrets.tf └── triggers.tf └── pagoda-data-stack-prod ├── .terraform.lock.hcl ├── block-streamer.tf ├── coordinator.tf ├── frontend.tf ├── hasura.tf ├── iam ├── .terraform.lock.hcl ├── main.tf └── resources.tf ├── postgres.tf ├── redis.tf ├── resources.tf ├── runner.tf ├── secrets.tf └── triggers.tf /.dockerignore: -------------------------------------------------------------------------------- 1 | **/target 2 | 3 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @near/queryapi-core 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: 'bug' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | **Screenshots** 24 | If applicable, add screenshots to help explain your problem. 25 | 26 | **Desktop (please complete the following information):** 27 | - OS: [e.g. iOS] 28 | - Browser [e.g. chrome, safari] 29 | - Version [e.g. 22] 30 | 31 | **Smartphone (please complete the following information):** 32 | - Device: [e.g. iPhone6] 33 | - OS: [e.g. iOS8.1] 34 | - Browser [e.g. stock browser, safari] 35 | - Version [e.g. 22] 36 | 37 | **Additional context** 38 | Add any other context about the problem here. 39 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/epic-template-.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'Epic Template ' 3 | about: 'Epics are milestones or groups of alike issues ' 4 | title: "\U0001F537 [Epic] New Epic " 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | ### Description 11 | (Overview of milestone or function governed by this epic) 12 | ### Success Criteria 13 | (Evaluate how this epic could be considered as complete and success) 14 | ### Resources 15 | (Relevant documentation, Figma links, and other reference material) 16 | Item 1 17 | Item 2 18 | Item 3 19 | ```[tasklist] 20 | ### Child Issues 21 | [ ] https://github.com/near/github-project-test/issues/1 22 | [ ] https://github.com/near/github-project-test/issues/2 23 | [ ] https://github.com/near/github-project-test/issues/3 24 | ``` 25 | ```[tasklist] 26 | ### dependencies/blocked 27 | [ ] https://github.com/near/github-project-test/issues/1 28 | [ ] https://github.com/near/github-project-test/issues/2 29 | [ ] https://github.com/near/github-project-test/issues/3 30 | ``` 31 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature-request-.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'Feature Request ' 3 | about: Suggest an idea for this project. If this doesn't look right 4 | title: '' 5 | labels: '' 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/secondary-focus-area-.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: 'Secondary Focus Area ' 3 | about: This issue serves to help us propose and organize support for impactful work, 4 | as a secondary priority to epics & planned roadmap items. If this doesn't look right 5 | title: "\U0001F525 [Secondary Focus Area] " 6 | labels: '' 7 | assignees: '' 8 | 9 | --- 10 | 11 | **Motivation** 12 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 13 | 14 | **Describe the solution you'd like** 15 | A clear and concise description of what you want to happen. 16 | 17 | **Describe alternatives you've considered** 18 | A clear and concise description of any alternative solutions or features you've considered. 19 | 20 | **Additional context** 21 | Add any other context or screenshots about the feature request here. 22 | 23 | **Open questions** 24 | -------------------------------------------------------------------------------- /.github/workflows/block-streamer-ci.yml: -------------------------------------------------------------------------------- 1 | name: Block Streamer 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | paths: 7 | - "block-streamer/**" 8 | pull_request: 9 | paths: 10 | - "block-streamer/**" 11 | 12 | env: 13 | CARGO_TERM_COLOR: always 14 | 15 | jobs: 16 | check: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v3 20 | - name: Install Protoc 21 | uses: arduino/setup-protoc@v2 22 | with: 23 | repo-token: ${{ secrets.GITHUB_TOKEN }} 24 | - name: Check 25 | working-directory: ./block-streamer 26 | run: cargo check 27 | 28 | test: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - uses: actions/checkout@v3 32 | - name: Install Protoc 33 | uses: arduino/setup-protoc@v2 34 | with: 35 | repo-token: ${{ secrets.GITHUB_TOKEN }} 36 | - name: Test 37 | working-directory: ./block-streamer 38 | run: cargo test 39 | 40 | 41 | format: 42 | runs-on: ubuntu-20.04 43 | steps: 44 | - name: Checkout repository 45 | uses: actions/checkout@v3 46 | - name: Install Protoc 47 | uses: arduino/setup-protoc@v2 48 | with: 49 | repo-token: ${{ secrets.GITHUB_TOKEN }} 50 | - name: Install Rust 51 | uses: actions-rs/toolchain@v1 52 | with: 53 | toolchain: 1.75.0 54 | override: true 55 | profile: minimal 56 | components: rustfmt 57 | - name: Check formatting 58 | working-directory: ./block-streamer 59 | run: | 60 | cargo fmt -- --check 61 | 62 | clippy: 63 | runs-on: ubuntu-20.04 64 | steps: 65 | - name: Checkout repository 66 | uses: actions/checkout@v3 67 | - name: Install Protoc 68 | uses: arduino/setup-protoc@v2 69 | with: 70 | repo-token: ${{ secrets.GITHUB_TOKEN }} 71 | - name: Install Rust 72 | uses: actions-rs/toolchain@v1 73 | with: 74 | toolchain: 1.75.0 75 | override: true 76 | profile: minimal 77 | components: clippy 78 | - name: Clippy check 79 | working-directory: ./block-streamer 80 | run: | 81 | cargo clippy 82 | -------------------------------------------------------------------------------- /.github/workflows/close-completed-issues.yml: -------------------------------------------------------------------------------- 1 | name: 'Issue States' 2 | 3 | on: 4 | project_card: 5 | types: [created, edited, moved] 6 | 7 | permissions: 8 | repository-projects: read 9 | issues: write 10 | pull-requests: write 11 | 12 | jobs: 13 | action: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: dessant/issue-states@v3 17 | with: 18 | github-token: ${{ github.token }} 19 | open-issue-columns: '' 20 | closed-issue-columns: 'Closed, Done' 21 | log-output: false 22 | -------------------------------------------------------------------------------- /.github/workflows/coordinator-ci.yml: -------------------------------------------------------------------------------- 1 | name: Coordinator 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | paths: 7 | - "coordinator/**" 8 | pull_request: 9 | paths: 10 | - "coordinator/**" 11 | 12 | env: 13 | CARGO_TERM_COLOR: always 14 | 15 | jobs: 16 | check: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v3 20 | - name: Install Protoc 21 | uses: arduino/setup-protoc@v2 22 | with: 23 | repo-token: ${{ secrets.GITHUB_TOKEN }} 24 | - name: Install Rust 25 | uses: actions-rs/toolchain@v1 26 | with: 27 | toolchain: 1.75.0 28 | override: true 29 | profile: minimal 30 | components: rustfmt 31 | - name: Check 32 | working-directory: ./coordinator 33 | run: cargo check 34 | 35 | test: 36 | runs-on: ubuntu-latest 37 | steps: 38 | - uses: actions/checkout@v3 39 | - name: Install Protoc 40 | uses: arduino/setup-protoc@v2 41 | with: 42 | repo-token: ${{ secrets.GITHUB_TOKEN }} 43 | - name: Install Rust 44 | uses: actions-rs/toolchain@v1 45 | with: 46 | toolchain: 1.75.0 47 | override: true 48 | profile: minimal 49 | components: rustfmt 50 | - name: Test 51 | working-directory: ./coordinator 52 | run: cargo test 53 | 54 | 55 | format: 56 | runs-on: ubuntu-20.04 57 | steps: 58 | - name: Checkout repository 59 | uses: actions/checkout@v3 60 | - name: Install Protoc 61 | uses: arduino/setup-protoc@v2 62 | with: 63 | repo-token: ${{ secrets.GITHUB_TOKEN }} 64 | - name: Install Rust 65 | uses: actions-rs/toolchain@v1 66 | with: 67 | toolchain: 1.75.0 68 | override: true 69 | profile: minimal 70 | components: rustfmt 71 | - name: Check formatting 72 | working-directory: ./coordinator 73 | run: | 74 | cargo fmt -- --check 75 | 76 | clippy: 77 | runs-on: ubuntu-20.04 78 | steps: 79 | - name: Checkout repository 80 | uses: actions/checkout@v3 81 | - name: Install Protoc 82 | uses: arduino/setup-protoc@v2 83 | with: 84 | repo-token: ${{ secrets.GITHUB_TOKEN }} 85 | - name: Install Rust 86 | uses: actions-rs/toolchain@v1 87 | with: 88 | toolchain: 1.75.0 89 | override: true 90 | profile: minimal 91 | components: clippy 92 | - name: Clippy check 93 | working-directory: ./coordinator 94 | run: | 95 | cargo clippy 96 | -------------------------------------------------------------------------------- /.github/workflows/deploy-dev-widgets.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Dev Components to Mainnet 2 | on: 3 | push: 4 | branches: [main] 5 | paths: 6 | - "frontend/widgets/**" 7 | jobs: 8 | deploy-mainnet: 9 | uses: ./.github/workflows/deploy-widgets.yml 10 | with: 11 | directory-paths: ${{vars.WIDGETS_DIRECTORY_PATHS}} 12 | deploy-account-address: ${{ vars.DEV_SIGNER_ACCOUNT_ID }} 13 | signer-public-key: ${{ vars.DEV_SIGNER_PUBLIC_KEY }} 14 | environment: dev 15 | secrets: 16 | SIGNER_PRIVATE_KEY: ${{ secrets.DEV_SIGNER_PRIVATE_KEY }} 17 | -------------------------------------------------------------------------------- /.github/workflows/deploy-prod-widgets.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Prod Components to Mainnet 2 | on: 3 | push: 4 | branches: [stable] 5 | paths: 6 | - "frontend/widgets/src/**" 7 | jobs: 8 | deploy-mainnet: 9 | uses: ./.github/workflows/deploy-widgets.yml 10 | with: 11 | directory-paths: ${{vars.WIDGETS_DIRECTORY_PATHS}} 12 | deploy-account-address: ${{ vars.PROD_SIGNER_ACCOUNT_ID }} 13 | signer-public-key: ${{ vars.PROD_SIGNER_PUBLIC_KEY }} 14 | environment: mainnet 15 | secrets: 16 | SIGNER_PRIVATE_KEY: ${{ secrets.PROD_SIGNER_PRIVATE_KEY }} 17 | -------------------------------------------------------------------------------- /.github/workflows/deploy-widgets.yml: -------------------------------------------------------------------------------- 1 | name: Deploy Components 2 | on: 3 | workflow_call: 4 | inputs: 5 | cli-version: 6 | required: false 7 | description: "Version of BOS CLI to use for deploy (e.g. 0.3.0)" 8 | type: string 9 | default: "0.3.1" 10 | deploy-account-address: 11 | required: true 12 | description: "Account under which component code should be deployed. Also the Signer" 13 | type: string 14 | signer-public-key: 15 | required: true 16 | description: "Public key for signing transactions in the format: `ed25519:`" 17 | type: string 18 | directory-paths: 19 | required: true 20 | description: "Comma-separated paths to the directories that contain the code to be deployed" 21 | type: string 22 | environment: 23 | required: true 24 | description: "mainnet or dev" 25 | type: string 26 | secrets: 27 | SIGNER_PRIVATE_KEY: 28 | description: "Private key in `ed25519:` format for signing transaction" 29 | required: true 30 | jobs: 31 | deploy-widgets: 32 | runs-on: ubuntu-latest 33 | name: Deploy widgets to social.near 34 | env: 35 | BOS_DEPLOY_ACCOUNT_ID: ${{ inputs.deploy-account-address }} 36 | BOS_SIGNER_PUBLIC_KEY: ${{ inputs.signer-public-key }} 37 | DIRECTORY_PATHS: ${{ inputs.directory-paths }} 38 | ENVIRONMENT: ${{inputs.environment}} 39 | BOS_SIGNER_PRIVATE_KEY: ${{ secrets.SIGNER_PRIVATE_KEY }} 40 | 41 | steps: 42 | - name: Checkout repository 43 | uses: actions/checkout@v2 44 | 45 | - name: Set replacements 46 | id: set_replacements 47 | run: | 48 | cd "frontend/widgets/" 49 | echo "replacements=$(jq -r '[to_entries[] | .["find"] = "${" + .key + "}" | .["replace"] = .value | del(.key, .value)]' ../replacement.${ENVIRONMENT}.json | tr -d "\n\r")" >> $GITHUB_OUTPUT 50 | 51 | - name: Replace placeholders 52 | uses: flcdrg/replace-multiple-action@v1 53 | with: 54 | files: '**/*.jsx' 55 | find: '${{ steps.set_replacements.outputs.replacements }}' 56 | prefix: '(^|.*)' 57 | suffix: '($|.*)' 58 | 59 | - name: Install near-social CLI 60 | run: | 61 | curl --proto '=https' --tlsv1.2 -LsSf https://github.com/FroVolod/bos-cli-rs/releases/download/v${{ inputs.cli-version }}/bos-cli-v${{ inputs.cli-version }}-installer.sh | sh 62 | 63 | - name: Deploy widgets 64 | run: | 65 | for DIR in $(echo $DIRECTORY_PATHS | tr "," "\n") 66 | do 67 | cd "$DIR" 68 | bos components deploy "$BOS_DEPLOY_ACCOUNT_ID" sign-as "$BOS_DEPLOY_ACCOUNT_ID" network-config mainnet sign-with-plaintext-private-key --signer-public-key "$BOS_SIGNER_PUBLIC_KEY" --signer-private-key "$BOS_SIGNER_PRIVATE_KEY" send 69 | cd - 70 | done 71 | -------------------------------------------------------------------------------- /.github/workflows/frontend-ci.yml: -------------------------------------------------------------------------------- 1 | name: frontend CI 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - "frontend/**" 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v3 15 | 16 | - name: Setup Node 17 | uses: actions/setup-node@v3 18 | with: 19 | node-version: '18.x' 20 | 21 | - name: Install Node Dependencies 22 | run: npm install 23 | working-directory: ./frontend 24 | 25 | - name: Test 26 | run: npm test 27 | working-directory: ./frontend 28 | lint: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - name: Checkout repository 32 | uses: actions/checkout@v3 33 | 34 | - name: Setup Node 35 | uses: actions/setup-node@v3 36 | with: 37 | node-version: '18.x' 38 | 39 | - name: Install Node Dependencies 40 | run: npm install 41 | working-directory: ./frontend 42 | 43 | - name: Lint 44 | run: npm run lint 45 | working-directory: ./frontend 46 | type-check: 47 | runs-on: ubuntu-latest 48 | steps: 49 | - name: Checkout repository 50 | uses: actions/checkout@v3 51 | 52 | - name: Setup Node 53 | uses: actions/setup-node@v3 54 | with: 55 | node-version: '18.x' 56 | 57 | - name: Install Node Dependencies 58 | run: npm install 59 | working-directory: ./frontend 60 | 61 | - name: Type Check 62 | run: npm run type-check 63 | working-directory: ./frontend 64 | build: 65 | runs-on: ubuntu-latest 66 | steps: 67 | - name: Checkout repository 68 | uses: actions/checkout@v3 69 | 70 | - name: Setup Node 71 | uses: actions/setup-node@v3 72 | with: 73 | node-version: '18.x' 74 | 75 | - name: Install Node Dependencies 76 | run: npm install 77 | working-directory: ./frontend 78 | 79 | - name: Build 80 | run: npm run build 81 | working-directory: ./frontend 82 | -------------------------------------------------------------------------------- /.github/workflows/runner-ci.yml: -------------------------------------------------------------------------------- 1 | name: Runner CI 2 | 3 | on: 4 | pull_request: 5 | paths: 6 | - "runner/**" 7 | 8 | jobs: 9 | test: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Checkout repository 14 | uses: actions/checkout@v3 15 | 16 | - name: Setup Node 17 | uses: actions/setup-node@v3 18 | with: 19 | node-version: '18.x' 20 | 21 | - name: Install Node Dependencies 22 | run: npm install 23 | working-directory: ./runner 24 | 25 | - name: Test 26 | run: npm test 27 | working-directory: ./runner 28 | lint: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - name: Checkout repository 32 | uses: actions/checkout@v3 33 | 34 | - name: Setup Node 35 | uses: actions/setup-node@v3 36 | with: 37 | node-version: '18.x' 38 | 39 | - name: Install Node Dependencies 40 | run: npm install 41 | working-directory: ./runner 42 | 43 | - name: Lint 44 | run: npm run lint 45 | working-directory: ./runner 46 | build: 47 | runs-on: ubuntu-latest 48 | steps: 49 | - name: Checkout repository 50 | uses: actions/checkout@v3 51 | 52 | - name: Setup Node 53 | uses: actions/setup-node@v3 54 | with: 55 | node-version: '18.x' 56 | 57 | - name: Install Node Dependencies 58 | run: npm install 59 | working-directory: ./runner 60 | 61 | - name: Build 62 | run: npm run build 63 | working-directory: ./runner 64 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | **/target 2 | .env* 3 | redis/ 4 | *.log 5 | /indexer/blocks/ 6 | node_modules/ 7 | .vscode/ 8 | runner/yarn.lock 9 | **/.DS_Store 10 | .terraform 11 | *.tfstate.* 12 | -------------------------------------------------------------------------------- /block-server/.gitignore: -------------------------------------------------------------------------------- 1 | # package directories 2 | node_modules 3 | jspm_packages 4 | 5 | # Serverless directories 6 | .serverless -------------------------------------------------------------------------------- /block-server/serverless.yml: -------------------------------------------------------------------------------- 1 | service: block-server 2 | 3 | # You can pin your service to only deploy with a specific Serverless version 4 | # Check out our docs for more details 5 | frameworkVersion: '3' 6 | 7 | provider: 8 | name: aws 9 | runtime: nodejs16.x 10 | region: eu-central-1 11 | 12 | # you can add statements to the Lambda function's IAM Role here 13 | # iam: 14 | # role: 15 | # statements: 16 | # - Effect: "Allow" 17 | # Action: 18 | # - "s3:ListBucket" 19 | # Resource: { "Fn::Join" : ["", ["arn:aws:s3:::", { "Ref" : "ServerlessDeploymentBucket" } ] ] } 20 | # - Effect: "Allow" 21 | # Action: 22 | # - "s3:PutObject" 23 | # Resource: 24 | # Fn::Join: 25 | # - "" 26 | # - - "arn:aws:s3:::" 27 | # - "Ref" : "ServerlessDeploymentBucket" 28 | # - "/*" 29 | 30 | # you can define service wide environment variables here 31 | # environment: 32 | # variable1: value1 33 | 34 | # you can add packaging information here 35 | #package: 36 | # patterns: 37 | # - '!exclude-me.js' 38 | # - '!exclude-me-dir/**' 39 | # - include-me.js 40 | # - include-me-dir/** 41 | 42 | functions: 43 | block: 44 | handler: handler.block 45 | events: 46 | - httpApi: 47 | path: /block/{block_height} 48 | method: get 49 | cors: true 50 | 51 | # Define function environment variables here 52 | # environment: 53 | # variable2: value2 54 | 55 | # you can add CloudFormation resource templates here 56 | #resources: 57 | # Resources: 58 | # NewResource: 59 | # Type: AWS::S3::Bucket 60 | # Properties: 61 | # BucketName: my-new-bucket 62 | # Outputs: 63 | # NewOutput: 64 | # Description: "Description for the output" 65 | # Value: "Some output value" 66 | -------------------------------------------------------------------------------- /block-streamer/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "block-streamer" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | actix-web = "4.5.1" 8 | anyhow = "1.0.57" 9 | async-stream = "0.3.5" 10 | async-trait = "0.1.74" 11 | aws-config = { version = "1.1.3", features = ["behavior-version-latest"] } 12 | aws-sdk-s3 = "1.13.0" 13 | base64 = "0.22.1" 14 | borsh = "0.10.2" 15 | cached = "0.49.3" 16 | chrono = "0.4.25" 17 | futures = "0.3.5" 18 | graphql_client = { version = "0.14.0", features = ["reqwest"] } 19 | lazy_static = "1.4.0" 20 | mockall = "0.11.4" 21 | near-lake-framework = "0.7.8" 22 | pin-project = "1.1.5" 23 | prometheus = "0.13.3" 24 | prost = "0.12.3" 25 | redis = { version = "0.21.5", features = ["tokio-comp", "connection-manager"] } 26 | regex = "1.10.4" 27 | reqwest = { version = "^0.11.0", features = ["json"] } 28 | serde = { version = "1", features = ["derive"] } 29 | serde_json = "1.0.55" 30 | tracing = "0.1.40" 31 | tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } 32 | tracing-stackdriver = "0.10.0" 33 | tokio = { version = "1.28.0", features = ["full", "test-util"]} 34 | tokio-util = "0.7.10" 35 | tokio-stream = "0.1.14" 36 | tonic = "0.10.2" 37 | wildmatch = "2.1.1" 38 | 39 | registry-types = { path = "../registry/types" } 40 | 41 | [build-dependencies] 42 | tonic-build = "0.10" 43 | 44 | [dev-dependencies] 45 | aws-smithy-runtime = { version = "1.0.0", features = ["test-util"] } 46 | aws-smithy-types = "1.0.1" 47 | http = "0.2.9" 48 | -------------------------------------------------------------------------------- /block-streamer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.75 AS build 2 | ARG CARGO_BUILD_MODE=release 3 | WORKDIR /tmp/ 4 | COPY block-streamer/ block-streamer/ 5 | COPY registry/types/ registry/types/ 6 | WORKDIR /tmp/block-streamer/ 7 | RUN apt update && apt install -yy protobuf-compiler 8 | RUN if [ "$CARGO_BUILD_MODE" = "debug" ]; then \ 9 | cargo build --package block-streamer; \ 10 | else \ 11 | cargo build --release --package block-streamer; \ 12 | fi 13 | 14 | FROM ubuntu:22.04 15 | ARG CARGO_BUILD_MODE=release 16 | RUN apt update && apt install -yy openssl ca-certificates 17 | USER nobody 18 | COPY --from=build /tmp/block-streamer/target/$CARGO_BUILD_MODE/block-streamer /block-streamer 19 | ENTRYPOINT ["/block-streamer"] 20 | -------------------------------------------------------------------------------- /block-streamer/README.md: -------------------------------------------------------------------------------- 1 | // TODO: Improve README further 2 | 3 | ## GraphQL Code Generation 4 | Querying a GraphQL requires informing Rust of the correct types to deserialize the response data into. In order to do this, the schema of the GraphQL data needs to be introspected. Following that, the query intended to be called needs to be fully defined. With this information, code can be automatically generated using the macro provided in graphql-client. Below are the instructions on how to do so. 5 | 6 | ### Generating schema.graphql 7 | Follow the instructions in the [Hasura Documentation](https://hasura.io/docs/latest/schema/common-patterns/export-graphql-schema/) to introspect the schema and generate the graphql file. Keep in mind that a header for the role needs to be provided. Otherwise, the schemas remain hidden from the public/default user. 8 | 9 | For example: `gq https://my-graphql-engine.com/v1/graphql -H 'X-Hasura-Role: someaccount_near' --introspect > schema.graphql` 10 | 11 | ### Generating Rust types from query 12 | After acquiring the graphql file for the schema, write the queries that need to be called in individual graphql files. Once written, add the following code template to a Rust file and the code will be auto generated using the macro. Assuming there are no problems generating the code, the code will be immediately usable. 13 | 14 | ``` 15 | #[derive(GraphQLQuery)] 16 | #[graphql( 17 | schema_path = "PATH/TO/schema.graphql", 18 | query_path = "PATH/TO/query.graphql", 19 | response_derives = "Debug", 20 | normalization = "rust" 21 | )] 22 | struct QueryNameInPascalCase; 23 | ``` 24 | -------------------------------------------------------------------------------- /block-streamer/build.rs: -------------------------------------------------------------------------------- 1 | fn main() -> Result<(), Box> { 2 | tonic_build::compile_protos("proto/block_streamer.proto")?; 3 | 4 | Ok(()) 5 | } 6 | -------------------------------------------------------------------------------- /block-streamer/data/000107503704/list_objects.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 000107503704/ 5 | 6 | 7 | -------------------------------------------------------------------------------- /block-streamer/data/000107503705/list_objects.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 000107503705/ 5 | 6 | 7 | -------------------------------------------------------------------------------- /block-streamer/data/000107503705/shard_1.json: -------------------------------------------------------------------------------- 1 | {"chunk":{"author":"epic.poolv1.near","header":{"balance_burnt":"328039759800900000000","chunk_hash":"CBQ7Q5RkW3v3Jozd2esXjqWrtRFjEU9piEoMetJmBiPV","encoded_length":161,"encoded_merkle_root":"BRcT52UdJ6nqfcoKyBQ6Keda4dhGZhhqrXbPnX73XDVW","gas_limit":1000000000000000,"gas_used":3644983855043,"height_created":107503705,"height_included":107503705,"outcome_root":"EeguCoRrCzd8zGoAKzCyJxAAUMQcLSy3Y1FovqpAmvqQ","outgoing_receipts_root":"DN7QhC5dcBjKSz771NhCWpvZyCAnK2YmVcJWyDvkiKy8","prev_block_hash":"3UGi8N6uikSd9ZgoUyz8FjhZhxErjhiqQ24AGDfq4DSf","prev_state_root":"5xhpLgxiNn1vRsNbkVwZ3oFJfJgXywBaqffNmm8aLcH7","rent_paid":"0","shard_id":1,"signature":"ed25519:2BoGwm2mBDt2Dw2s5p1kyFBFDboWZCDR2nyAoQUfF8v8gcW2FCS1wq4TZajdVR4mr6Ru4625mfrefMAEP3EEKPMk","tx_root":"11111111111111111111111111111111","validator_proposals":[],"validator_reward":"0"},"receipts":[{"predecessor_id":"system","receipt":{"Action":{"actions":[{"Transfer":{"deposit":"188666211632363625537704"}}],"gas_price":"0","input_data_ids":[],"output_data_receivers":[],"signer_id":"relay.aurora","signer_public_key":"ed25519:BYcLvGJ8p3LSHkQyazPoRnLt1ktC9apopqe5MFzbqbUr"}},"receipt_id":"73vU6LfRvB4zF3xdLwFjmnMkPXuYkbLzFNifYdMtFp19","receiver_id":"relay.aurora"}],"transactions":[]},"receipt_execution_outcomes":[],"shard_id":1,"state_changes":[]} -------------------------------------------------------------------------------- /block-streamer/data/invalid/block.json: -------------------------------------------------------------------------------- 1 | {} 2 | -------------------------------------------------------------------------------- /block-streamer/data/invalid/list_objects.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 000000000000/ 5 | 6 | 7 | -------------------------------------------------------------------------------- /block-streamer/examples/list_streams.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use block_streamer::block_streamer_client::BlockStreamerClient; 4 | use block_streamer::ListStreamsRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = BlockStreamerClient::connect("http://0.0.0.0:8002").await?; 9 | 10 | let response = client 11 | .list_streams(Request::new(ListStreamsRequest {})) 12 | .await?; 13 | 14 | println!("{:#?}", response.into_inner()); 15 | 16 | Ok(()) 17 | } 18 | -------------------------------------------------------------------------------- /block-streamer/examples/start_stream.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use block_streamer::block_streamer_client::BlockStreamerClient; 4 | use block_streamer::{start_stream_request::Rule, ActionAnyRule, StartStreamRequest, Status}; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = BlockStreamerClient::connect("http://0.0.0.0:8002").await?; 9 | 10 | let response = client 11 | .start_stream(Request::new(StartStreamRequest { 12 | start_block_height: 106700000, 13 | account_id: "morgs.near".to_string(), 14 | function_name: "test".to_string(), 15 | version: 0, 16 | redis_stream: "morgs.near/test:block_stream".to_string(), 17 | rule: Some(Rule::ActionAnyRule(ActionAnyRule { 18 | affected_account_id: "social.near".to_string(), 19 | status: Status::Success.into(), 20 | })), 21 | })) 22 | .await?; 23 | 24 | println!("{:#?}", response.into_inner()); 25 | 26 | Ok(()) 27 | } 28 | -------------------------------------------------------------------------------- /block-streamer/examples/stop_stream.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use block_streamer::block_streamer_client::BlockStreamerClient; 4 | use block_streamer::StopStreamRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = BlockStreamerClient::connect("http://0.0.0.0:8002").await?; 9 | 10 | let response = client 11 | .stop_stream(Request::new(StopStreamRequest { 12 | // ID for indexer morgs.near/test 13 | stream_id: "16210176318434468568".to_string(), 14 | })) 15 | .await?; 16 | 17 | println!("{:#?}", response.into_inner()); 18 | 19 | Ok(()) 20 | } 21 | -------------------------------------------------------------------------------- /block-streamer/graphql/dataplatform_near/get_bitmaps_exact.graphql: -------------------------------------------------------------------------------- 1 | query GetBitmapsExact($block_date: date, $receiver_ids: [String!], $limit: Int, $offset: Int) { 2 | dataplatform_near_receiver_blocks_bitmaps(limit: $limit, offset: $offset, where: {block_date: {_eq: $block_date}, receiver: {receiver: {_in: $receiver_ids}}}) { 3 | bitmap 4 | first_block_height 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /block-streamer/graphql/dataplatform_near/get_bitmaps_wildcard.graphql: -------------------------------------------------------------------------------- 1 | query GetBitmapsWildcard($block_date: date, $receiver_ids: String, $limit: Int, $offset: Int) { 2 | dataplatform_near_receiver_blocks_bitmaps(limit: $limit, offset: $offset, where: {block_date: {_eq: $block_date}, receiver: {receiver: {_regex: $receiver_ids}}}) { 3 | bitmap 4 | first_block_height 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /block-streamer/src/graphql/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod client; 2 | -------------------------------------------------------------------------------- /block-streamer/src/indexer_config.rs: -------------------------------------------------------------------------------- 1 | use near_lake_framework::near_indexer_primitives::types::AccountId; 2 | use std::collections::hash_map::DefaultHasher; 3 | use std::hash::{Hash, Hasher}; 4 | 5 | use registry_types::Rule; 6 | 7 | #[derive(serde::Serialize, serde::Deserialize, Clone, Debug)] 8 | pub struct IndexerConfig { 9 | pub account_id: AccountId, 10 | pub function_name: String, 11 | pub rule: Rule, 12 | } 13 | 14 | impl IndexerConfig { 15 | pub fn get_full_name(&self) -> String { 16 | format!("{}/{}", self.account_id, self.function_name) 17 | } 18 | 19 | pub fn get_hash_id(&self) -> String { 20 | let mut hasher = DefaultHasher::new(); 21 | self.get_full_name().hash(&mut hasher); 22 | hasher.finish().to_string() 23 | } 24 | 25 | pub fn last_processed_block_key(&self) -> String { 26 | // TODO: rename to `last_processed_block` 27 | format!("{}:last_published_block", self.get_full_name()) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /block-streamer/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod blockstreamer { 2 | tonic::include_proto!("blockstreamer"); 3 | } 4 | 5 | pub use blockstreamer::*; 6 | -------------------------------------------------------------------------------- /block-streamer/src/main.rs: -------------------------------------------------------------------------------- 1 | use tracing_subscriber::prelude::*; 2 | 3 | mod block_stream; 4 | mod graphql; 5 | mod indexer_config; 6 | mod lake_s3_client; 7 | mod metrics; 8 | mod receiver_blocks; 9 | mod redis; 10 | mod rules; 11 | mod s3_client; 12 | mod server; 13 | mod utils; 14 | 15 | #[cfg(test)] 16 | mod test_utils; 17 | 18 | #[tokio::main] 19 | async fn main() -> anyhow::Result<()> { 20 | let subscriber = tracing_subscriber::registry() 21 | .with(metrics::LogCounter) 22 | .with(tracing_subscriber::EnvFilter::from_default_env()); 23 | 24 | if std::env::var("GCP_LOGGING_ENABLED").is_ok() { 25 | subscriber.with(tracing_stackdriver::layer()).init(); 26 | } else { 27 | subscriber 28 | .with(tracing_subscriber::fmt::layer().compact()) 29 | .init(); 30 | } 31 | 32 | let redis_url = std::env::var("REDIS_URL").expect("REDIS_URL is not set"); 33 | let graphql_endpoint = 34 | std::env::var("HASURA_GRAPHQL_ENDPOINT").expect("HASURA_GRAPHQL_ENDPOINT is not set"); 35 | let grpc_port = std::env::var("GRPC_PORT").expect("GRPC_PORT is not set"); 36 | let metrics_port = std::env::var("METRICS_PORT") 37 | .expect("METRICS_PORT is not set") 38 | .parse() 39 | .expect("METRICS_PORT is not a valid number"); 40 | 41 | tracing::info!( 42 | redis_url, 43 | grpc_port, 44 | metrics_port, 45 | graphql_endpoint, 46 | "Starting Block Streamer" 47 | ); 48 | 49 | let redis = std::sync::Arc::new(redis::RedisClient::connect(&redis_url).await?); 50 | 51 | let aws_config = aws_config::from_env().load().await; 52 | let s3_config = aws_sdk_s3::Config::from(&aws_config); 53 | let s3_client = crate::s3_client::S3Client::new(s3_config.clone()); 54 | 55 | let graphql_client = graphql::client::GraphQLClient::new(graphql_endpoint); 56 | let receiver_blocks_processor = std::sync::Arc::new( 57 | crate::receiver_blocks::ReceiverBlocksProcessor::new(graphql_client, s3_client.clone()), 58 | ); 59 | 60 | let lake_s3_client = crate::lake_s3_client::SharedLakeS3Client::from_conf(s3_config); 61 | 62 | tokio::spawn(metrics::init_server(metrics_port).expect("Failed to start metrics server")); 63 | 64 | server::init(&grpc_port, redis, receiver_blocks_processor, lake_s3_client).await?; 65 | 66 | Ok(()) 67 | } 68 | -------------------------------------------------------------------------------- /block-streamer/src/receiver_blocks/mod.rs: -------------------------------------------------------------------------------- 1 | mod bitmap; 2 | mod receiver_blocks_processor; 3 | 4 | pub use receiver_blocks_processor::ReceiverBlocksProcessor; 5 | -------------------------------------------------------------------------------- /block-streamer/src/rules/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod matcher; 2 | pub mod outcomes_reducer; 3 | pub mod types; 4 | 5 | use near_lake_framework::near_indexer_primitives::StreamerMessage; 6 | use registry_types::Rule; 7 | 8 | use types::{ChainId, IndexerRuleMatch}; 9 | 10 | pub fn reduce_indexer_rule_matches( 11 | indexer_rule: &Rule, 12 | streamer_message: &StreamerMessage, 13 | chain_id: ChainId, 14 | ) -> Vec { 15 | match &indexer_rule { 16 | Rule::ActionAny { .. } | Rule::ActionFunctionCall { .. } | Rule::Event { .. } => { 17 | outcomes_reducer::reduce_indexer_rule_matches_from_outcomes( 18 | indexer_rule, 19 | streamer_message, 20 | chain_id, 21 | ) 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /block-streamer/src/rules/types.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | 3 | pub type TransactionHashString = String; 4 | pub type ReceiptIdString = String; 5 | pub type BlockHashString = String; 6 | 7 | #[derive( 8 | borsh::BorshSerialize, 9 | borsh::BorshDeserialize, 10 | serde::Serialize, 11 | serde::Deserialize, 12 | Clone, 13 | Debug, 14 | )] 15 | pub struct IndexerRuleMatch { 16 | pub chain_id: ChainId, 17 | pub payload: IndexerRuleMatchPayload, 18 | pub block_height: u64, 19 | } 20 | 21 | #[derive( 22 | borsh::BorshSerialize, 23 | borsh::BorshDeserialize, 24 | serde::Serialize, 25 | serde::Deserialize, 26 | Clone, 27 | Debug, 28 | )] 29 | pub enum IndexerRuleMatchPayload { 30 | Actions { 31 | block_hash: BlockHashString, 32 | receipt_id: ReceiptIdString, 33 | transaction_hash: Option, 34 | }, 35 | Events { 36 | block_hash: BlockHashString, 37 | receipt_id: ReceiptIdString, 38 | transaction_hash: Option, 39 | event: String, 40 | standard: String, 41 | version: String, 42 | data: Option, 43 | }, 44 | StateChanges { 45 | block_hash: BlockHashString, 46 | receipt_id: Option, 47 | transaction_hash: Option, 48 | }, 49 | } 50 | 51 | #[derive( 52 | borsh::BorshSerialize, 53 | borsh::BorshDeserialize, 54 | serde::Serialize, 55 | serde::Deserialize, 56 | Clone, 57 | Debug, 58 | )] 59 | pub enum ChainId { 60 | Mainnet, 61 | Testnet, 62 | } 63 | impl fmt::Display for ChainId { 64 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 65 | match self { 66 | ChainId::Mainnet => write!(f, "mainnet"), 67 | ChainId::Testnet => write!(f, "testnet"), 68 | } 69 | } 70 | } 71 | 72 | #[derive(serde::Serialize, serde::Deserialize, Clone, Debug)] 73 | pub struct Event { 74 | pub event: String, 75 | pub standard: String, 76 | pub version: String, 77 | pub data: Option, 78 | } 79 | 80 | impl Event { 81 | pub fn from_log(log: &str) -> anyhow::Result { 82 | let prefix = "EVENT_JSON:"; 83 | if !log.starts_with(prefix) { 84 | anyhow::bail!("log message doesn't start from required prefix"); 85 | } 86 | 87 | Ok(serde_json::from_str::<'_, Self>( 88 | log[prefix.len()..].trim(), 89 | )?) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /block-streamer/src/server/mod.rs: -------------------------------------------------------------------------------- 1 | mod block_streamer_service; 2 | 3 | pub mod blockstreamer { 4 | tonic::include_proto!("blockstreamer"); 5 | } 6 | 7 | pub async fn init( 8 | port: &str, 9 | redis: std::sync::Arc, 10 | receiver_blocks_processor: std::sync::Arc, 11 | lake_s3_client: crate::lake_s3_client::SharedLakeS3Client, 12 | ) -> anyhow::Result<()> { 13 | let addr = format!("0.0.0.0:{}", port).parse()?; 14 | 15 | tracing::info!("Starting gRPC server on {}", addr); 16 | 17 | let block_streamer_service = block_streamer_service::BlockStreamerService::new( 18 | redis, 19 | receiver_blocks_processor, 20 | lake_s3_client, 21 | ); 22 | 23 | let block_streamer_server = 24 | blockstreamer::block_streamer_server::BlockStreamerServer::new(block_streamer_service); 25 | 26 | tonic::transport::Server::builder() 27 | .add_service(block_streamer_server) 28 | .serve(addr) 29 | .await 30 | .map_err(|err| err.into()) 31 | } 32 | -------------------------------------------------------------------------------- /coordinator/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "coordinator" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | anyhow = "1.0.75" 8 | futures-util = "0.3.30" 9 | prost = "0.12.3" 10 | redis = { version = "0.24", features = ["tokio-comp", "connection-manager"] } 11 | tokio = "1.28" 12 | tonic = "0.10.2" 13 | tracing = "0.1.40" 14 | tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } 15 | tracing-stackdriver = "0.10.0" 16 | serde = "1.0.195" 17 | serde_json = "1.0.108" 18 | 19 | block-streamer = { path = "../block-streamer" } 20 | runner = { path = "../runner-client"} 21 | registry-types = { path = "../registry/types" } 22 | 23 | near-jsonrpc-client = "0.8.0" 24 | near-primitives = "0.20.0" 25 | near-jsonrpc-primitives = "0.20.0" 26 | 27 | [build-dependencies] 28 | tonic-build = "0.10" 29 | 30 | [dev-dependencies] 31 | mockall = "0.11.4" 32 | -------------------------------------------------------------------------------- /coordinator/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.75 AS build 2 | ARG CARGO_BUILD_MODE=release 3 | WORKDIR /tmp/ 4 | COPY coordinator/ coordinator/ 5 | COPY registry/types/ registry/types/ 6 | COPY block-streamer/ block-streamer/ 7 | COPY runner-client/ runner-client/ 8 | WORKDIR /tmp/coordinator/ 9 | RUN apt update && apt install -yy protobuf-compiler 10 | RUN if [ "$CARGO_BUILD_MODE" = "debug" ]; then \ 11 | cargo build; \ 12 | else \ 13 | cargo build --release; \ 14 | fi 15 | 16 | 17 | FROM ubuntu:22.04 18 | ARG CARGO_BUILD_MODE=release 19 | RUN apt update && apt install -yy openssl ca-certificates 20 | USER nobody 21 | COPY --from=build /tmp/coordinator/target/$CARGO_BUILD_MODE/coordinator /coordinator 22 | ENTRYPOINT ["/coordinator"] 23 | -------------------------------------------------------------------------------- /coordinator/build.rs: -------------------------------------------------------------------------------- 1 | fn main() -> Result<(), Box> { 2 | tonic_build::compile_protos("proto/indexer_manager.proto")?; 3 | 4 | Ok(()) 5 | } 6 | -------------------------------------------------------------------------------- /coordinator/proto/indexer_manager.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package indexer; 4 | 5 | // The IndexerManager service provides RPCs to manage Indexer instances 6 | service IndexerManager { 7 | // Re-enable an existing Indexer 8 | rpc Enable (IndexerRequest) returns (EnableIndexerResponse); 9 | 10 | // Disable an Indexer, preventing it from running 11 | rpc Disable (IndexerRequest) returns (DisableIndexerResponse); 12 | 13 | // List all Indexer with their state 14 | rpc List (Empty) returns (ListIndexersResponse); 15 | } 16 | 17 | // Request message for managing Indexers 18 | message IndexerRequest { 19 | // Account ID which the indexer is defined under 20 | string account_id = 1; 21 | // Name of the indexer 22 | string function_name = 2; 23 | } 24 | 25 | // Response message for enabling Indexer 26 | message EnableIndexerResponse { 27 | bool success = 1; 28 | } 29 | 30 | // Response message for disabling Indexer 31 | message DisableIndexerResponse { 32 | bool success = 1; 33 | } 34 | 35 | // Reponse message for listing Indexers 36 | message ListIndexersResponse { 37 | repeated IndexerState indexers = 1; 38 | } 39 | 40 | // Persisted state relevant to Indexer 41 | message IndexerState { 42 | string account_id = 1; 43 | string function_name = 2; 44 | bool enabled = 3; 45 | } 46 | 47 | message Empty {} 48 | -------------------------------------------------------------------------------- /coordinator/src/handlers/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod block_streams; 2 | pub mod data_layer; 3 | pub mod executors; 4 | -------------------------------------------------------------------------------- /coordinator/src/indexer_config.rs: -------------------------------------------------------------------------------- 1 | use near_primitives::types::AccountId; 2 | use registry_types::{Rule, StartBlock}; 3 | 4 | use crate::redis::KeyProvider; 5 | 6 | #[derive(Debug, Clone, PartialEq)] 7 | pub struct IndexerConfig { 8 | pub account_id: AccountId, 9 | pub function_name: String, 10 | pub code: String, 11 | pub start_block: StartBlock, 12 | pub schema: String, 13 | pub rule: Rule, 14 | pub updated_at_block_height: Option, 15 | pub created_at_block_height: u64, 16 | pub deleted_at_block_height: Option, 17 | } 18 | 19 | impl KeyProvider for IndexerConfig { 20 | fn account_id(&self) -> String { 21 | self.account_id.to_string() 22 | } 23 | 24 | fn function_name(&self) -> String { 25 | self.function_name.clone() 26 | } 27 | } 28 | 29 | #[cfg(test)] 30 | impl Default for IndexerConfig { 31 | fn default() -> Self { 32 | Self { 33 | account_id: "morgs.near".parse().unwrap(), 34 | function_name: "test".to_string(), 35 | code: "code".to_string(), 36 | schema: "schema".to_string(), 37 | rule: Rule::ActionAny { 38 | affected_account_id: "queryapi.dataplatform.near".to_string(), 39 | status: registry_types::Status::Any, 40 | }, 41 | created_at_block_height: 1, 42 | updated_at_block_height: Some(2), 43 | deleted_at_block_height: None, 44 | start_block: StartBlock::Height(100), 45 | } 46 | } 47 | } 48 | 49 | impl IndexerConfig { 50 | pub fn get_full_name(&self) -> String { 51 | format!("{}/{}", self.account_id, self.function_name) 52 | } 53 | 54 | pub fn get_registry_version(&self) -> u64 { 55 | self.updated_at_block_height 56 | .unwrap_or(self.created_at_block_height) 57 | } 58 | 59 | pub fn is_deleted(&self) -> bool { 60 | self.deleted_at_block_height.is_some() 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /coordinator/src/server/mod.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use crate::indexer_state::IndexerStateManager; 4 | use crate::registry::Registry; 5 | 6 | mod indexer_manager_service; 7 | 8 | pub mod indexer_manager { 9 | tonic::include_proto!("indexer"); 10 | } 11 | 12 | pub async fn init( 13 | port: String, 14 | indexer_state_manager: Arc, 15 | registry: Arc, 16 | ) -> anyhow::Result<()> { 17 | let addr = format!("0.0.0.0:{}", port).parse()?; 18 | 19 | tracing::info!("Starting gRPC server on {}", addr); 20 | 21 | let indexer_manager_service = 22 | indexer_manager_service::IndexerManagerService::new(indexer_state_manager, registry); 23 | 24 | let indexer_manager_server = 25 | indexer_manager::indexer_manager_server::IndexerManagerServer::new(indexer_manager_service); 26 | 27 | tonic::transport::Server::builder() 28 | .add_service(indexer_manager_server) 29 | .serve(addr) 30 | .await 31 | .map_err(Into::into) 32 | } 33 | -------------------------------------------------------------------------------- /coordinator/src/utils.rs: -------------------------------------------------------------------------------- 1 | use std::{ops::Mul, time::Duration}; 2 | 3 | use futures_util::future::Future; 4 | 5 | const INITIAL_DELAY_SECONDS: Duration = Duration::from_secs(1); 6 | const MAXIMUM_DELAY_SECONDS: Duration = Duration::from_secs(30); 7 | 8 | pub async fn exponential_retry(operation: F) -> Result 9 | where 10 | F: Fn() -> Fut, 11 | Fut: Future>, 12 | E: std::fmt::Debug, 13 | { 14 | let mut attempts = 1; 15 | let mut delay = INITIAL_DELAY_SECONDS; 16 | 17 | loop { 18 | match operation().await { 19 | Ok(result) => return Ok(result), 20 | Err(error) => { 21 | if attempts == 1 || attempts % 5 == 0 { 22 | tracing::warn!("Encountered error {attempts} time(s). Retrying...\n{error:?}") 23 | } 24 | 25 | tokio::time::sleep(delay).await; 26 | 27 | attempts += 1; 28 | delay = delay.mul(2).min(MAXIMUM_DELAY_SECONDS); 29 | } 30 | } 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /core-indexers/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: '@typescript-eslint/parser', 3 | env: { 4 | es2021: true, 5 | node: true 6 | }, 7 | overrides: [ 8 | { 9 | files: ['.eslintrc.js', 'jest.config.js'], 10 | parser: 'espree', 11 | extends: ['standard'], 12 | rules: { 13 | semi: ['error', 'always'], 14 | quotes: ['error', 'single'], 15 | 'array-callback-return': ['error', { allowImplicit: false }] 16 | } 17 | }, 18 | { 19 | files: ['**/*.ts'], 20 | parserOptions: { 21 | project: './tsconfig.json', 22 | tsconfigRootDir: __dirname 23 | }, 24 | extends: [ 25 | 'standard-with-typescript' 26 | ], 27 | rules: { 28 | '@typescript-eslint/semi': ['error', 'always'], 29 | '@typescript-eslint/comma-dangle': ['error', 'only-multiline'], 30 | '@typescript-eslint/strict-boolean-expressions': 'off', 31 | 'array-callback-return': ['error', { allowImplicit: false }] 32 | } 33 | } 34 | ] 35 | }; 36 | -------------------------------------------------------------------------------- /core-indexers/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: 'ts-jest', 3 | testEnvironment: 'node', 4 | testMatch: ['**/*.test.ts'], 5 | testTimeout: 10000 6 | }; 7 | -------------------------------------------------------------------------------- /core-indexers/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "core-indexers", 3 | "version": "1.0.0", 4 | "description": "core indexers", 5 | "scripts": { 6 | "test": "node --experimental-vm-modules ./node_modules/.bin/jest" 7 | }, 8 | "author": "", 9 | "license": "ISC", 10 | "devDependencies": { 11 | "@eslint/eslintrc": "^3.1.0", 12 | "@eslint/js": "^9.7.0", 13 | "@types/jest": "^29.5.12", 14 | "@types/node": "^20.14.11", 15 | "@typescript-eslint/eslint-plugin": "^5.62.0", 16 | "@typescript-eslint/parser": "^5.62.0", 17 | "eslint": "^8.46.0", 18 | "eslint-config-prettier": "^8.9.0", 19 | "eslint-config-standard-with-typescript": "^37.0.0", 20 | "eslint-plugin-import": "^2.28.0", 21 | "eslint-plugin-n": "^16.0.1", 22 | "eslint-plugin-prettier": "^5.0.0", 23 | "eslint-plugin-promise": "^6.1.1", 24 | "globals": "^15.8.0", 25 | "jest": "^29.7.0", 26 | "ts-jest": "^29.2.3", 27 | "ts-node": "^10.9.2", 28 | "typescript": "^5.5.3" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /core-indexers/receiver-blocks/schema.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE 2 | "receivers" ( 3 | "id" BIGSERIAL NOT NULL PRIMARY KEY, 4 | "receiver" TEXT NOT NULL 5 | ); 6 | 7 | CREATE UNIQUE INDEX idx_receivers_by_receiver ON receivers (receiver); 8 | 9 | CREATE TABLE 10 | "bitmaps" ( 11 | "receiver_id" bigint NOT NULL, 12 | "block_date" date NOT NULL, 13 | "first_block_height" int NOT NULL, 14 | "last_elias_gamma_start_bit" int NOT NULL, 15 | "max_index" int NOT NULL, 16 | "bitmap" TEXT NOT NULL, 17 | PRIMARY KEY ("block_date", "receiver_id"), 18 | CONSTRAINT "bitmaps_receiver_id_fkey" FOREIGN KEY ("receiver_id") REFERENCES "receivers" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION 19 | ); 20 | 21 | -------------------------------------------------------------------------------- /core-indexers/receiver-blocks/unit.test.ts: -------------------------------------------------------------------------------- 1 | import fs from 'fs'; 2 | import LocalIndexer from 'queryapi-runner/src/indexer/local-indexer'; 3 | import { LocalIndexerConfig } from 'queryapi-runner/src/indexer-config/indexer-config'; 4 | import { LogLevel } from 'queryapi-runner/src/indexer-meta/log-entry'; 5 | import path from 'path'; 6 | 7 | describe('Receiver Blocks Indexer Tests', () => { 8 | const indexerConfig: LocalIndexerConfig = LocalIndexerConfig.fromObject({ 9 | accountId: 'account.near', 10 | functionName: 'sample_indexer', 11 | code: fs.readFileSync(path.join(__dirname, 'indexer.js'), 'utf8'), 12 | schema: fs.readFileSync(path.join(__dirname, 'schema.sql'), 'utf8'), 13 | logLevel: LogLevel.INFO, 14 | }); 15 | 16 | test('Try executing on a block', async () => { 17 | const localIndexer = new LocalIndexer(indexerConfig); 18 | const context = localIndexer.getContext(); 19 | 20 | // Run on one block to populate receivers table and initial bitmap 21 | await localIndexer.executeOnBlock(100000000); 22 | const receivers = await context.db.Receivers.select({ 23 | receiver: 'app.nearcrowd.near' 24 | }); 25 | const tokenSweatId = receivers[0].id; 26 | 27 | const correctBitmapOne = { 28 | first_block_height: 100000000, 29 | block_date: '2023-08-30', 30 | receiver_id: tokenSweatId, 31 | bitmap: 'wA==', 32 | last_elias_gamma_start_bit: 1, 33 | max_index: 0, 34 | }; 35 | const correctBitmapTwo = { 36 | first_block_height: 100000000, 37 | block_date: '2023-08-30', 38 | receiver_id: tokenSweatId, 39 | bitmap: 'oA==', 40 | last_elias_gamma_start_bit: 1, 41 | max_index: 1, 42 | }; 43 | 44 | let bitmap = await context.db.Bitmaps.select({ 45 | receiver_id: tokenSweatId 46 | }); 47 | expect(bitmap.length).toBe(1); 48 | expect(bitmap[0]).toEqual(correctBitmapOne); 49 | 50 | // Run on second block and verify bitmap update 51 | await localIndexer.executeOnBlock(100000001); 52 | bitmap = await context.db.Bitmaps.select({ 53 | receiver_id: tokenSweatId 54 | }); 55 | expect(bitmap.length).toBe(1); 56 | expect(bitmap[0]).toEqual(correctBitmapTwo); 57 | }); 58 | }); 59 | -------------------------------------------------------------------------------- /core-indexers/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2018", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 4 | "lib": ["es2021"], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 5 | "module": "commonjs", /* Specify what module code is generated. */ 6 | "rootDir": "..", 7 | "paths": { 8 | "@queryapi-runner/*": ["../runner/*"], /* Allow imports from runner using queryapi-runner alias */ 9 | }, 10 | "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 11 | "resolveJsonModule": true, /* Enable importing .json files. */ 12 | "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 13 | "outDir": "dist", /* Specify an output folder for all emitted files. */ 14 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 15 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 16 | "strict": true, /* Enable all strict type-checking options. */ 17 | "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 18 | "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 19 | "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 20 | "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 21 | "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 22 | "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 23 | "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 24 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 25 | }, 26 | "include": ["**/*"], 27 | "exclude": ["node_modules", "dist"] 28 | } 29 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # QueryAPI Documentation 2 | 3 | Documentation archive from docs.near.org 4 | -------------------------------------------------------------------------------- /docs/queryapi/autocomp-error.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp-error.png -------------------------------------------------------------------------------- /docs/queryapi/autocomp-types.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp-types.png -------------------------------------------------------------------------------- /docs/queryapi/autocomp1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp1.jpg -------------------------------------------------------------------------------- /docs/queryapi/autocomp2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp2.jpg -------------------------------------------------------------------------------- /docs/queryapi/autocomp3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp3.jpg -------------------------------------------------------------------------------- /docs/queryapi/autocomp4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp4.jpg -------------------------------------------------------------------------------- /docs/queryapi/autocomp5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/near/queryapi/7d9546d3b1bab7b27789ba078fef048ddf2bda63/docs/queryapi/autocomp5.jpg -------------------------------------------------------------------------------- /frontend/.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | end_of_line = lf 5 | insert_final_newline = true 6 | 7 | [*.{js,json,yml}] 8 | charset = utf-8 9 | indent_style = space 10 | indent_size = 2 11 | -------------------------------------------------------------------------------- /frontend/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: '@typescript-eslint/parser', 3 | extends: [ 4 | "plugin:@typescript-eslint/recommended", 5 | "next/core-web-vitals", // extended set of recommended rules from Next.js 6 | "prettier", 7 | ], 8 | plugins: ["simple-import-sort", "@typescript-eslint"], 9 | root: true, 10 | rules: { 11 | "simple-import-sort/imports": "warn", 12 | "@typescript-eslint/no-explicit-any": "off", // TODO: remove once refactor from JS is complete 13 | "@typescript-eslint/consistent-type-imports": "error", 14 | "@typescript-eslint/no-unused-vars": ['warn', { argsIgnorePattern: "^_", "varsIgnorePattern": "^_" }], 15 | '@typescript-eslint/no-empty-function': ['warn', { allow: ['methods'] }], 16 | "@typescript-eslint/ban-ts-comment": ["error", { 17 | "ts-ignore": "allow-with-description", 18 | "minimumDescriptionLength": 5 19 | }], 20 | } 21 | }; 22 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | node_modules/ 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | .pnpm-debug.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | worktree/* 38 | -------------------------------------------------------------------------------- /frontend/.prettierrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | semi: true, // Add semicolons at the end of statements 3 | singleQuote: true, // Use single quotes instead of double quotes 4 | tabWidth: 2, // Set the tab width to 2 spaces 5 | singleAttributePerLine: false, 6 | printWidth: 120, // Wrap lines that exceed 120 characters 7 | trailingComma: 'all', // Use trailing commas wherever possible (multi-line objects and arrays) 8 | arrowParens: 'always', // Always include parentheses around arrow function parameters 9 | endOfLine: 'lf', // Use LF (line feed) as the line ending 10 | }; 11 | -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18-alpine as dependencies 2 | WORKDIR ./ 3 | COPY package.json package-lock.json ./ 4 | RUN npm install 5 | 6 | FROM node:18-alpine as builder 7 | # Set build arguments and environment variables 8 | ARG NEXT_PUBLIC_REGISTRY_CONTRACT_ID 9 | ARG NEXT_PUBLIC_HASURA_ENDPOINT 10 | ENV NEXT_PUBLIC_HASURA_ENDPOINT=$NEXT_PUBLIC_HASURA_ENDPOINT 11 | ENV NEXT_PUBLIC_REGISTRY_CONTRACT_ID=$NEXT_PUBLIC_REGISTRY_CONTRACT_ID 12 | 13 | WORKDIR ./ 14 | COPY . . 15 | COPY --from=dependencies ./node_modules ./node_modules 16 | RUN npm run build 17 | 18 | FROM node:18-alpine as runner 19 | WORKDIR ./ 20 | ENV NODE_ENV production 21 | 22 | COPY --from=builder ./.next ./.next 23 | COPY --from=builder ./node_modules ./node_modules 24 | COPY --from=builder ./package.json ./package.json 25 | 26 | EXPOSE 3000 27 | CMD ["npm", "run", "start"] 28 | -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | ## What is this repo? 2 | 3 | Frontend for Near QueryAPI that allows users to create, manage, and explore indexers stored on-chain. You can visit the app [here](https://near.org/dataplatform.near/widget/QueryApi.App) 4 | 5 | 6 | BOS widgets are stored in the `widgets/` folder while the main NextJS application lives in the root. 7 | 8 | ## Getting Started 9 | 10 | First, download the bos-loader cli by following this guide [here](https://docs.near.org/bos/dev/bos-loader). 11 | 12 | From the root of QueryAPI Frontend repo, run the following command 13 | 14 | ```bash 15 | npm run serve:widgets:local // for running local enviornment 16 | npm run serve:widgets:dev // for running dev enviornment 17 | npm run serve:widgets:prod // for running prod enviornment 18 | ``` 19 | > Near.org or any other BOS gateway queries the blockchain state to pull the latest widgets code and renders it. If we would like to test our BOS widgets, we need to override the path at which the gateway (near.org) queries for the widget code. We do this using the Bos-loader tool (the underlying CLI tool used in the `yarn serve:widgets:dev` command) which allows us to serve out widgets locally (http://127.0.0.1:3030 by default). ** This command replaces all keys found in `replacement.dev.json` object with the their values in the widgets directory when serving the widgets **. At this point, we have served our widgets locally but have not yet told the BOS gateway (near.org) where to load our local widgets from. 20 | 21 | 22 | **Then, Head to `dev.near.org/flags` and enter `http://127.0.0.1:3030`** 23 | 24 | > In order to tell our BOS gateway (near.org), where to load the local widgets from, we head to `dev.near.org/flags` and enter the local path we got from running the previous command. If you have not changed any configurations then the default should be `http://127.0.0.1:3030` 25 | 26 | **Finally**, run the following to serve the local NextJS frontend 27 | ```bash 28 | npm run dev 29 | ``` 30 | 31 | **Now, head to the path where the widgets are served on the BOS.** 32 | 33 | - Prod App: `https://near.org/dataplatform.near/widget/QueryApi.App` 34 | 35 | 36 | ### ENV variables 37 | The React app does not use the replacement files. Instead, we need to provide an `.env` file to make sure we are accessing the right enviornment 38 | 39 | By default, the React app will use the `dev` enviornment vars 40 | ``` 41 | Prod: 42 | NEXT_PUBLIC_HASURA_ENDPOINT=https://near-queryapi.api.pagoda.co 43 | NEXT_PUBLIC_REGISTRY_CONTRACT_ID=queryapi.dataplatform.near 44 | ``` 45 | ``` 46 | Dev: 47 | NEXT_PUBLIC_HASURA_ENDPOINT=https://near-queryapi.dev.api.pagoda.co 48 | NEXT_PUBLIC_REGISTRY_CONTRACT_ID=dev-queryapi.dataplatform.near 49 | -------------------------------------------------------------------------------- /frontend/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: 'ts-jest', 3 | testEnvironment: 'node', 4 | transform: { 5 | '^.+\\.[tj]sx?$': 'ts-jest', 6 | }, 7 | moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx'], 8 | testPathIgnorePatterns: [ 9 | "/frontend/src/components/Editor/__tests__/Editor.test.js", 10 | "/frontend/src/utils/formatters.test.js" 11 | ], 12 | }; 13 | -------------------------------------------------------------------------------- /frontend/next.config.js: -------------------------------------------------------------------------------- 1 | const nextConfig = { 2 | async headers() { 3 | return [ 4 | { 5 | source: "/(.*)", // Match all routes 6 | headers: [ 7 | { 8 | key: "Access-Control-Allow-Origin", 9 | value: "*", // Allow requests from any origin 10 | }, 11 | { 12 | key: "Access-Control-Allow-Methods", 13 | value: "GET, POST, PUT, DELETE, OPTIONS", 14 | }, 15 | { 16 | key: "Access-Control-Allow-Headers", 17 | value: "Content-Type, Authorization", 18 | }, 19 | ], 20 | }, 21 | ]; 22 | }, 23 | }; 24 | 25 | module.exports = nextConfig; 26 | -------------------------------------------------------------------------------- /frontend/postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | } 7 | -------------------------------------------------------------------------------- /frontend/replacement.dev.json: -------------------------------------------------------------------------------- 1 | { 2 | "REPL_ACCOUNT_ID": "dev-queryapi.dataplatform.near", 3 | "REPL_GRAPHQL_ENDPOINT": "https://near-queryapi.dev.api.pagoda.co", 4 | "REPL_EXTERNAL_APP_URL": "https://queryapi-frontend-vcqilefdcq-ew.a.run.app", 5 | "REPL_REGISTRY_CONTRACT_ID": "dev-queryapi.dataplatform.near", 6 | "REPL_QUERY_API_USAGE_URL": "https://storage.googleapis.com/databricks-near-query-runner/output/query-api-usage/indexers_dev.json" 7 | } 8 | -------------------------------------------------------------------------------- /frontend/replacement.local.json: -------------------------------------------------------------------------------- 1 | { 2 | "REPL_ACCOUNT_ID": "dataplatform.near", 3 | "REPL_GRAPHQL_ENDPOINT": "https://near-queryapi.api.pagoda.co", 4 | "REPL_EXTERNAL_APP_URL": "http://localhost:3000", 5 | "REPL_REGISTRY_CONTRACT_ID": "queryapi.dataplatform.near", 6 | "REPL_QUERY_API_USAGE_URL": "https://storage.googleapis.com/databricks-near-query-runner/output/query-api-usage/indexers.json" 7 | } 8 | -------------------------------------------------------------------------------- /frontend/replacement.mainnet.json: -------------------------------------------------------------------------------- 1 | { 2 | "REPL_ACCOUNT_ID": "dataplatform.near", 3 | "REPL_GRAPHQL_ENDPOINT": "https://near-queryapi.api.pagoda.co", 4 | "REPL_EXTERNAL_APP_URL": "https://queryapi-frontend-24ktefolwq-ew.a.run.app", 5 | "REPL_REGISTRY_CONTRACT_ID": "queryapi.dataplatform.near", 6 | "REPL_QUERY_API_USAGE_URL": "https://storage.googleapis.com/databricks-near-query-runner/output/query-api-usage/indexers.json" 7 | } 8 | -------------------------------------------------------------------------------- /frontend/src/classes/ValidationError.ts: -------------------------------------------------------------------------------- 1 | export class ValidationError extends Error { 2 | type: string; 3 | location?: { 4 | start: { line: number; column: number }; 5 | end: { line: number; column: number }; 6 | }; 7 | 8 | constructor( 9 | message: string, 10 | type: string, 11 | location?: { start: { line: number; column: number }; end: { line: number; column: number } }, 12 | ) { 13 | super(message); 14 | this.type = type; 15 | this.location = location; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Alert.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | 3 | type AlertProps = { 4 | type: 'success' | 'error' | 'info'; 5 | message: string; 6 | onClose?: () => void; 7 | }; 8 | 9 | const Alert: React.FC = ({ type, message, onClose }) => { 10 | const [closed, setClosed] = useState(false); 11 | 12 | const handleClose = () => { 13 | setClosed(true); 14 | if (onClose) { 15 | onClose(); 16 | } 17 | }; 18 | 19 | if (closed) { 20 | return null; 21 | } 22 | 23 | return ( 24 |
34 | Alert: 35 | {message} 36 | 37 | 44 | Close 45 | 49 | 50 | 51 |
52 | ); 53 | }; 54 | 55 | export default Alert; 56 | -------------------------------------------------------------------------------- /frontend/src/components/Common/CustomTooltip.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | 3 | export enum TooltipDirection { 4 | Top = 'top', 5 | Bottom = 'bottom', 6 | Left = 'left', 7 | Right = 'right', 8 | } 9 | 10 | interface CustomTooltipProps { 11 | message: string; 12 | direction?: TooltipDirection; 13 | children: React.ReactElement; 14 | } 15 | 16 | const CustomTooltip: React.FC = ({ message, direction = TooltipDirection.Top, children }) => { 17 | const [visible, setVisible] = useState(false); 18 | 19 | const showTooltip = (): void => { 20 | setVisible(true); 21 | }; 22 | const hideTooltip = (): void => { 23 | setVisible(false); 24 | }; 25 | 26 | const getTooltipPositionClass = (direction: TooltipDirection | undefined): string => { 27 | switch (direction) { 28 | case TooltipDirection.Top: 29 | return 'bottom-full left-1/2 transform -translate-x-1/2'; 30 | case TooltipDirection.Bottom: 31 | return 'top-full left-1/2 transform -translate-x-1/2'; 32 | case TooltipDirection.Left: 33 | return 'top-1/2 right-full transform translate-y-1/2'; 34 | case TooltipDirection.Right: 35 | return 'top-1/2 left-full transform translate-y-1/2'; 36 | default: 37 | return ''; 38 | } 39 | }; 40 | 41 | const tooltipClasses = [ 42 | getTooltipPositionClass(direction), 43 | 'tooltip-bubble', 44 | 'text-xxs', 45 | 'rounded', 46 | 'shadow-sm', 47 | 'bg-black', 48 | 'text-white', 49 | 'px-2', 50 | 'absolute', 51 | 'text-center', 52 | 'w-max', 53 | ] 54 | .filter(Boolean) 55 | .join(' '); 56 | 57 | return ( 58 |
59 | {children} 60 | {visible && ( 61 |
62 |
{message}
63 |
64 | )} 65 |
66 | ); 67 | }; 68 | 69 | export default CustomTooltip; 70 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Icons/AlertSquareIcon.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | const AlertSquareIcon = () => { 4 | return ( 5 |
6 | ! 7 |
8 | ); 9 | }; 10 | export default AlertSquareIcon; 11 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Icons/CheckMarkIcon.js: -------------------------------------------------------------------------------- 1 | export const CheckmarkIcon = () => ( 2 | 3 | 4 | 5 | 6 | 7 | ); 8 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Icons/CheckMarkSquareIcon.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | const CheckMarkSquareIcon = () => { 4 | return ( 5 |
6 | 7 | 12 | 13 |
14 | ); 15 | }; 16 | 17 | export default CheckMarkSquareIcon; 18 | -------------------------------------------------------------------------------- /frontend/src/components/Common/Icons/ClearIcon.js: -------------------------------------------------------------------------------- 1 | export const ClearIcon = () => { 2 | return ( 3 | 4 | 8 | 9 | ); 10 | }; 11 | -------------------------------------------------------------------------------- /frontend/src/components/Common/LatestBlock.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | 3 | import { calculateBlockTimeDifference } from '@/utils/calculateBlockTimeDifference'; 4 | 5 | interface LatestBlockProps { 6 | indexerBlockHeight?: number; 7 | } 8 | 9 | interface BlockResponse { 10 | result?: { 11 | header?: { 12 | height?: number; 13 | }; 14 | }; 15 | error?: { 16 | message?: string; 17 | }; 18 | } 19 | 20 | const LatestBlock: React.FC = (props) => { 21 | const [latestFinalBlock, setLatestFinalBlock] = useState(null); 22 | const [errors, setErrors] = useState(''); 23 | 24 | useEffect(() => { 25 | const rpcBlock = async (finality: string): Promise => { 26 | try { 27 | const response = await fetch('https://rpc.mainnet.near.org', { 28 | method: 'POST', 29 | headers: { 30 | 'Content-Type': 'application/json', 31 | }, 32 | body: JSON.stringify({ 33 | jsonrpc: '2.0', 34 | id: 'dontcare', 35 | method: 'block', 36 | params: { 37 | finality, 38 | }, 39 | }), 40 | }); 41 | 42 | if (!response.ok) { 43 | throw new Error(`HTTP error! Status: ${response.status}`); 44 | } 45 | 46 | return await response.json(); 47 | } catch (error) { 48 | return { error: { message: (error as Error).message } }; 49 | } 50 | }; 51 | 52 | const updateFinalBlock = async (): Promise => { 53 | try { 54 | const res = await rpcBlock('final'); 55 | 56 | if (res?.result?.header?.height) { 57 | setLatestFinalBlock(res.result.header.height); 58 | } else { 59 | setErrors('Failed to fetch final block height'); 60 | } 61 | } catch (error) { 62 | setErrors((error as Error).message || 'Error fetching final block height'); 63 | } 64 | }; 65 | 66 | updateFinalBlock().catch((error) => { 67 | console.error('Failed to fetch or process data:', error); 68 | }); 69 | 70 | const intervalId = setInterval(() => { 71 | updateFinalBlock().catch((error) => { 72 | console.error('Failed to update final block:', error); 73 | }); 74 | }, 1000); 75 | 76 | return () => { 77 | clearInterval(intervalId); 78 | }; 79 | }, []); 80 | 81 | return ( 82 |
83 | {latestFinalBlock !== null && props.indexerBlockHeight !== undefined 84 | ? `Indexer is ${latestFinalBlock - props.indexerBlockHeight} blocks or ${calculateBlockTimeDifference( 85 | latestFinalBlock, 86 | props.indexerBlockHeight, 87 | )} behind the blockchain tip` 88 | : `Indexer is not yet synced Latest Final Block Height: ${latestFinalBlock as number}`} 89 | {errors &&
Error: {errors}
} 90 |
91 | ); 92 | }; 93 | 94 | export default LatestBlock; 95 | -------------------------------------------------------------------------------- /frontend/src/components/CreateNewIndexer/CreateNewIndexer.js: -------------------------------------------------------------------------------- 1 | import Editor from '@/components/Editor/EditorComponents/Editor'; 2 | 3 | const CreateNewIndexer = () => { 4 | return ; 5 | }; 6 | 7 | export default CreateNewIndexer; 8 | -------------------------------------------------------------------------------- /frontend/src/components/CreateNewIndexer/index.js: -------------------------------------------------------------------------------- 1 | import CreateNewIndexer from './CreateNewIndexer'; 2 | export default CreateNewIndexer; 3 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/EditorComponents/FileSwitcher.jsx: -------------------------------------------------------------------------------- 1 | import React, { useContext } from 'react'; 2 | import { IndexerDetailsContext } from '@/contexts/IndexerDetailsContext'; 3 | import AlertSquareIcon from '@/components/Common/Icons/AlertSquareIcon'; 4 | import CheckMarkSquareIcon from '@/components/Common/Icons/CheckMarkSquareIcon'; 5 | 6 | import CustomTooltip, { TooltipDirection } from '@/components/Common/CustomTooltip'; 7 | const IndexerErrorMessage = 'There was an error with the Indexer.'; 8 | 9 | export function FileSwitcher({ fileName, setFileName, schemaError, indexerError }) { 10 | const { isCreateNewIndexer } = useContext(IndexerDetailsContext); 11 | return ( 12 |
13 | 30 | 47 | {!isCreateNewIndexer && ( 48 | 56 | )} 57 |
58 | ); 59 | } 60 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/EditorComponents/GlyphContainer.js: -------------------------------------------------------------------------------- 1 | // Used to render Glyphs in the Editor 2 | import styled from 'styled-components'; 3 | 4 | const GlyphContainer = styled.div` 5 | .glyphSuccess { 6 | background: transparent; 7 | } 8 | 9 | .glyphError { 10 | background: red; 11 | } 12 | `; 13 | export { GlyphContainer }; 14 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/EditorComponents/custom.d.ts: -------------------------------------------------------------------------------- 1 | declare module '!!raw-loader!*' { 2 | const content: string; 3 | export default content; 4 | } 5 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/EditorComponents/index.js: -------------------------------------------------------------------------------- 1 | import Editor from './Editor'; 2 | 3 | export default Editor; 4 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/EditorViewContainer/BlockPickerContainer.tsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from 'react'; 2 | 3 | import BlockPickerView from '../EditorView/BlockPickerView'; 4 | 5 | interface BlockPickerContainerProps { 6 | heights: string[]; 7 | setHeights: React.Dispatch>; 8 | executeIndexerFunction: () => void; 9 | latestHeight: number; 10 | isExecuting: boolean; 11 | stopExecution: () => void; 12 | } 13 | 14 | const BlockPickerContainer: React.FC = ({ 15 | heights = [], 16 | setHeights, 17 | executeIndexerFunction, 18 | latestHeight, 19 | isExecuting, 20 | stopExecution, 21 | }) => { 22 | const [inputValue, setInputValue] = useState(String(latestHeight)); 23 | 24 | const addHeight = (): void => { 25 | if (heights.length < 10 && inputValue !== '') { 26 | setHeights([...heights, inputValue]); 27 | setInputValue(''); 28 | } 29 | }; 30 | 31 | return ( 32 | 41 | ); 42 | }; 43 | 44 | export default BlockPickerContainer; 45 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/EditorViewContainer/DeveloperToolsContainer.tsx: -------------------------------------------------------------------------------- 1 | import React, { useContext } from 'react'; 2 | 3 | import { IndexerDetailsContext } from '../../../contexts/IndexerDetailsContext'; 4 | import DeveloperToolsView from '../EditorView/DeveloperToolsView'; 5 | 6 | interface DeveloperToolsContainerProps { 7 | handleFormating: () => void; 8 | handleCodeGen: () => void; 9 | isExecuting: boolean; 10 | executeIndexerFunction: () => void; 11 | heights: number[]; 12 | setHeights: React.Dispatch>; 13 | stopExecution: () => void; 14 | latestHeight: number | undefined; 15 | diffView: boolean; 16 | setDiffView: React.Dispatch>; 17 | } 18 | 19 | const DeveloperToolsContainer: React.FC = ({ 20 | handleFormating, 21 | handleCodeGen, 22 | executeIndexerFunction, 23 | isExecuting, 24 | stopExecution, 25 | heights, 26 | setHeights, 27 | latestHeight, 28 | diffView, 29 | setDiffView, 30 | }) => { 31 | const { setShowResetCodeModel, debugMode, setDebugMode } = useContext(IndexerDetailsContext); 32 | 33 | const removeHeight = (index: number): void => { 34 | setHeights(heights.filter((_, i) => i !== index)); 35 | }; 36 | 37 | return ( 38 | 59 | ); 60 | }; 61 | 62 | export default DeveloperToolsContainer; 63 | -------------------------------------------------------------------------------- /frontend/src/components/Editor/QueryApiStorageManager.tsx: -------------------------------------------------------------------------------- 1 | export default class QueryAPIStorageManager { 2 | private indexerCodeStorageKey: string; 3 | private schemaCodeStorageKey: string; 4 | private schemaTypesStorageKey: string; 5 | private cursorPositionKey: string; 6 | private debugListStorageKey: string; 7 | 8 | constructor(accountID: string, indexerName: string) { 9 | this.indexerCodeStorageKey = this.createStorageKey('IndexerCode', accountID, indexerName); 10 | this.schemaCodeStorageKey = this.createStorageKey('SchemaCode', accountID, indexerName); 11 | this.schemaTypesStorageKey = this.createStorageKey('SchemaTypes', accountID, indexerName); 12 | this.cursorPositionKey = this.createStorageKey('CursorPosition', accountID, indexerName); 13 | this.debugListStorageKey = this.createStorageKey('DebugList', accountID, indexerName); 14 | } 15 | 16 | private createStorageKey(type: string, accountID: string, indexerName: string): string { 17 | return `QueryAPI:${type}:${accountID}#${indexerName || 'new'}`; 18 | } 19 | 20 | private saveToLocalStorage(key: string, data: any): void { 21 | localStorage.setItem(key, JSON.stringify(data)); 22 | } 23 | 24 | private getFromLocalStorage(key: string): any { 25 | const data = localStorage.getItem(key); 26 | return data ? JSON.parse(data) : null; 27 | } 28 | 29 | setIndexerCode(data: any): void { 30 | this.saveToLocalStorage(this.indexerCodeStorageKey, data); 31 | } 32 | 33 | getIndexerCode(): any { 34 | return this.getFromLocalStorage(this.indexerCodeStorageKey); 35 | } 36 | 37 | setSchemaCode(data: any): void { 38 | this.saveToLocalStorage(this.schemaCodeStorageKey, data); 39 | } 40 | 41 | getSchemaCode(): any { 42 | return this.getFromLocalStorage(this.schemaCodeStorageKey); 43 | } 44 | 45 | setSchemaTypes(data: any): void { 46 | this.saveToLocalStorage(this.schemaTypesStorageKey, data); 47 | } 48 | 49 | getSchemaTypes(): any { 50 | return this.getFromLocalStorage(this.schemaTypesStorageKey); 51 | } 52 | 53 | setCursorPosition(data: any): void { 54 | this.saveToLocalStorage(this.cursorPositionKey, data); 55 | } 56 | 57 | getCursorPosition(): any { 58 | return this.getFromLocalStorage(this.cursorPositionKey); 59 | } 60 | 61 | setDebugList(data: any): void { 62 | this.saveToLocalStorage(this.debugListStorageKey, data); 63 | } 64 | 65 | getDebugList(): any { 66 | return this.getFromLocalStorage(this.debugListStorageKey); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/GraphQL/Query.ts: -------------------------------------------------------------------------------- 1 | export const Query = (tableName: string): string => ` 2 | query getLogsQuery( 3 | $limit: Int, 4 | $offset: Int, 5 | $order_by_timestamp: order_by, 6 | $level: String_comparison_exp = {}, 7 | $type: String_comparison_exp= {}, 8 | $timestamp: timestamp_comparison_exp = {}, 9 | $message: String_comparison_exp = {}, 10 | $block_height: numeric_comparison_exp = {} 11 | ) { 12 | ${tableName}( 13 | limit: $limit, 14 | offset: $offset, 15 | order_by: {timestamp: $order_by_timestamp}, 16 | where: { 17 | message: $message, 18 | _or: [ 19 | { message: $message }, 20 | { block_height: $block_height }, 21 | ], 22 | _and: [ 23 | {level: $level}, 24 | {type: $type}, 25 | {timestamp: $timestamp} 26 | ] 27 | } 28 | ) { 29 | block_height 30 | level 31 | message 32 | timestamp 33 | type 34 | } 35 | ${tableName}_aggregate( 36 | where: { 37 | message: $message, 38 | _or: [ 39 | { message: $message }, 40 | { block_height: $block_height }, 41 | ], 42 | _and: [ 43 | {level: $level}, 44 | {type: $type}, 45 | {timestamp: $timestamp} 46 | ] 47 | } 48 | ) { 49 | aggregate { 50 | count 51 | } 52 | } 53 | } 54 | `; 55 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/GraphQL/QueryValidation.ts: -------------------------------------------------------------------------------- 1 | import { calculateTimestamp } from '@/utils/calculateTimestamp'; 2 | 3 | interface Variables { 4 | limit: number; 5 | offset: number; 6 | order_by_timestamp: 'asc' | 'desc'; 7 | level?: string; 8 | type?: string; 9 | timestamp?: string; 10 | keyword?: string; 11 | } 12 | 13 | interface QueryFilter { 14 | _eq?: string | number; 15 | _ilike?: string; 16 | _gte?: string; 17 | } 18 | interface QueryValidationResult { 19 | limit: number; 20 | offset: number; 21 | order_by_timestamp: 'asc' | 'desc'; 22 | level?: QueryFilter; 23 | type?: QueryFilter; 24 | timestamp?: QueryFilter; 25 | keyword?: string; 26 | message?: { _ilike: string }; 27 | block_height?: { _eq: number }; 28 | } 29 | /* eslint-disable-next-line */ 30 | export const QueryValidation = ({ 31 | limit, 32 | offset, 33 | order_by_timestamp, 34 | level, 35 | type, 36 | timestamp, 37 | keyword, 38 | }: Variables): QueryValidationResult => { 39 | const levelFormat: { level?: QueryFilter } = level ? { level: { _eq: level } } : {}; 40 | const typeFormat: { type?: QueryFilter } = type ? { type: { _eq: type } } : {}; 41 | const timestampFormat: { timestamp?: QueryFilter } = timestamp 42 | ? { timestamp: { _gte: calculateTimestamp(timestamp) } } 43 | : {}; 44 | const messageFormat = keyword ? { message: { _ilike: `%${keyword}%` } } : {}; 45 | const blockHeightFormat = keyword && !isNaN(Number(keyword)) ? { block_height: { _eq: Number(keyword) } } : {}; 46 | 47 | return { 48 | limit, 49 | offset, 50 | order_by_timestamp, 51 | ...levelFormat, 52 | ...typeFormat, 53 | ...timestampFormat, 54 | ...messageFormat, 55 | ...blockHeightFormat, 56 | }; 57 | }; 58 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/ClearButtonView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Button } from 'react-bootstrap'; 3 | import { ClearIcon } from '@/components/Common/Icons/ClearIcon'; 4 | 5 | const ClearButtonView = ({ onClick }) => { 6 | return ( 7 | 15 | ); 16 | }; 17 | 18 | export default ClearButtonView; 19 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/DateSelectorView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import OptionSelectorContainer from '../LogsViewContainer/OptionSelectorContainer'; 3 | 4 | const DateSelectorView = ({ options, selectedOption, onOptionChange }) => { 5 | return ; 6 | }; 7 | 8 | export default DateSelectorView; 9 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/IndexerLogsView.jsx: -------------------------------------------------------------------------------- 1 | import { Container, Row, Col } from 'react-bootstrap'; 2 | import LogsMenu from '../LogsMenu'; 3 | import LogFieldCardView from './LogFieldCardView'; 4 | import 'gridjs/dist/theme/mermaid.css'; 5 | import styled from 'styled-components'; 6 | 7 | const CustomGridContainer = styled.div` 8 | .gridjs-wrapper { 9 | border-radius: 0 !important; 10 | box-shadow: none !important; 11 | border: 1px solid #d2d2d2 !important; 12 | border-collapse: collapse !important; 13 | } 14 | .gridjs-container { 15 | padding: 0 2px !important; 16 | } 17 | .gridjs-table { 18 | border-collapse: collapse; 19 | } 20 | .gridjs-td { 21 | border: none; 22 | } 23 | .gridjs-search { 24 | width: 100% !important; 25 | } 26 | .gridjs-search-input { 27 | width: 100% !important; 28 | padding: 18px !important; 29 | border-radius: 4px 4px 0px 0 !important; 30 | border: 1px solid #d2d2d2 !important; 31 | } 32 | .gridjs-head { 33 | padding: 0 !important; 34 | margin: 0 !important; 35 | } 36 | `; 37 | 38 | const IndexerLogsView = ({ 39 | severity, 40 | setSeverity, 41 | logType, 42 | setLogType, 43 | startTime, 44 | setStartTime, 45 | functionName, 46 | tableName, 47 | latestHeight, 48 | currentIndexerDetails, 49 | currentUserAccountId, 50 | getIndexerLogsConfig, 51 | getSearchConfig, 52 | getPaginationConfig, 53 | getGridStyle, 54 | getGridConfig, 55 | reloadData, 56 | gridContainerRef, 57 | }) => { 58 | return ( 59 | <> 60 | 67 | 68 | 69 | 70 | 71 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | ); 87 | }; 88 | 89 | export default IndexerLogsView; 90 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/LogFieldCardView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Card, Accordion } from 'react-bootstrap'; 3 | import SeveritySelectorContainer from '../LogsViewContainer/SeveritySelectorContainer'; 4 | import LogTypeSelectorContainer from '../LogsViewContainer/LogTypeSelectorContainer'; 5 | import DateSelectorContainer from '../LogsViewContainer/DateSelectorContainer'; 6 | import styled from 'styled-components'; 7 | 8 | const CustomAccordianWrapper = styled.div` 9 | .accordion-button { 10 | background-color: #f8f8f8 !important; 11 | } 12 | .accordion-item:first-of-type { 13 | border-top-left-radius: 0 !important; 14 | border-top-right-radius: 0 !important; 15 | border-collapse: collapse !important; 16 | } 17 | .accordion-item:first-of-type > .accordion-header .accordion-button { 18 | border-top-left-radius: 0 !important; 19 | border-top-right-radius: 0 !important; 20 | } 21 | .accordian { 22 | .--bs-accordion-border-width: 0 !important; 23 | } 24 | `; 25 | 26 | const LogFieldCardView = ({ 27 | severity, 28 | handleSeverityChange, 29 | logType, 30 | handleLogTypeChange, 31 | dateFilter, 32 | handleDateFilter, 33 | }) => { 34 | return ( 35 | 36 | Filters 37 | 38 | 39 | 40 | Severity 41 | 42 | 43 | 44 | 45 | 46 | 47 | Log Type 48 | 49 | 50 | 51 | 52 | 53 | 54 | Date 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | ); 63 | }; 64 | 65 | export default LogFieldCardView; 66 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/LogTypeSelectorView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import OptionSelectorContainer from '../LogsViewContainer/OptionSelectorContainer'; 3 | 4 | const LogTypeSelectorView = ({ options, selectedOption, onOptionChange }) => { 5 | return ; 6 | }; 7 | 8 | export default LogTypeSelectorView; 9 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/OptionSelectorView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { Row, Col, Form, Button } from 'react-bootstrap'; 3 | import { CheckmarkIcon } from '@/components/Common/Icons/CheckMarkIcon'; 4 | import ClearButtonContainer from '../LogsViewContainer/ClearButtonContainer'; 5 | 6 | const OptionSelectorView = ({ options, selectedOption, onOptionChange, handleOptionChange, handleClearSelection }) => { 7 | return ( 8 |
9 | 10 | {options.map((option, index) => ( 11 | 12 |
handleOptionChange(option)} 18 | > 19 | 20 | {selectedOption === option && } 21 |
{option}
22 |
23 | {selectedOption === option && } 24 |
25 | 26 | ))} 27 |
28 |
29 | ); 30 | }; 31 | 32 | export default OptionSelectorView; 33 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsView/SeveritySelectorView.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import OptionSelectorContainer from '../LogsViewContainer/OptionSelectorContainer'; 3 | 4 | const SeveritySelectorView = ({ options, selectedOption, onOptionChange }) => { 5 | return ; 6 | }; 7 | 8 | export default SeveritySelectorView; 9 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsViewContainer/ClearButtonContainer.tsx: -------------------------------------------------------------------------------- 1 | import React, { type FC, type MouseEvent } from 'react'; 2 | 3 | import ClearButtonView from '../LogsView/ClearButtonView'; 4 | 5 | interface ClearButtonProps { 6 | onClick: () => void; 7 | } 8 | 9 | const ClearButton: FC = ({ onClick }) => { 10 | const handleClick = (event: MouseEvent): void => { 11 | event.stopPropagation(); 12 | onClick(); 13 | }; 14 | 15 | return ; 16 | }; 17 | 18 | export default ClearButton; 19 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsViewContainer/DateSelectorContainer.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import { TIME_INTERVALS_MAP } from '@/constants/DurationMap'; 4 | 5 | import DateSelectorView from '../LogsView/DateSelectorView'; 6 | 7 | interface DateSelectorProps { 8 | selectedDate: string; 9 | onDateChange: (selectedDate: Date) => void; 10 | } 11 | 12 | const DateSelectorContainer: React.FC = ({ selectedDate, onDateChange }) => { 13 | const dateOptions: string[] = Array.from(TIME_INTERVALS_MAP.values()); 14 | 15 | return ; 16 | }; 17 | 18 | export default DateSelectorContainer; 19 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsViewContainer/LogTypeSelectorContainer.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import LogTypeSelectorView from '../LogsView/LogTypeSelectorView'; 4 | 5 | interface LogTypeSelectorContainerProps { 6 | selectedLogType: string; 7 | onLogTypeChange: (logType: string) => void; 8 | } 9 | 10 | const LogTypeSelectorContainer: React.FC = ({ selectedLogType, onLogTypeChange }) => { 11 | const logTypeOptions: string[] = ['system', 'user']; 12 | 13 | return ( 14 | 15 | ); 16 | }; 17 | 18 | export default LogTypeSelectorContainer; 19 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsViewContainer/OptionSelectorContainer.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import OptionSelectorView from '../LogsView/OptionSelectorView'; 4 | interface OptionSelectorContainerProps { 5 | options: string[]; 6 | selectedOption: string; 7 | onOptionChange: (option: string) => void; 8 | } 9 | 10 | const OptionSelectorContainer: React.FC = ({ 11 | options, 12 | selectedOption, 13 | onOptionChange, 14 | }) => { 15 | const handleOptionChange = (value: string): void => { 16 | onOptionChange(value); 17 | }; 18 | 19 | const handleClearSelection = (): void => { 20 | onOptionChange(''); 21 | }; 22 | 23 | return ( 24 | 31 | ); 32 | }; 33 | 34 | export default OptionSelectorContainer; 35 | -------------------------------------------------------------------------------- /frontend/src/components/Logs/LogsViewContainer/SeveritySelectorContainer.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | import SeverityRadioButtonGroupView from '../LogsView/SeveritySelectorView'; 4 | 5 | interface SeveritySelectorProps { 6 | selectedSeverity: string; 7 | onSeverityChange: (severity: string) => void; 8 | } 9 | 10 | const SeveritySelectorContainer: React.FC = ({ selectedSeverity, onSeverityChange }) => { 11 | // Refactor to fetch fields from graphql 12 | const severityOptions: string[] = ['INFO', 'DEBUG', 'WARNING', 'ERROR']; 13 | 14 | return ( 15 | 20 | ); 21 | }; 22 | 23 | export default SeveritySelectorContainer; 24 | -------------------------------------------------------------------------------- /frontend/src/components/Modals/PublishModal.jsx: -------------------------------------------------------------------------------- 1 | import React, { useContext, useState } from 'react'; 2 | import { Button, Modal, Alert } from 'react-bootstrap'; 3 | import PublishFormContainer from './ModalsContainer/PublishFormContainer'; 4 | import { IndexerDetailsContext } from '../../contexts/IndexerDetailsContext'; 5 | import { validateContractIds } from '../../utils/validators'; 6 | 7 | export const PublishModal = ({ registerFunction, actionButtonText }) => { 8 | const { indexerDetails, showPublishModal, setShowPublishModal } = useContext(IndexerDetailsContext); 9 | const [indexerConfig, setIndexerConfig] = useState({ filter: 'social.near', startBlockHeight: null }); 10 | const [indexerName, setIndexerName] = useState(''); 11 | const [error, setError] = useState(null); 12 | 13 | const updateConfig = (indexerName, filter, height, startBlock) => { 14 | setIndexerConfig({ filter, startBlock, height }); 15 | setIndexerName(indexerName); 16 | }; 17 | 18 | const register = async () => { 19 | if (indexerName === undefined || indexerName === '') { 20 | setError(() => 'Please provide an Indexer Name'); 21 | return; 22 | } 23 | 24 | if (!validateContractIds(indexerConfig.filter)) { 25 | setError(() => 'Please provide a valid contract name'); 26 | return; 27 | } 28 | setError(null); 29 | registerFunction(indexerName, indexerConfig); 30 | setShowPublishModal(false); 31 | }; 32 | 33 | return ( 34 | setShowPublishModal(false)} className="bg-gray-50"> 35 | 36 | Enter Indexer Details 37 | 38 | 39 | 40 | {error && ( 41 | 45 | {error} 46 | 47 | )} 48 | 49 | 50 | 57 | 60 | 61 | 62 | ); 63 | }; 64 | -------------------------------------------------------------------------------- /frontend/src/components/Modals/ResetChangesModal.jsx: -------------------------------------------------------------------------------- 1 | import { IndexerDetailsContext } from '../../contexts/IndexerDetailsContext'; 2 | import React, { useContext } from 'react'; 3 | import { Button, Modal } from 'react-bootstrap'; 4 | export const ResetChangesModal = ({ handleResetCodeChanges }) => { 5 | const { showResetCodeModel, setShowResetCodeModel } = useContext(IndexerDetailsContext); 6 | return ( 7 | setShowResetCodeModel(false)}> 8 | 9 | Are you sure? 10 | 11 | The changes you have made in the editor will be deleted. 12 | 13 | 16 | 19 | 20 | 21 | ); 22 | }; 23 | -------------------------------------------------------------------------------- /frontend/src/components/Playground/index.js: -------------------------------------------------------------------------------- 1 | import dynamic from 'next/dynamic'; 2 | 3 | const DynamicGraphiQLPlayground = dynamic( 4 | () => import('./graphiql.jsx').then((mod) => mod.GraphqlPlayground), 5 | { ssr: false }, // This will load the component only on client side 6 | ); 7 | 8 | function GraphqlPlayground() { 9 | return ( 10 |
11 | 12 |
13 | ); 14 | } 15 | 16 | export default GraphqlPlayground; 17 | -------------------------------------------------------------------------------- /frontend/src/constants/DurationMap.ts: -------------------------------------------------------------------------------- 1 | export const TIME_INTERVALS_MAP = new Map([ 2 | ['15s', 'Last 15 seconds (15s)'], 3 | ['30s', 'Last 30 seconds (30s)'], 4 | ['1m', 'Last 1 minute (1m)'], 5 | ['5m', 'Last 5 minutes (5m)'], 6 | ['10m', 'Last 10 minutes (10m)'], 7 | ['15m', 'Last 15 minutes (15m)'], 8 | ['30m', 'Last 30 minutes (30m)'], 9 | ['45m', 'Last 45 minutes (45m)'], 10 | ['1h', 'Last 1 hour (1h)'], 11 | ['3h', 'Last 3 hours (3h)'], 12 | ['6h', 'Last 6 hours (6h)'], 13 | ['12h', 'Last 12 hours (12h)'], 14 | ['1d', 'Last 1 day (1d)'], 15 | ['2d', 'Last 2 days (2d)'], 16 | ['7d', 'Last 7 days (7d)'], 17 | ['14d', 'Last 14 days (14d)'], 18 | ['30d', 'Last 30 days (30d)'], 19 | ]); 20 | 21 | export const DURATION_MAP: Record = {}; 22 | 23 | TIME_INTERVALS_MAP.forEach((description: string, key: string) => { 24 | let duration: number; 25 | const unit: string = key.slice(-1); 26 | const value: number = parseInt(key.slice(0, -1), 10); 27 | 28 | switch (unit) { 29 | case 's': 30 | duration = value * 1000; 31 | break; 32 | case 'm': 33 | duration = value * 60 * 1000; 34 | break; 35 | case 'h': 36 | duration = value * 60 * 60 * 1000; 37 | break; 38 | case 'd': 39 | duration = value * 24 * 60 * 60 * 1000; 40 | break; 41 | default: 42 | throw new Error(`Unknown unit: ${unit}`); 43 | } 44 | 45 | DURATION_MAP[description] = duration; 46 | }); 47 | -------------------------------------------------------------------------------- /frontend/src/constants/RegexExp.js: -------------------------------------------------------------------------------- 1 | export const CONTRACT_NAME_REGEX = RegExp( 2 | /^(([a-z\d]+[-_])*[a-z\d]+(\.([a-z\d]+[-_])*[a-z\d]+)*\.([a-z\d]+)|([a-z\d]+))$/, 3 | ); 4 | export const WILD_CARD_REGEX = RegExp(/^\*\./); 5 | export const WILD_CARD = '*'; 6 | -------------------------------------------------------------------------------- /frontend/src/constants/Strings.js: -------------------------------------------------------------------------------- 1 | //errors messages 2 | export const CODE_GENERAL_ERROR_MESSAGE = 'There is an error in your code. Please check the console for more details.'; 3 | export const CODE_FORMATTING_ERROR_MESSAGE = 4 | 'There was an error while formatting your code. Please check the console for more details.'; 5 | export const SCHEMA_TYPE_GENERATION_ERROR_MESSAGE = 6 | 'There was an error while generating types for your SQL schema. Please ensure your schema is valid SQL DDL.'; 7 | export const SCHEMA_FORMATTING_ERROR_MESSAGE = 8 | 'There was an error while formatting your schema. Please, check the console for more details.'; 9 | export const INDEXER_REGISTER_TYPE_GENERATION_ERROR = 10 | 'There was an error generating types from your schema, so the context.db object cannot be created. Would you like to proceed with registering the Indexer despite this?'; 11 | 12 | //error types 13 | export const FORMATTING_ERROR_TYPE = 'formatting_error'; 14 | export const TYPE_GENERATION_ERROR_TYPE = 'type_generation_error_type'; 15 | -------------------------------------------------------------------------------- /frontend/src/contexts/ModalContext.js: -------------------------------------------------------------------------------- 1 | import React, { createContext, useContext, useState } from 'react'; 2 | 3 | const ModalContext = createContext({ 4 | openModal: false, 5 | message: '', 6 | data: {}, 7 | showModal: () => {}, 8 | hideModal: () => {}, 9 | }); 10 | 11 | export const useModal = () => useContext(ModalContext); 12 | 13 | export const ModalProvider = ({ children }) => { 14 | const [openModal, setOpenModal] = useState(false); 15 | const [message, setMessage] = useState(''); 16 | const [data, setData] = useState(); 17 | 18 | const showModal = (errorMessage, data = null) => { 19 | setOpenModal(true); 20 | setMessage(errorMessage); 21 | setData(data); 22 | }; 23 | 24 | const hideModal = () => { 25 | setOpenModal(false); 26 | setMessage(''); 27 | setData(); 28 | }; 29 | 30 | return ( 31 | {children} 32 | ); 33 | }; 34 | -------------------------------------------------------------------------------- /frontend/src/core/InfoModal.jsx: -------------------------------------------------------------------------------- 1 | import { Button, Modal } from 'react-bootstrap'; 2 | import PropTypes from 'prop-types'; 3 | 4 | export const InfoModal = ({ 5 | open, 6 | title, 7 | message, 8 | okButtonText = 'OK', 9 | cancelButtonText = 'CANCEL', 10 | onOkButtonPressed, 11 | onCancelButtonPressed, 12 | onClose, 13 | }) => { 14 | const handleClose = () => { 15 | if (onClose) { 16 | onClose(); 17 | } 18 | }; 19 | 20 | const handleOnOkButtonPressed = () => { 21 | onOkButtonPressed(); 22 | onClose(); 23 | }; 24 | 25 | return ( 26 | 27 | 28 | {title} 29 | 30 | 31 |

{message}

32 |
33 | 34 | {onCancelButtonPressed && ( 35 | 38 | )} 39 | {onOkButtonPressed && ( 40 | 43 | )} 44 | 45 |
46 | ); 47 | }; 48 | 49 | InfoModal.propTypes = { 50 | open: PropTypes.bool.isRequired, 51 | title: PropTypes.string.isRequired, 52 | message: PropTypes.string.isRequired, 53 | okButtonText: PropTypes.string, 54 | onOkButtonPressed: PropTypes.func, 55 | cancelButtonText: PropTypes.string, 56 | onCancelButtonPressed: PropTypes.func, 57 | onClose: PropTypes.func, 58 | }; 59 | -------------------------------------------------------------------------------- /frontend/src/pages/_app.jsx: -------------------------------------------------------------------------------- 1 | import 'bootstrap/dist/css/bootstrap.min.css'; 2 | import 'near-social-bridge/near-social-bridge.css'; 3 | import './global.css'; 4 | import 'regenerator-runtime/runtime'; 5 | 6 | import React from 'react'; 7 | 8 | import { Spinner } from 'near-social-bridge'; 9 | import { overrideLocalStorage, NearSocialBridgeProvider } from 'near-social-bridge'; 10 | import { ApolloClient, InMemoryCache, ApolloProvider } from '@apollo/client'; 11 | import { ModalProvider } from '@/contexts/ModalContext'; 12 | import { IndexerDetailsProvider } from '@/contexts/IndexerDetailsContext'; 13 | 14 | overrideLocalStorage(); 15 | 16 | export default function App({ Component, pageProps }) { 17 | const client = new ApolloClient({ 18 | uri: `${process.env.NEXT_PUBLIC_HASURA_ENDPOINT}/v1/graphql`, 19 | cache: new InMemoryCache(), 20 | }); 21 | 22 | return ( 23 | }> 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | ); 33 | } 34 | -------------------------------------------------------------------------------- /frontend/src/pages/_document.jsx: -------------------------------------------------------------------------------- 1 | import { Html, Head, Main, NextScript } from 'next/document'; 2 | 3 | export default function Document() { 4 | return ( 5 | 6 | 7 | 8 |
9 | 10 | 11 | 12 | ); 13 | } 14 | -------------------------------------------------------------------------------- /frontend/src/pages/api/generateCode.ts: -------------------------------------------------------------------------------- 1 | import type { NextApiRequest, NextApiResponse } from 'next'; 2 | 3 | import { createSchema } from 'genson-js'; 4 | import type { Schema } from 'genson-js/dist/types'; 5 | import { WizardCodeGenerator } from './WizardCodeGenerator'; 6 | 7 | export type Method = { 8 | method_name: string; 9 | schema: Schema; 10 | }; 11 | 12 | export type Event = { 13 | event_name: string; 14 | schema: Schema; 15 | }; 16 | 17 | export interface RequestBody { 18 | contractFilter: string | string[]; 19 | selectedMethods: Method[]; 20 | selectedEvents?: Event[]; 21 | } 22 | 23 | export const isStringOrArray = (value: any): value is string | string[] => 24 | (typeof value === 'string' && value !== '') || 25 | (Array.isArray(value) && value.every((item) => typeof item === 'string')); 26 | 27 | export const isValidSchema = (schema: any): boolean => { 28 | try { 29 | createSchema(schema); 30 | return true; 31 | } catch { 32 | return false; 33 | } 34 | }; 35 | 36 | export const validateRequestBody = (body: any): body is RequestBody => { 37 | return ( 38 | isStringOrArray(body.contractFilter) && 39 | Array.isArray(body.selectedMethods) && 40 | body.selectedMethods.every(isValidMethod) && 41 | Array.isArray(body.selectedEvents) && 42 | body.selectedEvents.every(isValidEvent) 43 | ); 44 | }; 45 | 46 | export const isValidMethod = (item: any): item is Method => 47 | typeof item === 'object' && 48 | typeof item.method_name === 'string' && 49 | item.method_name.trim() !== '' && 50 | isValidSchema(item.schema); 51 | 52 | export const isValidEvent = (item: any): item is Event => 53 | typeof item === 'object' && 54 | typeof item.event_name === 'string' && 55 | item.event_name.trim() !== '' && 56 | isValidSchema(item.schema); 57 | 58 | export default function handler(req: NextApiRequest, res: NextApiResponse): void { 59 | res.setHeader('Access-Control-Allow-Origin', '*'); 60 | res.setHeader('Access-Control-Allow-Methods', 'POST, OPTIONS'); 61 | res.setHeader('Access-Control-Allow-Headers', 'Content-Type'); 62 | if (req.method === 'OPTIONS') { 63 | res.status(200).end(); 64 | return; 65 | } 66 | 67 | if (req.method !== 'POST') { 68 | res.status(405).json({ error: 'Method Not Allowed' }); 69 | return; 70 | } 71 | 72 | if (!validateRequestBody(req.body)) { 73 | res.status(400).json({ 74 | error: 'Invalid request body: selectedMethods and selectedEvents must be arrays of objects with correct shape', 75 | }); 76 | return; 77 | } 78 | 79 | const { contractFilter, selectedMethods, selectedEvents } = req.body; 80 | const filterString = Array.isArray(contractFilter) ? contractFilter.join(', ') : contractFilter; 81 | 82 | const generator = new WizardCodeGenerator(filterString, selectedMethods, selectedEvents); 83 | const { jsCode, sqlCode } = generator.generateCode(); 84 | 85 | res.status(200).json({ jsCode, sqlCode }); 86 | } 87 | -------------------------------------------------------------------------------- /frontend/src/pages/create-new-indexer/index.js: -------------------------------------------------------------------------------- 1 | import { withRouter } from 'next/router'; 2 | import React, { useContext, useEffect } from 'react'; 3 | import { Alert } from 'react-bootstrap'; 4 | 5 | import CreateNewIndexer from '@/components/CreateNewIndexer'; 6 | import { IndexerDetailsContext } from '@/contexts/IndexerDetailsContext'; 7 | 8 | const CreateNewIndexerPage = ({ router }) => { 9 | const { accountId } = router.query; 10 | const { setAccountId, setIsCreateNewIndexer } = useContext(IndexerDetailsContext); 11 | 12 | useEffect(() => { 13 | setIsCreateNewIndexer(true); 14 | setAccountId(accountId); 15 | }, [accountId, setAccountId, setIsCreateNewIndexer]); 16 | 17 | if (accountId == undefined) { 18 | return ( 19 | <> 20 | 21 | AccountId needs to be specified in the URL 22 | 23 | 24 | ); 25 | } 26 | 27 | return ; 28 | }; 29 | 30 | export default withRouter(CreateNewIndexerPage); 31 | -------------------------------------------------------------------------------- /frontend/src/pages/global.css: -------------------------------------------------------------------------------- 1 | @import "tailwindcss/base"; 2 | @import "tailwindcss/components"; 3 | @import "tailwindcss/utilities"; 4 | 5 | /* Override Tailwind CSS collapse class */ 6 | .accordion-collapse.collapse { 7 | @apply visible; 8 | } 9 | -------------------------------------------------------------------------------- /frontend/src/pages/index.jsx: -------------------------------------------------------------------------------- 1 | export default function Home() { 2 | return <>; 3 | } 4 | -------------------------------------------------------------------------------- /frontend/src/pages/query-api-editor/index.js: -------------------------------------------------------------------------------- 1 | import { withRouter } from 'next/router'; 2 | import React, { useContext, useEffect } from 'react'; 3 | import { Alert } from 'react-bootstrap'; 4 | 5 | import Editor from '@/components/Editor/EditorComponents/Editor'; 6 | import IndexerLogsContainer from '@/components/Logs/LogsViewContainer/IndexerLogsContainer'; 7 | import { IndexerDetailsContext } from '@/contexts/IndexerDetailsContext'; 8 | 9 | const QueryApiEditorPage = ({ router }) => { 10 | const { accountId, indexerName } = router.query; 11 | const { setAccountId, setIndexerName, showLogsView } = useContext(IndexerDetailsContext); 12 | 13 | useEffect(() => { 14 | if (!accountId || !indexerName) return; 15 | setAccountId(accountId); 16 | setIndexerName(indexerName); 17 | }, [accountId, indexerName]); 18 | 19 | if (accountId == undefined || indexerName == undefined) { 20 | return ( 21 | 22 | Both accountId and IndexerName need to be specified in the URL. 23 | 24 | ); 25 | } 26 | 27 | return showLogsView ? : ; 28 | }; 29 | 30 | export default withRouter(QueryApiEditorPage); 31 | -------------------------------------------------------------------------------- /frontend/src/utils/calculateBlockTimeDifference.ts: -------------------------------------------------------------------------------- 1 | export const calculateBlockTimeDifference = (latestBlockHeight: number, currentBlockHeight: number): string => { 2 | const averageBlockTimeSeconds = 1.1; 3 | const blocksDifference: number = Math.abs(currentBlockHeight - latestBlockHeight); 4 | 5 | const timeDifferenceSeconds: number = blocksDifference * averageBlockTimeSeconds; 6 | 7 | const days: number = Math.floor(timeDifferenceSeconds / (3600 * 24)); 8 | const hours: number = Math.floor((timeDifferenceSeconds % (3600 * 24)) / 3600); 9 | const minutes: number = Math.floor((timeDifferenceSeconds % 3600) / 60); 10 | const seconds: number = Math.floor(timeDifferenceSeconds % 60); 11 | 12 | let timeDifferenceString = ''; 13 | if (days > 0) { 14 | timeDifferenceString += `${days}day${days > 1 ? 's' : ''} `; 15 | } 16 | if (hours > 0) { 17 | timeDifferenceString += `${hours} hr${hours > 1 ? 's' : ''} `; 18 | } 19 | if (minutes > 0 || hours > 0) { 20 | timeDifferenceString += `${minutes} min${minutes > 1 ? 's' : ''} `; 21 | } 22 | timeDifferenceString += `${seconds} s`; 23 | 24 | return timeDifferenceString.trim(); 25 | }; 26 | -------------------------------------------------------------------------------- /frontend/src/utils/calculateTimestamp.ts: -------------------------------------------------------------------------------- 1 | import { DURATION_MAP } from '@/constants/DurationMap'; 2 | 3 | export const calculateTimestamp = (selectedOption: string): string => { 4 | const currentTime: number = Date.now(); 5 | const duration: number | undefined = DURATION_MAP[selectedOption]; 6 | if (duration !== undefined) { 7 | return new Date(currentTime - duration).toISOString(); 8 | } else { 9 | console.log('invalid option'); 10 | return ''; 11 | } 12 | }; 13 | -------------------------------------------------------------------------------- /frontend/src/utils/debounce.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Provided a duration and a function, returns a new function which is called 3 | * `duration` milliseconds after the last call. 4 | */ 5 | export default function debounce(duration, fn) { 6 | let timeout; 7 | return function (...args) { 8 | if (timeout) { 9 | window.clearTimeout(timeout); 10 | } 11 | timeout = window.setTimeout(() => { 12 | timeout = null; 13 | fn(...args); 14 | }, duration); 15 | }; 16 | } 17 | -------------------------------------------------------------------------------- /frontend/src/utils/fetchBlock.js: -------------------------------------------------------------------------------- 1 | const BLOCK_FETCHER_API = 'https://70jshyr5cb.execute-api.eu-central-1.amazonaws.com/block/'; 2 | 3 | const GENESIS_BLOCK_HEIGHT = 52945886; 4 | export async function fetchBlockDetails(blockHeight) { 5 | if (blockHeight <= GENESIS_BLOCK_HEIGHT) { 6 | throw new Error(`Block Height must be greater than genesis block height #${GENESIS_BLOCK_HEIGHT}`); 7 | } 8 | try { 9 | const response = await fetch(`${BLOCK_FETCHER_API}${String(blockHeight)}`); 10 | const block_details = await response.json(); 11 | return block_details; 12 | } catch { 13 | throw new Error(`Error Fetching Block Height details at BlockHeight #${blockHeight}`); 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /frontend/src/utils/formatTimestamp.ts: -------------------------------------------------------------------------------- 1 | export const formatTimestamp = (timestamp: string): string => { 2 | const date = new Date(timestamp); 3 | 4 | const year = date.getFullYear(); 5 | const month = String(date.getMonth() + 1).padStart(2, '0'); 6 | const day = String(date.getDate()).padStart(2, '0'); 7 | 8 | const hours = String(date.getHours()).padStart(2, '0'); 9 | const minutes = String(date.getMinutes()).padStart(2, '0'); 10 | const seconds = String(date.getSeconds()).padStart(2, '0'); 11 | 12 | const milliseconds = String(date.getMilliseconds()).padStart(3, '0'); 13 | 14 | return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${milliseconds}`; 15 | }; 16 | -------------------------------------------------------------------------------- /frontend/src/utils/formatters.test.js: -------------------------------------------------------------------------------- 1 | import { formatIndexingCode, formatSQL } from './formatters'; 2 | 3 | const inputSQL1 = `CREATE TABLE\n "indexer_storage" (\n "function_name" TEXT NOT NULL,\n "key_name" TEXT NOT NULL,\n "value" TEXT NOT NULL,\n PRIMARY KEY ("function_name", "key_name")\n )\n`; 4 | const expectedOutput1 = `CREATE TABLE 5 | "indexer_storage" ( 6 | "function_name" TEXT NOT NULL, 7 | "key_name" TEXT NOT NULL, 8 | "value" TEXT NOT NULL, 9 | PRIMARY KEY ("function_name", "key_name") 10 | ) 11 | `; 12 | 13 | test('Basic formatting for SQL', () => { 14 | expect(formatSQL(inputSQL1)).toEqual(expectedOutput1); 15 | }); 16 | 17 | const inputSQL2 = `CREATE INVALID TABLE indexer_storage"`; 18 | 19 | test('Formatting invalid SQL input returns the invalid unformatted input', () => { 20 | console.log(formatSQL(inputSQL2)); 21 | expect(formatSQL(inputSQL2)).toEqual(inputSQL2); 22 | }); 23 | 24 | const inputJS2 = 25 | '\n const h = block.header().height;\n console.log("About to write demo_blockheight", h);\n await context.set("demo_height", h);\n'; 26 | const expectedOutput2 = `const h = block.header().height; 27 | console.log("About to write demo_blockheight", h); 28 | await context.set("demo_height", h);\n`; 29 | 30 | test('formatting for JS code without wrapCode', () => { 31 | expect(formatIndexingCode(inputJS2, false)).toEqual(expectedOutput2); 32 | }); 33 | 34 | const expectedOutput3 = `import { Block } from "@near-lake/primitives"; 35 | /** 36 | * Note: We only support javascript at the moment. We will support Rust, Typescript in a further release. 37 | */ 38 | 39 | /** 40 | * getBlock(block, context) applies your custom logic to a Block on Near and commits the data to a database. 41 | * context is a global variable that contains helper methods. 42 | * context.db is a subfield which contains helper methods to interact with your database. 43 | * 44 | * Learn more about indexers here: https://docs.near.org/concepts/advanced/indexers 45 | * 46 | * @param {block} Block - A Near Protocol Block 47 | */ 48 | async function getBlock(block: Block) { 49 | const h = block.header().height; 50 | console.log("About to write demo_blockheight", h); 51 | await context.set("demo_height", h); 52 | } 53 | `; 54 | 55 | test('formatting for JS code with wrapCode', () => { 56 | expect(formatIndexingCode(inputJS2, true)).toEqual(expectedOutput3); 57 | }); 58 | 59 | const inputJS3 = 'const a = block.header().height;\nawait context.set("demo_height", h\n'; 60 | 61 | test('Handling invalid JS input returns original', () => { 62 | expect(formatIndexingCode(inputJS3, false)).toEqual(inputJS3); 63 | }); 64 | -------------------------------------------------------------------------------- /frontend/src/utils/getLatestBlockHeight.js: -------------------------------------------------------------------------------- 1 | import { providers } from 'near-api-js'; 2 | 3 | //network config (replace testnet with mainnet or betanet) 4 | const provider = new providers.JsonRpcProvider('https://archival-rpc.mainnet.near.org'); 5 | 6 | // get latest block height 7 | export const getLatestBlockHeight = async () => { 8 | const provider = new providers.JsonRpcProvider('https://archival-rpc.mainnet.near.org'); 9 | const latestBlock = await provider.block({ 10 | finality: 'final', 11 | }); 12 | return latestBlock.header.height; 13 | }; 14 | -------------------------------------------------------------------------------- /frontend/src/utils/helpers.ts: -------------------------------------------------------------------------------- 1 | export const sanitizeString = (str: string): string => { 2 | return str.replace(/[^a-zA-Z0-9]/g, '_').replace(/^([0-9])/, '_$1'); 3 | }; 4 | 5 | export const sanitizeIndexerName = (name: string): string => { 6 | return name.replaceAll('-', '_').trim().toLowerCase(); 7 | }; 8 | 9 | export const sanitizeAccountId = (accountId: string): string => { 10 | return accountId.replaceAll('.', '_'); 11 | }; 12 | -------------------------------------------------------------------------------- /frontend/src/utils/queryIndexerFunction.js: -------------------------------------------------------------------------------- 1 | import { providers } from 'near-api-js'; 2 | const REGISTRY_CONTRACT = process.env.NEXT_PUBLIC_REGISTRY_CONTRACT_ID || 'dev-queryapi.dataplatform.near'; 3 | //network config (replace testnet with mainnet or betanet) 4 | const provider = new providers.JsonRpcProvider('https://rpc.mainnet.near.org'); 5 | 6 | export const queryIndexerFunctionDetails = async (accountId, functionName) => { 7 | let args = { account_id: accountId }; 8 | 9 | try { 10 | const result = await provider.query({ 11 | request_type: 'call_function', 12 | account_id: REGISTRY_CONTRACT, 13 | // TODO Create method to query single indexer 14 | method_name: 'list_by_account', 15 | args_base64: Buffer.from(JSON.stringify(args)).toString('base64'), 16 | finality: 'optimistic', 17 | }); 18 | 19 | const indexers = result.result && result.result.length > 0 && JSON.parse(Buffer.from(result.result).toString()); 20 | 21 | if (!indexers) { 22 | return null; 23 | } 24 | 25 | return indexers[functionName]; 26 | } catch (error) { 27 | console.log( 28 | `Could not query indexer function details from registry ${REGISTRY_CONTRACT}, for ${accountId}/${functionName}`, 29 | ); 30 | console.log(error, 'error'); 31 | return null; 32 | } 33 | }; 34 | -------------------------------------------------------------------------------- /frontend/tailwind.config.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | module.exports = { 3 | purge: ['./src/**/*.{js,jsx,ts,tsx}'], 4 | darkMode: false, 5 | theme: { 6 | extend: { 7 | colors: { 8 | transparent: 'transparent', 9 | current: 'currentColor', 10 | green: { 11 | 50: '#f0fdf4', 12 | 100: '#dcfce7', 13 | 900: '#22543d', 14 | }, 15 | black: '#000000', 16 | white: '#ffffff', 17 | gray: { 18 | 50: '#fafafa', 19 | 100: '#f3f4f6', 20 | 900: '#111827', 21 | }, 22 | primary: { 23 | light: '#6ee7b7', 24 | DEFAULT: '#38b2ac', 25 | dark: '#0d9488', 26 | }, 27 | secondary: { 28 | light: '#d6bcfa', 29 | DEFAULT: '#a78bfa', 30 | dark: '#6a4f9e', 31 | }, 32 | }, 33 | fontFamily: { 34 | sans: ['Inter', 'sans-serif'], 35 | serif: ['Georgia', 'serif'], 36 | }, 37 | fontSize: { 38 | xxs: '0.60rem', 39 | xs: '0.75rem', 40 | sm: '0.875rem', 41 | base: '1rem', 42 | lg: '1.125rem', 43 | xl: '1.25rem', 44 | '2xl': '1.5rem', 45 | '3xl': '1.875rem', 46 | '4xl': '2.25rem', 47 | '5xl': '3rem', 48 | '6xl': '4rem', 49 | '7xl': '5rem', 50 | }, 51 | spacing: { 52 | px: '1px', 53 | 0: '0', 54 | 1: '0.25rem', 55 | 2: '0.5rem', 56 | 3: '0.75rem', 57 | 4: '1rem', 58 | 5: '1.25rem', 59 | 6: '1.5rem', 60 | 8: '2rem', 61 | 10: '2.5rem', 62 | }, 63 | transformOrigin: { 64 | 'top-left': 'top left', 65 | }, 66 | }, 67 | }, 68 | variants: { 69 | extend: {}, 70 | }, 71 | plugins: [], 72 | }; 73 | -------------------------------------------------------------------------------- /frontend/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2018", 4 | "lib": [ 5 | "es2021" 6 | ], 7 | "module": "commonjs", 8 | "rootDirs": [ 9 | "./src", 10 | "./tests" 11 | ], 12 | "allowJs": true, 13 | "skipLibCheck": true, 14 | "strict": true, 15 | "forceConsistentCasingInFileNames": true, 16 | "noEmit": true, 17 | "esModuleInterop": true, 18 | "moduleResolution": "node", 19 | "resolveJsonModule": true, 20 | "isolatedModules": true, 21 | "jsx": "preserve", 22 | "incremental": true, 23 | "baseUrl": ".", 24 | "paths": { 25 | "@/*": [ 26 | "./src/*" 27 | ], 28 | } 29 | }, 30 | "include": [ 31 | "next-env.d.ts", 32 | "**/*.ts", 33 | "**/*.tsx", 34 | ], 35 | "exclude": [ 36 | "node_modules", 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /frontend/widgets/examples/feed/src/QueryApi.Examples.Feed.jsx: -------------------------------------------------------------------------------- 1 | const GRAPHQL_ENDPOINT = 2 | props.GRAPHQL_ENDPOINT || "https://near-queryapi.api.pagoda.co"; 3 | const APP_OWNER = props.APP_OWNER || "dataplatform.near"; 4 | const loadMorePosts = props.loadMorePosts; 5 | const hasMore = props.hasMore || false; 6 | const posts = props.posts || []; 7 | 8 | const Subheading = styled.h2` 9 | display: block; 10 | margin: 0; 11 | font-size: 14px; 12 | line-height: 10px; 13 | color: ${(p) => (p.bold ? "#11181C !important" : "#687076 !important")}; 14 | font-weight: ${(p) => (p.bold ? "600" : "400")}; 15 | font-size: ${(p) => (p.small ? "12px" : "14px")}; 16 | overflow: ${(p) => (p.ellipsis ? "hidden" : "visible")}; 17 | text-overflow: ${(p) => (p.ellipsis ? "ellipsis" : "unset")}; 18 | white-space: nowrap; 19 | outline: none; 20 | `; 21 | 22 | 23 | const Post = styled.div` 24 | border-bottom: 1px solid #ECEEF0; 25 | padding: 24px 0 12px; 26 | 27 | @media (max-width: 1200px) { 28 | padding: 12px 0 0; 29 | } 30 | `; 31 | 32 | const renderItem = (item, i) => { 33 | if (item.accounts_liked.length !== 0) { 34 | item.accounts_liked = JSON.parse(item.accounts_liked); 35 | } 36 | return ( 37 | 38 | 50 | 51 | ); 52 | }; 53 | 54 | const renderedItems = posts.map(renderItem); 55 | 56 | const Loader = () => { 57 | return( 58 |
59 |
) 66 | } 67 | 68 | if (!posts) return() 69 | 70 | return ( 71 | 77 | 88 | ); 89 | -------------------------------------------------------------------------------- /frontend/widgets/examples/feed/src/QueryApi.Feed.jsx: -------------------------------------------------------------------------------- 1 | const GRAPHQL_ENDPOINT = "https://near-queryapi.api.pagoda.co"; 2 | const APP_OWNER = "dataplatform.near"; 3 | 4 | let lastPostSocialApi = Social.index("post", "main", { 5 | limit: 1, 6 | order: "desc", 7 | }); 8 | 9 | State.init({ 10 | shouldFallback: props.shouldFallback ?? false, 11 | }); 12 | 13 | function fetchGraphQL(operationsDoc, operationName, variables) { 14 | return asyncFetch(`${GRAPHQL_ENDPOINT}/v1/graphql`, { 15 | method: "POST", 16 | headers: { "x-hasura-role": "dataplatform_near" }, 17 | body: JSON.stringify({ 18 | query: operationsDoc, 19 | variables: variables, 20 | operationName: operationName, 21 | }), 22 | }); 23 | } 24 | 25 | const lastPostQuery = ` 26 | query IndexerQuery { 27 | dataplatform_near_social_feed_posts( limit: 1, order_by: { block_height: desc }) { 28 | block_height 29 | } 30 | } 31 | `; 32 | 33 | fetchGraphQL(lastPostQuery, "IndexerQuery", {}) 34 | .then((feedIndexerResponse) => { 35 | if (feedIndexerResponse && feedIndexerResponse.body.data.dataplatform_near_social_feed_posts.length > 0) { 36 | const nearSocialBlockHeight = lastPostSocialApi[0].blockHeight; 37 | const feedIndexerBlockHeight = 38 | feedIndexerResponse.body.data.dataplatform_near_social_feed_posts[0] 39 | .block_height; 40 | 41 | const lag = nearSocialBlockHeight - feedIndexerBlockHeight; 42 | 43 | let shouldFallback = lag > 2 || !feedIndexerBlockHeight; 44 | 45 | // console.log(`Social API block height: ${nearSocialBlockHeight}`); 46 | // console.log(`Feed block height: ${feedIndexerBlockHeight}`); 47 | // console.log(`Lag: ${lag}`); 48 | // console.log(`Fallback to old widget? ${shouldFallback}`); 49 | 50 | State.update({ shouldFallback }); 51 | } else { 52 | console.log("Falling back to old widget."); 53 | State.update({ shouldFallback: true }); 54 | } 55 | }) 56 | .catch((error) => { 57 | console.log("Error while fetching GraphQL(falling back to old widget): ", error); 58 | State.update({ shouldFallback: true }); 59 | }); 60 | 61 | return ( 62 | <> 63 | {state.shouldFallback == true ? ( 64 | 65 | ) : ( 66 | 73 | )} 74 | 75 | ); 76 | -------------------------------------------------------------------------------- /frontend/widgets/examples/feed/src/QueryApi.dev.Feed.jsx: -------------------------------------------------------------------------------- 1 | const GRAPHQL_ENDPOINT = 2 | "https://near-queryapi.dev.api.pagoda.co"; 3 | const APP_OWNER = "dev-queryapi.dataplatform.near"; 4 | 5 | let lastPostSocialApi = Social.index("post", "main", { 6 | limit: 1, 7 | order: "desc", 8 | }); 9 | 10 | State.init({ 11 | shouldFallback: props.shouldFallback ?? false, 12 | }); 13 | 14 | function fetchGraphQL(operationsDoc, operationName, variables) { 15 | return asyncFetch(`${GRAPHQL_ENDPOINT}/v1/graphql`, { 16 | method: "POST", 17 | headers: { "x-hasura-role": "dataplatform_near" }, 18 | body: JSON.stringify({ 19 | query: operationsDoc, 20 | variables: variables, 21 | operationName: operationName, 22 | }), 23 | }); 24 | } 25 | 26 | const lastPostQuery = ` 27 | query IndexerQuery { 28 | dataplatform_near_social_feed_posts( limit: 1, order_by: { block_height: desc }) { 29 | block_height 30 | } 31 | } 32 | `; 33 | 34 | fetchGraphQL(lastPostQuery, "IndexerQuery", {}) 35 | .then((feedIndexerResponse) => { 36 | if (feedIndexerResponse && feedIndexerResponse.body.data.dataplatform_near_social_feed_posts.length > 0) { 37 | const nearSocialBlockHeight = lastPostSocialApi[0].blockHeight; 38 | const feedIndexerBlockHeight = 39 | feedIndexerResponse.body.data.dataplatform_near_social_feed_posts[0] 40 | .block_height; 41 | 42 | const lag = nearSocialBlockHeight - feedIndexerBlockHeight; 43 | 44 | let shouldFallback = lag > 2 || !feedIndexerBlockHeight; 45 | 46 | // console.log(`Social API block height: ${nearSocialBlockHeight}`); 47 | // console.log(`Feed block height: ${feedIndexerBlockHeight}`); 48 | // console.log(`Lag: ${lag}`); 49 | // console.log(`Fallback to old widget? ${shouldFallback}`); 50 | 51 | State.update({ shouldFallback }); 52 | } else { 53 | console.log("Falling back to old widget."); 54 | State.update({ shouldFallback: true }); 55 | } 56 | }) 57 | .catch((error) => { 58 | console.log("Error while fetching GraphQL(falling back to old widget): ", error); 59 | State.update({ shouldFallback: true }); 60 | }); 61 | 62 | return ( 63 | <> 64 | {state.shouldFallback == true ? ( 65 | 66 | ) : ( 67 | 74 | )} 75 | 76 | ); 77 | -------------------------------------------------------------------------------- /frontend/widgets/src/NearQueryApi.metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Seamlessly create, manage, and discover indexers", 3 | "image": { 4 | "ipfs_cid": "bafkreihx3wowmjrv3taztqxwgubt6mijaqwzvo6573wi6lv4omxfh3ogdm" 5 | }, 6 | "name": "Near Query API waitlist", 7 | "tags": { 8 | "indexers": "", 9 | "data-platform": "", 10 | "waitlist": "" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /frontend/widgets/src/QueryApi.App.jsx: -------------------------------------------------------------------------------- 1 | const view = props.view; 2 | const path = props.path; 3 | const tab = props.tab; 4 | const activeIndexerView = props.activeIndexerView; 5 | const selectedIndexerPath = props.selectedIndexerPath; 6 | 7 | return ( 8 | 18 | ); 19 | -------------------------------------------------------------------------------- /frontend/widgets/src/QueryApi.App.metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Main entrypoint to Near QueryAPI's production widget which allows you to seamlessly create, manage, and discover new indexers", 3 | "image": { 4 | "ipfs_cid": "bafkreihx3wowmjrv3taztqxwgubt6mijaqwzvo6573wi6lv4omxfh3ogdm" 5 | }, 6 | "name": "Near QueryAPI", 7 | "tags": { 8 | "app": "", 9 | "indexers": "", 10 | "data-platform": "" 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /frontend/widgets/src/QueryApi.Dashboard.metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Main dashboard for Near QueryAPI which allows you to seamlessly create, manage, and discover indexers", 3 | "image": { 4 | }, 5 | "name": "Near QueryAPI Dashboard", 6 | "tags": { 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /frontend/widgets/src/QueryApi.Editor.metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Helper widget for QueryApi.Dashboard. Loads QueryAPI's React App which allows you to edit indexers inside the browser", 3 | "image": { 4 | }, 5 | "name": "Editor", 6 | "tags": { 7 | "indexers": "", 8 | "data-platform": "", 9 | "react": "" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /frontend/widgets/src/QueryApi.IndexerStatus.metadata.json: -------------------------------------------------------------------------------- 1 | { 2 | "description": "Helper widget for QueryApi.Dashboard to display Indexer Status", 3 | "image": { 4 | }, 5 | "name": "Indexer Status", 6 | "tags": { 7 | "indexers": "", 8 | "data-platform": "" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /frontend/widgets/src/QueryApi.NotFound.jsx: -------------------------------------------------------------------------------- 1 | const PageNotFoundContainer = styled.div` 2 | display: flex; 3 | flex-direction: column; 4 | justify-content: center; 5 | align-items: center; 6 | height: 100vh; 7 | text-align: center; 8 | background-color: #f9f9f9; 9 | color: #333; 10 | `; 11 | 12 | const Heading = styled.h1` 13 | font-size: 3rem; 14 | margin: 0; 15 | `; 16 | 17 | const Subheading = styled.p` 18 | font-size: 1.5rem; 19 | margin: 1rem 0; 20 | `; 21 | 22 | const HomeLink = styled.a` 23 | font-size: 1rem; 24 | color: #007bff; 25 | text-decoration: none; 26 | margin-top: 2rem; 27 | 28 | &:hover { 29 | text-decoration: underline; 30 | } 31 | `; 32 | 33 | return ( 34 | 35 | 404 36 | Page Not Found 37 | Go back to Home 38 | 39 | ); 40 | -------------------------------------------------------------------------------- /frontend/widgets/src/components/toggle.jsx: -------------------------------------------------------------------------------- 1 | const ToggleRoot = styled.div` 2 | justify-content: space-between; 3 | width: fit-content; 4 | max-width: 100%; 5 | `; 6 | 7 | const ToggleSwitchRoot = styled("Switch.Root")` 8 | all: unset; 9 | display: block; 10 | width: 42px; 11 | height: 25px; 12 | background-color: #d1d1d1; 13 | border-radius: 9999px; 14 | position: relative; 15 | box-shadow: 0 2px 10px var(--blackA7); 16 | 17 | &[data-state="checked"] { 18 | background-color: #00d084; 19 | } 20 | 21 | &[data-disabled=""] { 22 | opacity: 0.7; 23 | } 24 | `; 25 | 26 | const ToggleSwitchThumb = styled("Switch.Thumb")` 27 | all: unset; 28 | display: block; 29 | width: 21px; 30 | height: 21px; 31 | border-radius: 9999px; 32 | transition: transform 100ms; 33 | transform: translateX(2px); 34 | will-change: transform; 35 | 36 | &[data-state="checked"] { 37 | transform: translateX(19px); 38 | } 39 | `; 40 | 41 | const ToggleLabel = styled.label` 42 | white-space: nowrap; 43 | `; 44 | 45 | const Toggle = ({ 46 | active, 47 | className, 48 | direction, 49 | disabled, 50 | key, 51 | label, 52 | onSwitch, 53 | ...rest 54 | }) => ( 55 | 63 | {label} 64 | 65 | 73 | {!disabled && } 74 | 75 | 76 | ); 77 | 78 | return Toggle(props); 79 | -------------------------------------------------------------------------------- /frontend/widgets/src/props.json: -------------------------------------------------------------------------------- 1 | {} -------------------------------------------------------------------------------- /hasura-authentication-service/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hasura-authentication-service" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | actix-web = "4.2.1" 8 | actix-cors = "0.6" 9 | dotenv = "0.15.0" 10 | env_logger = "0.9" 11 | log = "0.4" 12 | serde = { version = "1", features = ["derive"] } 13 | serde_json = "1" 14 | -------------------------------------------------------------------------------- /hasura-authentication-service/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM rust:1.67.1 AS builder 2 | WORKDIR /tmp/ 3 | COPY Cargo.toml Cargo.lock ./ 4 | COPY src/ src/ 5 | RUN cargo build --release 6 | 7 | FROM ubuntu:20.04 8 | RUN apt update && apt install -yy openssl ca-certificates 9 | RUN apt-get install libpq5 -y 10 | COPY --from=builder /tmp/target/release/hasura-authentication-service . 11 | ENTRYPOINT [ "./hasura-authentication-service" ] 12 | -------------------------------------------------------------------------------- /hasura-authentication-service/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | 3 | use actix_cors::Cors; 4 | use actix_web::{middleware, App, HttpServer}; 5 | use dotenv::dotenv; 6 | 7 | mod services; 8 | 9 | use crate::services::auth; 10 | 11 | #[actix_web::main] 12 | async fn main() -> io::Result<()> { 13 | dotenv().ok(); 14 | env_logger::init_from_env(env_logger::Env::new().default_filter_or("info")); 15 | 16 | let port = std::env::var("PORT") 17 | .expect("PORT must be set") 18 | .parse::() 19 | .expect("PORT must be numeric"); 20 | 21 | log::info!("starting HTTP server on port {}", port); 22 | 23 | HttpServer::new(move || { 24 | App::new() 25 | .service(auth) 26 | .wrap(Cors::permissive()) 27 | .wrap(middleware::Logger::default()) 28 | }) 29 | .workers(2) 30 | .bind(("0.0.0.0", port))? 31 | .run() 32 | .await 33 | } 34 | -------------------------------------------------------------------------------- /hasura-authentication-service/src/services.rs: -------------------------------------------------------------------------------- 1 | use actix_web::{route, HttpRequest, HttpResponse, Responder}; 2 | 3 | #[derive(serde::Serialize)] 4 | struct AuthResponse { 5 | #[serde(rename(serialize = "X-Hasura-Role"))] 6 | role_header: String, 7 | } 8 | 9 | #[route("/auth", method = "GET", method = "POST")] 10 | pub(crate) async fn auth(req: HttpRequest) -> impl Responder { 11 | let role_header = match req.headers().get("X-Hasura-Role") { 12 | Some(role_header) => role_header.to_str().unwrap().to_string(), 13 | None => std::env::var("DEFAULT_HASURA_ROLE").unwrap(), 14 | }; 15 | 16 | if role_header == "admin" { 17 | return HttpResponse::Unauthorized().finish(); 18 | } 19 | 20 | HttpResponse::Ok().json(AuthResponse { role_header }) 21 | } 22 | -------------------------------------------------------------------------------- /postgres/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM postgres:14 2 | 3 | RUN apt-get update && apt-get install -y postgresql-14-cron 4 | 5 | RUN echo "shared_preload_libraries = 'pg_cron'" >> /usr/share/postgresql/postgresql.conf.sample 6 | 7 | COPY ./init.sql /docker-entrypoint-initdb.d/ 8 | 9 | EXPOSE 5432 10 | 11 | CMD ["postgres"] 12 | -------------------------------------------------------------------------------- /postgres/init.sql: -------------------------------------------------------------------------------- 1 | -- pgbouncer 2 | CREATE ROLE pgbouncer LOGIN; 3 | ALTER ROLE pgbouncer WITH PASSWORD 'pgbouncer'; 4 | CREATE OR REPLACE FUNCTION public.user_lookup(in i_username text, out uname text, out phash text) 5 | RETURNS record AS $$ 6 | BEGIN 7 | SELECT usename, passwd FROM pg_catalog.pg_shadow 8 | WHERE usename = i_username INTO uname, phash; 9 | RETURN; 10 | END; 11 | $$ LANGUAGE plpgsql SECURITY DEFINER; 12 | REVOKE ALL ON FUNCTION public.user_lookup(text) FROM public; 13 | GRANT EXECUTE ON FUNCTION public.user_lookup(text) TO pgbouncer; 14 | 15 | -- pg_cron 16 | CREATE EXTENSION pg_cron; 17 | -------------------------------------------------------------------------------- /prometheus.yml: -------------------------------------------------------------------------------- 1 | global: 2 | scrape_interval: 1s 3 | 4 | scrape_configs: 5 | - job_name: 'queryapi-runner' 6 | static_configs: 7 | - targets: ['host.docker.internal:9180'] -------------------------------------------------------------------------------- /registry/contract/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "registry" 3 | version = "1.0.0" 4 | authors = ["Near Inc "] 5 | edition = "2021" 6 | 7 | [lib] 8 | crate-type = ["cdylib"] 9 | 10 | [dependencies] 11 | borsh = "1.0.0" 12 | near-sdk = "5.0.0-alpha.1" 13 | uint = { version = "0.9.3", default-features = false } 14 | registry-types = { path = "../types", features = ["near-sdk"] } 15 | 16 | [profile.release] 17 | codegen-units = 1 18 | opt-level = "z" 19 | lto = true 20 | debug = false 21 | panic = "abort" 22 | overflow-checks = true 23 | -------------------------------------------------------------------------------- /registry/contract/README.md: -------------------------------------------------------------------------------- 1 | # IndexerFunction Registry 2 | 3 | A registry of indexer functions that are run by QueryAPI 4 | 5 |
6 | 7 | When an IndexerFunction is added to the registry the calling user's account is prepended to the name of the function. 8 | Example: `developer.near` calls `register_indexer_function("index_all_the_things", "bunch of code here" })` the function 9 | will be registered as `developer.near/index_all_the_things`. 10 | It can then be read by calling `read_indexer_function("developer.near/index_all_the_things")`. 11 | 12 | ## Methods 13 | 14 | ``` 15 | register_indexer_function({ function_name, code }) // Note that the name will be prefixed with the calling account 16 | read_indexer_function({ function_name }) 17 | remove_indexer_function({ function_name }) // Note that the name will be prefixed with the calling account 18 | list_indexer_functions() 19 | ``` 20 | 21 |
22 | 23 | ### Example Calls 24 | 25 | ```bash 26 | near view registry.queryapi.testnet read_indexer_function '{"function_name":"developer.testnet/log"}' 27 | near view registry.queryapi.testnet list_indexer_functions 28 | near call registry.queryapi.testnet register_indexer_function '{"function_name":"log", "code": "console.log(`Block #${streamerMessage.block.header.height});"}' --accountId 29 | near call registry.queryapi.testnet remove_indexer_function '{"function_name":"log"}' --accountId 30 | ``` 31 | 32 | TODO: 33 | Add multisig deployment administration for Data team and SREs (DAO?). 34 | 35 | ## Deployment commands 36 | 37 | ```bash 38 | ./build.sh 39 | near deploy --wasmFile ./target/wasm32-unknown-unknown/release/registry.wasm --accountId registry.queryapi.testnet 40 | ``` 41 | -------------------------------------------------------------------------------- /registry/contract/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | echo ">> Building contract" 4 | 5 | rustup target add wasm32-unknown-unknown 6 | cargo build --all --target wasm32-unknown-unknown --release 7 | 8 | # handle 9 | # https://github.com/near/nearcore/issues/8358 10 | cargo install wasm-opt --locked 11 | wasm-opt -Oz --signext-lowering target/wasm32-unknown-unknown/release/registry.wasm -o target/wasm32-unknown-unknown/release/registry.wasm; 12 | -------------------------------------------------------------------------------- /registry/contract/deploy-dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | near contract deploy dev-queryapi.dataplatform.near use-file ./target/wasm32-unknown-unknown/release/registry.wasm without-init-call network-config mainnet sign-with-keychain send 3 | -------------------------------------------------------------------------------- /registry/contract/deploy-local-testing.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | near deploy --wasmFile ./target/wasm32-unknown-unknown/release/registry.wasm --accountId registry.queryapi.near 3 | -------------------------------------------------------------------------------- /registry/contract/deploy-prod.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | near contract deploy queryapi.dataplatform.near use-file ./target/wasm32-unknown-unknown/release/registry.wasm without-init-call network-config mainnet sign-with-keychain send 3 | -------------------------------------------------------------------------------- /registry/types/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "registry-types" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | borsh = { version = "1.2.1", features = ["derive"] } 8 | serde = { version = "1.0.193" } 9 | near-account-id = { version = "1.0.0" } 10 | 11 | near-sdk = { version = "5.0.0-alpha.1", optional = true } 12 | -------------------------------------------------------------------------------- /runner-client/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "runner" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | prost = "0.12.3" 8 | tonic = "0.10.2" 9 | tokio = { version = "1.28.0", features = ["full"]} 10 | 11 | [build-dependencies] 12 | tonic-build = "0.10" 13 | -------------------------------------------------------------------------------- /runner-client/build.rs: -------------------------------------------------------------------------------- 1 | fn main() -> Result<(), Box> { 2 | tonic_build::compile_protos("proto/runner.proto")?; 3 | tonic_build::compile_protos("proto/data-layer.proto")?; 4 | 5 | Ok(()) 6 | } 7 | -------------------------------------------------------------------------------- /runner-client/examples/check_provisioning_task_status.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use runner::data_layer::data_layer_client::DataLayerClient; 4 | use runner::data_layer::CheckProvisioningTaskStatusRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = DataLayerClient::connect("http://localhost:7001").await?; 9 | 10 | let response = client 11 | .check_provisioning_task_status(Request::new(CheckProvisioningTaskStatusRequest { 12 | account_id: "morgs.near".to_string(), 13 | function_name: "test2".to_string(), 14 | })) 15 | .await?; 16 | 17 | println!("{:#?}", response.into_inner()); 18 | 19 | Ok(()) 20 | } 21 | -------------------------------------------------------------------------------- /runner-client/examples/list_executors.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use runner::runner_client::RunnerClient; 4 | use runner::ListExecutorsRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = RunnerClient::connect("http://localhost:7001").await?; 9 | 10 | let response = client 11 | .list_executors(Request::new(ListExecutorsRequest {})) 12 | .await?; 13 | 14 | println!("{:#?}", response.into_inner()); 15 | 16 | Ok(()) 17 | } 18 | -------------------------------------------------------------------------------- /runner-client/examples/start_executor.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use runner::runner_client::RunnerClient; 4 | use runner::StartExecutorRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = RunnerClient::connect("http://localhost:7001").await?; 9 | 10 | let response = client 11 | .start_executor(Request::new(StartExecutorRequest { 12 | account_id: "account_near".to_string(), 13 | function_name: "sample_indexer".to_string(), 14 | code: " 15 | console.log('Hello, world!'); 16 | await context.db.IndexerStorage.insert({ 17 | \"function_name\": \"sample_indexer\", 18 | \"key_name\": block.blockHeight.toString(), 19 | \"value\": \"Hello, world!\" 20 | }); 21 | " 22 | .to_string(), 23 | schema: "CREATE TABLE \"indexer_storage\" ( 24 | \"function_name\" TEXT NOT NULL, 25 | \"key_name\" TEXT NOT NULL, 26 | \"value\" TEXT NOT NULL, 27 | PRIMARY KEY (\"function_name\", \"key_name\") 28 | );" 29 | .to_string(), 30 | redis_stream: "test:stream".to_string(), 31 | version: 123, 32 | })) 33 | .await?; 34 | 35 | println!("{:#?}", response.into_inner()); 36 | 37 | Ok(()) 38 | } 39 | -------------------------------------------------------------------------------- /runner-client/examples/start_provisioning_task.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use runner::data_layer::data_layer_client::DataLayerClient; 4 | use runner::data_layer::ProvisionRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = DataLayerClient::connect("http://localhost:7001").await?; 9 | 10 | let response = client 11 | .start_provisioning_task(Request::new(ProvisionRequest { 12 | account_id: "test.near".to_string(), 13 | function_name: "data_layer_example".to_string(), 14 | schema: "create table blocks();".to_string(), 15 | })) 16 | .await?; 17 | 18 | println!("{:#?}", response.into_inner()); 19 | 20 | Ok(()) 21 | } 22 | -------------------------------------------------------------------------------- /runner-client/examples/stop_executor.rs: -------------------------------------------------------------------------------- 1 | use tonic::Request; 2 | 3 | use runner::runner_client::RunnerClient; 4 | use runner::StopExecutorRequest; 5 | 6 | #[tokio::main] 7 | async fn main() -> Result<(), Box> { 8 | let mut client = RunnerClient::connect("http://localhost:50007").await?; 9 | 10 | let response = client 11 | .stop_executor(Request::new(StopExecutorRequest { 12 | // Deterministic ID for morgs.near/test 13 | executor_id: "be21b48c307671c1b3768ed84439f736c1cbbd77f815986354e855d44efd16e6" 14 | .to_string(), 15 | })) 16 | .await?; 17 | 18 | println!("{:#?}", response.into_inner()); 19 | 20 | Ok(()) 21 | } 22 | -------------------------------------------------------------------------------- /runner-client/proto/data-layer.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package data_layer; 4 | 5 | service DataLayer { 6 | // Starts async provisioning task 7 | rpc StartProvisioningTask (ProvisionRequest) returns (StartTaskResponse); 8 | 9 | // Start async deprovisioning task 10 | rpc StartDeprovisioningTask (DeprovisionRequest) returns (StartTaskResponse); 11 | 12 | // Checks the status of provisioning/deprovisioning 13 | rpc GetTaskStatus (GetTaskStatusRequest) returns (GetTaskStatusResponse); 14 | 15 | } 16 | 17 | message StartTaskResponse { 18 | string task_id = 1; 19 | } 20 | 21 | message ProvisionRequest { 22 | string account_id = 1; 23 | string function_name = 2; 24 | string schema = 3; 25 | } 26 | 27 | message DeprovisionRequest { 28 | string account_id = 1; 29 | string function_name = 2; 30 | } 31 | 32 | 33 | message GetTaskStatusRequest { 34 | string task_id = 1; 35 | } 36 | 37 | enum TaskStatus { 38 | UNSPECIFIED = 0; 39 | PENDING = 1; 40 | COMPLETE = 2; 41 | FAILED = 3; 42 | } 43 | 44 | message GetTaskStatusResponse { 45 | TaskStatus status = 1; 46 | } 47 | -------------------------------------------------------------------------------- /runner-client/proto/runner.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package runner; 3 | 4 | service Runner { 5 | // Starts a new Runner executor 6 | rpc StartExecutor (StartExecutorRequest) returns (StartExecutorResponse); 7 | 8 | // Stops an existing Runner executor 9 | rpc StopExecutor (StopExecutorRequest) returns (StopExecutorResponse); 10 | 11 | // Lists all Runner executor 12 | rpc ListExecutors (ListExecutorsRequest) returns (ListExecutorsResponse); 13 | 14 | // Get Executor info 15 | rpc GetExecutor (GetExecutorRequest) returns (ExecutorInfo); 16 | } 17 | 18 | // Get Executor request 19 | message GetExecutorRequest { 20 | string account_id = 1; 21 | string function_name = 2; 22 | } 23 | 24 | // Start Executor Request 25 | message StartExecutorRequest { 26 | string redis_stream = 1; 27 | string account_id = 2; 28 | string function_name = 3; 29 | string code = 4; 30 | string schema = 5; 31 | // Block height corresponding to the created/updated height of the indexer 32 | uint64 version = 6; 33 | } 34 | 35 | // Start Executor Response 36 | message StartExecutorResponse { 37 | string executor_id = 1; 38 | } 39 | 40 | // Stop Executor Request 41 | message StopExecutorRequest { 42 | string executor_id = 1; 43 | } 44 | 45 | // Stop Executor Response 46 | message StopExecutorResponse { 47 | string executor_id = 1; 48 | } 49 | 50 | // List Executor Request 51 | message ListExecutorsRequest { 52 | } 53 | 54 | // List Executor Response 55 | message ListExecutorsResponse { 56 | // List of all executors, including stopped or crashed ones 57 | repeated ExecutorInfo executors = 1; 58 | } 59 | 60 | // Information about a single BlockExecutor instance. 61 | message ExecutorInfo { 62 | string executor_id = 1; 63 | string account_id = 2; 64 | string function_name = 3; 65 | // Block height corresponding to the created/updated height of the indexer 66 | uint64 version = 5; 67 | Health health = 6; 68 | } 69 | 70 | // Contains health information for the Executor 71 | message Health { 72 | ExecutionState execution_state = 1; 73 | } 74 | 75 | enum ExecutionState { 76 | UNSPECIFIED = 0; 77 | // Running as expected 78 | RUNNING = 1; 79 | // Executor is running, but the execution is erroring 80 | FAILING = 2; 81 | // Waiting for some internal condition to be met before proceeding 82 | WAITING = 3; 83 | // Intentionally stopped 84 | STOPPED = 4; 85 | // Unintentionally stopped 86 | STALLED = 5; 87 | } 88 | -------------------------------------------------------------------------------- /runner-client/src/lib.rs: -------------------------------------------------------------------------------- 1 | mod runner { 2 | tonic::include_proto!("runner"); 3 | } 4 | 5 | pub use runner::*; 6 | 7 | pub mod data_layer { 8 | tonic::include_proto!("data_layer"); 9 | } 10 | -------------------------------------------------------------------------------- /runner/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: '@typescript-eslint/parser', 3 | env: { 4 | es2021: true, 5 | node: true, 6 | }, 7 | overrides: [ 8 | { 9 | files: ['.eslintrc.js', 'jest.config.js'], 10 | parser: 'espree', 11 | extends: ['standard'], 12 | rules: { 13 | semi: ['error', 'always'], 14 | 'comma-dangle': ['error', 'only-multiline'], 15 | }, 16 | }, 17 | { 18 | files: ['./src/**/*', './tests/**/*'], 19 | parserOptions: { 20 | project: './tsconfig.json', 21 | tsconfigRootDir: __dirname, 22 | }, 23 | extends: [ 24 | 'standard-with-typescript', 25 | ], 26 | rules: { 27 | '@typescript-eslint/semi': ['error', 'always'], 28 | '@typescript-eslint/comma-dangle': ['error', 'only-multiline'], 29 | '@typescript-eslint/strict-boolean-expressions': 'off', 30 | }, 31 | }, 32 | ], 33 | }; 34 | -------------------------------------------------------------------------------- /runner/.gitignore: -------------------------------------------------------------------------------- 1 | **/dist 2 | /node_modules 3 | src/generated/ 4 | -------------------------------------------------------------------------------- /runner/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:18.18.2 AS builder 2 | WORKDIR /usr/src/app 3 | COPY . . 4 | RUN npm install 5 | RUN npm run build 6 | 7 | FROM node:18.18.2 8 | WORKDIR /usr/src/app 9 | COPY --from=builder /usr/src/app/package*.json ./ 10 | RUN npm install --omit=dev 11 | COPY --from=builder /usr/src/app/dist ./dist 12 | COPY protos ./protos 13 | CMD [ "npm", "run", "start:docker" ] 14 | -------------------------------------------------------------------------------- /runner/examples/list-executors.ts: -------------------------------------------------------------------------------- 1 | // Run with 'npx ts-node src/test-client.ts' 2 | 3 | import runnerClient from '../src/server/services/runner/runner-client'; 4 | 5 | void (async function main () { 6 | runnerClient.ListExecutors({}, (err, response) => { 7 | if (err) { 8 | console.error('List request error: ', err); 9 | } else { 10 | console.log('list response: ', JSON.stringify({ response }, null, 2)); 11 | } 12 | }); 13 | })(); 14 | -------------------------------------------------------------------------------- /runner/examples/start-executor.ts: -------------------------------------------------------------------------------- 1 | // Run with 'npx ts-node src/test-client.ts' 2 | 3 | import runnerClient from '../src/server/services/runner/runner-client'; 4 | 5 | const schema = ` 6 | CREATE TABLE 7 | "indexer_storage" ( 8 | "function_name" TEXT NOT NULL, 9 | "key_name" TEXT NOT NULL, 10 | "value" TEXT NOT NULL, 11 | PRIMARY KEY ("function_name", "key_name") 12 | ) 13 | `; 14 | 15 | const code = ` 16 | // do nothing 17 | `; 18 | 19 | const indexer = { 20 | account_id: 'account.near', // Can be anything 21 | redis_stream: 'test:stream', // Redis stream will need messages for indexer to run. This is just an example. 22 | function_name: 'sample_indexer', // Can be anything 23 | code, 24 | schema, 25 | }; 26 | 27 | void (async function main () { 28 | runnerClient.StartExecutor({ 29 | redisStream: indexer.redis_stream, 30 | accountId: indexer.account_id, 31 | functionName: indexer.function_name, 32 | code: indexer.code, 33 | schema: indexer.schema 34 | }, (err, response) => { 35 | if (err) { 36 | console.error('error: ', err); 37 | } else { 38 | console.log('start response: ', JSON.stringify({ response }, null, 2)); 39 | } 40 | }); 41 | })(); 42 | -------------------------------------------------------------------------------- /runner/examples/stop-executor.ts: -------------------------------------------------------------------------------- 1 | // Run with 'npx ts-node src/test-client.ts' 2 | 3 | import runnerClient from '../src/server/services/runner/runner-client'; 4 | 5 | runnerClient.StopExecutor({ 6 | executorId: '0293a6b1dcd2259a8be6b59a8cd3e7b4285e540a64a7cbe99639947f7b7e2f9a' 7 | }, (err, response) => { 8 | if (err) { 9 | console.error('error: ', err); 10 | } else { 11 | console.log('stop request: ', JSON.stringify({ response }, null, 2)); 12 | } 13 | }); 14 | -------------------------------------------------------------------------------- /runner/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | preset: 'ts-jest', 3 | testEnvironment: 'node', 4 | roots: ['./src', './tests'], 5 | }; 6 | -------------------------------------------------------------------------------- /runner/protos/data-layer.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package data_layer; 4 | 5 | service DataLayer { 6 | // Starts async provisioning task 7 | rpc StartProvisioningTask (ProvisionRequest) returns (StartTaskResponse); 8 | 9 | // Start async deprovisioning task 10 | rpc StartDeprovisioningTask (DeprovisionRequest) returns (StartTaskResponse); 11 | 12 | // Checks the status of provisioning/deprovisioning 13 | rpc GetTaskStatus (GetTaskStatusRequest) returns (GetTaskStatusResponse); 14 | 15 | } 16 | 17 | message StartTaskResponse { 18 | string task_id = 1; 19 | } 20 | 21 | message ProvisionRequest { 22 | string account_id = 1; 23 | string function_name = 2; 24 | string schema = 3; 25 | } 26 | 27 | message DeprovisionRequest { 28 | string account_id = 1; 29 | string function_name = 2; 30 | } 31 | 32 | 33 | message GetTaskStatusRequest { 34 | string task_id = 1; 35 | } 36 | 37 | enum TaskStatus { 38 | UNSPECIFIED = 0; 39 | PENDING = 1; 40 | COMPLETE = 2; 41 | FAILED = 3; 42 | } 43 | 44 | message GetTaskStatusResponse { 45 | TaskStatus status = 1; 46 | } 47 | -------------------------------------------------------------------------------- /runner/protos/runner.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | package runner; 3 | 4 | service Runner { 5 | // Starts a new Runner executor 6 | rpc StartExecutor (StartExecutorRequest) returns (StartExecutorResponse); 7 | 8 | // Stops an existing Runner executor 9 | rpc StopExecutor (StopExecutorRequest) returns (StopExecutorResponse); 10 | 11 | // Lists all Runner executor 12 | rpc ListExecutors (ListExecutorsRequest) returns (ListExecutorsResponse); 13 | 14 | // Get Executor info 15 | rpc GetExecutor (GetExecutorRequest) returns (ExecutorInfo); 16 | } 17 | 18 | // Get Executor request 19 | message GetExecutorRequest { 20 | string account_id = 1; 21 | string function_name = 2; 22 | } 23 | 24 | // Start Executor Request 25 | message StartExecutorRequest { 26 | string redis_stream = 1; 27 | string account_id = 2; 28 | string function_name = 3; 29 | string code = 4; 30 | string schema = 5; 31 | // Block height corresponding to the created/updated height of the indexer 32 | uint64 version = 6; 33 | } 34 | 35 | // Start Executor Response 36 | message StartExecutorResponse { 37 | string executor_id = 1; 38 | } 39 | 40 | // Stop Executor Request 41 | message StopExecutorRequest { 42 | string executor_id = 1; 43 | } 44 | 45 | // Stop Executor Response 46 | message StopExecutorResponse { 47 | string executor_id = 1; 48 | } 49 | 50 | // List Executor Request 51 | message ListExecutorsRequest { 52 | } 53 | 54 | // List Executor Response 55 | message ListExecutorsResponse { 56 | // List of all executors, including stopped or crashed ones 57 | repeated ExecutorInfo executors = 1; 58 | } 59 | 60 | // Information about a single BlockExecutor instance. 61 | message ExecutorInfo { 62 | string executor_id = 1; 63 | string account_id = 2; 64 | string function_name = 3; 65 | // Block height corresponding to the created/updated height of the indexer 66 | uint64 version = 5; 67 | Health health = 6; 68 | } 69 | 70 | // Contains health information for the Executor 71 | message Health { 72 | ExecutionState execution_state = 1; 73 | } 74 | 75 | enum ExecutionState { 76 | UNSPECIFIED = 0; 77 | // Running as expected 78 | RUNNING = 1; 79 | // Executor is running, but the execution is erroring 80 | FAILING = 2; 81 | // Waiting for some internal condition to be met before proceeding 82 | WAITING = 3; 83 | // Intentionally stopped 84 | STOPPED = 4; 85 | // Unintentionally stopped 86 | STALLED = 5; 87 | // Waiting to start 88 | IDLE = 6; 89 | } 90 | -------------------------------------------------------------------------------- /runner/src/globals.d.ts: -------------------------------------------------------------------------------- 1 | declare namespace NodeJS { 2 | export interface ProcessEnv { 3 | HASURA_ENDPOINT: string 4 | HASURA_ADMIN_SECRET: string 5 | PGHOST: string 6 | PGHOST_HASURA?: string 7 | PGPORT: string 8 | PGUSER: string 9 | PGPASSWORD: string 10 | PGDATABASE: string 11 | PORT: string 12 | CRON_DATABASE: string 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /runner/src/index.ts: -------------------------------------------------------------------------------- 1 | import { startServer as startMetricsServer } from './metrics'; 2 | import { startServer as startGrpcServer } from './server'; 3 | import logger from './logger'; 4 | 5 | startGrpcServer(); 6 | 7 | startMetricsServer().catch((err) => { 8 | logger.error('Failed to start metrics server', err); 9 | }); 10 | -------------------------------------------------------------------------------- /runner/src/indexer-config/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './indexer-config'; 2 | export { ProvisioningConfig, LocalIndexerConfig } from './indexer-config'; 3 | -------------------------------------------------------------------------------- /runner/src/indexer-config/indexer-config.test.ts: -------------------------------------------------------------------------------- 1 | import { LogLevel } from '../indexer-meta/log-entry'; 2 | import IndexerConfig from './indexer-config'; 3 | 4 | describe('IndexerConfig unit tests', () => { 5 | const REDIS_STREAM = 'test:stream'; 6 | const ACCOUNT_ID = 'test-account.near'; 7 | const FUNCTION_NAME = 'test-indexer'; 8 | const SCHEMA = ''; 9 | 10 | test('constructor sets executorId correctly', () => { 11 | const indexerConfig = new IndexerConfig(REDIS_STREAM, ACCOUNT_ID, FUNCTION_NAME, 0, '', SCHEMA, LogLevel.INFO); 12 | 13 | expect(indexerConfig.executorId).toEqual('d43da7e3e466961f28ddaa99c8f7c2b44f25ef8d44931c677e48a6fd051bb966'); 14 | }); 15 | 16 | test('exposes full indexer name correctly', () => { 17 | const indexerConfig = new IndexerConfig(REDIS_STREAM, ACCOUNT_ID, FUNCTION_NAME, 0, '', SCHEMA, LogLevel.INFO); 18 | 19 | expect(indexerConfig.fullName()).toEqual('test-account.near/test-indexer'); 20 | }); 21 | 22 | test('returns correct hasura values', () => { 23 | const indexerConfig = new IndexerConfig(REDIS_STREAM, ACCOUNT_ID, FUNCTION_NAME, 0, '', SCHEMA, LogLevel.INFO); 24 | 25 | expect(indexerConfig.hasuraRoleName()).toEqual('test_account_near'); 26 | expect(indexerConfig.hasuraFunctionName()).toEqual('test_indexer'); 27 | }); 28 | 29 | test('returns correct hasura values for account starting with number', () => { 30 | const indexerConfig = new IndexerConfig(REDIS_STREAM, '0xSome-Account', FUNCTION_NAME, 0, '', SCHEMA, LogLevel.INFO); 31 | 32 | expect(indexerConfig.hasuraRoleName()).toEqual('_0xSome_Account'); 33 | expect(indexerConfig.hasuraFunctionName()).toEqual('test_indexer'); 34 | }); 35 | 36 | test('returns correct postgres values', () => { 37 | const indexerConfig = new IndexerConfig(REDIS_STREAM, ACCOUNT_ID, FUNCTION_NAME, 0, '', SCHEMA, LogLevel.INFO); 38 | 39 | expect(indexerConfig.userName()).toEqual('test_account_near'); 40 | expect(indexerConfig.databaseName()).toEqual('test_account_near'); 41 | expect(indexerConfig.schemaName()).toEqual('test_account_near_test_indexer'); 42 | }); 43 | 44 | test('returns correct postgres values for account starting with number', () => { 45 | const indexerConfig = new IndexerConfig(REDIS_STREAM, '0xSome-Account', FUNCTION_NAME, 0, '', SCHEMA, LogLevel.INFO); 46 | 47 | expect(indexerConfig.userName()).toEqual('_0xSome_Account'); 48 | expect(indexerConfig.databaseName()).toEqual('_0xSome_Account'); 49 | expect(indexerConfig.schemaName()).toEqual('_0xSome_Account_test_indexer'); 50 | }); 51 | }); 52 | -------------------------------------------------------------------------------- /runner/src/indexer-meta/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './indexer-meta'; 2 | export { IndexerStatus, METADATA_TABLE_UPSERT, MetadataFields } from './indexer-meta'; 3 | export { default as LogEntry } from './log-entry'; 4 | -------------------------------------------------------------------------------- /runner/src/indexer-meta/log-entry.ts: -------------------------------------------------------------------------------- 1 | export enum LogLevel { 2 | DEBUG = 2, 3 | INFO = 5, 4 | WARN = 6, 5 | ERROR = 8, 6 | } 7 | 8 | export enum LogType { 9 | SYSTEM = 'system', 10 | USER = 'user', 11 | } 12 | 13 | export default class LogEntry { 14 | public readonly timestamp: Date; 15 | 16 | constructor ( 17 | public readonly message: string, 18 | public readonly level: LogLevel, 19 | public readonly type: LogType, 20 | public readonly blockHeight?: number 21 | ) { 22 | this.timestamp = new Date(); 23 | } 24 | 25 | static createLog (message: string, level: LogLevel, type: LogType, blockHeight?: number): LogEntry { 26 | return new LogEntry(message, level, type, blockHeight); 27 | } 28 | 29 | static systemDebug (message: string, blockHeight?: number): LogEntry { 30 | return LogEntry.createLog(message, LogLevel.DEBUG, LogType.SYSTEM, blockHeight); 31 | } 32 | 33 | static systemInfo (message: string, blockHeight?: number): LogEntry { 34 | return LogEntry.createLog(message, LogLevel.INFO, LogType.SYSTEM, blockHeight); 35 | } 36 | 37 | static systemWarn (message: string, blockHeight?: number): LogEntry { 38 | return LogEntry.createLog(message, LogLevel.WARN, LogType.SYSTEM, blockHeight); 39 | } 40 | 41 | static systemError (message: string, blockHeight?: number): LogEntry { 42 | return LogEntry.createLog(message, LogLevel.ERROR, LogType.SYSTEM, blockHeight); 43 | } 44 | 45 | static userDebug (message: string, blockHeight?: number): LogEntry { 46 | return LogEntry.createLog(message, LogLevel.DEBUG, LogType.USER, blockHeight); 47 | } 48 | 49 | static userInfo (message: string, blockHeight?: number): LogEntry { 50 | return LogEntry.createLog(message, LogLevel.INFO, LogType.USER, blockHeight); 51 | } 52 | 53 | static userWarn (message: string, blockHeight?: number): LogEntry { 54 | return LogEntry.createLog(message, LogLevel.WARN, LogType.USER, blockHeight); 55 | } 56 | 57 | static userError (message: string, blockHeight?: number): LogEntry { 58 | return LogEntry.createLog(message, LogLevel.ERROR, LogType.USER, blockHeight); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /runner/src/indexer-meta/no-op-indexer-meta.ts: -------------------------------------------------------------------------------- 1 | import { type LocalIndexerConfig } from '../indexer-config/indexer-config'; 2 | import { type IndexerStatus, type IndexerMetaInterface } from './indexer-meta'; 3 | import type LogEntry from './log-entry'; 4 | import { LogLevel } from './log-entry'; 5 | 6 | export default class NoOpIndexerMeta implements IndexerMetaInterface { 7 | constructor ( 8 | private readonly indexerConfig: LocalIndexerConfig, 9 | ) {} 10 | 11 | private shouldLog (logLevel: LogLevel): boolean { 12 | return logLevel >= this.indexerConfig.logLevel; 13 | } 14 | 15 | async writeLogs (logEntries: LogEntry[]): Promise { 16 | const entriesArray = logEntries.filter(entry => this.shouldLog(entry.level)); 17 | if (entriesArray.length === 0) { 18 | return; 19 | }; 20 | entriesArray.forEach(entry => { 21 | console.log(`[${LogLevel[entry.level]}] [${entry.timestamp.toString()}] ${entry.message}`); 22 | }); 23 | } 24 | 25 | async setStatus (status: IndexerStatus): Promise { 26 | console.log(`Setting status to ${status}`); 27 | } 28 | 29 | async updateBlockHeight (blockHeight: number): Promise { 30 | console.log(`Setting last processed block height to ${blockHeight}`); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /runner/src/indexer/context-builder/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './context-builder'; 2 | export type { ContextObject } from './context-builder'; 3 | -------------------------------------------------------------------------------- /runner/src/indexer/dml-handler/index.ts: -------------------------------------------------------------------------------- 1 | export { default as DmlHandler } from './dml-handler'; 2 | export { default as InMemoryDmlHandler } from './in-memory-dml-handler'; 3 | export type { DmlHandlerInterface } from './dml-handler'; 4 | -------------------------------------------------------------------------------- /runner/src/indexer/index.ts: -------------------------------------------------------------------------------- 1 | export { default as Indexer } from './indexer'; 2 | export { default as LocalIndexer } from './local-indexer'; 3 | export type { TableDefinitionNames } from './indexer'; 4 | -------------------------------------------------------------------------------- /runner/src/indexer/local-indexer.ts: -------------------------------------------------------------------------------- 1 | import ContextBuilder, { type ContextObject } from './context-builder'; 2 | import InMemoryDmlHandler from './dml-handler/in-memory-dml-handler'; 3 | import IndexerConfig from '../indexer-config'; 4 | import { type LocalIndexerConfig } from '../indexer-config'; 5 | import NoOpIndexerMeta from '../indexer-meta/no-op-indexer-meta'; 6 | import Indexer from './indexer'; 7 | import LakeClient from '../lake-client/lake-client'; 8 | 9 | export default class LocalIndexer { 10 | public readonly indexer: Indexer; 11 | private readonly lakeClient: LakeClient; 12 | 13 | constructor (config: LocalIndexerConfig) { 14 | const fullIndexerConfig: IndexerConfig = IndexerConfig.fromObject({ 15 | redisStreamKey: 'local-indexer', 16 | accountId: config.accountId, 17 | functionName: config.functionName, 18 | version: 0, 19 | code: config.code, 20 | schema: config.schema, 21 | logLevel: config.logLevel, 22 | }); 23 | const dmlHandler = new InMemoryDmlHandler(config.schema); 24 | const contextBuilder = new ContextBuilder(fullIndexerConfig, { dmlHandler }); 25 | const indexerMeta = new NoOpIndexerMeta(config); 26 | this.indexer = new Indexer(fullIndexerConfig, { indexerMeta, contextBuilder }); 27 | this.lakeClient = new LakeClient(); 28 | } 29 | 30 | getContext (): ContextObject { 31 | return this.indexer.deps.contextBuilder.buildContext(0, []); 32 | } 33 | 34 | async executeOnBlock (blockHeight: number): Promise { 35 | // TODO: Cache Block data locally 36 | const block = await this.lakeClient.fetchBlock(blockHeight); 37 | await this.indexer.execute(block); 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /runner/src/instrumentation/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './tracer'; 2 | -------------------------------------------------------------------------------- /runner/src/lake-client/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './lake-client'; 2 | -------------------------------------------------------------------------------- /runner/src/lake-client/lake-client.test.ts: -------------------------------------------------------------------------------- 1 | import { GetObjectCommand, type S3Client } from '@aws-sdk/client-s3'; 2 | import LakeClient from './lake-client'; 3 | 4 | describe('LakeClient', () => { 5 | test('Indexer.fetchBlock() should fetch the block and shards from S3 upon cache miss', async () => { 6 | const blockHeight = 85233529; 7 | const blockHash = 'xyz'; 8 | const mockSend = jest.fn() 9 | .mockReturnValueOnce({ // block 10 | Body: { 11 | transformToString: () => JSON.stringify({ 12 | chunks: [0, 1, 2, 3], 13 | header: { 14 | height: blockHeight, 15 | hash: blockHash, 16 | } 17 | }) 18 | } 19 | }) 20 | .mockReturnValue({ // shard 21 | Body: { 22 | transformToString: () => JSON.stringify({}) 23 | } 24 | }); 25 | const mockS3 = { 26 | send: mockSend, 27 | } as unknown as S3Client; 28 | const client = new LakeClient('mainnet', mockS3); 29 | 30 | const block = await client.fetchBlock(blockHeight); 31 | 32 | expect(mockSend).toHaveBeenCalledTimes(5); 33 | expect(JSON.stringify(mockSend.mock.calls[0][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ 34 | Bucket: 'near-lake-data-mainnet', 35 | Key: `${blockHeight.toString().padStart(12, '0')}/block.json` 36 | }))); 37 | expect(JSON.stringify(mockSend.mock.calls[1][0])).toStrictEqual(JSON.stringify(new GetObjectCommand({ 38 | Bucket: 'near-lake-data-mainnet', 39 | Key: `${blockHeight.toString().padStart(12, '0')}/shard_0.json` 40 | }))); 41 | 42 | expect(block.blockHeight).toEqual(blockHeight); 43 | expect(block.blockHash).toEqual(blockHash); 44 | }); 45 | }); 46 | -------------------------------------------------------------------------------- /runner/src/logger.ts: -------------------------------------------------------------------------------- 1 | import winston from 'winston'; 2 | import { LoggingWinston } from '@google-cloud/logging-winston'; 3 | import Transport from 'winston-transport'; 4 | 5 | import { METRICS } from './metrics'; 6 | 7 | const { format, transports } = winston; 8 | 9 | class LogCounter extends Transport { 10 | log (info: { level: string }, callback: () => void): void { 11 | METRICS.LOGS_COUNT.labels({ level: info.level }).inc(); 12 | 13 | callback(); 14 | } 15 | } 16 | 17 | const logger = winston.createLogger({ 18 | level: 'info', 19 | format: format.combine( 20 | format.timestamp(), 21 | format.errors({ stack: true }), 22 | ), 23 | transports: [new LogCounter()], 24 | }); 25 | 26 | if (process.env.GCP_LOGGING_ENABLED) { 27 | logger.add(new LoggingWinston({ redirectToStdout: true })); 28 | } else { 29 | logger.add(new transports.Console({ 30 | format: format.combine( 31 | format.colorize(), 32 | format.simple(), 33 | ), 34 | silent: process.env.NODE_ENV === 'test' 35 | })); 36 | } 37 | 38 | export default logger; 39 | -------------------------------------------------------------------------------- /runner/src/pg-client.ts: -------------------------------------------------------------------------------- 1 | import { Pool, type PoolConfig, type QueryResult, type QueryResultRow } from 'pg'; 2 | import pgFormatModule from 'pg-format'; 3 | 4 | import logger from './logger'; 5 | 6 | export interface PostgresConnectionParams { 7 | user: string 8 | password: string 9 | host: string 10 | port: number | string 11 | database: string 12 | } 13 | 14 | export default class PgClient { 15 | private readonly logger = logger.child({ service: 'PgClient' }); 16 | private readonly pgPool: Pool; 17 | public format: typeof pgFormatModule; 18 | 19 | constructor ( 20 | connectionParams: PostgresConnectionParams, 21 | poolConfig: PoolConfig = { max: Number(process.env.MAX_PG_POOL_SIZE ?? 10), idleTimeoutMillis: 3000 }, 22 | PgPool: typeof Pool = Pool, 23 | pgFormat: typeof pgFormatModule = pgFormatModule, 24 | onError: (err: Error) => void = (err) => { this.logger.error(err); } 25 | ) { 26 | this.pgPool = new PgPool({ 27 | user: connectionParams.user, 28 | password: connectionParams.password, 29 | host: connectionParams.host, 30 | port: Number(connectionParams.port), 31 | database: connectionParams.database, 32 | ...poolConfig, 33 | }); 34 | 35 | this.pgPool.on('error', onError); 36 | 37 | this.format = pgFormat; 38 | } 39 | 40 | async end (): Promise { 41 | await this.pgPool.end(); 42 | } 43 | 44 | async query(query: string, params: any[] = []): Promise> { 45 | // Automatically manages client connections to pool 46 | return await this.pgPool.query(query, params); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /runner/src/provisioner/__snapshots__/provisioner.test.ts.snap: -------------------------------------------------------------------------------- 1 | // Jest Snapshot v1, https://goo.gl/fbAQLP 2 | 3 | exports[`Provisioner provisionUserApi formats user input before executing the query 1`] = ` 4 | [ 5 | [ 6 | "CREATE DATABASE "databaseName UNION SELECT * FROM users --"", 7 | ], 8 | [ 9 | "CREATE USER morgs_near WITH PASSWORD 'pass; DROP TABLE users;--'", 10 | ], 11 | [ 12 | "GRANT ALL PRIVILEGES ON DATABASE "databaseName UNION SELECT * FROM users --" TO morgs_near", 13 | ], 14 | [ 15 | "REVOKE CONNECT ON DATABASE "databaseName UNION SELECT * FROM users --" FROM PUBLIC", 16 | ], 17 | ] 18 | `; 19 | -------------------------------------------------------------------------------- /runner/src/provisioner/hasura-client/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './hasura-client'; 2 | export type { HasuraMetadata, HasuraSource, HasuraConfiguration, HasuraDatabaseConnectionParameters, HasuraTableMetadata, HasuraRolePermission } from './hasura-client'; 3 | export { HASURA_PERMISSION_TYPES } from './hasura-client'; 4 | -------------------------------------------------------------------------------- /runner/src/provisioner/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './provisioner'; 2 | export { METADATA_TABLE_NAME, LOGS_TABLE_NAME } from './provisioner'; 3 | -------------------------------------------------------------------------------- /runner/src/provisioner/provisioning-state/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './provisioning-state'; 2 | -------------------------------------------------------------------------------- /runner/src/provisioner/schemas/logs-table.ts: -------------------------------------------------------------------------------- 1 | export const logsTableDDL = (schemaName: string): string => ` 2 | CREATE TABLE sys_logs ( 3 | id BIGSERIAL NOT NULL, 4 | block_height NUMERIC(20), 5 | date DATE NOT NULL, 6 | timestamp TIMESTAMP NOT NULL, 7 | type TEXT NOT NULL, 8 | level TEXT NOT NULL, 9 | message TEXT NOT NULL, 10 | PRIMARY KEY (date, id) 11 | ) PARTITION BY RANGE (date); 12 | 13 | CREATE INDEX sys_logs_timestamp_idx ON sys_logs USING btree (timestamp); 14 | CREATE INDEX sys_logs_type_idx ON sys_logs USING btree (type); 15 | CREATE INDEX sys_logs_level_idx ON sys_logs USING btree (level); 16 | CREATE INDEX sys_logs_block_height_idx ON sys_logs USING btree (block_height); 17 | CREATE INDEX sys_logs_search_vector_idx ON sys_logs USING GIN (to_tsvector('english', message)); 18 | 19 | 20 | CREATE OR REPLACE FUNCTION fn_create_partition(_tbl text, _date date, _interval_start text, _interval_end text) 21 | RETURNS void 22 | LANGUAGE plpgsql AS 23 | $func$ 24 | DECLARE 25 | _start text; 26 | _end text; 27 | _partition_name text; 28 | BEGIN 29 | _start := TO_CHAR(date_trunc('day', _date + (_interval_start)::interval), 'YYYY-MM-DD'); 30 | _end := TO_CHAR(date_trunc('day', _date + (_interval_end)::interval), 'YYYY-MM-DD'); 31 | _partition_name := TO_CHAR(date_trunc('day', _date + (_interval_start)::interval), 'YYYYMMDD'); 32 | -- Create partition 33 | EXECUTE 'CREATE TABLE IF NOT EXISTS ' || _tbl || '_p' || _partition_name || ' PARTITION OF ' || _tbl || ' FOR VALUES FROM (''' || _start || ''') TO (''' || _end || ''')'; 34 | END 35 | $func$; 36 | 37 | SELECT fn_create_partition('${schemaName}.sys_logs', CURRENT_DATE, '0 day', '1 day'); 38 | SELECT fn_create_partition('${schemaName}.sys_logs', CURRENT_DATE, '1 day', '2 day'); 39 | 40 | CREATE OR REPLACE FUNCTION fn_delete_partition(_tbl text, _date date, _interval_start text, _interval_end text) 41 | RETURNS void 42 | LANGUAGE plpgsql AS 43 | $func$ 44 | DECLARE 45 | _start text; 46 | _end text; 47 | _partition_name text; 48 | BEGIN 49 | _start := TO_CHAR(date_trunc('day', _date + (_interval_start)::interval), 'YYYY-MM-DD'); 50 | _end := TO_CHAR(date_trunc('day', _date + (_interval_end)::interval), 'YYYY-MM-DD'); 51 | _partition_name := TO_CHAR(date_trunc('day', _date + (_interval_start)::interval), 'YYYYMMDD'); 52 | -- Detach partition 53 | EXECUTE 'ALTER TABLE ' || _tbl || ' DETACH PARTITION ' || _tbl || '_p' || _partition_name; 54 | EXECUTE 'DROP TABLE ' || _tbl || '_p' || _partition_name; 55 | END 56 | $func$; 57 | `; 58 | -------------------------------------------------------------------------------- /runner/src/provisioner/schemas/metadata-table.ts: -------------------------------------------------------------------------------- 1 | export const metadataTableDDL = (): string => ` 2 | CREATE TABLE IF NOT EXISTS sys_metadata ( 3 | attribute TEXT NOT NULL, 4 | value TEXT NOT NULL, 5 | PRIMARY KEY (attribute) 6 | ); 7 | `; 8 | -------------------------------------------------------------------------------- /runner/src/server/index.ts: -------------------------------------------------------------------------------- 1 | import * as grpc from '@grpc/grpc-js'; 2 | import * as protoLoader from '@grpc/proto-loader'; 3 | import assert from 'assert'; 4 | 5 | import logger from '../logger'; 6 | import { getRunnerService } from './services/runner'; 7 | import { createDataLayerService } from './services/data-layer'; 8 | import { type ProtoGrpcType as RunnerProtoGrpcType } from '../generated/runner'; 9 | import { type ProtoGrpcType as DataLayerProtoGrpcType } from '../generated/data-layer'; 10 | 11 | export function startServer (): grpc.Server { 12 | const server = new grpc.Server(); 13 | 14 | const runnerProto = (grpc.loadPackageDefinition( 15 | protoLoader.loadSync('protos/runner.proto') 16 | ) as unknown) as RunnerProtoGrpcType; 17 | server.addService(runnerProto.runner.Runner.service, getRunnerService()); 18 | 19 | const dataLayerProto = (grpc.loadPackageDefinition( 20 | protoLoader.loadSync('protos/data-layer.proto') 21 | ) as unknown) as DataLayerProtoGrpcType; 22 | server.addService(dataLayerProto.data_layer.DataLayer.service, createDataLayerService()); 23 | 24 | const credentials = grpc.ServerCredentials; 25 | 26 | assert(process.env.GRPC_SERVER_PORT, 'GRPC_SERVER_PORT is not defined'); 27 | 28 | server.bindAsync( 29 | `0.0.0.0:${process.env.GRPC_SERVER_PORT}`, 30 | credentials.createInsecure(), // TODO: Use secure credentials with allow for Coordinator 31 | (err: Error | null, port: number) => { 32 | if (err) { 33 | logger.error('gRPC server error', err); 34 | } else { 35 | logger.info(`gRPC server bound on: 0.0.0.0:${port}`); 36 | } 37 | } 38 | ); 39 | 40 | return server; 41 | } 42 | -------------------------------------------------------------------------------- /runner/src/server/services/data-layer/index.ts: -------------------------------------------------------------------------------- 1 | export { createDataLayerService } from './data-layer-service'; 2 | -------------------------------------------------------------------------------- /runner/src/server/services/runner/index.ts: -------------------------------------------------------------------------------- 1 | export { getRunnerService } from './runner-service'; 2 | -------------------------------------------------------------------------------- /runner/src/server/services/runner/runner-client.ts: -------------------------------------------------------------------------------- 1 | import * as grpc from '@grpc/grpc-js'; 2 | import * as protoLoader from '@grpc/proto-loader'; 3 | import { type ProtoGrpcType } from '../../../generated/runner'; 4 | import { type RunnerClient } from '../../../generated/runner/Runner'; 5 | 6 | const PROTO_PATH = 'protos/runner.proto'; 7 | 8 | const packageDefinition = protoLoader.loadSync(PROTO_PATH); 9 | const runner = (grpc.loadPackageDefinition(packageDefinition) as unknown) as ProtoGrpcType; 10 | 11 | const serverPort = process.env.GRPC_SERVER_PORT ?? '7001'; 12 | 13 | const runnerClient: RunnerClient = new runner.runner.Runner(`localhost:${serverPort}`, grpc.credentials.createInsecure()); 14 | 15 | export default runnerClient; 16 | -------------------------------------------------------------------------------- /runner/src/stream-handler/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './stream-handler'; 2 | -------------------------------------------------------------------------------- /runner/src/stream-handler/redis-client/index.ts: -------------------------------------------------------------------------------- 1 | export { default } from './redis-client'; 2 | -------------------------------------------------------------------------------- /runner/src/stream-handler/redis-client/redis-client.ts: -------------------------------------------------------------------------------- 1 | import { createClient, type RedisClientType } from 'redis'; 2 | 3 | import logger from '../../logger'; 4 | 5 | interface StreamMessage { 6 | id: string 7 | message: { 8 | block_height: string 9 | } 10 | } 11 | 12 | export default class RedisClient { 13 | SMALLEST_STREAM_ID = '0'; 14 | LARGEST_STREAM_ID = '+'; 15 | STREAMS_SET_KEY = 'streams'; 16 | STREAMER_MESSAGE_HASH_KEY_BASE = 'streamer_message:'; 17 | 18 | private readonly logger: typeof logger; 19 | 20 | constructor ( 21 | private readonly client: RedisClientType = createClient({ url: process.env.REDIS_CONNECTION_STRING }) 22 | ) { 23 | this.logger = logger.child({ service: this.constructor.name }); 24 | 25 | client.on('error', (err) => { this.logger.error('Redis Client Error', err); }); 26 | client.connect().catch(this.logger.error.bind(this)); 27 | } 28 | 29 | async disconnect (): Promise { 30 | await this.client.disconnect(); 31 | } 32 | 33 | async getStreamMessages ( 34 | streamKey: string, 35 | streamId = this.SMALLEST_STREAM_ID, 36 | count = 1 37 | ): Promise { 38 | const results = await this.client.xRead( 39 | { key: streamKey, id: streamId }, 40 | { COUNT: count } 41 | ); 42 | 43 | return results?.[0].messages as StreamMessage[]; 44 | }; 45 | 46 | async deleteStreamMessage ( 47 | streamKey: string, 48 | id: string, 49 | ): Promise { 50 | await this.client.xDel(streamKey, id); 51 | }; 52 | 53 | async getUnprocessedStreamMessageCount ( 54 | streamKey: string, 55 | ): Promise { 56 | const results = await this.client.xLen(streamKey); 57 | 58 | return results; 59 | }; 60 | 61 | async getStreams (): Promise { 62 | return await this.client.sMembers(this.STREAMS_SET_KEY); 63 | } 64 | 65 | async getStreamerMessage (blockHeight: number): Promise { 66 | return await this.client.get(`${this.STREAMER_MESSAGE_HASH_KEY_BASE}${blockHeight}`); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /runner/src/utility.ts: -------------------------------------------------------------------------------- 1 | import { type Tracer } from '@opentelemetry/api'; 2 | import VError from 'verror'; 3 | 4 | export async function wrapError (fn: () => Promise, errorMessage: string): Promise { 5 | try { 6 | return await fn(); 7 | } catch (error) { 8 | if (error instanceof Error) { 9 | throw new VError(error, errorMessage); 10 | } 11 | throw new VError(errorMessage); 12 | } 13 | } 14 | 15 | export async function wrapSpan (fn: (...vars: any[]) => Promise, tracer: Tracer, spanName: string): Promise { 16 | const span = tracer.startSpan(spanName); 17 | try { 18 | return await fn(); 19 | } finally { 20 | span.end(); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /runner/tests/testcontainers/hasura.ts: -------------------------------------------------------------------------------- 1 | import { type Readable } from 'stream'; 2 | import { AbstractStartedContainer, GenericContainer, type StartedTestContainer, Wait, type StartedNetwork } from 'testcontainers'; 3 | 4 | import { logConsumer } from './utils'; 5 | 6 | export class HasuraGraphQLContainer { 7 | private databaseUrl?: string; 8 | private adminSecret = 'adminsecret'; 9 | 10 | private readonly PORT = 8080; 11 | 12 | constructor (private readonly container = new GenericContainer('hasura/graphql-engine:latest')) { 13 | container.withExposedPorts(this.PORT) 14 | .withWaitStrategy(Wait.forLogMessage(/.*Starting API server.*/i)) 15 | .withLogConsumer(logConsumer) 16 | .withStartupTimeout(120_000); 17 | } 18 | 19 | public withNetwork (network: StartedNetwork): this { 20 | this.container.withNetwork(network); 21 | return this; 22 | } 23 | 24 | public withLogConsumer (consumer: (stream: Readable) => unknown): this { 25 | this.container.withLogConsumer(consumer); 26 | return this; 27 | } 28 | 29 | public withDatabaseUrl (databaseUrl: string): this { 30 | this.databaseUrl = databaseUrl; 31 | return this; 32 | } 33 | 34 | public withAdminSecret (adminSecret: string): this { 35 | this.adminSecret = adminSecret; 36 | return this; 37 | } 38 | 39 | public async start (): Promise { 40 | if (!this.databaseUrl) { 41 | throw new Error('Database URL is required'); 42 | } 43 | 44 | this.container.withEnvironment({ 45 | HASURA_GRAPHQL_DATABASE_URL: this.databaseUrl, 46 | HASURA_GRAPHQL_ENABLE_CONSOLE: 'true', 47 | ...(this.adminSecret && { HASURA_GRAPHQL_ADMIN_SECRET: this.adminSecret }), 48 | }); 49 | return new StartedHasuraGraphQLContainer(await this.container.start(), this.databaseUrl, this.adminSecret, this.PORT); 50 | } 51 | } 52 | 53 | export class StartedHasuraGraphQLContainer extends AbstractStartedContainer { 54 | constructor ( 55 | startedTestContainer: StartedTestContainer, 56 | private readonly databaseUrl: string, 57 | private readonly adminSecret: string, 58 | private readonly port: number 59 | ) { 60 | super(startedTestContainer); 61 | } 62 | 63 | public getPort (networkName?: string): string { 64 | return networkName ? this.port.toString() : this.getMappedPort(this.port).toString(); 65 | } 66 | 67 | public getDatabaseUrl (): string { 68 | return this.databaseUrl; 69 | } 70 | 71 | public getAdminSecret (): string { 72 | return this.adminSecret; 73 | } 74 | 75 | public getIpAddress (networkName?: string): string { 76 | return networkName ? super.getIpAddress(networkName) : this.getHost(); 77 | } 78 | 79 | public getEndpoint (networkName?: string): string { 80 | return `http://${this.getIpAddress(networkName)}:${this.getPort(networkName)}`; 81 | } 82 | } 83 | -------------------------------------------------------------------------------- /runner/tests/testcontainers/utils.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from 'stream'; 2 | 3 | export const logConsumer = (stream: Readable): void => { 4 | const readable = new Readable().wrap(stream); 5 | readable.on('data', (chunk) => { 6 | console.log(chunk.toString()); 7 | }); 8 | }; 9 | -------------------------------------------------------------------------------- /runner/tsconfig.build.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig.json", 3 | "include": ["./src"], 4 | "exclude": ["node_modules", "dist", "**/*.test.*", "scripts"] 5 | } 6 | -------------------------------------------------------------------------------- /runner/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2018", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ 4 | "lib": ["es2021"], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ 5 | "module": "commonjs", /* Specify what module code is generated. */ 6 | "rootDirs": ["./src", "./tests", "./scripts"], 7 | "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */ 8 | "resolveJsonModule": true, /* Enable importing .json files. */ 9 | "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ 10 | "outDir": "dist", /* Specify an output folder for all emitted files. */ 11 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ 12 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ 13 | "strict": true, /* Enable all strict type-checking options. */ 14 | "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ 15 | "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ 16 | "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ 17 | "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ 18 | "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ 19 | "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ 20 | "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ 21 | "skipLibCheck": true /* Skip type checking all .d.ts files. */ 22 | }, 23 | "include": ["./src", "./tests", "./scripts"], 24 | "exclude": ["node_modules", "dist"] 25 | } 26 | -------------------------------------------------------------------------------- /scripts/wipe-database.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # PostgreSQL superuser credentials 4 | PG_SUPERUSER="postgres" 5 | PG_SUPERUSER_PASSWORD="postgrespassword" 6 | 7 | # Exclude these databases and users 8 | EXCLUDED_DATABASES="'postgres', 'template0', 'template1', 'cron'" 9 | EXCLUDED_USERS="'postgres', 'pgbouncer'" 10 | 11 | # Get a list of databases, excluding the defaults 12 | DATABASES=$(psql -U $PG_SUPERUSER -t -c "SELECT datname FROM pg_database WHERE datname NOT IN ($EXCLUDED_DATABASES);") 13 | 14 | # Get a list of users, excluding 'postgres' 15 | USERS=$(psql -U $PG_SUPERUSER -t -c "SELECT usename FROM pg_user WHERE usename NOT IN ($EXCLUDED_USERS);") 16 | 17 | # Drop each database 18 | for db in $DATABASES; do 19 | echo "Dropping database: $db" 20 | psql -U $PG_SUPERUSER -c "DROP DATABASE IF EXISTS $db;" 21 | done 22 | 23 | # Drop each user 24 | for user in $USERS; do 25 | echo "Revoking privileges for user: $user" 26 | psql -U $PG_SUPERUSER -c "REVOKE ALL PRIVILEGES ON FUNCTION cron.schedule_in_database(text,text,text,text,text,boolean) FROM $user;" 27 | psql -U $PG_SUPERUSER -c "REVOKE ALL PRIVILEGES ON SCHEMA cron FROM $user;" 28 | echo "Dropping user: $user" 29 | psql -U $PG_SUPERUSER -c "DROP USER IF EXISTS $user;" 30 | 31 | done 32 | 33 | echo "All non-default databases and users have been dropped." 34 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-dev/coordinator-v1.tf: -------------------------------------------------------------------------------- 1 | #TODO: Remove this file 2 | module "gce-container" { 3 | source = "terraform-google-modules/container-vm/google" 4 | version = "v3.1.0" 5 | 6 | container = { 7 | args = ["mainnet", "from-interruption"] 8 | image = "us-central1-docker.pkg.dev/pagoda-data-stack-dev/queryapi/queryapi-coordinator-v1:latest" 9 | 10 | env = [ 11 | { 12 | name = "DATABASE_URL" 13 | value = data.google_secret_manager_secret_version.queryapi_testnet_database_url.secret_data 14 | }, 15 | { 16 | name = "AWS_ACCESS_KEY_ID" 17 | value = data.google_secret_manager_secret_version.queryapi_testnet_lake_aws_access_key.secret_data 18 | }, 19 | { 20 | name = "AWS_SECRET_ACCESS_KEY" 21 | value = data.google_secret_manager_secret_version.queryapi_testnet_lake_aws_secret_access_key.secret_data 22 | }, 23 | { 24 | name = "AWS_REGION" 25 | value = "eu-central-1" 26 | }, 27 | { 28 | name = "REDIS_CONNECTION_STRING" 29 | # value = module.redis.redis_host_ip 30 | value = data.google_secret_manager_secret_version.queryapi_testnet_redis_connection_string.secret_data 31 | }, 32 | { 33 | name = "REGISTRY_CONTRACT_ID" 34 | value = "dev-queryapi.dataplatform.near" 35 | }, 36 | { 37 | name = "PORT" 38 | value = "9180" 39 | } 40 | ] 41 | } 42 | restart_policy = "Always" 43 | } 44 | 45 | resource "google_compute_address" "queryapi_static_ip" { 46 | name = "queryapi-coordinator-static-ip" 47 | region = "europe-west1" 48 | address = "10.101.0.104" 49 | address_type = "INTERNAL" 50 | subnetwork = data.google_compute_subnetwork.dev_eu_subnetwork.id 51 | } 52 | 53 | resource "google_compute_firewall" "http-access" { 54 | name = "queryapi-coordinator-access" 55 | project = "pagoda-shared-infrastructure" 56 | network = data.google_compute_network.dev_network.name 57 | 58 | allow { 59 | protocol = "tcp" 60 | ports = [ 61 | "22", # SSH 62 | "9180", # Prometheus API 63 | ] 64 | } 65 | 66 | source_ranges = ["0.0.0.0/0"] 67 | target_tags = ["queryapi"] 68 | } 69 | 70 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-dev/iam/.terraform.lock.hcl: -------------------------------------------------------------------------------- 1 | # This file is maintained automatically by "terraform init". 2 | # Manual edits may be lost in future updates. 3 | 4 | provider "registry.terraform.io/hashicorp/google" { 5 | version = "4.53.0" 6 | hashes = [ 7 | "h1:7SFrTbReTOk8LlaKRYXv8gThdpadraBnscv1iODZAUs=", 8 | "zh:203c010f76ef5f5bb5fc3e02e712ed59eddcd7f1a41868e2e57f6e95000245f4", 9 | "zh:22f872c417e7d0c6a736aa2da5e6aeed50e587c8dc03f21c825ab4997d48852a", 10 | "zh:3a7c39e2af65a808bc349f69354f07ab3343b4f64d5fd4afc7000d2e44208250", 11 | "zh:6931c3df70dbe8d6533a9044fed7453fb9cda4049ffb360bef7f2bc3422e055a", 12 | "zh:6f8d2ce64a93c12c42971dcb442f196b4865353fe1d978521eb517dfcc07533b", 13 | "zh:82c0dae4d7e360e63c2221af177df26cacdb646eb5fd2b5be08ca04f0e387b71", 14 | "zh:9f066e0eab55876ba39bab9167f8556b014eee117129bfbc411dd94bdb73d9be", 15 | "zh:ce5e468b807a84dc6fe16f262f6de4c07d2a1e5267e00acc9eb61ad350a622d3", 16 | "zh:d92060c3c4a6a2dac6755ed1411698b1a58cac811f61d84c8e3a2a98be7e6146", 17 | "zh:e59053ce6218c74840523b031de0851041a1948523c8cbc5e0af2c605961aea7", 18 | "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", 19 | "zh:f8c204d278663e2e65b5d3f74717dcc9e9b9781ddd8a99f37d7eb44cab25be03", 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-dev/iam/main.tf: -------------------------------------------------------------------------------- 1 | resource "google_service_account" "queryapi_sa" { 2 | account_id = "queryapi-sa" 3 | display_name = "queryapi-sa" 4 | } 5 | 6 | resource "google_artifact_registry_repository_iam_member" "member" { 7 | location = "us-central1" 8 | repository = "projects/pagoda-data-stack-dev/locations/us-central1/repositories/queryapi" 9 | role = "roles/artifactregistry.reader" 10 | member = "serviceAccount:${google_service_account.queryapi_sa.email}" 11 | } 12 | 13 | resource "google_project_iam_member" "query_api_log_writer" { 14 | project = var.project_id 15 | role = "roles/logging.logWriter" 16 | member = "serviceAccount:${google_service_account.queryapi_sa.email}" 17 | } 18 | 19 | resource "google_project_iam_member" "query_api_metrics_writer" { 20 | project = var.project_id 21 | role = "roles/monitoring.metricWriter" 22 | member = "serviceAccount:${google_service_account.queryapi_sa.email}" 23 | } 24 | 25 | resource "google_project_iam_custom_role" "deploy_to_gce_vm_role" { 26 | role_id = "DeployToGCEVM" 27 | title = "Deploy Container to VM" 28 | description = "Role to be able to update a VM running a docker container" 29 | permissions = ["compute.instances.setMetadata", "compute.instances.get", "compute.instances.stop", "compute.instances.start"] 30 | } 31 | 32 | resource "google_project_iam_member" "cloudbuild_gce_deploy_role" { 33 | project = var.project_id 34 | role = google_project_iam_custom_role.deploy_to_gce_vm_role.id 35 | member = "serviceAccount:851370737288@cloudbuild.gserviceaccount.com" 36 | } 37 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-dev/iam/resources.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | default = "pagoda-data-stack-dev" 3 | description = "The default project id to use for resources in this directory." 4 | } 5 | 6 | terraform { 7 | backend "gcs" { 8 | bucket = "terraform-pagoda-shared-infrastructure" 9 | prefix = "state/data_stack/queryapi/pagoda_data_stack_dev/iam" 10 | } 11 | } 12 | 13 | provider "google" { 14 | project = "pagoda-data-stack-dev" 15 | } 16 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-dev/redis.tf: -------------------------------------------------------------------------------- 1 | variable "redis_configs" { 2 | type = map(string) 3 | default = { 4 | maxmemory-policy = "volatile-lru" 5 | maxmemory-gb = "3.5" 6 | } 7 | } 8 | 9 | resource "google_redis_instance" "queryapi-redis" { 10 | name = "queryapi-redis" 11 | memory_size_gb = 5 12 | region = "europe-west1" 13 | location_id = "europe-west1-b" 14 | 15 | tier = "STANDARD_HA" 16 | redis_version = "REDIS_6_X" 17 | connect_mode = "PRIVATE_SERVICE_ACCESS" 18 | authorized_network = data.google_compute_network.dev_network.id 19 | 20 | redis_configs = var.redis_configs 21 | } 22 | 23 | output "redis_host_ip" { 24 | description = "The IP address of the instance." 25 | value = google_redis_instance.queryapi-redis.host 26 | } 27 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-dev/resources.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | default = "pagoda-data-stack-dev" 3 | description = "The default project id to use for resources in this directory." 4 | } 5 | 6 | terraform { 7 | backend "gcs" { 8 | bucket = "terraform-pagoda-shared-infrastructure" 9 | prefix = "state/data_stack/queryapi/pagoda_data_stack_dev" 10 | } 11 | } 12 | 13 | provider "google" { 14 | project = "pagoda-data-stack-dev" 15 | } 16 | 17 | data "google_compute_subnetwork" "dev_subnetwork" { 18 | name = "dev-us-central1" 19 | project = "pagoda-shared-infrastructure" 20 | region = "us-central1" 21 | } 22 | 23 | data "google_compute_subnetwork" "dev_eu_subnetwork" { 24 | name = "dev-europe-west1" 25 | project = "pagoda-shared-infrastructure" 26 | region = "europe-west1" 27 | } 28 | 29 | data "google_compute_network" "dev_network" { 30 | name = "dev" 31 | project = "pagoda-shared-infrastructure" 32 | } 33 | 34 | data "google_service_account" "queryapi_sa" { 35 | account_id = "queryapi_sa" 36 | } 37 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-prod/iam/.terraform.lock.hcl: -------------------------------------------------------------------------------- 1 | # This file is maintained automatically by "terraform init". 2 | # Manual edits may be lost in future updates. 3 | 4 | provider "registry.terraform.io/hashicorp/google" { 5 | version = "4.64.0" 6 | hashes = [ 7 | "h1:oT2shsj9Mb4dGGwzlbWQPMTGSex6yDtJZcF5xQJ7rdE=", 8 | "zh:097fcb0a45fa41c2476deeb7a9adeadf5142e35e4d1a9eeb7b1720900a06807c", 9 | "zh:177e6e34f10efb5cec16b4106af5aef5240f20c33d91d40f3ea73fdc6ce9a24a", 10 | "zh:3331b0f62f900f8f1447e654a7318f3db03723739ac5dcdc446f1a1b1bf5fd0b", 11 | "zh:39e5a19693f8d598d35968660837d1b55ca82d7c314cd433fd957d1c2a5b6616", 12 | "zh:44d09cb871e7ec242610d84f93367755d0c532f744e5871a032cdba430e39ec7", 13 | "zh:77769c0f8ace0be3f85b702b7d4cc0fd43d89bfbea1493166c4f288338222f0a", 14 | "zh:a83ca3e204a85d1d04ee7a6432fdabc7b7e2ef7f46513b6309d8e30ea9e855a3", 15 | "zh:bbf1e983d24877a690886aacd48085b37c8c61dc65e128707f36b7ae6de11abf", 16 | "zh:c359fcf8694af0ec490a1784575eeb355d6e5a922b225f49d5307a06e9715ad0", 17 | "zh:f0df551e19cf8cc9a021a4148518a610b856a50a55938710837fa55b4fbd252f", 18 | "zh:f569b65999264a9416862bca5cd2a6177d94ccb0424f3a4ef424428912b9cb3c", 19 | "zh:fb171d37178d46d711f3e09107492343f8356c1237bc6df23114920dc23c4528", 20 | ] 21 | } 22 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-prod/iam/main.tf: -------------------------------------------------------------------------------- 1 | resource "google_service_account" "queryapi_sa" { 2 | account_id = "queryapi-sa" 3 | display_name = "queryapi-sa" 4 | } 5 | 6 | resource "google_service_account" "queryapi_frontend" { 7 | account_id = "queryapi-frontend" 8 | display_name = "queryapi-frontend" 9 | } 10 | 11 | resource "google_artifact_registry_repository_iam_member" "member" { 12 | location = "europe-west1" 13 | repository = "projects/pagoda-data-stack-prod/locations/europe-west1/repositories/queryapi" 14 | role = "roles/artifactregistry.reader" 15 | member = "serviceAccount:${google_service_account.queryapi_sa.email}" 16 | } 17 | 18 | resource "google_project_iam_member" "query_api_log_writer" { 19 | project = var.project_id 20 | role = "roles/logging.logWriter" 21 | member = "serviceAccount:${google_service_account.queryapi_sa.email}" 22 | } 23 | 24 | resource "google_project_iam_member" "query_api_metrics_writer" { 25 | project = var.project_id 26 | role = "roles/monitoring.metricWriter" 27 | member = "serviceAccount:${google_service_account.queryapi_sa.email}" 28 | } 29 | 30 | resource "google_project_iam_custom_role" "deploy_to_gce_role" { 31 | role_id = "DeployToGCE" 32 | title = "Deploy Container to VM" 33 | description = "Role to be able to update a VM running a docker container" 34 | permissions = ["compute.instances.setMetadata", "compute.instances.get", "compute.instances.stop", "compute.instances.start"] 35 | } 36 | 37 | resource "google_project_iam_member" "cloudbuild_gce_deploy_role" { 38 | project = var.project_id 39 | role = google_project_iam_custom_role.deploy_to_gce_role.id 40 | member = "serviceAccount:185940574553@cloudbuild.gserviceaccount.com" 41 | } 42 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-prod/iam/resources.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | default = "pagoda-data-stack-prod" 3 | description = "The default project id to use for resources in this directory." 4 | } 5 | 6 | terraform { 7 | backend "gcs" { 8 | bucket = "terraform-pagoda-shared-infrastructure" 9 | prefix = "state/data_stack/queryapi/pagoda_data_stack_prod/iam" 10 | } 11 | } 12 | 13 | provider "google" { 14 | project = "pagoda-data-stack-prod" 15 | } 16 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-prod/redis.tf: -------------------------------------------------------------------------------- 1 | variable "redis_configs" { 2 | type = map(string) 3 | default = { 4 | maxmemory-policy = "volatile-lru" 5 | maxmemory-gb = "3.5" 6 | } 7 | } 8 | 9 | resource "google_redis_instance" "queryapi-redis" { 10 | name = "queryapi-redis" 11 | memory_size_gb = 5 12 | region = "europe-west1" 13 | location_id = "europe-west1-b" 14 | 15 | tier = "STANDARD_HA" 16 | redis_version = "REDIS_6_X" 17 | connect_mode = "PRIVATE_SERVICE_ACCESS" 18 | authorized_network = data.google_compute_network.prod_network.id 19 | 20 | redis_configs = var.redis_configs 21 | } 22 | 23 | output "host" { 24 | description = "The IP address of the instance." 25 | value = google_redis_instance.queryapi-redis.host 26 | } 27 | -------------------------------------------------------------------------------- /terraform/pagoda-data-stack-prod/resources.tf: -------------------------------------------------------------------------------- 1 | variable "project_id" { 2 | default = "pagoda-data-stack-prod" 3 | description = "The default project id to use for resources in this directory." 4 | } 5 | 6 | terraform { 7 | backend "gcs" { 8 | bucket = "terraform-pagoda-shared-infrastructure" 9 | prefix = "state/data_stack/queryapi/pagoda_data_stack_prod" 10 | } 11 | } 12 | 13 | provider "google" { 14 | project = "pagoda-data-stack-prod" 15 | } 16 | 17 | data "google_compute_subnetwork" "prod_subnetwork" { 18 | name = "prod-us-central1" 19 | project = "pagoda-shared-infrastructure" 20 | region = "us-central1" 21 | } 22 | 23 | data "google_compute_subnetwork" "prod_eu_subnetwork" { 24 | name = "prod-europe-west1" 25 | project = "pagoda-shared-infrastructure" 26 | region = "europe-west1" 27 | } 28 | 29 | data "google_compute_network" "prod_network" { 30 | name = "prod" 31 | project = "pagoda-shared-infrastructure" 32 | } 33 | 34 | data "google_service_account" "queryapi_sa" { 35 | account_id = "queryapi_sa" 36 | } 37 | --------------------------------------------------------------------------------