├── .dockerignore
├── .github
├── CODEOWNERS
├── dependabot.yml
└── workflows
│ ├── ci.yml
│ ├── dependabot-auto-approve-minor.yml
│ ├── dependabot-auto-merge.yml
│ └── docker.yml
├── .gitignore
├── .mocharc.yaml
├── .prettierignore
├── Dockerfile
├── LICENSE.md
├── README.md
├── bin
└── checker.js
├── commands
└── checker.js
├── eslint.config.js
├── lib
├── abi.json
├── activity.js
├── checker-id.js
├── contracts.js
├── metrics.js
├── migrate.js
├── paths.js
├── rewards.js
├── runtime.js
├── subnets.js
├── telemetry.js
└── zinnia.js
├── package-lock.json
├── package.json
├── scripts
└── post-install.js
├── test
├── checker-id.test.js
├── checker.js
├── cli.js
├── metrics.js
├── storage.js
└── util.js
└── tsconfig.json
/.dockerignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | runtimes
3 | npm-debug.log
4 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @bajtos @juliangruber @pyropy @NikolasHaimerl
2 | package.json
3 | package-lock.json
4 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: 'npm'
4 | directory: '/'
5 | schedule:
6 | interval: 'daily'
7 | time: '09:00'
8 | timezone: 'Europe/Berlin'
9 | commit-message:
10 | prefix: 'deps'
11 | prefix-development: 'deps(dev)'
12 | reviewers:
13 | - 'bajtos'
14 | - 'juliangruber'
15 | - package-ecosystem: 'github-actions'
16 | directory: '/'
17 | schedule:
18 | interval: 'daily'
19 | time: '09:00'
20 | timezone: 'Europe/Berlin'
21 | commit-message:
22 | prefix: 'ci'
23 | reviewers:
24 | - 'bajtos'
25 | - 'juliangruber'
26 | - package-ecosystem: 'docker'
27 | directories:
28 | - '/'
29 | schedule:
30 | interval: 'daily'
31 | time: '09:00'
32 | timezone: 'Europe/Berlin'
33 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 | branches: [main]
8 |
9 | jobs:
10 | build:
11 | runs-on: ${{ matrix.os }}
12 | strategy:
13 | matrix:
14 | os: [macos-latest, ubuntu-latest, windows-latest]
15 | node: ['18', '20', '21', '22']
16 | steps:
17 | - uses: actions/checkout@v4
18 | - uses: actions/setup-node@v4
19 | with:
20 | node-version: ${{ matrix.node }}
21 | - run: npm ci --omit=dev
22 | - run: npm ci
23 | - run: npm run test:unit
24 |
25 | lint:
26 | runs-on: ubuntu-latest
27 | steps:
28 | - uses: actions/checkout@v4
29 | - uses: actions/setup-node@v4
30 | - run: npm ci
31 | - run: npm run test:types
32 | - run: npm run lint
33 |
34 | test-locales:
35 | runs-on: ubuntu-latest
36 | strategy:
37 | matrix:
38 | lang:
39 | - en_GB.UTF-8
40 | steps:
41 | - uses: actions/checkout@v4
42 | - uses: actions/setup-node@v4
43 | - run: npm ci
44 | - run: npm run test:unit
45 | env:
46 | LANG: ${{ matrix.lang }}
47 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
48 |
49 | docker:
50 | name: Build and test Docker
51 | runs-on: ubuntu-latest
52 | permissions:
53 | contents: read
54 | packages: read
55 | env:
56 | REGISTRY: ghcr.io
57 |
58 | steps:
59 | - uses: actions/checkout@v4
60 |
61 | - uses: docker/login-action@v3
62 | with:
63 | registry: ${{ env.REGISTRY }}
64 | username: ${{ github.actor }}
65 | password: ${{ secrets.GITHUB_TOKEN }}
66 |
67 | - name: Set up QEMU
68 | uses: docker/setup-qemu-action@v3
69 | with:
70 | platforms: 'arm64'
71 |
72 | - name: Set up Docker Buildx
73 | uses: docker/setup-buildx-action@v3
74 |
75 | - name: Build and load Docker image
76 | id: docker_build
77 | uses: docker/build-push-action@v6
78 | with:
79 | context: .
80 | load: true
81 | tags: core-test
82 | cache-from: type=registry,ref=ghcr.io/CheckerNetwork/node
83 | cache-to: type=inline
84 |
85 | - name: Build Docker image for other platforms
86 | uses: docker/build-push-action@v6
87 | with:
88 | context: .
89 | platforms: linux/arm64
90 | cache-from: type=registry,ref=ghcr.io/CheckerNetwork/node
91 | cache-to: type=inline
92 |
93 | - name: Start Checker Node container with PASSPHRASE
94 | run: |
95 | docker run \
96 | --name checker \
97 | --detach \
98 | --env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD \
99 | --env PASSPHRASE=secret \
100 | $IMAGEID
101 | env:
102 | IMAGEID: ${{ steps.docker_build.outputs.imageid }}
103 |
104 | - name: Print checker activity
105 | run: |
106 | sleep 10 # Wait for Checker subnets to start
107 | docker logs checker
108 |
109 | - name: Check | Spark started
110 | run: docker logs checker | grep "Spark started"
111 |
112 | - name: Start Checker Node container without PASSPHRASE
113 | run: |
114 | docker run \
115 | --name checker_unsecure \
116 | --detach \
117 | --env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD \
118 | $IMAGEID
119 | env:
120 | IMAGEID: ${{ steps.docker_build.outputs.imageid }}
121 |
122 | - name: Print checker activity
123 | run: |
124 | sleep 10 # Wait for Checker subnets to start
125 | docker logs checker_unsecure
126 |
127 | - name: Check | Spark started
128 | run: docker logs checker_unsecure | grep "Spark started"
129 |
--------------------------------------------------------------------------------
/.github/workflows/dependabot-auto-approve-minor.yml:
--------------------------------------------------------------------------------
1 | name: Dependabot auto-approve minor updates
2 | on: pull_request
3 |
4 | permissions:
5 | pull-requests: write
6 |
7 | jobs:
8 | dependabot:
9 | runs-on: ubuntu-latest
10 | if: ${{ github.actor == 'dependabot[bot]' }}
11 | strategy:
12 | matrix:
13 | dependencyStartsWith:
14 | - '@checkernetwork/prettier-config'
15 | - undici
16 | - '@types/'
17 | - typescript
18 | - prettier
19 | - mocha
20 | - '@glif/'
21 | - tar
22 | - np
23 | - ethers
24 | - execa
25 | - '@ipld/car'
26 | - '@filecoin-station/spark-impact-evaluator'
27 | - neostandard
28 | - p-retry
29 | - cross-spawn
30 | - nanoid
31 | - w3name
32 | - '@web3-storage/car-block-validator'
33 | - node
34 | - ipfs-unixfs-exporter
35 | - eslint
36 | steps:
37 | - name: Dependabot metadata
38 | id: metadata
39 | uses: dependabot/fetch-metadata@v2
40 | with:
41 | github-token: '${{ secrets.GITHUB_TOKEN }}'
42 | - name: Approve a PR
43 | if: ${{startsWith(steps.metadata.outputs.dependency-names, matrix.dependencyStartsWith) && (steps.metadata.outputs.update-type == 'version-update:semver-patch' || steps.metadata.outputs.update-type == 'version-update:semver-minor')}}
44 | run: gh pr review --approve "$PR_URL"
45 | env:
46 | PR_URL: ${{github.event.pull_request.html_url}}
47 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
48 |
--------------------------------------------------------------------------------
/.github/workflows/dependabot-auto-merge.yml:
--------------------------------------------------------------------------------
1 | name: Dependabot auto-merge
2 | on: pull_request
3 |
4 | permissions:
5 | contents: write
6 | pull-requests: write
7 |
8 | jobs:
9 | dependabot:
10 | runs-on: ubuntu-latest
11 | if: ${{ github.actor == 'dependabot[bot]' }}
12 | steps:
13 | - name: Enable auto-merge for Dependabot PRs
14 | run: gh pr merge --auto --squash "$PR_URL"
15 | env:
16 | PR_URL: ${{github.event.pull_request.html_url}}
17 | GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
18 |
--------------------------------------------------------------------------------
/.github/workflows/docker.yml:
--------------------------------------------------------------------------------
1 | name: Docker build
2 | on:
3 | push:
4 | tags: ['*']
5 |
6 | env:
7 | REGISTRY: ghcr.io
8 | IMAGE_NAME: ${{ github.repository }}
9 |
10 | jobs:
11 | docker:
12 | runs-on: ubuntu-latest
13 | permissions:
14 | contents: read
15 | packages: write
16 |
17 | steps:
18 | - uses: actions/checkout@v4
19 |
20 | - uses: docker/login-action@v3
21 | with:
22 | registry: ${{ env.REGISTRY }}
23 | username: ${{ github.actor }}
24 | password: ${{ secrets.GITHUB_TOKEN }}
25 |
26 | - name: Set up QEMU
27 | uses: docker/setup-qemu-action@v3
28 | with:
29 | platforms: 'arm64'
30 |
31 | - name: Set up Docker Buildx
32 | uses: docker/setup-buildx-action@v3
33 |
34 | - uses: martinbeentjes/npm-get-version-action@main
35 | id: package-version
36 |
37 | - name: Build and push Docker image
38 | uses: docker/build-push-action@v6
39 | with:
40 | context: .
41 | push: true
42 | tags: |
43 | ghcr.io/checkernetwork/node
44 | ghcr.io/checkernetwork/node:${{ steps.package-version.outputs.current-version }}
45 | platforms: linux/amd64,linux/arm64
46 | cache-from: type=registry,ref=ghcr.io/checkernetwork/node
47 | cache-to: type=inline
48 |
49 | - if: failure()
50 | uses: slackapi/slack-github-action@v2.1.0
51 | with:
52 | method: chat.postMessage
53 | token: ${{ secrets.SLACK_BOT_TOKEN }}
54 | payload: |
55 | {
56 | "channel": "alerts",
57 | "text": "Building `${{ github.event.repository.name }}` docker image failed",
58 | "blocks": [
59 | {
60 | "type": "section",
61 | "text": {
62 | "type": "mrkdwn",
63 | "text": ":warning: *<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|Building `${{ github.event.repository.name }}` docker image failed>*"
64 | }
65 | }
66 | ]
67 | }
68 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 |
44 | # TypeScript v1 declaration files
45 | typings/
46 |
47 | # TypeScript cache
48 | *.tsbuildinfo
49 |
50 | # Optional npm cache directory
51 | .npm
52 |
53 | # Optional eslint cache
54 | .eslintcache
55 |
56 | # Microbundle cache
57 | .rpt2_cache/
58 | .rts2_cache_cjs/
59 | .rts2_cache_es/
60 | .rts2_cache_umd/
61 |
62 | # Optional REPL history
63 | .node_repl_history
64 |
65 | # Output of 'npm pack'
66 | *.tgz
67 |
68 | # Yarn Integrity file
69 | .yarn-integrity
70 |
71 | # dotenv environment variables file
72 | .env
73 | .env.test
74 |
75 | # parcel-bundler cache (https://parceljs.org/)
76 | .cache
77 |
78 | # Next.js build output
79 | .next
80 |
81 | # Gatsby files
82 | .cache/
83 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
84 | # https://nextjs.org/blog/next-9-1#public-directory-support
85 | # public
86 |
87 | # vuepress build output
88 | .vuepress/dist
89 |
90 | # Serverless directories
91 | .serverless/
92 |
93 | # FuseBox cache
94 | .fusebox/
95 |
96 | # DynamoDB Local files
97 | .dynamodb/
98 |
99 | # TernJS port file
100 | .tern-port
101 | .DS_Store
102 | runtimes
103 |
104 | dist
105 | .state
106 |
--------------------------------------------------------------------------------
/.mocharc.yaml:
--------------------------------------------------------------------------------
1 | timeout: 20000
2 | exit: true
3 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | # Let's keep LICENSE.md in the same formatting as we use in other PL repositories
2 | LICENSE.md
3 | /lib/abi.json
4 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:22.15.0-slim
2 | LABEL org.opencontainers.image.source=https://github.com/CheckerNetwork/node
3 | USER node
4 | WORKDIR /usr/src/app
5 | COPY . .
6 | RUN npm ci --omit=dev
7 | ENV DEPLOYMENT_TYPE=docker
8 | CMD [ "./bin/checker.js" ]
9 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | The contents of this repository are Copyright (c) corresponding authors and
2 | contributors, licensed under the `Permissive License Stack` meaning either of:
3 |
4 | - Apache-2.0 Software License: https://www.apache.org/licenses/LICENSE-2.0
5 | ([...4tr2kfsq](https://dweb.link/ipfs/bafkreiankqxazcae4onkp436wag2lj3ccso4nawxqkkfckd6cg4tr2kfsq))
6 |
7 | - MIT Software License: https://opensource.org/licenses/MIT
8 | ([...vljevcba](https://dweb.link/ipfs/bafkreiepofszg4gfe2gzuhojmksgemsub2h4uy2gewdnr35kswvljevcba))
9 |
10 | You may not use the contents of this repository except in compliance
11 | with one of the listed Licenses. For an extended clarification of the
12 | intent behind the choice of Licensing please refer to
13 | https://protocol.ai/blog/announcing-the-permissive-license-stack/
14 |
15 | Unless required by applicable law or agreed to in writing, software
16 | distributed under the terms listed in this notice is distributed on
17 | an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
18 | either express or implied. See each License for the specific language
19 | governing permissions and limitations under that License.
20 |
21 |
22 | `SPDX-License-Identifier: Apache-2.0 OR MIT`
23 |
24 | Verbatim copies of both licenses are included below:
25 |
26 | Apache-2.0 Software License
27 |
28 | ```
29 | Apache License
30 | Version 2.0, January 2004
31 | http://www.apache.org/licenses/
32 |
33 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
34 |
35 | 1. Definitions.
36 |
37 | "License" shall mean the terms and conditions for use, reproduction,
38 | and distribution as defined by Sections 1 through 9 of this document.
39 |
40 | "Licensor" shall mean the copyright owner or entity authorized by
41 | the copyright owner that is granting the License.
42 |
43 | "Legal Entity" shall mean the union of the acting entity and all
44 | other entities that control, are controlled by, or are under common
45 | control with that entity. For the purposes of this definition,
46 | "control" means (i) the power, direct or indirect, to cause the
47 | direction or management of such entity, whether by contract or
48 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
49 | outstanding shares, or (iii) beneficial ownership of such entity.
50 |
51 | "You" (or "Your") shall mean an individual or Legal Entity
52 | exercising permissions granted by this License.
53 |
54 | "Source" form shall mean the preferred form for making modifications,
55 | including but not limited to software source code, documentation
56 | source, and configuration files.
57 |
58 | "Object" form shall mean any form resulting from mechanical
59 | transformation or translation of a Source form, including but
60 | not limited to compiled object code, generated documentation,
61 | and conversions to other media types.
62 |
63 | "Work" shall mean the work of authorship, whether in Source or
64 | Object form, made available under the License, as indicated by a
65 | copyright notice that is included in or attached to the work
66 | (an example is provided in the Appendix below).
67 |
68 | "Derivative Works" shall mean any work, whether in Source or Object
69 | form, that is based on (or derived from) the Work and for which the
70 | editorial revisions, annotations, elaborations, or other modifications
71 | represent, as a whole, an original work of authorship. For the purposes
72 | of this License, Derivative Works shall not include works that remain
73 | separable from, or merely link (or bind by name) to the interfaces of,
74 | the Work and Derivative Works thereof.
75 |
76 | "Contribution" shall mean any work of authorship, including
77 | the original version of the Work and any modifications or additions
78 | to that Work or Derivative Works thereof, that is intentionally
79 | submitted to Licensor for inclusion in the Work by the copyright owner
80 | or by an individual or Legal Entity authorized to submit on behalf of
81 | the copyright owner. For the purposes of this definition, "submitted"
82 | means any form of electronic, verbal, or written communication sent
83 | to the Licensor or its representatives, including but not limited to
84 | communication on electronic mailing lists, source code control systems,
85 | and issue tracking systems that are managed by, or on behalf of, the
86 | Licensor for the purpose of discussing and improving the Work, but
87 | excluding communication that is conspicuously marked or otherwise
88 | designated in writing by the copyright owner as "Not a Contribution."
89 |
90 | "Contributor" shall mean Licensor and any individual or Legal Entity
91 | on behalf of whom a Contribution has been received by Licensor and
92 | subsequently incorporated within the Work.
93 |
94 | 2. Grant of Copyright License. Subject to the terms and conditions of
95 | this License, each Contributor hereby grants to You a perpetual,
96 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
97 | copyright license to reproduce, prepare Derivative Works of,
98 | publicly display, publicly perform, sublicense, and distribute the
99 | Work and such Derivative Works in Source or Object form.
100 |
101 | 3. Grant of Patent License. Subject to the terms and conditions of
102 | this License, each Contributor hereby grants to You a perpetual,
103 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
104 | (except as stated in this section) patent license to make, have made,
105 | use, offer to sell, sell, import, and otherwise transfer the Work,
106 | where such license applies only to those patent claims licensable
107 | by such Contributor that are necessarily infringed by their
108 | Contribution(s) alone or by combination of their Contribution(s)
109 | with the Work to which such Contribution(s) was submitted. If You
110 | institute patent litigation against any entity (including a
111 | cross-claim or counterclaim in a lawsuit) alleging that the Work
112 | or a Contribution incorporated within the Work constitutes direct
113 | or contributory patent infringement, then any patent licenses
114 | granted to You under this License for that Work shall terminate
115 | as of the date such litigation is filed.
116 |
117 | 4. Redistribution. You may reproduce and distribute copies of the
118 | Work or Derivative Works thereof in any medium, with or without
119 | modifications, and in Source or Object form, provided that You
120 | meet the following conditions:
121 |
122 | (a) You must give any other recipients of the Work or
123 | Derivative Works a copy of this License; and
124 |
125 | (b) You must cause any modified files to carry prominent notices
126 | stating that You changed the files; and
127 |
128 | (c) You must retain, in the Source form of any Derivative Works
129 | that You distribute, all copyright, patent, trademark, and
130 | attribution notices from the Source form of the Work,
131 | excluding those notices that do not pertain to any part of
132 | the Derivative Works; and
133 |
134 | (d) If the Work includes a "NOTICE" text file as part of its
135 | distribution, then any Derivative Works that You distribute must
136 | include a readable copy of the attribution notices contained
137 | within such NOTICE file, excluding those notices that do not
138 | pertain to any part of the Derivative Works, in at least one
139 | of the following places: within a NOTICE text file distributed
140 | as part of the Derivative Works; within the Source form or
141 | documentation, if provided along with the Derivative Works; or,
142 | within a display generated by the Derivative Works, if and
143 | wherever such third-party notices normally appear. The contents
144 | of the NOTICE file are for informational purposes only and
145 | do not modify the License. You may add Your own attribution
146 | notices within Derivative Works that You distribute, alongside
147 | or as an addendum to the NOTICE text from the Work, provided
148 | that such additional attribution notices cannot be construed
149 | as modifying the License.
150 |
151 | You may add Your own copyright statement to Your modifications and
152 | may provide additional or different license terms and conditions
153 | for use, reproduction, or distribution of Your modifications, or
154 | for any such Derivative Works as a whole, provided Your use,
155 | reproduction, and distribution of the Work otherwise complies with
156 | the conditions stated in this License.
157 |
158 | 5. Submission of Contributions. Unless You explicitly state otherwise,
159 | any Contribution intentionally submitted for inclusion in the Work
160 | by You to the Licensor shall be under the terms and conditions of
161 | this License, without any additional terms or conditions.
162 | Notwithstanding the above, nothing herein shall supersede or modify
163 | the terms of any separate license agreement you may have executed
164 | with Licensor regarding such Contributions.
165 |
166 | 6. Trademarks. This License does not grant permission to use the trade
167 | names, trademarks, service marks, or product names of the Licensor,
168 | except as required for reasonable and customary use in describing the
169 | origin of the Work and reproducing the content of the NOTICE file.
170 |
171 | 7. Disclaimer of Warranty. Unless required by applicable law or
172 | agreed to in writing, Licensor provides the Work (and each
173 | Contributor provides its Contributions) on an "AS IS" BASIS,
174 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
175 | implied, including, without limitation, any warranties or conditions
176 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
177 | PARTICULAR PURPOSE. You are solely responsible for determining the
178 | appropriateness of using or redistributing the Work and assume any
179 | risks associated with Your exercise of permissions under this License.
180 |
181 | 8. Limitation of Liability. In no event and under no legal theory,
182 | whether in tort (including negligence), contract, or otherwise,
183 | unless required by applicable law (such as deliberate and grossly
184 | negligent acts) or agreed to in writing, shall any Contributor be
185 | liable to You for damages, including any direct, indirect, special,
186 | incidental, or consequential damages of any character arising as a
187 | result of this License or out of the use or inability to use the
188 | Work (including but not limited to damages for loss of goodwill,
189 | work stoppage, computer failure or malfunction, or any and all
190 | other commercial damages or losses), even if such Contributor
191 | has been advised of the possibility of such damages.
192 |
193 | 9. Accepting Warranty or Additional Liability. While redistributing
194 | the Work or Derivative Works thereof, You may choose to offer,
195 | and charge a fee for, acceptance of support, warranty, indemnity,
196 | or other liability obligations and/or rights consistent with this
197 | License. However, in accepting such obligations, You may act only
198 | on Your own behalf and on Your sole responsibility, not on behalf
199 | of any other Contributor, and only if You agree to indemnify,
200 | defend, and hold each Contributor harmless for any liability
201 | incurred by, or claims asserted against, such Contributor by reason
202 | of your accepting any such warranty or additional liability.
203 |
204 | END OF TERMS AND CONDITIONS
205 | ```
206 |
207 |
208 | MIT Software License
209 |
210 | ```
211 | Permission is hereby granted, free of charge, to any person obtaining a copy
212 | of this software and associated documentation files (the "Software"), to deal
213 | in the Software without restriction, including without limitation the rights
214 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
215 | copies of the Software, and to permit persons to whom the Software is
216 | furnished to do so, subject to the following conditions:
217 |
218 | The above copyright notice and this permission notice shall be included in
219 | all copies or substantial portions of the Software.
220 |
221 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
222 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
223 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
224 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
225 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
226 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
227 | THE SOFTWARE.
228 | ```
229 |
230 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | :artificial_satellite:
4 |
5 |
6 | Checker Node
7 |
8 |
9 |
10 |
11 |
12 | > Checker Node is a node implementation for the
13 | > [Checker Network](https://checker.network), suitable for running on all kinds
14 | > of servers..
15 |
16 | [](https://github.com/CheckerNetwork/node/actions/workflows/ci.yml)
17 |
18 | ## Deployment
19 |
20 | Checker Node supports different deployment options:
21 |
22 | - [Docker](#docker)
23 | - [Manual Deployment (Ubuntu)](#manual-deployment-ubuntu)
24 |
25 | ## Installation
26 |
27 | > **Note**: Checker Node requires Node.js, we recommend using the latest LTS
28 | > version. You can install Node.js using your favorite package manager or get
29 | > the official installer from
30 | > [Node.js downloads](https://nodejs.org/en/download/).
31 |
32 | With Node.js installed, run `npm` to install Checker Node.
33 |
34 | ```bash
35 | $ npm install -g @checkernetwork/node
36 | ```
37 |
38 | ## Usage
39 |
40 | ```bash
41 | $ FIL_WALLET_ADDRESS=... PASSPHRASE=... checker
42 | ```
43 |
44 | ## Common Configuration
45 |
46 | Checker Node is configured using environment variables (see
47 | [The Twelve-Factor App](https://12factor.net/config)).
48 |
49 | The following configuration options are shared by all Checker commands:
50 |
51 | - `$CACHE_ROOT` _(string; optional)_: Checker stores temporary files (e.g.
52 | cached data) in this directory. Defaults to
53 | - Linux: `${XDG_CACHE_HOME:-~/.cache}/checker-network-node`
54 | - macOS: `~/Library/Caches/network.checker.node`
55 | - Windows: `%TEMP%/Checker Network Node`
56 | - `$STATE_ROOT` _(string; optional)_: Checker stores logs and subnet state in
57 | this directory. Defaults to
58 |
59 | - Linux: `${XDG_STATE_HOME:-~/.local/state}/checker-network-node`
60 | - macOS: `~/Library/Application Support/network.checker.node`
61 | - Windows: `%LOCALAPPDATA%/Checker Network Node`
62 |
63 | **IMPORTANT:** The`$STATE_ROOT` directory must be local to the computer
64 | running the Checker. This directory must not be shared with other computers
65 | operated by the user, e.g. via Windows Domain profile or cloud storage like
66 | iCloud Drive, Dropbox and OneDrive.
67 |
68 | ## Commands
69 |
70 | ### `$ checker`
71 |
72 | Start a new Checker process. The Checker will run in foreground and can be
73 | terminated by pressing Ctrl+C.
74 |
75 | This command has the following additional configuration in addition to common
76 | the configuration options described in
77 | [Common Configuration](#common-configuration):
78 |
79 | - `FIL_WALLET_ADDRESS` _(string; required)_: Address of the Filecoin wallet that
80 | will receive rewards. The value must be a mainnet address starting with
81 | `f410`, `0x`.
82 |
83 | `f1` addresses currently are not supported. Rewards for Checker operators are
84 | administered by a FEVM smart contract. It is currently technically complex to
85 | make payments to f1 addresses.
86 |
87 | If you just want to give `core` a quick spin, you can use the address
88 | `0x000000000000000000000000000000000000dEaD`. Please note that any earnings
89 | sent there will be lost.
90 |
91 | - `PASSPHRASE` _(string; optional)_: a passphrase to protect the Checker
92 | instance private key stored in a file inside the `STATE_ROOT` directory.
93 |
94 | - `SUBNET_FILTER` _(string; optional)_: Run only the subnet with the given name.
95 | Eg:
96 | - `SUBNET_FILTER=spark`
97 |
98 | This command outputs metrics and activity events:
99 |
100 | ```bash
101 | $ checker
102 | {
103 | "totalJobsCompleted": 161,
104 | "rewardsScheduledForAddress": "0.041033208757289921"
105 | }
106 | [4/19/2023, 9:26:54 PM] INFO Saturn Node will try to connect to the Saturn Orchestrator...
107 | [4/19/2023, 9:26:54 PM] INFO Saturn Node was able to connect to the Orchestrator and will now start connecting to the Saturn network...
108 | ...
109 | ```
110 |
111 | ```bash
112 | $ checker --json
113 | {"type":"jobs-completed","total":161}
114 | {"type":"activity:info","subnet":"Saturn","message":"Saturn Node will try to connect to the Saturn Orchestrator..."}
115 | {"type":"activity:info","subnet":"Saturn","message":"Saturn Node was able to connect to the Orchestrator and will now start connecting to the Saturn network..."}
116 | ...
117 | ```
118 |
119 | For the JSON output, the following event types exist:
120 |
121 | - `jobs-completed`
122 | - `total`
123 | - `activity:info`
124 | - `subnet`
125 | - `message`
126 | - `activity:error`
127 | - `subnet`
128 | - `message`
129 |
130 | Set the flag `--experimental` to run subnets not yet considered safe for
131 | production use. _Run this at your own risk!_
132 |
133 | Checker Modules currently in experimental mode:
134 |
135 | - [Arweave](https://github.com/CheckerNetwork/arweave-checker/)
136 | - [Walrus](https://github.com/CheckerNetwork/walrus-checker/)
137 |
138 | ### `$ checker --help`
139 |
140 | Show help.
141 |
142 | ```bash
143 | $ checker --help
144 | Usage: checker [options]
145 |
146 | Options:
147 | -j, --json Output JSON [boolean]
148 | --experimental Also run experimental subnets [boolean]
149 | --recreateCheckerIdOnError Recreate Checker ID if it is corrupted
150 | [boolean]
151 | -v, --version Show version number [boolean]
152 | -h, --help Show help [boolean]
153 | ```
154 |
155 | ### `$ checker --version`
156 |
157 | Show version number.
158 |
159 | ```bash
160 | $ checker --version
161 | @checkernetwork/node: 1.0.1
162 | ```
163 |
164 | ## Docker
165 |
166 | Deploy Checker with [Docker](https://www.docker.com/). Please replace
167 | `FIL_WALLET_ADDRESS` and ensure the passed `state` folder is persisted across
168 | machine restarts.
169 |
170 | ```bash
171 | $ docker run \
172 | --name checker \
173 | --detach \
174 | --env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD \
175 | -v ./state:/home/node/.local/state/
176 | ghcr.io/checkernetwork/node
177 | ```
178 |
179 | ## Manual Deployment (Ubuntu)
180 |
181 | On a fresh [Ubuntu](https://ubuntu.com/) machine:
182 |
183 | ```bash
184 | # Install node.js
185 | $ curl -fsSL https://deb.nodesource.com/setup_20.x | sudo -E bash - &&\
186 | sudo apt-get install -y nodejs
187 |
188 | # Install core
189 | $ npm install -g @checkernetwork/node
190 |
191 | # Create systemd service
192 | # Don't forget to replace FIL_WALLET_ADDRESS and User
193 | $ sudo tee /etc/systemd/system/checker.service > /dev/null <
20 | yargs
21 | .option('json', {
22 | alias: 'j',
23 | type: 'boolean',
24 | description: 'Output JSON',
25 | })
26 | .option('experimental', {
27 | type: 'boolean',
28 | description: 'Also run experimental subnets',
29 | })
30 | .option('recreateCheckerIdOnError', {
31 | type: 'boolean',
32 | description: 'Recreate Checker ID if it is corrupted',
33 | }),
34 | ({ json, experimental, recreateCheckerIdOnError }) =>
35 | checker({ json, experimental, recreateCheckerIdOnError }),
36 | )
37 | .version(`${pkg.name}: ${pkg.version}`)
38 | .alias('v', 'version')
39 | .alias('h', 'help')
40 | .parse()
41 |
--------------------------------------------------------------------------------
/commands/checker.js:
--------------------------------------------------------------------------------
1 | import { join } from 'node:path'
2 | import * as zinniaRuntime from '../lib/zinnia.js'
3 | import { formatActivityObject, activities } from '../lib/activity.js'
4 | import { runPingLoop, runMachinesLoop } from '../lib/telemetry.js'
5 | import fs from 'node:fs/promises'
6 | import { metrics } from '../lib/metrics.js'
7 | import { paths } from '../lib/paths.js'
8 | import { getCheckerId } from '../lib/checker-id.js'
9 | import pRetry from 'p-retry'
10 | import { fetch } from 'undici'
11 | import { ethAddressFromDelegated, isEthAddress } from '@glif/filecoin-address'
12 | import { ethers, formatEther } from 'ethers'
13 | import { runUpdateRewardsLoop } from '../lib/rewards.js'
14 | import { runUpdateContractsLoop } from '../lib/contracts.js'
15 |
16 | const { FIL_WALLET_ADDRESS, PASSPHRASE } = process.env
17 |
18 | const runtimeNames = ['zinnia']
19 |
20 | /**
21 | * @param {string} msg
22 | * @param {number} [exitCode]
23 | */
24 | const panic = (msg, exitCode = 1) => {
25 | console.error(msg)
26 | process.exit(exitCode)
27 | }
28 |
29 | export const checker = async ({
30 | json,
31 | recreateCheckerIdOnError,
32 | experimental,
33 | }) => {
34 | if (!FIL_WALLET_ADDRESS) panic('FIL_WALLET_ADDRESS required')
35 | if (FIL_WALLET_ADDRESS.startsWith('f1')) {
36 | panic(
37 | 'Invalid FIL_WALLET_ADDRESS: f1 addresses are currently not supported. Please use an f4 or 0x address.',
38 | )
39 | }
40 | if (
41 | !FIL_WALLET_ADDRESS.startsWith('f410') &&
42 | !FIL_WALLET_ADDRESS.startsWith('0x')
43 | ) {
44 | panic('FIL_WALLET_ADDRESS must start with f410 or 0x')
45 | }
46 | if (
47 | FIL_WALLET_ADDRESS.startsWith('0x') &&
48 | !isEthAddress(FIL_WALLET_ADDRESS)
49 | ) {
50 | panic('Invalid FIL_WALLET_ADDRESS ethereum address', 2)
51 | }
52 |
53 | const keypair = await getCheckerId({
54 | secretsDir: paths.secrets,
55 | passphrase: PASSPHRASE,
56 | recreateOnError: recreateCheckerIdOnError,
57 | })
58 | const CHECKER_ID = keypair.publicKey
59 |
60 | const fetchRes = await pRetry(
61 | () =>
62 | fetch(`https://station-wallet-screening.fly.dev/${FIL_WALLET_ADDRESS}`),
63 | {
64 | retries: 1000,
65 | onFailedAttempt: () =>
66 | console.error(
67 | 'Failed to validate FIL_WALLET_ADDRESS address. Retrying...',
68 | ),
69 | },
70 | )
71 | if (fetchRes.status === 403) panic('Invalid FIL_WALLET_ADDRESS address', 2)
72 | if (!fetchRes.ok) panic('Failed to validate FIL_WALLET_ADDRESS address')
73 | const ethAddress = FIL_WALLET_ADDRESS.startsWith('0x')
74 | ? FIL_WALLET_ADDRESS
75 | : ethAddressFromDelegated(FIL_WALLET_ADDRESS)
76 | for (const runtimeName of runtimeNames) {
77 | await fs.mkdir(join(paths.runtimeCache, runtimeName), { recursive: true })
78 | await fs.mkdir(join(paths.runtimeState, runtimeName), { recursive: true })
79 | }
80 |
81 | activities.onActivity((activity) => {
82 | if (json) {
83 | console.log(
84 | JSON.stringify({
85 | type: `activity:${activity.type}`,
86 | subnet: activity.source,
87 | message: activity.message,
88 | }),
89 | )
90 | } else {
91 | process.stdout.write(formatActivityObject(activity))
92 | }
93 | })
94 |
95 | metrics.onUpdate((metrics) => {
96 | if (json) {
97 | console.log(
98 | JSON.stringify({
99 | type: 'jobs-completed',
100 | total: metrics.totalJobsCompleted,
101 | rewardsScheduledForAddress: formatEther(
102 | metrics.rewardsScheduledForAddress,
103 | ),
104 | }),
105 | )
106 | } else {
107 | console.log(
108 | JSON.stringify(
109 | {
110 | totalJobsCompleted: metrics.totalJobsCompleted,
111 | rewardsScheduledForAddress: formatEther(
112 | metrics.rewardsScheduledForAddress,
113 | ),
114 | },
115 | null,
116 | 2,
117 | ),
118 | )
119 | }
120 | })
121 |
122 | const contracts = []
123 |
124 | const fetchRequest = new ethers.FetchRequest(
125 | 'https://api.node.glif.io/rpc/v1',
126 | )
127 | fetchRequest.setHeader(
128 | 'Authorization',
129 | 'Bearer RXQ2SKH/BVuwN7wisZh3b5uXStGPj1JQIrIWD+rxF0Y=',
130 | )
131 | const provider = new ethers.JsonRpcProvider(fetchRequest)
132 |
133 | await Promise.all([
134 | zinniaRuntime.run({
135 | provider,
136 | CHECKER_ID,
137 | FIL_WALLET_ADDRESS: ethAddress,
138 | ethAddress,
139 | STATE_ROOT: join(paths.runtimeState, 'zinnia'),
140 | CACHE_ROOT: join(paths.runtimeCache, 'zinnia'),
141 | subnetVersionsDir: paths.subnetVersionsDir,
142 | subnetSourcesDir: paths.subnetSourcesDir,
143 | onActivity: (activity) => {
144 | activities.submit({
145 | ...activity,
146 | // Zinnia will try to overwrite `source` if a subnet created the
147 | // activity. Using the spread syntax won't work because a
148 | // `source: null` would overwrite the default value.
149 | source: activity.source || 'Zinnia',
150 | })
151 | },
152 | onMetrics: (m) => metrics.submit('zinnia', m),
153 | experimental,
154 | }),
155 | runPingLoop({ CHECKER_ID }),
156 | runMachinesLoop({ CHECKER_ID }),
157 | runUpdateContractsLoop({
158 | provider,
159 | contracts,
160 | onActivity: (activity) => activities.submit(activity),
161 | }),
162 | runUpdateRewardsLoop({
163 | contracts,
164 | ethAddress,
165 | onMetrics: (m) => metrics.submit('zinnia', m),
166 | }),
167 | ])
168 | }
169 |
--------------------------------------------------------------------------------
/eslint.config.js:
--------------------------------------------------------------------------------
1 | import neostandard from 'neostandard'
2 |
3 | export default neostandard({
4 | noStyle: true, // Disable style-related rules, we use Prettier
5 | ts: true,
6 | env: ['mocha'],
7 | ignores: ['.cache/**', 'test/.cache/**'],
8 | })
9 |
--------------------------------------------------------------------------------
/lib/abi.json:
--------------------------------------------------------------------------------
1 | [{"type":"constructor","inputs":[{"internalType":"address","name":"admin","type":"address"}]},{"type":"function","name":"DEFAULT_ADMIN_ROLE","inputs":[],"outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"EVALUATE_ROLE","inputs":[],"outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"MAX_SCORE","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"MEASURE_ROLE","inputs":[],"outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"addBalances","inputs":[{"internalType":"address payable[]","name":"addresses","type":"address[]"},{"internalType":"uint256[]","name":"_balances","type":"uint256[]"}],"outputs":[],"stateMutability":"payable"},{"type":"function","name":"addMeasurements","inputs":[{"internalType":"string","name":"cid","type":"string"}],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"nonpayable"},{"type":"function","name":"adminAdvanceRound","inputs":[],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"availableBalance","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"balanceHeld","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"balances","inputs":[{"internalType":"address","name":"","type":"address"}],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"currentRoundEndBlockNumber","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"currentRoundIndex","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"currentRoundRoundReward","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"disableWithdraw","inputs":[],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"getRoleAdmin","inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"}],"outputs":[{"internalType":"bytes32","name":"","type":"bytes32"}],"stateMutability":"view"},{"type":"function","name":"grantRole","inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"hasRole","inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view"},{"type":"function","name":"maxTransfersPerTx","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"minBalanceForTransfer","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"nextRoundLength","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"participantCountReadyForTransfer","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"participantCountScheduledForTransfer","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"previousRoundIndex","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"previousRoundRoundReward","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"previousRoundTotalScores","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"readyForTransfer","inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"outputs":[{"internalType":"address payable","name":"","type":"address"}],"stateMutability":"view"},{"type":"function","name":"releaseRewards","inputs":[],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"renounceRole","inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"callerConfirmation","type":"address"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"revokeRole","inputs":[{"internalType":"bytes32","name":"role","type":"bytes32"},{"internalType":"address","name":"account","type":"address"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"rewardsScheduledFor","inputs":[{"internalType":"address","name":"participant","type":"address"}],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"roundReward","inputs":[],"outputs":[{"internalType":"uint256","name":"","type":"uint256"}],"stateMutability":"view"},{"type":"function","name":"scheduledForTransfer","inputs":[{"internalType":"uint256","name":"","type":"uint256"}],"outputs":[{"internalType":"address payable","name":"","type":"address"}],"stateMutability":"view"},{"type":"function","name":"setMaxTransfersPerTx","inputs":[{"internalType":"uint256","name":"_maxTransfersPerTx","type":"uint256"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"setMinBalanceForTransfer","inputs":[{"internalType":"uint256","name":"_minBalanceForTransfer","type":"uint256"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"setNextRoundLength","inputs":[{"internalType":"uint256","name":"_nextRoundLength","type":"uint256"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"setRoundReward","inputs":[{"internalType":"uint256","name":"_roundReward","type":"uint256"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"setScores","inputs":[{"internalType":"uint256","name":"roundIndex","type":"uint256"},{"internalType":"address payable[]","name":"addresses","type":"address[]"},{"internalType":"uint256[]","name":"scores","type":"uint256[]"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"supportsInterface","inputs":[{"internalType":"bytes4","name":"interfaceId","type":"bytes4"}],"outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view"},{"type":"function","name":"tick","inputs":[],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"withdraw","inputs":[{"internalType":"address payable","name":"destination","type":"address"}],"outputs":[],"stateMutability":"nonpayable"},{"type":"function","name":"withdrawDisabled","inputs":[],"outputs":[{"internalType":"bool","name":"","type":"bool"}],"stateMutability":"view"},{"type":"event","name":"MeasurementsAdded","inputs":[{"name":"cid","type":"string","indexed":false},{"name":"roundIndex","type":"uint256","indexed":true},{"name":"sender","type":"address","indexed":true}],"anonymous":false},{"type":"event","name":"RoleAdminChanged","inputs":[{"name":"role","type":"bytes32","indexed":true},{"name":"previousAdminRole","type":"bytes32","indexed":true},{"name":"newAdminRole","type":"bytes32","indexed":true}],"anonymous":false},{"type":"event","name":"RoleGranted","inputs":[{"name":"role","type":"bytes32","indexed":true},{"name":"account","type":"address","indexed":true},{"name":"sender","type":"address","indexed":true}],"anonymous":false},{"type":"event","name":"RoleRevoked","inputs":[{"name":"role","type":"bytes32","indexed":true},{"name":"account","type":"address","indexed":true},{"name":"sender","type":"address","indexed":true}],"anonymous":false},{"type":"event","name":"RoundStart","inputs":[{"name":"roundIndex","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"Transfer","inputs":[{"name":"to","type":"address","indexed":true},{"name":"amount","type":"uint256","indexed":false}],"anonymous":false},{"type":"event","name":"TransferFailed","inputs":[{"name":"to","type":"address","indexed":true},{"name":"amount","type":"uint256","indexed":false}],"anonymous":false},{"type":"error","name":"AccessControlBadConfirmation","inputs":[]},{"type":"error","name":"AccessControlUnauthorizedAccount","inputs":[{"internalType":"address","name":"account","type":"address"},{"internalType":"bytes32","name":"neededRole","type":"bytes32"}]},{"type":"receive"}]
--------------------------------------------------------------------------------
/lib/activity.js:
--------------------------------------------------------------------------------
1 | import EventEmitter from 'node:events'
2 |
3 | export class ActivityEvent {
4 | /**
5 | * @param {Object} options
6 | * @param {'info' | 'error'} options.type
7 | * @param {String} options.source
8 | * @param {String} options.message
9 | */
10 | constructor({ type, source, message }) {
11 | this.type = type
12 | this.source = source
13 | this.message = message
14 | }
15 | }
16 |
17 | export const formatActivityObject = ({ type, message }) => {
18 | return (
19 | `${type.toUpperCase().padEnd(5)} ${message}`
20 | .trimEnd()
21 | .split(/\n/g)
22 | .map((line) => `[${new Date().toLocaleString()}] ${line}`)
23 | .join('\n') + '\n'
24 | )
25 | }
26 |
27 | class Activities {
28 | #events = new EventEmitter()
29 |
30 | /** @param {ActivityEvent} activity */
31 | submit(activity) {
32 | this.#events.emit('activity', activity)
33 | }
34 |
35 | /** @param {(activity: ActivityEvent) => void} fn */
36 | onActivity(fn) {
37 | this.#events.on('activity', fn)
38 | }
39 | }
40 |
41 | export const activities = new Activities()
42 |
--------------------------------------------------------------------------------
/lib/checker-id.js:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert'
2 | import fs from 'node:fs/promises'
3 | import path from 'node:path'
4 | import { subtle, getRandomValues } from 'node:crypto'
5 |
6 | /**
7 | * @param {object} args
8 | * @param {string} args.secretsDir
9 | * @param {string} args.passphrase
10 | * @param {boolean} [args.recreateOnError]
11 | * @param {import('node:console')} [args.log]
12 | * @returns {Promise<{ publicKey: string; privateKey: string }>}
13 | */
14 | export async function getCheckerId({
15 | secretsDir,
16 | passphrase,
17 | recreateOnError = false,
18 | log = console,
19 | }) {
20 | assert.strictEqual(typeof secretsDir, 'string', 'secretsDir must be a string')
21 |
22 | await fs.mkdir(secretsDir, { recursive: true })
23 | const keystore = path.join(secretsDir, 'checker_id')
24 |
25 | try {
26 | const keypair = await loadKeypair(keystore, passphrase, { log })
27 | log.error('Loaded Checker ID: %s', keypair.publicKey)
28 | return keypair
29 | } catch (err) {
30 | if (recreateOnError || (err.code === 'ENOENT' && err.path === keystore)) {
31 | if (recreateOnError) console.error(err)
32 | // the keystore file does not exist, create a new key
33 | return await generateKeypair(keystore, passphrase, { log })
34 | } else {
35 | throw new Error(
36 | `Cannot load Checker ID from file "${keystore}". ${err.message}`,
37 | { cause: err },
38 | )
39 | }
40 | }
41 | }
42 |
43 | /**
44 | * @param {string} keystore
45 | * @param {string} passphrase
46 | * @param {object} args
47 | * @param {import('node:console')} args.log
48 | * @returns {Promise<{ publicKey: string; privateKey: string }>}
49 | */
50 | async function loadKeypair(keystore, passphrase, { log }) {
51 | const ciphertext = await fs.readFile(keystore)
52 | let plaintext
53 |
54 | if (!passphrase) {
55 | plaintext = ciphertext
56 | } else {
57 | const looksLikeJson =
58 | ciphertext[0] === '{'.charCodeAt(0) &&
59 | ciphertext[ciphertext.length - 1] === '}'.charCodeAt(0)
60 |
61 | if (looksLikeJson) {
62 | const keypair = await tryUpgradePlaintextToCiphertext(
63 | passphrase,
64 | keystore,
65 | ciphertext,
66 | { log },
67 | )
68 | if (keypair) return keypair
69 | // fall back and continue the original path to decrypt the file
70 | }
71 |
72 | try {
73 | plaintext = await decrypt(passphrase, ciphertext)
74 | } catch (err) {
75 | throw new Error(
76 | 'Cannot decrypt Checker ID file. Did you configure the correct PASSPHRASE? Alternatively overwrite it using `--recreateCheckerIdOnError`',
77 | { cause: err },
78 | )
79 | }
80 | }
81 |
82 | return parseStoredKeys(plaintext)
83 | }
84 |
85 | /**
86 | * @param {string} keystore
87 | * @param {string} passphrase
88 | * @param {Buffer} maybeCiphertext
89 | * @param {object} args
90 | * @param {import('node:console')} args.log
91 | * @returns
92 | */
93 | async function tryUpgradePlaintextToCiphertext(
94 | passphrase,
95 | keystore,
96 | maybeCiphertext,
97 | { log },
98 | ) {
99 | let keypair
100 | try {
101 | keypair = parseStoredKeys(maybeCiphertext)
102 | } catch (err) {
103 | // the file seems to be encrypted
104 | return undefined
105 | }
106 |
107 | // re-create the keypair file with encrypted keypair
108 | await storeKeypair(passphrase, keystore, keypair)
109 | log.error('Encrypted the Checker ID file using the provided PASSPHRASE.')
110 | return keypair
111 | }
112 | /**
113 | * @param {Buffer | ArrayBuffer} json
114 | * @returns {{ publicKey: string; privateKey: string }}
115 | */
116 | function parseStoredKeys(json) {
117 | const storedKeys = JSON.parse(Buffer.from(json).toString())
118 | assert.strictEqual(
119 | typeof storedKeys.publicKey,
120 | 'string',
121 | 'checker_id is corrupted: invalid publicKey',
122 | )
123 | assert.strictEqual(
124 | typeof storedKeys.privateKey,
125 | 'string',
126 | 'checker_id is corrupted: invalid privateKey',
127 | )
128 | return storedKeys
129 | }
130 |
131 | /**
132 | * @param {string} keystore
133 | * @param {string} passphrase
134 | * @param {object} args
135 | * @param {import('node:console')} [args.log]
136 | * @returns {Promise<{ publicKey: string; privateKey: string }>}
137 | */
138 | async function generateKeypair(keystore, passphrase, { log }) {
139 | if (!passphrase) {
140 | log.warn(`
141 | *****************************************************************************************
142 | The private key of the identity of your Checker instance will be stored in plaintext.
143 | We strongly recommend you to configure PASSPHRASE environment variable to enable
144 | Checker to encrypt the private key stored on the filesystem.
145 | *****************************************************************************************
146 | `)
147 | }
148 | const keyPair = /** @type {import('node:crypto').webcrypto.CryptoKeyPair} */ (
149 | /** @type {unknown} */ (
150 | await subtle.generateKey({ name: 'ED25519' }, true, ['sign', 'verify'])
151 | )
152 | )
153 | const publicKey = Buffer.from(
154 | await subtle.exportKey('spki', keyPair.publicKey),
155 | ).toString('hex')
156 | const privateKey = Buffer.from(
157 | await subtle.exportKey('pkcs8', keyPair.privateKey),
158 | ).toString('hex')
159 | log.error('Generated a new Checker ID:', publicKey)
160 | await storeKeypair(passphrase, keystore, { publicKey, privateKey })
161 | return { publicKey, privateKey }
162 | }
163 |
164 | /**
165 | * @param {string} keystore
166 | * @param {string} passphrase
167 | * @param {{ publicKey: string; privateKey: string }} keypair
168 | */
169 | async function storeKeypair(passphrase, keystore, { publicKey, privateKey }) {
170 | const plaintext = JSON.stringify({ publicKey, privateKey })
171 | const ciphertext = passphrase
172 | ? await encrypt(passphrase, Buffer.from(plaintext))
173 | : Buffer.from(plaintext)
174 | await fs.writeFile(keystore, ciphertext)
175 |
176 | const keys = { publicKey, privateKey }
177 | return keys
178 | }
179 |
180 | //
181 | // The implementation below is loosely based on the following articles
182 | // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/deriveKey#pbkdf2_2
183 | // https://bradyjoslin.com/blog/encryption-webcrypto/
184 | //
185 |
186 | /**
187 | * @param {string} passphrase
188 | * @param {Uint8Array} salt
189 | * @returns {Promise}
190 | */
191 | async function deriveKeyFromPassphrase(passphrase, salt) {
192 | // Create a password based key (PBKDF2) that will be used to derive
193 | // the AES-GCM key used for encryption / decryption.
194 | const keyMaterial = await subtle.importKey(
195 | 'raw',
196 | Buffer.from(passphrase),
197 | 'PBKDF2',
198 | /* extractable: */ false,
199 | ['deriveKey'],
200 | )
201 |
202 | // Derive the key used for encryption/decryption
203 | return await subtle.deriveKey(
204 | {
205 | name: 'PBKDF2',
206 | salt,
207 | iterations: 100_000,
208 | hash: 'SHA-256',
209 | },
210 | keyMaterial,
211 | { name: 'AES-GCM', length: 256 },
212 | /* extractable: */ true,
213 | ['encrypt', 'decrypt'],
214 | )
215 | }
216 |
217 | /**
218 | * @param {string} passphrase
219 | * @param {Buffer} plaintext
220 | * @returns {Promise}
221 | */
222 | export async function encrypt(passphrase, plaintext) {
223 | assert(Buffer.isBuffer(plaintext), 'plaintext must be a Buffer')
224 | const salt = getRandomValues(new Uint8Array(16))
225 | const iv = getRandomValues(new Uint8Array(12))
226 |
227 | const key = await deriveKeyFromPassphrase(passphrase, salt)
228 | const ciphertext = await subtle.encrypt(
229 | { name: 'AES-GCM', iv },
230 | key,
231 | plaintext,
232 | )
233 |
234 | const result = Buffer.alloc(
235 | salt.byteLength + iv.byteLength + ciphertext.byteLength,
236 | )
237 | result.set(salt, 0)
238 | result.set(iv, salt.byteLength)
239 | result.set(new Uint8Array(ciphertext), salt.byteLength + iv.byteLength)
240 | return result
241 | }
242 |
243 | /**
244 | * @param {string} passphrase
245 | * @param {Buffer} encryptedData
246 | * @returns {Promise}
247 | */
248 | export async function decrypt(passphrase, encryptedData) {
249 | assert(Buffer.isBuffer(encryptedData), 'encryptedData must be a Buffer')
250 | const salt = Uint8Array.prototype.slice.call(encryptedData, 0, 16)
251 | const iv = Uint8Array.prototype.slice.call(encryptedData, 16, 16 + 12)
252 | const ciphertext = Uint8Array.prototype.slice.call(encryptedData, 16 + 12)
253 |
254 | const key = await deriveKeyFromPassphrase(passphrase, salt)
255 | const plaintext = await subtle.decrypt(
256 | { name: 'AES-GCM', iv },
257 | key,
258 | ciphertext,
259 | )
260 | return plaintext
261 | }
262 |
--------------------------------------------------------------------------------
/lib/contracts.js:
--------------------------------------------------------------------------------
1 | import timers from 'node:timers/promises'
2 | import pRetry from 'p-retry'
3 | import * as Name from 'w3name'
4 | import { ethers } from 'ethers'
5 | import * as SparkImpactEvaluator from '@filecoin-station/spark-impact-evaluator'
6 | import { reportW3NameError } from './telemetry.js'
7 |
8 | const {
9 | // https://github.com/filecoin-station/contract-addresses
10 | CONTRACT_ADDRESSES_IPNS_KEY = 'k51qzi5uqu5dmaqrefqazad0ca8b24fb79zlacfjw2awdt5gjf2cr6jto5jyqe',
11 | } = process.env
12 |
13 | const DELAY_IN_MINUTES = 10
14 |
15 | export const runUpdateContractsLoop = async ({
16 | provider,
17 | contracts,
18 | onActivity,
19 | }) => {
20 | await timers.setTimeout(2_000)
21 | while (true) {
22 | try {
23 | const newContracts = await getContractsWithRetry({ provider })
24 | contracts.splice(0)
25 | contracts.push(...newContracts)
26 | } catch (err) {
27 | console.error(
28 | 'Failed to update the list of contract addresses. Will retry later.',
29 | err,
30 | )
31 | onActivity({
32 | type: 'error',
33 | message: `Cannot update scheduled rewards. Will retry in ${DELAY_IN_MINUTES} minutes.`,
34 | })
35 | }
36 |
37 | const delay = DELAY_IN_MINUTES * 60 * 1000
38 | const jitter = Math.random() * 20_000 - 10_000 // +- 10 seconds
39 | try {
40 | await timers.setTimeout(delay + jitter)
41 | } catch (err) {
42 | if (err.name === 'AbortError') return
43 | throw err
44 | }
45 | }
46 | }
47 |
48 | async function getContractsWithRetry({ provider }) {
49 | const contractAddresses = await pRetry(getContractAddresses, {
50 | retries: 10,
51 | onFailedAttempt: (err) => {
52 | console.error(err)
53 | console.error('Failed to get contract addresses. Retrying...')
54 | if (String(err).includes('You are being rate limited')) {
55 | const delaySeconds = 60 + Math.random() * 60
56 | // Don't DDOS the w3name services
57 | console.error(`Rate limited. Waiting ${delaySeconds} seconds...`)
58 | return timers.setTimeout(delaySeconds * 1000)
59 | }
60 | },
61 | })
62 | console.error(`Meridian contract addresses: ${contractAddresses.join(', ')}`)
63 | return contractAddresses.map((address) => {
64 | return new ethers.Contract(address, SparkImpactEvaluator.ABI, provider)
65 | })
66 | }
67 |
68 | async function getContractAddresses() {
69 | const name = Name.parse(CONTRACT_ADDRESSES_IPNS_KEY)
70 | let revision
71 | try {
72 | revision = await Name.resolve(name)
73 | } catch (err) {
74 | reportW3NameError()
75 | throw err
76 | }
77 | return revision.value.split('\n').filter(Boolean)
78 | }
79 |
--------------------------------------------------------------------------------
/lib/metrics.js:
--------------------------------------------------------------------------------
1 | import { writeClient } from './telemetry.js'
2 | import { Point } from '@influxdata/influxdb-client'
3 | import EventEmitter from 'node:events'
4 |
5 | export class MetricsEvent {
6 | /**
7 | * @param {Object} options
8 | * @param {Number} options.totalJobsCompleted
9 | * @param {bigint} options.rewardsScheduledForAddress
10 | */
11 | constructor({ totalJobsCompleted, rewardsScheduledForAddress }) {
12 | this.totalJobsCompleted = totalJobsCompleted
13 | this.rewardsScheduledForAddress = rewardsScheduledForAddress
14 | }
15 | }
16 |
17 | export class Metrics {
18 | #events = new EventEmitter()
19 |
20 | constructor() {
21 | this.mergedMetrics = null
22 | /** @type {Map} */
23 | this.subnetMetrics = new Map()
24 | this.lastErrorReportedAt = 0
25 | }
26 |
27 | /**
28 | * - Filters duplicate entries
29 | * - Writes `jobs-completed` to InfluxDB
30 | * - Merges metrics from all subnets
31 | *
32 | * @param {String} subnetName
33 | * @param {Partial} metrics
34 | */
35 | submit(subnetName, metrics) {
36 | /** @type {MetricsEvent} */
37 | const resolvedMetrics = {
38 | // initial values
39 | totalJobsCompleted: 0,
40 | rewardsScheduledForAddress: 0n,
41 | // or values submitted previously
42 | ...this.subnetMetrics.get(subnetName),
43 | // or values submitted now
44 | ...metrics,
45 | }
46 |
47 | if (
48 | typeof metrics.totalJobsCompleted === 'number' &&
49 | typeof this.subnetMetrics.get(subnetName)?.totalJobsCompleted === 'number'
50 | ) {
51 | const diff =
52 | metrics.totalJobsCompleted -
53 | this.subnetMetrics.get(subnetName).totalJobsCompleted
54 | if (diff > 0) {
55 | writeClient.writePoint(
56 | new Point('jobs-completed')
57 | .tag('subnet', subnetName)
58 | .intField('value', diff),
59 | )
60 | }
61 | }
62 | this.subnetMetrics.set(subnetName, resolvedMetrics)
63 | const mergedMetrics = {
64 | totalJobsCompleted: 0,
65 | rewardsScheduledForAddress: 0n,
66 | }
67 | for (const [, metrics] of this.subnetMetrics) {
68 | mergedMetrics.totalJobsCompleted += metrics.totalJobsCompleted
69 | // Merging rewards metrics should be revisited as more subnets start
70 | // paying rewards
71 | mergedMetrics.rewardsScheduledForAddress +=
72 | metrics.rewardsScheduledForAddress
73 | }
74 | const isChanged =
75 | this.mergedMetrics === null ||
76 | Object.entries(this.mergedMetrics).some(
77 | ([key, value]) => mergedMetrics[key] !== value,
78 | )
79 | if (isChanged) {
80 | this.mergedMetrics = mergedMetrics
81 | this.#events.emit('update', mergedMetrics)
82 | }
83 | }
84 |
85 | /** @param {(metrics: MetricsEvent) => void} fn */
86 | onUpdate(fn) {
87 | this.#events.on('update', fn)
88 | }
89 | }
90 |
91 | export const metrics = new Metrics()
92 |
--------------------------------------------------------------------------------
/lib/migrate.js:
--------------------------------------------------------------------------------
1 | import { join } from 'node:path'
2 | import { stat, rename, mkdir } from 'node:fs/promises'
3 | import { paths, getOldStateRoot } from './paths.js'
4 |
5 | const exists = async (path) => {
6 | try {
7 | await stat(path)
8 | return true
9 | } catch {
10 | return false
11 | }
12 | }
13 |
14 | export const maybeMigrateRuntimeState = async () => {
15 | const newRuntimeStatePath = paths.runtimeState
16 | const oldRuntimeStatePath = join(getOldStateRoot(), 'modules')
17 | const hasNewState = await exists(newRuntimeStatePath)
18 | const hasOldState = await exists(oldRuntimeStatePath)
19 | if (!hasNewState && hasOldState) {
20 | console.error(
21 | `Migrating runtime state files from ${oldRuntimeStatePath} to ${newRuntimeStatePath}`,
22 | )
23 | await mkdir(join(newRuntimeStatePath, '..'), { recursive: true })
24 | await rename(oldRuntimeStatePath, newRuntimeStatePath)
25 | console.error('Migration complete')
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/lib/paths.js:
--------------------------------------------------------------------------------
1 | import { join } from 'node:path'
2 | import { homedir, platform } from 'node:os'
3 | import assert from 'node:assert'
4 | import { fileURLToPath } from 'node:url'
5 |
6 | const {
7 | CACHE_ROOT,
8 | STATE_ROOT,
9 | LOCALAPPDATA,
10 | TEMP,
11 | XDG_CACHE_HOME = join(homedir(), '.cache'),
12 | XDG_STATE_HOME = join(homedir(), '.local', 'state'),
13 | } = process.env
14 |
15 | const getPaths = ({ cacheRoot, stateRoot }) => ({
16 | secrets: join(stateRoot, 'secrets'),
17 | runtimeCache: join(cacheRoot, 'runtimes'),
18 | runtimeState: join(stateRoot, 'runtimes'),
19 | subnetSourcesDir: join(cacheRoot, 'subnets', 'sources'),
20 | subnetVersionsDir: join(stateRoot, 'subnets', 'latest'),
21 | lockFile: join(stateRoot, '.lock'),
22 | })
23 |
24 | export const getDefaultRootDirs = () => {
25 | switch (platform()) {
26 | case 'darwin': {
27 | const appId = 'network.checker.node'
28 | return {
29 | cacheRoot: CACHE_ROOT || join(homedir(), 'Library', 'Caches', appId),
30 | stateRoot:
31 | STATE_ROOT ||
32 | join(homedir(), 'Library', 'Application Support', appId),
33 | }
34 | }
35 | case 'win32': {
36 | assert(TEMP || CACHE_ROOT, '%TEMP% required')
37 | assert(LOCALAPPDATA || STATE_ROOT, '%LOCALAPPDATA% required')
38 | const appName = 'Checker Network Node'
39 | return {
40 | cacheRoot: CACHE_ROOT || join(TEMP, appName),
41 | // We must use LOCALAPPDATA (not APPDATA) to avoid sharing the state with other computers
42 | // inside Windows Domain
43 | stateRoot: STATE_ROOT || join(LOCALAPPDATA, appName),
44 | }
45 | }
46 | case 'linux': {
47 | const appSlug = 'checker-network-node'
48 | return {
49 | cacheRoot: CACHE_ROOT || join(XDG_CACHE_HOME, appSlug),
50 | stateRoot: STATE_ROOT || join(XDG_STATE_HOME, appSlug),
51 | }
52 | }
53 | default:
54 | throw new Error(`Unsupported platform: ${platform()}`)
55 | }
56 | }
57 |
58 | export const getOldStateRoot = () => {
59 | switch (platform()) {
60 | case 'darwin':
61 | return (
62 | STATE_ROOT ||
63 | join(homedir(), 'Library', 'Application Support', 'app.filstation.core')
64 | )
65 | case 'win32':
66 | return STATE_ROOT || join(LOCALAPPDATA, 'Filecoin Station Core')
67 | case 'linux':
68 | return STATE_ROOT || join(XDG_STATE_HOME, 'filecoin-station-core')
69 | default:
70 | throw new Error(`Unsupported platform: ${platform()}`)
71 | }
72 | }
73 |
74 | export const paths = getPaths(getDefaultRootDirs())
75 | export const runtimeBinaries = fileURLToPath(
76 | new URL('../runtimes', import.meta.url),
77 | )
78 | export const packageJSON = fileURLToPath(
79 | new URL('../package.json', import.meta.url),
80 | )
81 |
--------------------------------------------------------------------------------
/lib/rewards.js:
--------------------------------------------------------------------------------
1 | import timers from 'node:timers/promises'
2 | import assert from 'node:assert/strict'
3 |
4 | /**
5 | * @param {object} args
6 | * @param {import('ethers').Contract[]} args.contracts
7 | * @param {string} args.ethAddress
8 | * @param {(m: Partial) => void} args.onMetrics
9 | */
10 | export const runUpdateRewardsLoop = async ({
11 | contracts,
12 | ethAddress,
13 | onMetrics,
14 | }) => {
15 | while (true) {
16 | while (!contracts.length) {
17 | await timers.setTimeout(1000)
18 | }
19 | const rewards = await Promise.all([
20 | ...contracts.map(async (contract) => {
21 | return getContractScheduledRewardsWithFallback(contract, ethAddress)
22 | }),
23 | getOffchainScheduledRewardsWithFallback(ethAddress),
24 | ])
25 | const totalRewards = rewards.reduce((a, b) => a + b, 0n)
26 | onMetrics({ rewardsScheduledForAddress: totalRewards })
27 |
28 | const delay = 10 * 60 * 1000 // 10 minutes
29 | const jitter = Math.random() * 20_000 - 10_000 // +- 10 seconds
30 | await timers.setTimeout(delay + jitter)
31 | }
32 | }
33 |
34 | async function getOffchainScheduledRewardsWithFallback(ethAddress) {
35 | try {
36 | const res = await fetch(
37 | `https://spark-rewards.fly.dev/scheduled-rewards/${ethAddress}`,
38 | )
39 | const json = await res.json()
40 | assert(typeof json === 'string')
41 | return BigInt(json)
42 | } catch (err) {
43 | console.error('Failed to get scheduled rewards:', err.stack)
44 | return 0n
45 | }
46 | }
47 |
48 | async function getContractScheduledRewardsWithFallback(contract, ethAddress) {
49 | try {
50 | return await contract.rewardsScheduledFor(ethAddress)
51 | } catch (err) {
52 | console.error('Failed to get scheduled rewards:', err.stack)
53 | return 0n
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/lib/runtime.js:
--------------------------------------------------------------------------------
1 | import os from 'node:os'
2 | import assert from 'node:assert'
3 | import { join } from 'node:path'
4 | import { mkdir, chmod } from 'node:fs/promises'
5 | import { fetch } from 'undici'
6 | import { pipeline } from 'node:stream/promises'
7 | import unzip from 'unzip-stream'
8 | import { createWriteStream } from 'node:fs'
9 | import { runtimeBinaries } from './paths.js'
10 | import * as tar from 'tar'
11 |
12 | /** @typedef {import('unzip-stream').UnzipStreamEntry} UnzipStreamEntry */
13 |
14 | const { GITHUB_TOKEN } = process.env
15 | const authorization = GITHUB_TOKEN ? `Bearer ${GITHUB_TOKEN}` : undefined
16 |
17 | export const getRuntimeExecutable = ({ runtime, executable }) => {
18 | return join(runtimeBinaries, runtime, getExecutableFileName(executable))
19 | }
20 |
21 | const getExecutableFileName = (executable) => {
22 | return `${executable}${os.platform() === 'win32' ? '.exe' : ''}`
23 | }
24 |
25 | export const installRuntime = async ({
26 | runtime,
27 | repo,
28 | distTag,
29 | executable,
30 | targets,
31 | arch,
32 | }) => {
33 | console.log(
34 | `[${runtime}] GitHub client: ${authorization ? 'authorized' : 'anonymous'}`,
35 | )
36 | const target = targets.find(
37 | (target) => target.platform === os.platform() && target.arch === arch,
38 | )
39 | assert(target, `[${runtime}] Unsupported platform: ${os.platform()} ${arch}`)
40 |
41 | await mkdir(runtimeBinaries, { recursive: true })
42 | const outFile = join(runtimeBinaries, runtime)
43 |
44 | console.log(`[${runtime}] ⇣ downloading ${os.platform()} ${arch}`)
45 | const res = await fetch(
46 | `https://github.com/${repo}/releases/download/${distTag}/${target.asset}`,
47 | {
48 | headers: {
49 | ...(authorization ? { authorization } : {}),
50 | },
51 | redirect: 'follow',
52 | },
53 | )
54 |
55 | if (res.status >= 300) {
56 | throw new Error(
57 | `[${runtime}] Cannot fetch binary ${os.platform()} ${arch}: ${res.status}\n` +
58 | (await res.text()),
59 | )
60 | }
61 |
62 | if (!res.body) {
63 | throw new Error(
64 | `[${runtime}] Cannot fetch binary ${os.platform()} ${arch}: no response body`,
65 | )
66 | }
67 |
68 | if (target.asset.endsWith('tar.gz')) {
69 | await mkdir(outFile, { recursive: true })
70 | await pipeline(
71 | /** @type {any} */ (res.body),
72 | /** @type {any} */ (tar.x({ C: outFile })),
73 | )
74 | } else {
75 | await mkdir(join(runtimeBinaries, runtime), { recursive: true })
76 | const parser = unzip.Parse()
77 | parser.on('entry', async (entry) => {
78 | const executableFileName = getExecutableFileName(executable)
79 | const outPath = join(runtimeBinaries, runtime, entry.path)
80 | await pipeline(entry, createWriteStream(outPath))
81 | if (entry.path === executableFileName) {
82 | await chmod(outPath, 0o755)
83 | }
84 | })
85 | await pipeline(res.body, parser)
86 | }
87 | console.log(`[${runtime}] ✓ ${outFile}`)
88 | }
89 |
--------------------------------------------------------------------------------
/lib/subnets.js:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert'
2 | import { join } from 'node:path'
3 | import { mkdir, rm, readFile, writeFile, stat } from 'node:fs/promises'
4 | import { fetch, Headers } from 'undici'
5 | import { pipeline } from 'node:stream/promises'
6 | import * as Name from 'w3name'
7 | import { CarReader } from '@ipld/car'
8 | import { validateBlock } from '@web3-storage/car-block-validator'
9 | import { recursive as exporter } from 'ipfs-unixfs-exporter'
10 | import { reportW3NameError } from './telemetry.js'
11 | import * as tar from 'tar'
12 |
13 | const gateways = ['w3s.link', 'cf-ipfs.com', 'dweb.link']
14 |
15 | async function getLatestCID(ipnsKey) {
16 | const name = Name.parse(ipnsKey)
17 | let revision
18 | try {
19 | revision = await Name.resolve(name)
20 | } catch (err) {
21 | reportW3NameError()
22 | throw err
23 | }
24 | // /ipfs/:cid
25 | return revision.value.split('/').pop()
26 | }
27 |
28 | async function getLastSeenSubnetCID({ subnet, subnetVersionsDir }) {
29 | try {
30 | return await readFile(join(subnetVersionsDir, subnet), 'utf-8')
31 | } catch (err) {
32 | if (err.code !== 'ENOENT') {
33 | throw err
34 | }
35 | }
36 | return undefined
37 | }
38 |
39 | async function setLastSeenSubnetCID({ subnet, cid, subnetVersionsDir }) {
40 | await mkdir(subnetVersionsDir, { recursive: true })
41 | await writeFile(join(subnetVersionsDir, subnet), cid)
42 | }
43 |
44 | export async function updateSourceFiles({
45 | subnet,
46 | ipnsKey,
47 | subnetVersionsDir,
48 | subnetSourcesDir,
49 | noCache,
50 | }) {
51 | await mkdir(subnetSourcesDir, { recursive: true })
52 | const outDir = join(subnetSourcesDir, subnet)
53 |
54 | const lastSeenCID = await getLastSeenSubnetCID({ subnet, subnetVersionsDir })
55 | if (lastSeenCID !== undefined) {
56 | // Use `console.error` because with `--json` stdout needs to be JSON only
57 | console.error(`[${subnet}] ⇣ checking for updates`)
58 | }
59 |
60 | const cid = await getLatestCID(ipnsKey)
61 | const isUpdate = lastSeenCID !== cid
62 | if (!isUpdate) {
63 | try {
64 | await stat(join(outDir, 'main.js'))
65 | console.error(`[${subnet}] ✓ no update available`)
66 | return false
67 | } catch (err) {
68 | console.error(`[${subnet}] Cannot find sources on disk`)
69 | }
70 | }
71 |
72 | let res
73 | for (const gateway of gateways) {
74 | try {
75 | const url = `https://${cid}.ipfs.${gateway}?format=car`
76 | console.error(`[${subnet}] ⇣ downloading source files via ${url}`)
77 | const headers = new Headers()
78 | if (noCache) headers.append('Cache-Control', 'no-cache')
79 | res = await fetch(url, {
80 | signal: AbortSignal.timeout(10_000),
81 | headers,
82 | })
83 |
84 | if (res.status >= 300) {
85 | throw new Error(
86 | `[${subnet}] Cannot fetch ${subnet} archive for ${cid}: ${res.status}\n` +
87 | (await res.text()),
88 | )
89 | }
90 |
91 | if (!res.body) {
92 | throw new Error(
93 | `[${subnet}] Cannot fetch ${subnet} archive for ${cid}: no response body`,
94 | )
95 | }
96 | break
97 | } catch (err) {
98 | if (gateway === gateways[gateways.length - 1]) {
99 | throw new Error(
100 | `[${subnet}] Can't download subnet sources from any gateway`,
101 | { cause: err },
102 | )
103 | } else {
104 | console.error(err)
105 | }
106 | }
107 | }
108 |
109 | const tarExtractWarnings = []
110 | const tarExtractEntries = []
111 | try {
112 | const reader = await CarReader.fromIterable(res.body)
113 | const entries = exporter(cid, {
114 | async get(blockCid) {
115 | const block = await reader.get(blockCid)
116 | try {
117 | await validateBlock(block)
118 | } catch (err) {
119 | throw new Error(`Invalid block ${blockCid} of root ${cid}`, {
120 | cause: err,
121 | })
122 | }
123 | return block.bytes
124 | },
125 | })
126 | const { value: entry } = await entries.next()
127 | assert(entry, `No entries in ${subnet} archive`)
128 | // Depending on size, entries might be packaged as `file` or `raw`
129 | // https://github.com/web3-storage/w3up/blob/e8bffe2ee0d3a59a977d2c4b7efe425699424e19/packages/upload-client/src/unixfs.js#L11
130 | if (entry.type === 'file' || entry.type === 'raw') {
131 | await mkdir(outDir, { recursive: true })
132 | // `{ strip: 1 }` tells tar to remove the top-level directory (e.g. `mod-peer-checker-v1.0.0`)
133 | await pipeline(
134 | /** @type {any} */ (entry.content()),
135 | /** @type {any} */ (
136 | tar.x({
137 | strip: 1,
138 | C: outDir,
139 | onwarn(code, message, data) {
140 | tarExtractWarnings.push({ code, message, data })
141 | },
142 | onReadEntry(entry) {
143 | tarExtractEntries.push(entry.path)
144 | },
145 | })
146 | ),
147 | )
148 | await stat(join(outDir, 'main.js'))
149 | }
150 | } catch (err) {
151 | try {
152 | await rm(outDir, { recursive: true })
153 | } catch {
154 | if (err.code !== 'ENOENT') {
155 | throw err
156 | }
157 | }
158 | err.tarExtractWarnings = tarExtractWarnings
159 | err.tarExtractEntries = tarExtractEntries
160 | throw err
161 | }
162 |
163 | await setLastSeenSubnetCID({ subnet, cid, subnetVersionsDir })
164 | console.error(`[${subnet}] ✓ ${outDir}`)
165 |
166 | return isUpdate
167 | }
168 |
--------------------------------------------------------------------------------
/lib/telemetry.js:
--------------------------------------------------------------------------------
1 | import { InfluxDB, Point } from '@influxdata/influxdb-client'
2 | import { createHash, randomUUID } from 'node:crypto'
3 | import assert from 'node:assert'
4 | import { platform, arch, cpus, totalmem } from 'node:os'
5 | import fs from 'node:fs/promises'
6 | import * as paths from './paths.js'
7 | import timers from 'node:timers/promises'
8 |
9 | const { FIL_WALLET_ADDRESS, DEPLOYMENT_TYPE = 'cli' } = process.env
10 |
11 | const validDeploymentTypes = ['cli', 'docker', 'checker-app']
12 | assert(
13 | validDeploymentTypes.includes(DEPLOYMENT_TYPE),
14 | `Invalid DEPLOYMENT_TYPE: ${DEPLOYMENT_TYPE}. Options: ${validDeploymentTypes.join(', ')}`,
15 | )
16 |
17 | const pkg = JSON.parse(await fs.readFile(paths.packageJSON, 'utf8'))
18 | const processUUID = randomUUID()
19 |
20 | const client = new InfluxDB({
21 | url: 'https://eu-central-1-1.aws.cloud2.influxdata.com',
22 | token:
23 | // station-core-21-0-2
24 | 'MveJoNJL5I_333ehxXCjaPvUBGN46SprEzC4GzSCIXQHmwdvTN3y6utV-UxmxugL6hSY7eejvgFY161FsrDycQ==',
25 | })
26 |
27 | export const writeClient = client.getWriteApi(
28 | 'Filecoin Station', // org
29 | 'station', // bucket
30 | 'ns', // precision
31 | )
32 |
33 | const writeClientMachines = client.getWriteApi(
34 | 'Filecoin Station', // org
35 | 'station-machines', // bucket
36 | 's', // precision
37 | )
38 |
39 | export const runPingLoop = async ({ CHECKER_ID }) => {
40 | assert(FIL_WALLET_ADDRESS)
41 |
42 | while (true) {
43 | const point = new Point('ping')
44 | point.stringField(
45 | 'wallet',
46 | createHash('sha256').update(FIL_WALLET_ADDRESS).digest('hex'),
47 | )
48 | point.stringField('checker_id', CHECKER_ID)
49 | point.stringField('process_uuid', processUUID)
50 | point.stringField('version', pkg.version)
51 | point.tag('station', 'core')
52 | point.tag('platform', platform())
53 | point.tag('arch', arch())
54 | point.tag('deployment_type', DEPLOYMENT_TYPE)
55 | writeClient.writePoint(point)
56 | writeClient.flush().catch(() => {})
57 | await timers.setTimeout(10 * 60 * 1000) // 10 minutes
58 | }
59 | }
60 |
61 | export const runMachinesLoop = async ({ CHECKER_ID }) => {
62 | while (true) {
63 | const point = new Point('machine')
64 | point.stringField('checker_id', CHECKER_ID)
65 | point.stringField('process_uuid', processUUID)
66 | point.intField('cpu_count', cpus().length)
67 | if (cpus().length > 0) {
68 | const cpu = cpus()[0]
69 | point.intField('cpu_speed_mhz', cpu.speed)
70 | const model = cpu.model.toLowerCase()
71 | const brand = model.includes('intel')
72 | ? 'intel'
73 | : model.includes('amd')
74 | ? 'amd'
75 | : model.includes('apple')
76 | ? 'apple'
77 | : 'unknown'
78 | point.tag('cpu_brand', brand)
79 | if (brand === 'unknown') {
80 | point.stringField('cpu_model_unknown_brand', model)
81 | }
82 | }
83 | point.tag('platform', platform())
84 | point.tag('arch', arch())
85 | point.intField('memory_total_b', totalmem())
86 | writeClientMachines.writePoint(point)
87 | writeClientMachines.flush().catch(() => {})
88 | await timers.setTimeout(24 * 3600 * 1000) // 1 day
89 | }
90 | }
91 |
92 | export const reportW3NameError = () => {
93 | const point = new Point('w3name-error')
94 | point.stringField('version', pkg.version)
95 | writeClient.writePoint(point)
96 | writeClient.flush().catch(() => {})
97 | }
98 |
--------------------------------------------------------------------------------
/lib/zinnia.js:
--------------------------------------------------------------------------------
1 | import { execa } from 'execa'
2 | import { installRuntime, getRuntimeExecutable } from './runtime.js'
3 | import { updateSourceFiles } from './subnets.js'
4 | import os from 'node:os'
5 | import pRetry from 'p-retry'
6 | import timers from 'node:timers/promises'
7 | import { join } from 'node:path'
8 |
9 | const ZINNIA_DIST_TAG = 'v0.22.2'
10 | const SUBNETS = [
11 | {
12 | subnet: 'spark',
13 | ipnsKey: 'k51qzi5uqu5dlej5gtgal40sjbowuau5itwkr6mgyuxdsuhagjxtsfqjd6ym3g',
14 | experimental: false,
15 | },
16 | {
17 | subnet: 'arweave',
18 | ipnsKey: 'k51qzi5uqu5dgwm6tk4gibgfqbqjopwdtlphvyczrixay6oesadjdxt1eorimg',
19 | experimental: true,
20 | },
21 | {
22 | subnet: 'walrus',
23 | ipnsKey: 'k51qzi5uqu5dghv7chp14rx3w89xwbbi2pwzpz1xt02ddpcev6j7guyg60yi4m',
24 | experimental: true,
25 | },
26 | ]
27 | const {
28 | TARGET_ARCH = os.arch(),
29 | SUBNET_FILTER = '',
30 | MODULE_FILTER, // Legacy
31 | } = process.env
32 |
33 | if (MODULE_FILTER) {
34 | console.error('MODULE_FILTER is deprecated, use SUBNET_FILTER instead')
35 | process.exit(1)
36 | }
37 |
38 | export const install = () =>
39 | installRuntime({
40 | runtime: 'zinnia',
41 | repo: 'CheckerNetwork/zinnia',
42 | distTag: ZINNIA_DIST_TAG,
43 | executable: 'zinniad',
44 | arch: TARGET_ARCH,
45 | targets: [
46 | { platform: 'darwin', arch: 'arm64', asset: 'zinniad-macos-arm64.zip' },
47 | { platform: 'darwin', arch: 'x64', asset: 'zinniad-macos-x64.zip' },
48 | { platform: 'linux', arch: 'arm64', asset: 'zinniad-linux-arm64.tar.gz' },
49 | { platform: 'linux', arch: 'x64', asset: 'zinniad-linux-x64.tar.gz' },
50 | { platform: 'win32', arch: 'x64', asset: 'zinniad-windows-x64.zip' },
51 | ],
52 | })
53 |
54 | const matchesSubnetFilter = (subnet) =>
55 | SUBNET_FILTER === '' || subnet === SUBNET_FILTER
56 |
57 | const capitalize = (str) => `${str.charAt(0).toUpperCase()}${str.slice(1)}`
58 |
59 | const updateAllSourceFiles = async ({
60 | subnetVersionsDir,
61 | subnetSourcesDir,
62 | signal,
63 | }) => {
64 | const subnets = await Promise.all(
65 | Object.values(SUBNETS)
66 | .filter(({ subnet }) => matchesSubnetFilter(subnet))
67 | .map(({ subnet, ipnsKey }) =>
68 | pRetry(
69 | (attemptNumber) =>
70 | updateSourceFiles({
71 | subnet,
72 | ipnsKey,
73 | subnetVersionsDir,
74 | subnetSourcesDir,
75 | noCache: attemptNumber > 1,
76 | }),
77 | {
78 | signal,
79 | retries: 10,
80 | onFailedAttempt: (err) => {
81 | console.error(err)
82 | const msg = `Failed to download ${subnet} source. Retrying...`
83 | console.error(msg)
84 | if (String(err).includes('You are being rate limited')) {
85 | const delaySeconds = 60 + Math.random() * 60
86 | // Don't DDOS the w3name services
87 | console.error(
88 | `Rate limited. Waiting ${delaySeconds} seconds...`,
89 | )
90 | return timers.setTimeout(delaySeconds * 1000)
91 | }
92 | },
93 | },
94 | ),
95 | ),
96 | )
97 | const hasUpdated = subnets.find((updated) => updated === true)
98 | return hasUpdated
99 | }
100 |
101 | const runUpdateSourceFilesLoop = async ({
102 | controller,
103 | signal,
104 | onActivity,
105 | subnetVersionsDir,
106 | subnetSourcesDir,
107 | }) => {
108 | while (true) {
109 | if (signal.aborted) {
110 | return
111 | }
112 | const delay = 10 * 60 * 1000 // 10 minutes
113 | const jitter = Math.random() * 20_000 - 10_000 // +- 10 seconds
114 | try {
115 | await timers.setTimeout(delay + jitter, null, { signal })
116 | } catch (err) {
117 | if (err.name === 'AbortError') return
118 | throw err
119 | }
120 | try {
121 | const shouldRestart = await updateAllSourceFiles({
122 | subnetVersionsDir,
123 | subnetSourcesDir,
124 | signal,
125 | })
126 | if (shouldRestart) {
127 | onActivity({
128 | type: 'info',
129 | message: 'Updated subnet source code, restarting...',
130 | })
131 | controller.abort()
132 | return
133 | }
134 | } catch (err) {
135 | onActivity({
136 | type: 'error',
137 | message: 'Failed to update subnet source code',
138 | })
139 | console.error(err)
140 | }
141 | }
142 | }
143 |
144 | const catchChildProcessExit = async ({
145 | childProcesses,
146 | controller,
147 | onActivity,
148 | }) => {
149 | try {
150 | const tasks = childProcesses.map((p) =>
151 | (async () => {
152 | try {
153 | await p
154 | onActivity({
155 | type: 'info',
156 | message: `${capitalize(p.subnetName)} exited`,
157 | })
158 | } catch (err) {
159 | // When the child process crash, attach the subnet name & the exit reason to the error object
160 | const exitReason = p.exitCode
161 | ? `with exit code ${p.exitCode}`
162 | : p.signalCode
163 | ? `via signal ${p.signalCode}`
164 | : undefined
165 | throw Object.assign(err, {
166 | subnetName: p.subnetName,
167 | exitReason,
168 | signalCode: p.signalCode,
169 | })
170 | }
171 | })(),
172 | )
173 |
174 | await Promise.race(tasks)
175 | } catch (err) {
176 | if (err.name !== 'AbortError') {
177 | const subnetName = capitalize(err.subnetName ?? 'Zinnia')
178 | const exitReason = err.exitReason ?? 'for unknown reason'
179 | const message = `${subnetName} crashed ${exitReason}`
180 | onActivity({ type: 'error', message })
181 |
182 | const subnetErr = new Error(message, { cause: err })
183 | // Store the full error message including stdout & stder in the top-level `details` property
184 | Object.assign(subnetErr, { details: err.message })
185 |
186 | if (
187 | err.signalCode &&
188 | ['SIGTERM', 'SIGKILL', 'SIGINT'].includes(err.signalCode)
189 | ) {
190 | // These signal codes are triggered when somebody terminates the process from outside.
191 | // It's not a problem in Zinnia, there is nothing we can do about this.
192 | // Don't print the stack trace to stderr,
193 | // treat this as a regular exit (successful completion of the process).
194 | // (Note that this event has been already logged via `onActivity()` call above.)
195 | return
196 | }
197 | }
198 | throw err
199 | } finally {
200 | controller.abort()
201 | }
202 | }
203 |
204 | export async function run({
205 | provider,
206 | CHECKER_ID,
207 | FIL_WALLET_ADDRESS,
208 | ethAddress,
209 | STATE_ROOT,
210 | CACHE_ROOT,
211 | subnetVersionsDir,
212 | subnetSourcesDir,
213 | onActivity,
214 | onMetrics,
215 | isUpdated = false,
216 | experimental = false,
217 | }) {
218 | const zinniadExe = getRuntimeExecutable({
219 | runtime: 'zinnia',
220 | executable: 'zinniad',
221 | })
222 |
223 | if (!isUpdated) {
224 | try {
225 | onActivity({
226 | type: 'info',
227 | message: 'Updating source code for subnets...',
228 | })
229 | await updateAllSourceFiles({
230 | subnetVersionsDir,
231 | subnetSourcesDir,
232 | signal: null,
233 | })
234 | onActivity({
235 | type: 'info',
236 | message: 'Subnet source code up to date',
237 | })
238 | } catch (err) {
239 | onActivity({
240 | type: 'error',
241 | message: 'Failed to download latest Subnet source code',
242 | })
243 | throw err
244 | }
245 | }
246 |
247 | const controller = new AbortController()
248 | const { signal } = controller
249 | const childProcesses = []
250 |
251 | for (const { subnet, experimental: subnetIsExperimental } of SUBNETS) {
252 | const skipExperimentalSubnet = !experimental && subnetIsExperimental
253 | if (!matchesSubnetFilter(subnet) || skipExperimentalSubnet) continue
254 |
255 | // all paths are relative to `runtimeBinaries`
256 | const childProcess = execa(zinniadExe, [join(subnet, 'main.js')], {
257 | cwd: subnetSourcesDir,
258 | env: {
259 | STATION_ID: CHECKER_ID,
260 | FIL_WALLET_ADDRESS,
261 | STATE_ROOT,
262 | CACHE_ROOT,
263 | },
264 | cancelSignal: signal,
265 | })
266 | childProcesses.push(Object.assign(childProcess, { subnetName: subnet }))
267 |
268 | let timeoutId
269 | const resetTimeout = () => {
270 | clearTimeout(timeoutId)
271 | timeoutId = setTimeout(
272 | () => {
273 | onActivity({
274 | type: 'error',
275 | message: `${capitalize(subnet)} has been inactive for 5 minutes, restarting...`,
276 | })
277 |
278 | controller.abort()
279 | },
280 | 5 * 60 * 1000,
281 | )
282 | }
283 | resetTimeout()
284 | signal.addEventListener('abort', () => clearTimeout(timeoutId))
285 |
286 | childProcess.stdout.setEncoding('utf-8')
287 | childProcess.stdout.on('data', (data) => {
288 | resetTimeout()
289 | handleEvents({
290 | subnet,
291 | onActivity,
292 | onMetrics,
293 | text: data,
294 | }).catch((err) => {
295 | console.error(err)
296 | })
297 | })
298 | childProcess.stderr.setEncoding('utf-8')
299 | childProcess.stderr.on('data', (data) => {
300 | resetTimeout()
301 | process.stderr.write(data)
302 | })
303 | }
304 |
305 | try {
306 | await Promise.all([
307 | runUpdateSourceFilesLoop({
308 | controller,
309 | signal,
310 | onActivity,
311 | subnetVersionsDir,
312 | subnetSourcesDir,
313 | }),
314 | catchChildProcessExit({ childProcesses, onActivity, controller }),
315 | ])
316 | console.error('Zinnia main loop ended')
317 | } catch (err) {
318 | if (err.name === 'AbortError') {
319 | console.error('Zinnia main loop aborted')
320 | } else {
321 | console.error('Zinnia main loop errored', err)
322 | }
323 | } finally {
324 | controller.abort()
325 | }
326 |
327 | // This infinite recursion has no risk of exceeding the maximum call stack
328 | // size, as awaiting promises unwinds the stack
329 | return run({
330 | provider,
331 | CHECKER_ID,
332 | FIL_WALLET_ADDRESS,
333 | ethAddress,
334 | STATE_ROOT,
335 | CACHE_ROOT,
336 | subnetVersionsDir,
337 | subnetSourcesDir,
338 | onActivity,
339 | onMetrics,
340 | isUpdated: true,
341 | experimental,
342 | })
343 | }
344 |
345 | const jobsCompleted = {}
346 |
347 | async function handleEvents({ subnet, onActivity, onMetrics, text }) {
348 | for (const line of text.trimEnd().split(/\n/g)) {
349 | let event
350 | try {
351 | event = JSON.parse(line)
352 | } catch (err) {
353 | console.error('Ignoring malformed Zinnia event:', line)
354 | }
355 |
356 | try {
357 | switch (event.type) {
358 | case 'activity:started':
359 | onActivity({
360 | type: 'info',
361 | message: `${capitalize(subnet)} started`,
362 | source: subnet,
363 | })
364 | break
365 | case 'activity:info':
366 | onActivity({
367 | type: 'info',
368 | message: event.message.replace(
369 | /Module Runtime/,
370 | capitalize(subnet),
371 | ),
372 | source: event.subnet,
373 | })
374 | break
375 |
376 | case 'activity:error':
377 | onActivity({
378 | type: 'error',
379 | message: event.message.replace(
380 | /Module Runtime/,
381 | capitalize(subnet),
382 | ),
383 | source: event.subnet,
384 | })
385 | break
386 |
387 | case 'jobs-completed': {
388 | jobsCompleted[subnet] = event.total
389 | const totalJobsCompleted = Object.values(jobsCompleted).reduce(
390 | (a, b) => a + b,
391 | 0,
392 | )
393 | onMetrics({ totalJobsCompleted })
394 | break
395 | }
396 |
397 | default:
398 | console.error('Ignoring Zinnia event of unknown type:', event)
399 | }
400 | } catch (err) {
401 | console.error('Cannot handle Zinnia event: %s', line)
402 | console.error(err)
403 | }
404 | }
405 | }
406 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@checkernetwork/node",
3 | "version": "22.3.1",
4 | "description": "Checker Network Node",
5 | "repository": {
6 | "type": "git",
7 | "url": "git+https://github.com/CheckerNetwork/node.git"
8 | },
9 | "license": "(Apache-2.0 AND MIT)",
10 | "type": "module",
11 | "main": "index.js",
12 | "types": "dist/index.d.ts",
13 | "bin": {
14 | "checker": "bin/checker.js"
15 | },
16 | "files": [
17 | "bin",
18 | "commands",
19 | "dist",
20 | "lib",
21 | "scripts"
22 | ],
23 | "scripts": {
24 | "format": "prettier --write .",
25 | "postinstall": "node ./scripts/post-install.js",
26 | "lint": "eslint && prettier --check .",
27 | "lint:fix": "eslint --fix && prettier --write .",
28 | "release": "np",
29 | "start": "cross-env FIL_WALLET_ADDRESS=0x000000000000000000000000000000000000dEaD STATE_ROOT=.state CACHE_ROOT=.cache node ./bin/checker.js",
30 | "test": "npm run lint && npm run test:types && npm run test:unit",
31 | "test:types": "tsc -p .",
32 | "test:unit": "cross-env STATE_ROOT=test/.state CACHE_ROOT=test/.cache node --no-warnings=ExperimentalWarning node_modules/mocha/bin/_mocha"
33 | },
34 | "prettier": "@checkernetwork/prettier-config",
35 | "dependencies": {
36 | "@filecoin-station/spark-impact-evaluator": "^1.1.0",
37 | "@glif/filecoin-address": "^4.0.0",
38 | "@influxdata/influxdb-client": "^1.33.2",
39 | "@ipld/car": "^5.2.6",
40 | "@web3-storage/car-block-validator": "^1.2.0",
41 | "ethers": "^6.8.1",
42 | "execa": "^9.0.2",
43 | "ipfs-unixfs-exporter": "^13.4.0",
44 | "p-retry": "^6.1.0",
45 | "tar": "^7.0.0",
46 | "undici": "^7.0.0",
47 | "unzip-stream": "^0.3.1",
48 | "w3name": "^1.0.8",
49 | "yargs": "^17.7.1"
50 | },
51 | "devDependencies": {
52 | "@checkernetwork/prettier-config": "^1.0.0",
53 | "@types/yargs": "^17.0.24",
54 | "cross-env": "^7.0.3",
55 | "eslint": "^9.23.0",
56 | "get-stream": "^9.0.0",
57 | "mocha": "^11.0.1",
58 | "neostandard": "^0.12.1",
59 | "np": "^10.0.0",
60 | "prettier": "^3.5.3",
61 | "prettier-plugin-jsdoc": "^1.3.2",
62 | "prettier-plugin-multiline-arrays": "^4.0.3",
63 | "prettier-plugin-packagejson": "^2.5.10",
64 | "stream-match": "^1.2.1",
65 | "typescript": "^5.0.4"
66 | },
67 | "engines": {
68 | "node": ">=18"
69 | },
70 | "standard": {
71 | "env": [
72 | "mocha"
73 | ]
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/scripts/post-install.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | import * as zinnia from '../lib/zinnia.js'
4 |
5 | await zinnia.install()
6 |
--------------------------------------------------------------------------------
/test/checker-id.test.js:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert'
2 | import fs from 'node:fs/promises'
3 | import path from 'node:path'
4 | import { decrypt, encrypt, getCheckerId } from '../lib/checker-id.js'
5 | import { getUniqueTempDir } from './util.js'
6 |
7 | const log = {
8 | error() {},
9 | warn() {},
10 | }
11 |
12 | describe('checker-id', () => {
13 | describe('getCheckerId', () => {
14 | it('creates a new key and stores it in the given path', async () => {
15 | const secretsDir = getUniqueTempDir()
16 | const generated = await getCheckerId({
17 | secretsDir,
18 | passphrase: 'secret',
19 | log,
20 | })
21 | assert.match(generated.publicKey, /^[0-9a-z]+$/)
22 | assert.match(generated.privateKey, /^[0-9a-z]+$/)
23 |
24 | await fs.stat(path.join(secretsDir, 'checker_id'))
25 | // the check passes if the statement above does not throw
26 |
27 | const loaded = await getCheckerId({
28 | secretsDir,
29 | passphrase: 'secret',
30 | log,
31 | })
32 | assert.deepStrictEqual(loaded, generated)
33 | })
34 |
35 | it('returns a public key that is exactly 88 characters long', async () => {
36 | // spark-api is enforcing this constraint and rejecting measurements containing checkerId
37 | // in a different format
38 | const secretsDir = getUniqueTempDir()
39 | const { publicKey } = await await getCheckerId({
40 | secretsDir,
41 | passphrase: 'secret',
42 | log,
43 | })
44 | assert.strictEqual(publicKey.length, 88, 'publicKey.length')
45 | assert.match(publicKey, /^[0-9A-Za-z]*$/)
46 | })
47 |
48 | it('skips encryption when passphrase is not set', async () => {
49 | const secretsDir = getUniqueTempDir()
50 | const generated = await getCheckerId({ secretsDir, passphrase: '', log })
51 | assert.match(generated.publicKey, /^[0-9a-z]+$/)
52 | assert.match(generated.privateKey, /^[0-9a-z]+$/)
53 |
54 | await fs.stat(path.join(secretsDir, 'checker_id'))
55 | // the check passes if the statement above does not throw
56 |
57 | const loaded = await getCheckerId({ secretsDir, passphrase: '', log })
58 | assert.deepStrictEqual(loaded, generated)
59 | })
60 |
61 | it('provides a helpful error message when the file cannot be decrypted', async () => {
62 | const secretsDir = getUniqueTempDir()
63 | await getCheckerId({ secretsDir, passphrase: 'secret', log })
64 | await assert.rejects(
65 | getCheckerId({ secretsDir, passphrase: 'wrong pass', log }),
66 | /Cannot decrypt Checker ID file. Did you configure the correct PASSPHRASE/,
67 | )
68 | })
69 |
70 | it('recreates unreadable checker ids on demand', async () => {
71 | const secretsDir = getUniqueTempDir()
72 | await getCheckerId({ secretsDir, passphrase: 'secret', log })
73 | await getCheckerId({
74 | secretsDir,
75 | passphrase: 'new pass',
76 | recreateOnError: true,
77 | log,
78 | })
79 | })
80 |
81 | it('encrypts plaintext checker_id file when PASSPHRASE is provided', async () => {
82 | const secretsDir = getUniqueTempDir()
83 | const generated = await getCheckerId({ secretsDir, passphrase: '', log })
84 | const plaintext = await fs.readFile(path.join(secretsDir, 'checker_id'))
85 |
86 | const loaded = await getCheckerId({
87 | secretsDir,
88 | passphrase: 'super-secret',
89 | log,
90 | })
91 | assert.deepStrictEqual(loaded, generated)
92 |
93 | const ciphertext = await fs.readFile(path.join(secretsDir, 'checker_id'))
94 | assert.notStrictEqual(
95 | plaintext.toString('hex'),
96 | ciphertext.toString('hex'),
97 | )
98 | })
99 | })
100 |
101 | describe('passphrase-based encryption', () => {
102 | it('encrypts and decrypts plaintext', async () => {
103 | const plaintext = 'hello world'
104 | const passphrase = 'some secret words'
105 | const encrypted = await encrypt(passphrase, Buffer.from(plaintext))
106 | assert(Buffer.isBuffer(encrypted), 'encrypted value is a buffer')
107 |
108 | const data = await decrypt(passphrase, encrypted)
109 | assert(data instanceof ArrayBuffer, 'decrypted value is an ArrayBuffer')
110 | assert.strictEqual(Buffer.from(data).toString(), plaintext)
111 | })
112 | })
113 | })
114 |
--------------------------------------------------------------------------------
/test/checker.js:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert'
2 | import { execa } from 'execa'
3 | import {
4 | checker,
5 | FIL_WALLET_ADDRESS,
6 | PASSPHRASE,
7 | getUniqueTempDir,
8 | } from './util.js'
9 | import streamMatch from 'stream-match'
10 | import getStream from 'get-stream'
11 | import { once } from 'node:events'
12 |
13 | describe('Checker', () => {
14 | it('runs Zinnia', async () => {
15 | const ps = startChecker()
16 | await Promise.race([
17 | once(ps, 'exit'),
18 | Promise.all([
19 | streamMatch(ps.stdout, 'totalJobsCompleted'),
20 | streamMatch(ps.stdout, 'Spark started'),
21 | ]),
22 | ])
23 | // Assert that the process did not exit prematurely
24 | assert.strictEqual(ps.exitCode, null)
25 | stopChecker()
26 | })
27 | it('runs Spark Checker', async () => {
28 | const ps = startChecker()
29 | await Promise.race([
30 | once(ps, 'exit'),
31 | streamMatch(ps.stdout, 'SPARK started reporting retrievals'),
32 | ])
33 | // Assert that the process did not exit prematurely
34 | assert.strictEqual(ps.exitCode, null)
35 | stopChecker()
36 | })
37 | it('runs experimental subnets', () => {
38 | it('runs Arweave & Walrus', async () => {
39 | const ps = startChecker(['--experimental'])
40 | await streamMatch(ps.stdout, 'Arweave subnet started.')
41 | await streamMatch(ps.stdout, 'Walrus subnet started.')
42 | stopChecker()
43 | })
44 | })
45 | it('outputs events', async () => {
46 | const ps = startChecker()
47 | await Promise.all([
48 | streamMatch(ps.stdout, 'totalJobsCompleted'),
49 | streamMatch(ps.stdout, 'Spark started'),
50 | ])
51 | stopChecker()
52 | })
53 | it('outputs events json', async () => {
54 | const ps = startChecker(['--json'])
55 |
56 | await Promise.all([
57 | streamMatch(ps.stdout, 'jobs-completed'),
58 | streamMatch(ps.stdout, /activity:info.*(Spark started)/),
59 | ])
60 |
61 | stopChecker()
62 | })
63 |
64 | let ps, stdout, stderr
65 | function startChecker(cliArgs = []) {
66 | assert(!ps, 'Checker is already running')
67 |
68 | const CACHE_ROOT = getUniqueTempDir()
69 | const STATE_ROOT = getUniqueTempDir()
70 | ps = execa(checker, cliArgs, {
71 | env: { CACHE_ROOT, STATE_ROOT, FIL_WALLET_ADDRESS, PASSPHRASE },
72 | })
73 | stdout = getStream(ps.stdout)
74 | stderr = getStream(ps.stderr)
75 | return ps
76 | }
77 |
78 | function stopChecker() {
79 | ps.kill()
80 | ps = undefined
81 | }
82 |
83 | afterEach(async () => {
84 | if (!ps) return
85 | // The test failed and did not stop the Checker process
86 | // Let's stop the process and print stdout & stderr for troubleshooting
87 | stopChecker()
88 |
89 | console.log('== STATION STDOUT ==\n%s', await stdout)
90 | console.log('== STATION STDERR ==\n%s', await stderr)
91 | console.log('== END ==')
92 | })
93 | })
94 |
--------------------------------------------------------------------------------
/test/cli.js:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert'
2 | import { execa } from 'execa'
3 | import {
4 | checker,
5 | FIL_WALLET_ADDRESS,
6 | PASSPHRASE,
7 | getUniqueTempDir,
8 | } from './util.js'
9 | import { once } from 'node:events'
10 |
11 | describe('CLI', () => {
12 | describe('FIL_WALLET_ADDRESS', () => {
13 | it('fails without address', async () => {
14 | await assert.rejects(
15 | execa(checker, {
16 | env: {
17 | STATE_ROOT: getUniqueTempDir(),
18 | PASSPHRASE,
19 | },
20 | }),
21 | )
22 | })
23 | it('fails with sanctioned address', async () => {
24 | try {
25 | await execa(checker, {
26 | env: {
27 | STATE_ROOT: getUniqueTempDir(),
28 | PASSPHRASE,
29 | FIL_WALLET_ADDRESS: '0x1da5821544e25c636c1417ba96ade4cf6d2f9b5a',
30 | },
31 | })
32 | } catch (err) {
33 | assert.strictEqual(err.exitCode, 2)
34 | return
35 | }
36 | assert.fail('Expected Checker to return a non-zero exit code')
37 | })
38 | it('fails with invalid 0x address', async () => {
39 | try {
40 | await execa(checker, {
41 | env: {
42 | STATE_ROOT: getUniqueTempDir(),
43 | PASSPHRASE,
44 | FIL_WALLET_ADDRESS: '0x1da5821544e25c636c1417ba96ade4cf6d2f9b5',
45 | },
46 | })
47 | } catch (err) {
48 | assert.strictEqual(err.exitCode, 2)
49 | return
50 | }
51 | assert.fail('Expected Checker to return a non-zero exit code')
52 | })
53 | it('starts without passphrase in a fresh install', async () => {
54 | const ps = execa(checker, {
55 | env: {
56 | STATE_ROOT: getUniqueTempDir(),
57 | FIL_WALLET_ADDRESS,
58 | },
59 | })
60 | await once(ps.stdout, 'data')
61 | ps.kill()
62 | })
63 | it('works with address and passphrase', async () => {
64 | const ps = execa(checker, {
65 | env: {
66 | STATE_ROOT: getUniqueTempDir(),
67 | FIL_WALLET_ADDRESS,
68 | PASSPHRASE,
69 | },
70 | })
71 | await once(ps.stdout, 'data')
72 | ps.kill()
73 | })
74 | it('fails with the wrong pass phrase', async () => {
75 | const STATE_ROOT = getUniqueTempDir()
76 | const ps = execa(checker, {
77 | env: {
78 | STATE_ROOT,
79 | FIL_WALLET_ADDRESS,
80 | PASSPHRASE,
81 | },
82 | })
83 | await once(ps.stdout, 'data')
84 | ps.kill()
85 | try {
86 | await execa(checker, {
87 | env: {
88 | STATE_ROOT,
89 | FIL_WALLET_ADDRESS,
90 | PASSPHRASE: `${PASSPHRASE}x`,
91 | },
92 | })
93 | } catch (err) {
94 | assert.strictEqual(err.exitCode, 1)
95 | return
96 | }
97 | assert.fail('Expected Checker to return a non-zero exit code')
98 | })
99 | })
100 |
101 | describe('--recreateCheckerIdOnError', () => {
102 | it('recreates the checker id on demand', async () => {
103 | const STATE_ROOT = getUniqueTempDir()
104 | {
105 | const ps = execa(checker, {
106 | env: {
107 | STATE_ROOT,
108 | FIL_WALLET_ADDRESS,
109 | PASSPHRASE,
110 | },
111 | })
112 | await once(ps.stdout, 'data')
113 | ps.kill()
114 | }
115 | {
116 | const ps = execa(checker, ['--recreateCheckerIdOnError'], {
117 | env: {
118 | STATE_ROOT,
119 | FIL_WALLET_ADDRESS,
120 | PASSPHRASE: `${PASSPHRASE}x`,
121 | },
122 | })
123 | await once(ps.stdout, 'data')
124 | ps.kill()
125 | }
126 | })
127 | })
128 |
129 | describe('--version', () => {
130 | it('outputs version', async () => {
131 | await execa(checker, ['--version'])
132 | await execa(checker, ['-v'])
133 | })
134 | })
135 |
136 | describe('--help', () => {
137 | it('outputs help text', async () => {
138 | await execa(checker, ['--help'])
139 | await execa(checker, ['-h'])
140 | })
141 | })
142 | })
143 |
--------------------------------------------------------------------------------
/test/metrics.js:
--------------------------------------------------------------------------------
1 | import assert from 'node:assert'
2 | import { Metrics } from '../lib/metrics.js'
3 |
4 | describe('Metrics', () => {
5 | describe('submit', () => {
6 | it('should merge metrics', () => {
7 | const metrics = new Metrics()
8 | metrics.submit('subnet1', {
9 | totalJobsCompleted: 1,
10 | rewardsScheduledForAddress: 1n,
11 | })
12 | metrics.submit('subnet2', {
13 | totalJobsCompleted: 2,
14 | rewardsScheduledForAddress: 2n,
15 | })
16 | assert.deepStrictEqual(metrics.mergedMetrics, {
17 | totalJobsCompleted: 3,
18 | rewardsScheduledForAddress: 3n,
19 | })
20 | })
21 | it('should filter duplicate entries', () => {
22 | const metrics = new Metrics()
23 | let i = 0
24 | metrics.onUpdate((metrics) => {
25 | if (i === 0) {
26 | assert.deepStrictEqual(metrics, {
27 | totalJobsCompleted: 1,
28 | rewardsScheduledForAddress: 0n,
29 | })
30 | } else if (i === 1) {
31 | assert.deepStrictEqual(metrics, {
32 | totalJobsCompleted: 2,
33 | rewardsScheduledForAddress: 0n,
34 | })
35 | } else {
36 | throw new Error('should not be called')
37 | }
38 | i++
39 | })
40 | metrics.submit('subnet1', {
41 | totalJobsCompleted: 1,
42 | rewardsScheduledForAddress: 0n,
43 | })
44 | metrics.submit('subnet1', {
45 | totalJobsCompleted: 1,
46 | rewardsScheduledForAddress: 0n,
47 | })
48 | metrics.submit('subnet2', {
49 | totalJobsCompleted: 1,
50 | rewardsScheduledForAddress: 0n,
51 | })
52 | })
53 | })
54 | })
55 |
--------------------------------------------------------------------------------
/test/storage.js:
--------------------------------------------------------------------------------
1 | import { execa } from 'execa'
2 | import { checker, FIL_WALLET_ADDRESS, PASSPHRASE } from './util.js'
3 | import { once } from 'node:events'
4 | import { tmpdir } from 'node:os'
5 | import fs from 'node:fs/promises'
6 | import { randomUUID } from 'node:crypto'
7 | import { join } from 'node:path'
8 |
9 | describe('Storage', async () => {
10 | it('creates files', async () => {
11 | const CACHE_ROOT = join(tmpdir(), randomUUID())
12 | const STATE_ROOT = join(tmpdir(), randomUUID())
13 | const ps = execa(checker, {
14 | env: { CACHE_ROOT, STATE_ROOT, FIL_WALLET_ADDRESS, PASSPHRASE },
15 | })
16 | while (true) {
17 | await once(ps.stdout, 'data')
18 | try {
19 | await fs.stat(CACHE_ROOT)
20 | break
21 | } catch {}
22 | }
23 | ps.kill()
24 | await fs.stat(join(CACHE_ROOT, 'runtimes'))
25 | await fs.stat(STATE_ROOT)
26 | await fs.stat(join(STATE_ROOT, 'runtimes'))
27 | })
28 | })
29 |
--------------------------------------------------------------------------------
/test/util.js:
--------------------------------------------------------------------------------
1 | import { randomUUID } from 'node:crypto'
2 | import { tmpdir } from 'node:os'
3 | import { join } from 'node:path'
4 | import { fileURLToPath } from 'node:url'
5 |
6 | export const FIL_WALLET_ADDRESS = '0x000000000000000000000000000000000000dEaD'
7 | export const PASSPHRASE = 'secret'
8 |
9 | export const checker = fileURLToPath(
10 | new URL('../bin/checker.js', import.meta.url),
11 | )
12 |
13 | export const getUniqueTempDir = () => {
14 | return join(tmpdir(), randomUUID())
15 | }
16 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "allowJs": true,
4 | "checkJs": true,
5 | "skipLibCheck": true,
6 | "lib": ["es2022"],
7 | "target": "es2022",
8 | "module": "Node16",
9 | "moduleResolution": "node16",
10 |
11 | // TODO
12 | // "strict": true,
13 | "forceConsistentCasingInFileNames": true,
14 |
15 | "declaration": true,
16 | "emitDeclarationOnly": true,
17 | "outDir": "dist",
18 | "declarationMap": true,
19 | "resolveJsonModule": true
20 | },
21 | "include": ["index.js", "bin", "commands", "lib", "scripts", "tests"],
22 | "exclude": ["dist/**/*"]
23 | }
24 |
--------------------------------------------------------------------------------