├── .benchrc.yaml ├── .cargo └── config.toml ├── .eslintrc.js ├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ ├── feature_request.md │ └── general_question.md ├── stale.yml └── workflows │ └── CI.yml ├── .gitignore ├── .mocharc.yaml ├── .npmignore ├── .prettierrc.yml ├── .vscode ├── launch.json └── settings.json ├── CONTRIBUTING.md ├── Cargo.toml ├── LICENSE ├── README.md ├── build.rs ├── doc ├── assets │ └── object-model.png ├── blst.md ├── building.md ├── class-hierarchy.md ├── classes.md ├── debugging.md ├── definitions.md ├── environment.md ├── errors.md ├── js-perspective-on-c.md ├── maybe.md ├── memory-model.md ├── multi-threading.md ├── native-node.md ├── readme.md ├── reference.md ├── repo.md ├── structuring-addons.md └── values.md ├── index.d.ts ├── index.js ├── npm ├── darwin-arm64 │ ├── README.md │ └── package.json ├── darwin-x64 │ ├── README.md │ └── package.json ├── linux-arm64-gnu │ ├── README.md │ └── package.json ├── linux-arm64-musl │ ├── README.md │ └── package.json ├── linux-x64-gnu │ ├── README.md │ └── package.json ├── linux-x64-musl │ ├── README.md │ └── package.json └── win32-x64-msvc │ ├── README.md │ └── package.json ├── package.json ├── rustfmt.toml ├── src └── lib.rs ├── test ├── __fixtures__ │ └── index.ts ├── fuzz │ ├── exec.ts │ ├── fuzz.test.ts │ ├── fuzzTarget.ts │ └── testCases.ts ├── memory │ ├── memory.test.ts │ └── napi.heapdump.zst ├── perf │ ├── PublicKey.test.ts │ ├── SecretKey.test.ts │ ├── Signature.test.ts │ └── functions.test.ts ├── spec │ ├── downloadTests.ts │ ├── functions.ts │ ├── index.test.ts │ ├── specTestVersioning.ts │ └── utils.ts ├── unit │ ├── PublicKey.test.ts │ ├── SecretKey.test.ts │ ├── Signature.test.ts │ ├── aggregatePublicKeys.test.ts │ ├── aggregateSignatures.test.ts │ ├── aggregateWithRandomness.test.ts │ ├── bindings.test.ts │ ├── utils.test.ts │ ├── verify.test.ts │ └── verifyMultipleAggregateSignatures.test.ts └── utils │ ├── helpers.ts │ ├── index.ts │ ├── memory │ ├── index.ts │ ├── testRunner.ts │ └── types.ts │ ├── testSets.ts │ └── types.ts ├── tsconfig.fuzz.json ├── tsconfig.json └── yarn.lock /.benchrc.yaml: -------------------------------------------------------------------------------- 1 | # Mocha opts 2 | extension: ["ts"] 3 | colors: true 4 | node-option: 5 | - "loader=ts-node/register" 6 | 7 | # benchmark opts 8 | threshold: 3 9 | maxMs: 60_000 10 | minRuns: 10 11 | -------------------------------------------------------------------------------- /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.x86_64-pc-windows-msvc] 2 | rustflags = ["-C", "target-feature=+crt-static"] 3 | 4 | [target.aarch64-unknown-linux-musl] 5 | linker = "aarch64-linux-musl-gcc" 6 | rustflags = ["-C", "target-feature=-crt-static"] -------------------------------------------------------------------------------- /.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | root: true, 3 | env: { 4 | browser: true, 5 | es6: true, 6 | node: true, 7 | mocha: true, 8 | }, 9 | globals: { 10 | BigInt: true, 11 | }, 12 | parser: "@typescript-eslint/parser", 13 | parserOptions: { 14 | ecmaVersion: "latest", 15 | project: "./tsconfig.json", 16 | sourceType: "script", 17 | }, 18 | plugins: ["@typescript-eslint", "eslint-plugin-import", "eslint-plugin-node", "prettier"], 19 | extends: [ 20 | "eslint:recommended", 21 | "plugin:import/errors", 22 | "plugin:import/warnings", 23 | "plugin:import/typescript", 24 | "plugin:@typescript-eslint/recommended", 25 | ], 26 | rules: { 27 | "prettier/prettier": "error", 28 | //doesnt work, it reports false errors 29 | "constructor-super": "off", 30 | "@typescript-eslint/await-thenable": "error", 31 | "@typescript-eslint/explicit-function-return-type": [ 32 | "error", 33 | { 34 | allowExpressions: true, 35 | }, 36 | ], 37 | "@typescript-eslint/func-call-spacing": "error", 38 | "@typescript-eslint/member-ordering": "error", 39 | "@typescript-eslint/no-explicit-any": "error", 40 | "@typescript-eslint/no-require-imports": "error", 41 | "@typescript-eslint/no-unused-vars": [ 42 | "error", 43 | { 44 | varsIgnorePattern: "^_", 45 | }, 46 | ], 47 | "@typescript-eslint/ban-ts-comment": "warn", 48 | "@typescript-eslint/no-use-before-define": "off", 49 | "@typescript-eslint/semi": "error", 50 | "@typescript-eslint/type-annotation-spacing": "error", 51 | "@typescript-eslint/no-floating-promises": "error", 52 | "@typescript-eslint/explicit-member-accessibility": ["error", {accessibility: "no-public"}], 53 | "@typescript-eslint/no-unsafe-call": "error", 54 | "@typescript-eslint/no-unsafe-return": "error", 55 | "import/no-extraneous-dependencies": [ 56 | "error", 57 | { 58 | devDependencies: false, 59 | optionalDependencies: false, 60 | peerDependencies: false, 61 | }, 62 | ], 63 | "func-call-spacing": "off", 64 | //if --fix is run it messes imports like /lib/presets/minimal & /lib/presets/mainnet 65 | "import/no-duplicates": "off", 66 | "node/no-deprecated-api": "error", 67 | "new-parens": "error", 68 | "no-caller": "error", 69 | "no-bitwise": "off", 70 | "no-cond-assign": "error", 71 | "no-consecutive-blank-lines": 0, 72 | "no-console": "warn", 73 | "no-var": "error", 74 | "object-curly-spacing": ["error", "never"], 75 | "object-literal-sort-keys": 0, 76 | "no-prototype-builtins": 0, 77 | "prefer-const": "error", 78 | quotes: ["error", "double"], 79 | semi: "off", 80 | }, 81 | settings: { 82 | "import/core-modules": ["node:child_process", "node:crypto", "node:fs", "node:os", "node:path", "node:util"], 83 | }, 84 | overrides: [ 85 | { 86 | files: ["test/**/*.ts"], 87 | rules: { 88 | "import/no-extraneous-dependencies": "off", 89 | "@typescript-eslint/no-explicit-any": "off", 90 | }, 91 | }, 92 | { 93 | files: ["test/utils/testSets.ts"], 94 | rules: { 95 | "no-console": "off", 96 | }, 97 | }, 98 | ], 99 | }; 100 | -------------------------------------------------------------------------------- /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # These owners will be the default owners for everything in 2 | # the repo. Unless a later match takes precedence, 3 | # They will be requested for 4 | # review when someone opens a pull request. 5 | * @ChainSafe/lodestar 6 | 7 | # Order is important; the last matching pattern takes the most 8 | # precedence. When someone opens a pull request that only 9 | # modifies md files, only md owners and not the global 10 | # owner(s) will be requested for a review. 11 | *.md @ChainSafe/lodestar 12 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | --- 5 | 6 | 7 | 8 | 9 | **Describe the bug** 10 | 11 | 12 | 13 | **Expected behavior** 14 | 15 | 16 | 17 | **Steps to Reproduce** 18 | 19 | 25 | 26 | **Screenshots** 27 | 28 | 29 | 30 | **Desktop (please complete the following information):** 31 | 32 | - OS: 33 | - Version: 34 | - Branch: 35 | - Commit hash: 36 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | --- 5 | 6 | 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | 12 | 13 | 14 | **Describe the solution you'd like** 15 | 16 | 17 | 18 | **Describe alternatives you've considered** 19 | 20 | 21 | 22 | **Additional context** 23 | 24 | 25 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/general_question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Architecture/Planning Question 3 | about: Suggest an idea for this project 4 | --- 5 | 6 | 10 | 11 | **What is your question?** 12 | 13 | 14 | -------------------------------------------------------------------------------- /.github/stale.yml: -------------------------------------------------------------------------------- 1 | # Configuration for probot-stale - https://github.com/probot/stale 2 | 3 | # Number of days of inactivity before an Issue or Pull Request becomes stale 4 | daysUntilStale: 60 5 | 6 | # Number of days of inactivity before an Issue or Pull Request with the stale label is closed. 7 | # Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. 8 | daysUntilClose: 15 9 | 10 | # Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) 11 | onlyLabels: [] 12 | 13 | # Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable 14 | exemptLabels: 15 | - "PR state: on-ice" 16 | - "Good First Issue" 17 | - "Status: On Ice" 18 | - "Priority: 4 - Low" 19 | - "Priority: 3 - Medium" 20 | - "Priority: 2 - High" 21 | - "Priority: 1 - Critical" 22 | - "discussion" 23 | - "Discussion" 24 | - "Epic" 25 | - "Good First Issue" 26 | - "help wanted" 27 | 28 | # Set to true to ignore issues in a project (defaults to false) 29 | exemptProjects: false 30 | 31 | # Set to true to ignore issues in a milestone (defaults to false) 32 | exemptMilestones: true 33 | 34 | # Set to true to ignore issues with an assignee (defaults to false) 35 | exemptAssignees: true 36 | 37 | # Label to use when marking as stale 38 | staleLabel: bot:stale 39 | 40 | # Comment to post when marking as stale. Set to `false` to disable 41 | markComment: > 42 | This issue has been automatically marked as stale because it has not had 43 | recent activity. It will be closed in 15 days if no further activity occurs. Thank you 44 | for your contributions. 45 | 46 | # Comment to post when removing the stale label. 47 | # unmarkComment: > 48 | # Your comment here 49 | 50 | # Comment to post when closing a stale Issue or Pull Request. 51 | closeComment: > 52 | This issue or pull request has been automatically been closed due to inactivity. 53 | 54 | # Limit the number of actions per hour, from 1-30. Default is 30 55 | limitPerRun: 30 56 | # Limit to only `issues` or `pulls` 57 | # only: issues 58 | 59 | # Optionally, specify configuration settings that are specific to just 'issues' or 'pulls': 60 | # pulls: 61 | # daysUntilStale: 30 62 | # markComment: > 63 | # This pull request has been automatically marked as stale because it has not had 64 | # recent activity. It will be closed if no further activity occurs. Thank you 65 | # for your contributions. 66 | 67 | # issues: 68 | # exemptLabels: 69 | # - confirmed 70 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | coverage 2 | build 3 | release 4 | dist 5 | node_modules 6 | prebuild 7 | 8 | npm-debug.log 9 | yarn-error.log 10 | 11 | # Eth2.0 spec tests data 12 | spec-tests 13 | spec-tests-bls 14 | 15 | # Fuzz Test Meta 16 | fuzz-tests 17 | 18 | # Created by https://www.toptal.com/developers/gitignore/api/node 19 | # Edit at https://www.toptal.com/developers/gitignore?templates=node 20 | 21 | ### Node ### 22 | # Logs 23 | logs 24 | *.log 25 | npm-debug.log* 26 | yarn-debug.log* 27 | yarn-error.log* 28 | lerna-debug.log* 29 | 30 | # Diagnostic reports (https://nodejs.org/api/report.html) 31 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 32 | 33 | # Runtime data 34 | pids 35 | *.pid 36 | *.seed 37 | *.pid.lock 38 | 39 | # Directory for instrumented libs generated by jscoverage/JSCover 40 | lib-cov 41 | 42 | # Coverage directory used by tools like istanbul 43 | coverage 44 | *.lcov 45 | 46 | # nyc test coverage 47 | .nyc_output 48 | 49 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 50 | .grunt 51 | 52 | # Bower dependency directory (https://bower.io/) 53 | bower_components 54 | 55 | # node-waf configuration 56 | .lock-wscript 57 | 58 | # Compiled binary addons (https://nodejs.org/api/addons.html) 59 | build/Release 60 | 61 | # Dependency directories 62 | node_modules/ 63 | jspm_packages/ 64 | 65 | # TypeScript v1 declaration files 66 | typings/ 67 | 68 | # TypeScript cache 69 | *.tsbuildinfo 70 | 71 | # Optional npm cache directory 72 | .npm 73 | 74 | # Optional eslint cache 75 | .eslintcache 76 | 77 | # Microbundle cache 78 | .rpt2_cache/ 79 | .rts2_cache_cjs/ 80 | .rts2_cache_es/ 81 | .rts2_cache_umd/ 82 | 83 | # Optional REPL history 84 | .node_repl_history 85 | 86 | # Output of 'npm pack' 87 | *.tgz 88 | 89 | # Yarn Integrity file 90 | .yarn-integrity 91 | 92 | # dotenv environment variables file 93 | .env 94 | .env.test 95 | 96 | # parcel-bundler cache (https://parceljs.org/) 97 | .cache 98 | 99 | # Next.js build output 100 | .next 101 | 102 | # Nuxt.js build / generate output 103 | .nuxt 104 | dist 105 | 106 | # Gatsby files 107 | .cache/ 108 | # Comment in the public line in if your project uses Gatsby and not Next.js 109 | # https://nextjs.org/blog/next-9-1#public-directory-support 110 | # public 111 | 112 | # vuepress build output 113 | .vuepress/dist 114 | 115 | # Serverless directories 116 | .serverless/ 117 | 118 | # FuseBox cache 119 | .fusebox/ 120 | 121 | # DynamoDB Local files 122 | .dynamodb/ 123 | 124 | # TernJS port file 125 | .tern-port 126 | 127 | # Stores VSCode versions used for testing VSCode extensions 128 | .vscode-test 129 | 130 | # End of https://www.toptal.com/developers/gitignore/api/node 131 | 132 | # Created by https://www.toptal.com/developers/gitignore/api/macos 133 | # Edit at https://www.toptal.com/developers/gitignore?templates=macos 134 | 135 | ### macOS ### 136 | # General 137 | .DS_Store 138 | .AppleDouble 139 | .LSOverride 140 | 141 | # Icon must end with two 142 | Icon 143 | 144 | 145 | # Thumbnails 146 | ._* 147 | 148 | # Files that might appear in the root of a volume 149 | .DocumentRevisions-V100 150 | .fseventsd 151 | .Spotlight-V100 152 | .TemporaryItems 153 | .Trashes 154 | .VolumeIcon.icns 155 | .com.apple.timemachine.donotpresent 156 | 157 | # Directories potentially created on remote AFP share 158 | .AppleDB 159 | .AppleDesktop 160 | Network Trash Folder 161 | Temporary Items 162 | .apdisk 163 | 164 | ### macOS Patch ### 165 | # iCloud generated files 166 | *.icloud 167 | 168 | # End of https://www.toptal.com/developers/gitignore/api/macos 169 | 170 | # Created by https://www.toptal.com/developers/gitignore/api/windows 171 | # Edit at https://www.toptal.com/developers/gitignore?templates=windows 172 | 173 | ### Windows ### 174 | # Windows thumbnail cache files 175 | Thumbs.db 176 | Thumbs.db:encryptable 177 | ehthumbs.db 178 | ehthumbs_vista.db 179 | 180 | # Dump file 181 | *.stackdump 182 | 183 | # Folder config file 184 | [Dd]esktop.ini 185 | 186 | # Recycle Bin used on file shares 187 | $RECYCLE.BIN/ 188 | 189 | # Windows Installer files 190 | *.cab 191 | *.msi 192 | *.msix 193 | *.msm 194 | *.msp 195 | 196 | # Windows shortcuts 197 | *.lnk 198 | 199 | # End of https://www.toptal.com/developers/gitignore/api/windows 200 | 201 | #Added by cargo 202 | 203 | /target 204 | Cargo.lock 205 | 206 | .pnp.* 207 | .yarn/* 208 | !.yarn/patches 209 | !.yarn/plugins 210 | !.yarn/releases 211 | !.yarn/sdks 212 | !.yarn/versions 213 | 214 | *.node 215 | -------------------------------------------------------------------------------- /.mocharc.yaml: -------------------------------------------------------------------------------- 1 | colors: true 2 | require: ts-node/register 3 | exit: true 4 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | target 2 | Cargo.lock 3 | .cargo 4 | .github 5 | npm 6 | .eslintrc 7 | .prettierignore 8 | rustfmt.toml 9 | yarn.lock 10 | *.node 11 | .yarn 12 | __test__ 13 | renovate.json 14 | -------------------------------------------------------------------------------- /.prettierrc.yml: -------------------------------------------------------------------------------- 1 | printWidth: 120 2 | tabWidth: 2 3 | useTabs: false 4 | semi: true 5 | singleQuote: false 6 | quoteProps: "as-needed" 7 | trailingComma: "es5" 8 | bracketSpacing: false 9 | arrowParens: "always" 10 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Debug Multi-Thread Tests", 11 | "runtimeExecutable": "${workspaceFolder}/node_modules/.bin/ts-node", 12 | "args": [ 13 | "${workspaceFolder}/node_modules/.bin/benchmark", 14 | "--config", 15 | ".benchrc.yaml", 16 | "test/perf/multithreading.test.ts" 17 | ], 18 | "env": { 19 | "UV_THREADPOOL_SIZE": "9", 20 | }, 21 | "console": "integratedTerminal", 22 | "protocol": "inspector", 23 | "cwd": "${workspaceFolder}" 24 | }, 25 | { 26 | "type": "lldb", 27 | "request": "launch", 28 | "name": "Debug Native Addon", 29 | "cwd": "${workspaceRoot}", 30 | "preLaunchTask": "npm: build:debug", 31 | "program": "node", 32 | "args": [ 33 | "node_modules/.bin/mocha", 34 | "test/unit/**/*.test.ts" 35 | ] 36 | } 37 | ] 38 | } -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "rust-analyzer.procMacro.ignored": { 3 | "napi-derive": [ 4 | "napi" 5 | ] 6 | } 7 | } -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | ## Welcome 4 | 5 | Thank you for your interest in contribution to the `blst-ts` project. This file will serve as your guide using the repo and some of the nuances of the architecture used within. Note that this repo contains a git submodule. Make sure the git submodule `blst` is populated before attempting to build locally. 6 | 7 | ```sh 8 | git submodule update --init --recursive 9 | yarn 10 | ``` 11 | 12 | ### Scripts 13 | 14 | #### `download-spec-tests` 15 | 16 | Pulls the spec test from the `ethereum/consensus-spec` repo and puts them in the `spec-tests` folder. 17 | 18 | #### `test:unit` 19 | 20 | Runs the unit tests in `test/unit` via mocha 21 | 22 | #### `test:spec` 23 | 24 | Runs the unit tests in `test/spec` via mocha. It is important do download the spec tests before running this. 25 | 26 | #### `test:memory` 27 | 28 | Runs a test rig for creating thousands of object instances to get a mean-reversion value for the memory consumed by a single instance. 29 | 30 | #### `test:perf` 31 | 32 | Uses `@dapplion/benchmark` to run the test in `test/perf`. Results from these tests are posted to PR bodies and checked against the values on `master` to make sure there are no regressions and to highlight significant performance increases. 33 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | edition = "2021" 3 | name = "chainsafe_blst" 4 | version = "0.0.0" 5 | 6 | [lib] 7 | crate-type = ["cdylib"] 8 | 9 | [dependencies] 10 | blst = { version = "0.3.13", features = ["portable"] } 11 | hex = "0.4.3" 12 | napi = { version = "2.16.8", default-features = false, features = ["napi8"] } 13 | napi-derive = "2.16.8" 14 | rand = "0.8.5" 15 | 16 | [build-dependencies] 17 | napi-build = "2.1.3" 18 | 19 | [profile.release] 20 | lto = true 21 | strip = "symbols" 22 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2024 ChainSafe Systems 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # blst-ts 2 | 3 | ![ETH2.0_Spec_Version 1.4.0](https://img.shields.io/badge/ETH2.0_Spec_Version-1.4.0-2e86c1.svg) 4 | ![ES Version](https://img.shields.io/badge/ES-2017-yellow) 5 | ![Node Version](https://img.shields.io/badge/node-16.x-green) 6 | 7 | Typescript wrapper for [supranational/blst](https://github.com/supranational/blst) native bindings, a highly performant BLS12-381 signature library. 8 | 9 | ## Supported Environments 10 | 11 | | OS / Arch | binary name | Node | 12 | | ------------- | ----------- | ---------------------------------- | 13 | | Linux / x64 | linux-x64 | 16, 18, 20, 21 | 14 | | Linux / arm64 | linux-arm64 | 16, 18, 20, 21 | 15 | | Windows / x64 | win32-x64 | 16, 18, 20, 21 | 16 | | macOS / x64 | darwin-x64 | 16, 18, 20, 21 | 17 | | macOS / arm64 | darwin-arm64 | 16, 18, 20, 21 | 18 | 19 | ## Usage 20 | 21 | ```bash 22 | yarn add @chainsafe/blst 23 | ``` 24 | 25 | This library comes with pre-compiled bindings for most platforms. You can check current support in [releases](https://github.com/ChainSafe/blst-ts/releases). If your platform is not supported, bindings will be compiled from source as a best effort with node-gyp. 26 | 27 | ```ts 28 | import crypto from "crypto"; 29 | import {SecretKey, verify, BLST_CONSTANTS} from "@chainsafe/blst"; 30 | 31 | const msg = Buffer.from("sample-msg"); 32 | const sk = SecretKey.fromKeygen(crypto.randomBytes(BLST_CONSTANTS.SECRET_KEY_LENGTH)); 33 | const pk = sk.toPublicKey(); 34 | const sig = sk.sign(msg); 35 | 36 | console.log(verify(msg, pk, sig)); // true 37 | ``` 38 | 39 | This library exposes a classes for secret keys, public keys and signatures: `SecretKey`, `PublicKey` & `Signature` 40 | 41 | The `PublicKey` and `Signature` contain an affine point (x,y) encoding of P1 in G1 and P2 in G2 respectively. 42 | 43 | ## Spec versioning 44 | 45 | This library has a hardcoded configuration compatible with Eth2.0 spec: 46 | 47 | | Setting | value | 48 | | -------------- | --------------------------------------------- | 49 | | PK_IN | `G1` | 50 | | HASH_OR_ENCODE | `true` | 51 | | DST | `BLS_SIG_BLS12381G2_XMD:SHA-256_SSWU_RO_POP_` | 52 | | RAND_BITS | `64` | 53 | 54 | > [spec](https://github.com/ethereum/eth2.0-specs/blob/v0.11.1/specs/phase0/beacon-chain.md#bls-signatures) 55 | 56 | > [test vectors](https://github.com/ethereum/consensus-spec-tests/tree/master/tests/general) 57 | 58 | ## Contributing 59 | 60 | Please check out [CONTRIBUTING.md](./CONTRIBUTING.md) for more info on how to use the repo and for architectural details 61 | 62 | ## Release/Publishing 63 | 64 | ## Release 65 | 66 | The release process is automatically [triggered](.github/workflows/CI.yml#216) when the master branch has the version in package.json updated. 67 | 68 | To create a new release: 69 | 70 | 1. Increment the project version in [package.json](package.json#3) 71 | - A pre-release can be published by ensuring that the project version is appended with non-numeric characters, eg: `-beta` 72 | 2. run `yarn run version` 73 | 3. merge a commit with these changes 74 | 4. CI will run and result in a new release being published 75 | 76 | ## License 77 | 78 | Apache-2.0 -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | extern crate napi_build; 2 | 3 | fn main() { 4 | napi_build::setup(); 5 | } 6 | -------------------------------------------------------------------------------- /doc/assets/object-model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/blst-ts/1c0711b98d6724381985611701db2e7922a7537e/doc/assets/object-model.png -------------------------------------------------------------------------------- /doc/blst.md: -------------------------------------------------------------------------------- 1 | # `supranational/blst` 2 | 3 | ## Overview 4 | 5 | `blst` (pronounced 'blast') is a BLS12-381 signature library focused on performance and security. It is written in C and assembly. 6 | 7 | The library deliberately abstains from dealing with memory management and multi-threading, with the rationale that these ultimately belong in language-specific bindings. Another responsibility that is left to the application is random number generation. All this in the name of run-time neutrality, which makes integration into more stringent environments like Intel SGX or ARM TrustZone trivial. 8 | 9 | The `@chainsafe/blst-ts` library was specifically built to focus on the multi-threading part. Random numbers are generated via `openssl`. 10 | 11 | ## Structure 12 | 13 | The most important code in `supranational/blst`, referred to as just `blst` for the rest of this readme, is in the `bindings` folder. While `src` is where the implementation happens, `bindings` is where you will find the exported interfaces. The two you will want to focus on are `blst.h` and `blst.hpp`. They are the `C` and `C++` api, respectively, to `#include`. 14 | 15 | ## Existing `node` Bindings 16 | 17 | The existing `node.js` bindings are [`Swig`](https://www.swig.org/index.html) generated. All of the functions run synchronously on the main JS thread which is not ideal for server situations. Despite this, the `blst` library has other functionality that is quite useful. The scope of `blst-ts` focuses on public key infrastructure and only uses a small fraction of the full api. If you need more functionality, you can use the `Swig` bindings together with `blst-ts` safely. 18 | 19 | ## Initialization of `blst::Pairing` 20 | 21 | When implementing the `supranational/blst` library, note that the `blst::Pairing` is a 0-byte opaque struct. In `C`, use `blst::blst_pairing_sizeof()` to `malloc` the correct amount of space. For `C++`, the library overrides the [new operator](https://github.com/supranational/blst/blob/a7fd1f584d26b0ae6cdc427976ea1d8980f7e15d/bindings/blst.hpp#L889), so you can invoke that, or use a smart pointer as one would normally. 22 | 23 | Stack allocating `blst::Pairing` will definitely cause undefined behavior. In practice, it gets a frustrating "couple" (2... or maybe 40) lines past the allocation before segfault or code execution halting. Having the [debugger and it's manual](./debugging.md) working will solve this. :wink: 24 | -------------------------------------------------------------------------------- /doc/building.md: -------------------------------------------------------------------------------- 1 | # Building Addons 2 | 3 | ## Build tools 4 | 5 | It is possible to use a range of build tools but two are supported most readily. `node-gyp` is the tool that node uses natively for building code at `install` time. `cmake.js` is an alternative to `node-gyp` and an extension of `CMake` for building javascript modules. 6 | 7 | The `blst-ts` library is built with `node-gyp`. 8 | 9 | ## `node-gyp` 10 | 11 | `node-gyp` is not known for being the most friendly build tool, but there are a few tips here that will help make life easier when working with it. It is a wrapper around `make` and will help to scaffold the files necessary (like `Makefile`'s) for compilation. 12 | 13 | It builds both `Release` and `Debug` builds and that can be controlled with the `--debug` flag when running the `configure`, `build` or `rebuild` commands. 14 | 15 | Before building one must first run `node-gyp configure` to setup the `build` folder. Note that running `npm install` automatically will run configure. You can read more about `node-gyp` commands [here](https://github.com/nodejs/node-gyp#how-to-use). 16 | 17 | ## `node-gyp` dependencies 18 | 19 | There is a common issue that comes up regarding `python` versions when installing and running `node-gyp`. Newer versions of mac have `python3` by default and `node-gyp` will not correctly find it on the `PATH`. There are a few options to resolve this but the easiest is to use `pyenv` to manage the versions that are installed and used. 20 | 21 | ## `binding.gyp` 22 | 23 | The complexity of `node-gyp` is not in its simple commands but its poorly documented configuration file named `binding.gyp`. [This](https://github.com/nodejs/node-gyp#the-bindinggyp-file) is the example of the file in the docs which is of little use. Under the [Further reading](https://github.com/nodejs/node-gyp#further-reading) heading there is a link to ["`binding.gyp` files out in the wild wiki page"](https://github.com/nodejs/node-gyp/blob/main/docs/binding.gyp-files-in-the-wild.md) and this is where one is expected to figure it out. Another helpful resource was going through the list of packages that depend on `node-addon-api` and looking through those projects' `binding.gyp`. 24 | 25 | ### `binding.gyp` Keys 26 | 27 | There is not a great resource for what each of the properties of the "object" that is in the `binding.gyp` file does. Hopefully the information below will make it a less confusing tool. 28 | 29 | #### `target_name` 30 | 31 | There are some nuances that will prevent the project from building or the `bindings` package from working correctly. In particular paying attention to the name of the `class` that implements `Napi::Addon` as that will get passed through and override the `target_name` field. There is also potentially a bug during linking if the `target_name` and entry filename don't match. 32 | 33 | #### `sources` 34 | 35 | This is a array of implementation files to compile. Generally this is a list of `.c`, `.cc`, or `.cpp` files (can be any c-compatible extension). 36 | 37 | #### `dependencies` 38 | 39 | It is possible to use properly formatted `node_modules` as dependencies. Check out how `node-addon-api` is structured for an example of this. 40 | 41 | #### `libraries` 42 | 43 | Use this to statically link to libraries 44 | 45 | ```json 46 | "libraries": [ 47 | "-lsodium", 48 | "<(module_root_dir)/deps/libblst.a", 49 | ], 50 | ``` 51 | 52 | #### `include_dirs` 53 | 54 | This is the equivalent to `include` directories in `make` or `CMake`. It is a list of directories that will be searched for header files. Anything placed here will be available in source files without a relative path. 55 | 56 | ```c 57 | #include "napi.h" 58 | ``` 59 | 60 | #### `defines` 61 | 62 | Preprocessor definitions that will be available at compile time. It is possible to pass with or "without" a value. 63 | 64 | ```json 65 | "defines": [ 66 | "NAPI_EXPERIMENTAL", 67 | "NAPI_DISABLE_CPP_EXCEPTIONS", 68 | "LIB_FANCY_SOME_ENV_VAR= in your native addon and use its functions, the linker resolves the corresponding OpenSSL symbols during the build process because [node-gyp knows](https://nodejs.org/api/addons.html#linking-to-libraries-included-with-nodejs) they are part of the Node.js binary. 185 | 186 | At runtime, when Node.js loads (dynamically links) the native addon using require, your addon is able to access the OpenSSL functions because those functions are part of the Node.js binary. 187 | 188 | ### Other `node-gyp` Targets 189 | 190 | ```json 191 | { 192 | "target_name": "action_after_build", 193 | "type": "none", 194 | "dependencies": ["kzg"], 195 | "copies": [ 196 | { 197 | "files": ["./build/Release/kzg.node"], 198 | "destination": "./dist" 199 | }, 200 | { 201 | "files": ["./build/Release/kzg.node"], 202 | "destination": "./" 203 | } 204 | ] 205 | } 206 | ``` 207 | -------------------------------------------------------------------------------- /doc/class-hierarchy.md: -------------------------------------------------------------------------------- 1 | ```mermaid 2 | classDiagram 3 | 4 | class BlstTsAddon{ 5 | + std::string _dst 6 | + std::string _blst_error_strings[8] 7 | + Napi::FunctionReference _secret_key_ctr 8 | + napi_type_tag _secret_key_tag 9 | + Napi::FunctionReference _public_key_ctr 10 | + napi_type_tag _public_key_tag 11 | + Napi::FunctionReference _signature_ctr 12 | + napi_type_tag _signature_tag 13 | + BlstTsAddon(Napi::Env env, Napi::Object exports) 14 | + std::string GetBlstErrorString(const blst::BLST_ERROR &err) 15 | + bool GetRandomBytes(blst::byte *ikm, size_t length) 16 | + Napi::Value RunTest(const Napi::CallbackInfo &info) 17 | - Napi::Object BuildJsConstants(Napi::Env &env) 18 | } 19 | 20 | class BlstBase{ 21 | + bool IsZeroBytes(const uint8_t *data, size_t start_byte, size_t byte_length) 22 | + bool HasError() 23 | + std::string GetError() 24 | + size_t GetBadIndex() 25 | + void ThrowJsException() 26 | # BlstBase(Napi::Env env) 27 | # void SetError(const std::string &err, const size_t bad_index = 0) 28 | # Napi::Env _env 29 | # BlstTsAddon *_module 30 | # std::string _error 31 | # size_t _bad_index 32 | } 33 | 34 | class Napi_AsyncWorker{ 35 | + virtual void Execute() 36 | } 37 | 38 | class BlstAsyncWorker{ 39 | + BlstAsyncWorker(const Napi::CallbackInfo &info) 40 | + Napi::Value RunSync() 41 | + Napi::Value Run() 42 | # Napi::Env &_env 43 | # const Napi::CallbackInfo &_info 44 | # virtual void Setup() 45 | # virtual Napi::Value GetReturnValue() 46 | # void SetError(const std::string &err) 47 | # void OnOK() 48 | # void OnError(Napi::Error const &err) 49 | - Napi::Promise::Deferred _deferred 50 | - bool _use_deferred 51 | - Napi::Promise GetPromise() 52 | } 53 | 54 | class SecretKey{ 55 | + std::unique_ptr _key 56 | + bool _is_zero_key 57 | + static void Init(Napi::Env env, Napi::Object &exports, BlstTsAddon *module) 58 | + static Napi::Value FromKeygen(const Napi::CallbackInfo &info) 59 | + static Napi::Value FromKeygenSync(const Napi::CallbackInfo &info) 60 | + static Napi::Value Deserialize(const Napi::CallbackInfo &info) 61 | + SecretKey(const Napi::CallbackInfo &info) 62 | + Napi::Value Serialize(const Napi::CallbackInfo &info) 63 | + Napi::Value ToPublicKey(const Napi::CallbackInfo &info) 64 | + Napi::Value Sign(const Napi::CallbackInfo &info) 65 | + Napi::Value SignSync(const Napi::CallbackInfo &info) 66 | - BlstTsAddon *_module 67 | } 68 | 69 | class PublicKey{ 70 | + bool _is_zero_key 71 | + bool _has_jacobian 72 | + bool _has_affine 73 | + std::unique_ptr _jacobian 74 | + std::unique_ptr _affine 75 | + static void Init(Napi::Env env, Napi::Object &exports, BlstTsAddon *module) 76 | + static Napi::Value Deserialize(const Napi::CallbackInfo &info) 77 | + PublicKey(const Napi::CallbackInfo &info) 78 | + Napi::Value Serialize(const Napi::CallbackInfo &info) 79 | + Napi::Value KeyValidate(const Napi::CallbackInfo &info) 80 | + Napi::Value KeyValidateSync(const Napi::CallbackInfo &info) 81 | + const blst::P1 *AsJacobian() 82 | + const blst::P1_Affine *AsAffine() 83 | + bool NativeValidate() 84 | } 85 | 86 | class PublicKeyArg{ 87 | + PublicKeyArg(Napi::Env env) 88 | + PublicKeyArg(Napi::Env env, Napi::Value raw_arg) 89 | + const blst::P1 *AsJacobian() 90 | + const blst::P1_Affine *AsAffine() 91 | + bool NativeValidate() 92 | + const uint8_t *GetBytes() 93 | + size_t GetBytesLength() 94 | - PublicKey *_public_key 95 | - Uint8ArrayArg _bytes 96 | - Napi::Reference _ref 97 | } 98 | 99 | class PublicKeyArgArray{ 100 | + PublicKeyArgArray(Napi::Env env, Napi::Value raw_arg) 101 | + PublicKeyArg &operator[](size_t index) 102 | + size_t Size() 103 | + void Reserve(size_t size) 104 | - std::vector _keys 105 | } 106 | 107 | class Signature{ 108 | + bool _has_jacobian 109 | + bool _has_affine 110 | + std::unique_ptr _jacobian 111 | + std::unique_ptr _affine 112 | + static void Init(Napi::Env env, Napi::Object &exports, BlstTsAddon *module) 113 | + static Napi::Value Deserialize(const Napi::CallbackInfo &info) 114 | + Signature(const Napi::CallbackInfo &info) 115 | + Napi::Value Serialize(const Napi::CallbackInfo &info) 116 | + Napi::Value SigValidate(const Napi::CallbackInfo &info) 117 | + Napi::Value SigValidateSync(const Napi::CallbackInfo &info) 118 | + const blst::P2 *AsJacobian() 119 | + const blst::P2_Affine *AsAffine() 120 | } 121 | 122 | class SignatureArg{ 123 | + Signature *_signature 124 | + SignatureArg(Napi::Env env) 125 | + SignatureArg(Napi::Env env, Napi::Value raw_arg) 126 | + const blst::P2 *AsJacobian() 127 | + const blst::P2_Affine *AsAffine() 128 | - Uint8ArrayArg _bytes 129 | - Napi::Reference _ref 130 | } 131 | 132 | class SignatureArgArray{ 133 | + SignatureArgArray(Napi::Env env, Napi::Value raw_arg) 134 | + SignatureArg &operator[](size_t index) 135 | + size_t Size() 136 | + void Reserve(size_t size) 137 | - std::vector _signatures 138 | } 139 | 140 | 141 | class AggregateVerifyWorker { 142 | + AggregateVerifyWorker(const Napi::CallbackInfo &info) 143 | # void Setup() override 144 | # void Execute() override 145 | + Napi::Value GetReturnValue() override 146 | - bool _invalid_args 147 | - bool _no_keys 148 | - bool _no_msgs 149 | - bool _result 150 | - std::unique_ptr _ctx 151 | - Uint8ArrayArgArray _msgs 152 | - PublicKeyArgArray _public_keys 153 | - SignatureArg _signature 154 | } 155 | 156 | class SignatureSet { 157 | - Uint8ArrayArg _msg 158 | - PublicKeyArg _publicKey 159 | - SignatureArg _signature 160 | + SignatureSet(Napi::Env env, const Napi::Value &raw_arg) 161 | } 162 | 163 | class SignatureSetArray { 164 | - std::vector _sets 165 | + SignatureSetArray(Napi::Env env, const Napi::Value &raw_arg) 166 | } 167 | 168 | class VerifyMultipleAggregateSignaturesWorker { 169 | + VerifyMultipleAggregateSignaturesWorker(const Napi::CallbackInfo &info) 170 | # void Setup() override 171 | # void Execute() override 172 | + Napi::Value GetReturnValue() override 173 | - bool _result 174 | - std::unique_ptr _ctx 175 | - SignatureSetArray _sets 176 | } 177 | 178 | class AggregatePublicKeysWorker { 179 | + AggregatePublicKeysWorker(const Napi::CallbackInfo &info, size_t arg_position) 180 | # void Setup() override 181 | # void Execute() override 182 | + Napi::Value GetReturnValue() override 183 | - bool _is_valid 184 | - blst::P1 _result 185 | - PublicKeyArgArray _public_keys 186 | } 187 | 188 | class AggregateSignaturesWorker { 189 | + AggregateSignaturesWorker(const Napi::CallbackInfo &info, size_t arg_position) 190 | # void Setup() override 191 | # void Execute() override 192 | + Napi::Value GetReturnValue() override 193 | - bool _is_valid 194 | - blst::P2 _result 195 | - SignatureArgArray _signatures 196 | } 197 | 198 | class TestWorker { 199 | + TestWorker(const Napi::CallbackInfo &info) 200 | # void Setup() override 201 | # void Execute() override 202 | + Napi::Value GetReturnValue() override 203 | - TestPhase _test_phase 204 | - int32_t _test_case 205 | - std::string _return_value 206 | } 207 | 208 | Napi_Addon --|> BlstTsAddon 209 | BlstBase --|> BlstAsyncWorker 210 | Napi_AsyncWorker --|> BlstAsyncWorker 211 | 212 | BlstBase --|> Uint8ArrayArg 213 | BlstBase --|> Uint8ArrayArgArray 214 | 215 | BlstBase --|> SecretKey 216 | Napi_ObjectWrap --|> SecretKey 217 | 218 | BlstBase --|> PublicKey 219 | Napi_ObjectWrap --|> PublicKey 220 | BlstBase --|> PublicKeyArg 221 | BlstBase --|> PublicKeyArgArray 222 | 223 | BlstBase --|> Signature 224 | Napi_ObjectWrap --|> Signature 225 | BlstBase --|> SignatureArg 226 | BlstBase --|> SignatureArgArray 227 | 228 | BlstAsyncWorker --|> AggregateVerifyWorker 229 | BlstAsyncWorker --|> VerifyMultipleAggregateSignaturesWorker 230 | BlstAsyncWorker --|> AggregatePublicKeysWorker 231 | BlstAsyncWorker --|> AggregateSignaturesWorker 232 | BlstAsyncWorker --|> TestWorker 233 | BlstBase --|> SignatureSet 234 | BlstBase --|> SignatureSetArray 235 | ``` 236 | -------------------------------------------------------------------------------- /doc/classes.md: -------------------------------------------------------------------------------- 1 | # Classes 2 | 3 | ```c++ 4 | template 5 | class InstanceWrap {} 6 | 7 | template 8 | class ObjectWrap : public InstanceWrap, public Reference {} 9 | 10 | template 11 | class Addon : public InstanceWrap {} 12 | ``` 13 | -------------------------------------------------------------------------------- /doc/debugging.md: -------------------------------------------------------------------------------- 1 | # Debugging Addons 2 | 3 | Special thanks to [@a7ul](https://github.com/a7ul/) for writing [this blog post](https://medium.com/@a7ul/debugging-nodejs-c-addons-using-vs-code-27e9940fc3ad). It was very helpful. 4 | 5 | [This issue](https://github.com/nodejs/node/issues/26667) in `nodejs/node` also gave a lot of insight and a [comment](https://github.com/nodejs/node/issues/26667#issuecomment-475329557) provided a good combination of `valgrind` flags to successfully highlight a memory leak using `Docker` on my mac. 6 | 7 | ## Setting-Up `Valgrind` 8 | 9 | ## Debugging by Example 10 | 11 | _**note:** This section was driven by a learning challenge. There was a very difficult bug that was induced by a compiler-generated function. Compiler-generated functions are a big reason hard-core `c` programmers do not like `c++`, but that is a hotly debated topic. 12 | 13 | ### The "bug" 14 | 15 | To be fair it happened two different times, in two slightly different contexts. It's not a "bug", it's a "feature"... I was attempting to create a vector of complex objects, that each had a `unique_ptr` as a member variable like this: 16 | 17 | ```c++ 18 | class PublicKey { 19 | private: 20 | std::unique_ptr; 21 | } 22 | 23 | main() { 24 | std::vector arr; 25 | PublicKey key{...}; 26 | arr.push_back(key) // segfault 27 | } 28 | ``` 29 | 30 | There were a few iterations of this rough idea but the gist is that an invalid "default" function was causing a segfault. I had gone through a bunch of refactoring to remove the segfault so in some cases it was the copy constructor and other it was the move assignment operator overload. Bottom line though is this is how I figured it out and that process was a huge learning hurdle for me. 31 | 32 | ### Finding and Fixing "the bug" 33 | 34 | No matter how I wrote the code it did not work. There was no obvious bugs in any of the code that I looked at and the truth is there were in-fact 0 bugs in the written code. That is why I was not able to spot it. The bug was in a compiler-generated function. 35 | 36 | In order to find this elusive bug I had to learn to use the debugger. I needed to step through execution to get more insight. The obvious insight, that I kinda already figured out empirically, was the error was in the `push_back`. But stepping-in led to a few "flashes/jumps" and bam segfault again. 37 | 38 | I saw nothing helpful and was a bit defeated. I had no more tutorials to watch, no more breadcrumbs to google and most importantly no idea what was causing the error. I ended up working on something else and came back to this after a refactor of some other stuff. 39 | 40 | In between the first encounter and seeing the issue again under different circumstances I had really studied move semantics closely. Specifically I learned to delete the operators and constructors that I did not want the compiler to auto-generate. 41 | 42 | This time a lightning bolt struck. The debugger stepped to the `= default` function declaration and I was able to see the name of the function causing the crash. I forget now if it was a move or a copy I was going for that attempt, but I hand wrote the "default" constructor and assignment-operator and finally saw what line the actual crash was on. After playing with it for a while the suite or problems stemmed from invalid smart pointer initialization or attempting to copy the `unique_ptr`, not move it (move semantics is not a default thing). 43 | 44 | When in debugger mode, compiler-generated functions don't have a "line" to jump to because it's just the machine code that gets infilled. The map doesn't have line reference so nothing shows and all one sees is flashes while the IDE is rendering whatever it is it renders in the instant before the debug segfault screen shows up. 45 | 46 | Once I saw what happened it was a simple matter of googling the surrounding topic, and the rule of 3 and 5 become prominent reminders of a hard fought battle. 47 | -------------------------------------------------------------------------------- /doc/definitions.md: -------------------------------------------------------------------------------- 1 | # Definitions 2 | 3 | ## `Isolate` 4 | 5 | A `v8::Isolate` is the representation of a complete JavaScript execution environment. It manages execution flow, the stack and the garbage collector. It is also the place where all the JavaScript "values" live. 6 | 7 | ## `HandleScope` 8 | 9 | `Napi::HandleScope` is the abi-stable representation of the [HandleScopeWrapper](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/js_native_api_v8.cc#L113) around a [`v8::HandleScope`](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/deps/v8/include/v8-local-handle.h#L77). 10 | 11 | ## `v8::Context` 12 | 13 | `Context` is a sand-boxed execution environment that allows separate, unrelated, JavaScript code to run in a single instance of the JavaScript engine. The JavaScript virtual machine implements the [Command Pattern](https://en.wikipedia.org/wiki/Command_pattern), and each message in the callback queue is a "request for invocation" in an explicit evaluation context. The context does a few things but most importantly is setting up the lexical environment associated with the function. 14 | 15 | This is an excerpt from "context.h": 16 | 17 | ```c++ 18 | /** 19 | * JSFunctions are pairs (context, function code), sometimes also called 20 | * closures. A Context object is used to represent function contexts and 21 | * dynamically pushed 'with' contexts (or 'scopes' in ECMA-262 speak). 22 | * 23 | * At runtime, the contexts build a stack in parallel to the execution 24 | * stack, with the top-most context being the current context. All contexts 25 | * have the following slots: 26 | * 27 | * [ scope_info ] This is the scope info describing the current context. It 28 | * contains the names of statically allocated context slots, 29 | * and stack-allocated locals. 30 | * 31 | * [ previous ] A pointer to the previous context. 32 | */ 33 | class Context : public TorqueGeneratedContext { 34 | /* 35 | * Lookup the slot called 'name', starting with the current context. 36 | * There are three possibilities: 37 | * 38 | * 1) result->IsContext() 39 | * 40 | * 2) result->IsJSObject(): 41 | * The binding was found as a named property in a context extension 42 | * object, as a property on the subject, or as a property of the global 43 | * object. 44 | * 45 | * 3) result->IsModule(): 46 | * 47 | * 4) result.is_null(): 48 | * There was no binding found 49 | */ 50 | static Handle Lookup( 51 | Handle context, 52 | Handle name, 53 | ...); 54 | } 55 | ``` 56 | -------------------------------------------------------------------------------- /doc/environment.md: -------------------------------------------------------------------------------- 1 | # Environment 2 | 3 | The first argument of every `napi` function is an `napi_env` object. This object represents the environment in which a function was called. `Napi::Env` is the `C++` wrapper around `napi_env`, which more specifically is an [opaque structure](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/js_native_api_types.h#L24) whose [implementation](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/js_native_api_v8.h#L52) instructs how the underlying constructs all relate to each other. It is a abi-stable simplification of the full [node::Environment](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/env.h#L533) 4 | 5 | ## Bringing It All Together 6 | 7 | The `node::Environment` is where the [`v8::Isolate`](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/env.h#L1001) meets, [`libuv`](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/env.h#L1003-L1009) and the [process meta](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/env.h#L1039-L1050). 8 | 9 | "[Environment](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/env.h#L533) is a per-isolate data structure that represents an execution environment. Each environment has a principal realm and can create multiple subsidiary synthetic realms." `node::Realm` is a container for a set of JavaScript objects and functions that associated with a particular global environment. 10 | 11 | While `v8` does manage "all thing JavaScript", the event loop is not actually part of the JavaScript spec. [Event Loops](https://html.spec.whatwg.org/multipage/webappapis.html#event-loops) are included as part of the html spec and are implemented by the browser, not the engine. `libuv` provides the Node.js event loop and the asynchronous callback (and I/O) abstraction. The `Environment` is the glue that binds everything together. 12 | -------------------------------------------------------------------------------- /doc/errors.md: -------------------------------------------------------------------------------- 1 | # Errors 2 | 3 | It is [possible](https://github.com/nodejs/node-addon-api/blob/main/doc/setup.md) to build Addons with, or without, errors turned on. It was [one](./readme.md#error-handling) of the decisions that was made when implementing this library. 4 | 5 | ## `C` Errors 6 | 7 | There is no `throw` keyword in `C`. See [Passing and Returning from Functions](./js-perspective-on-c.md#passing-to-and-returning-from-functions) for more information. Make sure to check all `napi_status` return values. How errors are queued on the `napi_env` is beyond the scope of this document. 8 | 9 | ## `C++` Errors 10 | 11 | It's possible to `throw` when building as `C++`. Errors that get `throw`n percolate up to the JS context. 12 | 13 | ```c++ 14 | throw Napi::Error::New(env, "The answer is NOT 42!!!"); 15 | ``` 16 | 17 | My understanding is stack unwinds are more "violent" at the native level, so it is also possible to hail errors in JS but execution to run smoothly in `C++`. 18 | 19 | ```c++ 20 | if (answer != 42) { 21 | Napi::Error::New(env, "Something went wrong!").ThrowAsJavaScriptException(); 22 | return env.Undefined(); 23 | } 24 | ``` 25 | 26 | In that situation the native function returns cleanly and the JS function will throw as expected. It should be noted that it is still possible for an underlying library error to throw with exceptions turned on. 27 | 28 | ## Turning `C++` Exceptions Off 29 | 30 | This is possible but not something implemented in `@chainsafe/blst-ts`. In [Napi::Value](./values.md#napi-values) there was mention about `Napi::Maybe` and this is what it is for. It represents the potential for a value to get returned. In error cases the maybe is empty. See [this](https://github.com/nodejs/node-addon-api/blob/main/doc/error_handling.md#examples-with-maybe-type-and-c-exceptions-disabled) section of the `node-addon-api` for more info. 31 | -------------------------------------------------------------------------------- /doc/js-perspective-on-c.md: -------------------------------------------------------------------------------- 1 | # A JS Developer's Perspective on `C/C++` 2 | 3 | ## What JS Developers Take For Granted 4 | 5 | JS has some serious luxuries. The runtime is forgiving because it is a single threaded, event driven, garbage collected language. The language is forgiving because it is dynamically typed and interpreted. All of that is a function of lines of code in `node`, `v8`, `libuv` and a host of other factors including good design and decent language semantics. 6 | 7 | When building native code, those things need to be done manually. Memory will need to be managed. Threading context will need to be understood. This is not to say that it is impossible to write good code in `C/C++` but it is a lot more to think about than writing JS. 8 | 9 | As an example, async in JS can be confusing for new developers. Some docs and blogs were written very early and use patterns like callbacks. Some are from much later and use async generators. Along with structural changes, there have also been stylistic shifts over time. This same thing happened in `C/C++`. 10 | 11 | The differences that are seen across docs/blogs/etc stem from `C` being quite old and lots of patterns changed along the way. Another piece is that `C` and `C++` are very similar, but they are not the same. Much like `TS` is an extension of `JS`, `C++` is an extension of `C`. So like `JS/TS` there are `C` and `C++` ways to do things, and in many contexts both ways are "valid." 12 | 13 | ## Memory Management 14 | 15 | Stack and heap allocation are both similar and different to JS and it is important to understand the nuances because they have greatly influenced this library. In particular, focus on how everything gets freed. It's easy to allocate memory but tracking and cleaning up the allocations is where the challenge lies. Many of the paradigms in `C/C++` are inherently, or explicitly, implemented for resource management. Because this library is primarily `C++` we will use [RAII](https://en.cppreference.com/w/cpp/language/raii) and OOP to help us along the way. 16 | 17 | One of the big things I noticed, on my journey to native code, is that its generally much easier to deal with stack allocated objects. Working with the heap feels harder and more error prone. Tutorials read like horror novels and its all a bit frightening. Stack allocation is much more natural as it mimics JS, but there are a few sticky wickets to pay attention to. 18 | 19 | First is that not everything can be stack allocated (see the discussion about [`blst::Pairing`](./blst.md#initialization-of-blstpairing) for a got-ya). There was another thing that caught me by surprise but intuitively makes sense once I thought about it. Stack overflow is real and arrays have lots of stuff in them. In native code there is the possibility that user data may be bigger than the stack can hold. I know... This is intuitive... But in JS those are things we don't really think about so it wasn't second nature yet. This is a real and dangerous risk. 20 | 21 | Another big piece is that in `C`, all functions are pass AND return by value. Full stop. It is possible to pass references and pointers yes, but those are both first class citizens that get passed by value. The whole premise behind creating the references/pointers is that the "value" associated is much smaller so the copy operation is faster. In general, a pointer is the same size as the system bytes (ie. 32-bit systems have 4 byte memory addresses and 64-bit systems have 8 byte memory addresses) and references depend on both system and compiler specifics. Complex objects on the other hand can get VERY large. 22 | 23 | While the solution to most efficiency problems will be passing by ref or pointer, the specifics of how to allocate and return need to be well understood to avoid segfault. For the rest of this tutorial we will reference the following class: 24 | 25 | ```c++ 26 | class SomethingFancy { 27 | SomethingFancy(int &value) {} 28 | }; 29 | ``` 30 | 31 | ## Compiler-Generated Functions 32 | 33 | Compiler-Generated functions are code that gets auto-generated by the compiler to ease the overhead of OOP and reduce the verbosity of `C++` code, among other benefits. They are also something that can [bite hard](./debugging.md#debugging-by-example). 34 | 35 | `C++` was built to organize large code-bases by creating objects that encapsulate data with the functional behavior. This helps to organize stuff but lends to complex semantics to get around creating, destroying and moving complex objects. Move semantics is a great example. Not only are they complex but they are pure `C++` and represent a huge efficiency boost to keep the data throughput as high as `C`. The following are the move constructor and move-assignment operators for `SomethingFancy`: 36 | 37 | ```c++ 38 | SomethingFancy::SomethingFancy(SomethingFancy &&other); 39 | SomethingFancy &SomethingFancy::operator=(SomethingFancy &&other); 40 | ``` 41 | 42 | Just knowing that those exist and that they are NOT compiler generated is two pieces of information that do not fit into either the `C` or `JS` mental model. 43 | 44 | The following lines are NOT The same... 45 | 46 | ```c++ 47 | SomethingFancy a; 48 | SomethingFancy b{42}; 49 | SomethingFancy c = SomethingFancy{123}; 50 | ``` 51 | 52 | SomethingFancy 'a' is allocated and constructed using the "default constructor" and in this instance that is a "magic function" that is compiler created. 'b' is allocated and created in-place using the declared constructor as would be expected. 'c' is... complex. The compiler-created default constructor is being used to allocate memory at 'c' and a right-hand reference is created using the declared constructor. Then the compiler-created destructor deletes the temporary at 'c' leaving an empty allocation, and then the value in the right-hand reference is copied to the allocation at 'c' using the compiler-created "magic" copy assignment operator. Lots and lots of confusing compiler slight-of-hand. 53 | 54 | While this is not intended to be a full tutorial these idea were something that definitely took getting used to and those "magic" functions are very hard to debug. I would also like to make note that the above example is strictly a `C++` thing. `SomethingFancy` is not a built in type and constructor syntax with an initializer list are not `C`, they are '`C++`'. The ideas of right-hand reference, operator overloading and constructor/destructors are all "`++`" too. 55 | 56 | ```c 57 | int a; 58 | int b{42}; // not valid C, but is valid C++ 59 | int c = 42; 60 | ``` 61 | 62 | It is valid, and often times required, to allocate memory but to no initialize it. In JS, the uninitialized value is `undefined`. In `C` using an uninitialized variable is "undefined behavior". 63 | 64 | With `C` as the context, 'a' is uninitialized and can be dangerous. 'b' is invalid syntax and 'c' is an integer initialized to 42. 65 | 66 | With `C++` as the context, 'a' is treated the same as raw `C`. 'b' is an integer initialized to 42 using constructor syntax. 'c' is an integer initialized to 42 using assignment syntax. 67 | 68 | ## Passing To and Returning From Functions 69 | 70 | Building functions happen for the same reasons in `C` as they do in JS. They help to ease the mental burden for "what something does". The differences in memory allocation drive very different coding paradigms though. 71 | 72 | In modern JS we generally pass in what is needed inside the function context, return back the values that result, and if an error occurs it is thrown. That pattern generally holds true for both sync and async functions. 73 | 74 | `C` stack-allocation semantics, and the resulting variable scoping, often result in a pattern where both input and output arguments are passed and then the function either returns `void` or `int` (to denote the error code where `0` represents no error) to show if valid execution occurred. Generally one sees variables allocated for both inputs and return(s) in the calling context and then those variables are passed into the function. Many times the function call is placed in a conditional to check execution status. 75 | 76 | ```c 77 | KZGSettings settings; // this is the output of load_trusted_setup_file() 78 | FILE *file_handle = fopen(******, "r"); // this is the input of load_trusted_setup_file() 79 | C_KZG_RET ret = load_trusted_setup_file(&settings, file_handle); 80 | 81 | if (fclose(file_handle) != 0) { 82 | if (ret == C_KZG_OK) { 83 | free_trusted_setup(&(data->settings)); 84 | } 85 | } 86 | 87 | if (ret != C_KZG_OK) { 88 | return 1; 89 | } 90 | 91 | return 0; 92 | ``` 93 | 94 | There is one more paradigm that is worth explicitly stating. Note in this example that `fclose` is in the conditional and `load_trusted_setup_file` is not. `load_trusted_setup_file` is a function where the return value needs to be handled for either valid or invalid execution (i.e. valid but if `fclose` fails need to `free_trusted_setup` and for invalid execution the function returns `1`). `fclose` however only requires that invalid execution requires further work. 95 | 96 | There are a few challenges that emerge from the way that `C` functions are structured. In very large code-bases, its very difficult to track allocations and de-allocations because they do not always occur in the same functions. 97 | 98 | In `C++`, everything from above is true and there are some additional paradigms that should be presented. Objects have higher complexity for initialization/clean-up and bigger sizes. Those "enhancements" and the `class` paradigm are where most of the complexity in `C++` (relative to `C`) comes in. The trade off for the complexity is RAII. Or I suppose RAII emerged because of the complexity and it is a very nice paradigm for large and complex systems. 99 | 100 | RAII helps to overcome a lot of the challenges that emerge with allocations and de-allocations that occur in different locations in the codebase. Most of the "magic" that happens in `C++` is the compiler filling in "defaults" so that the class structure isn't overly verbose. It is not necessary to write functions for constructors, destructors, copy semantics nor move semantics. They get "auto" generated for us, sometimes with disastrous consequences. 101 | 102 | The "rule of 3" and "rule of 5" were created to crystalize the mental paradigm so that developers do not get bitten by the magic/helpers that get induced. 103 | 104 | ## Closing Thoughts 105 | 106 | It is simple to go watch some coding tutorials, like I did, and whip out a ton of code "that works". Where most `C/C++` code tutorials lack is this kind of stuff. Best-practices. Coding paradigms. Relational anecdotes to paradigms I already understood... The stuff that a senior dev would lean over and tell us Juniors if we still had a water cooler to talk by... 107 | 108 | If you too have had a hard-fought win while learning system-level programming as a JS dev, this is the place to share that red badge of courage. 109 | -------------------------------------------------------------------------------- /doc/maybe.md: -------------------------------------------------------------------------------- 1 | 2 | ```c++ 3 | inline const std::string& Error::Message() const NAPI_NOEXCEPT { 4 | if (_message.size() == 0 && _env != nullptr) { 5 | #ifdef NAPI_CPP_EXCEPTIONS 6 | try { 7 | _message = Get("message").As(); 8 | } catch (...) { 9 | // Catch all errors here, to include e.g. a std::bad_alloc from 10 | // the std::string::operator=, because this method may not throw. 11 | } 12 | #else // NAPI_CPP_EXCEPTIONS 13 | #if defined(NODE_ADDON_API_ENABLE_MAYBE) 14 | Napi::Value message_val; 15 | if (Get("message").UnwrapTo(&message_val)) { 16 | _message = message_val.As(); 17 | } 18 | #else 19 | _message = Get("message").As(); 20 | #endif 21 | #endif // NAPI_CPP_EXCEPTIONS 22 | } 23 | return _message; 24 | } 25 | ``` 26 | -------------------------------------------------------------------------------- /doc/multi-threading.md: -------------------------------------------------------------------------------- 1 | # Multi-Threading 2 | 3 | It is possible to achieve multi-threading in a couple of ways with native `node` modules. The first option is to use `node` threads and let `libuv` do the scheduling of work. The second is to use native `c`-level `std::thread`(s). A third method is to use the two together in tandem. 4 | 5 | This library was first implemented using the third method and then rewritten to use the first. We will discuss them all in turn though. 6 | 7 | ## `std::thread` Multi-Threading 8 | 9 | Start from a basis that using native threads by themselves is not a great solution. It implies spawning native c threads while on the JS thread. Not only will the function call be blocking, but the native threads will compete with the `libuv` threads and there will be thread-level context switching. No bueno... 10 | 11 | ## Mixed Multi-Threading 12 | 13 | In the first pass of building the `@chainsafe/blst-ts` library this is the approach that was attempted. Use the method below to create an AsyncWorker and while on the worker thread spawn a native `C` thread pool and re-parallelize the library calls. It was shockingly fast. I have 10 cores so if a call with the SWIG bindings took 10 second, a call to the new bindings would take 1. Drop the mic. 14 | 15 | This is not as good as it seems though. While it looks great on paper, under real server load the library thread pool would compete for time with the node thread pool and there would be an unnecessary amount of context switching. It is probably better to treat the batch process as a unit of work and run it on a separate thread. Aggregated across the whole server load, the work will be managed much better. The heart of what JS does well is managing thread work, just let the engine do what it does best. 16 | 17 | ## `libuv` Multi-Threading 18 | 19 | The most predictable way to handle multi-threading is to use the built-in thread pool that ships with `node`. It is the magic behind "single-threaded execution" that makes `node` so easy to reason about. This method was ultimately implemented. 20 | 21 | Work is submitted to the thread-pool and queued for execution in the order it was received. There are a number of constructs available through `n-api` and the discussion of several can be found in the [`n-api`](./native-node.md) overview. 22 | 23 | The functions that are used by `blst-ts` are the `napi_*_async_work` functions. They are implemented using the `Napi::AsyncWorker` class. The constructor and destructor call `napi_create_async_work` and `napi_delete_async_work` respectively. `Napi::AsyncWorker::Queue` is responsible for calling `napi_queue_async_work`. You can see the implementation of [napi_create_async_work](https://github.com/nodejs/node/blob/4166d40d0873b6d8a0c7291872c8d20dc680b1d7/src/node_api.cc#L1138) to get an idea of how `node-addon-api` relates to `n-api` and how that integrates with underlying `v8` and `libuv` libraries. 24 | 25 | ## Returning Promises from Native Code 26 | 27 | There are some distinct challenges to structuring asynchronous code. The invocation will begin with context provided by the incoming `Napi::CallbackInfo` and that context will cease once the calling function returns. The actual work will happen at some point in the future, as will returning a value to the calling context. 28 | 29 | The `Napi::Promise::Deferred` will help us bridge the gap between the calling context and returning context. Holding handles to and from Phase 2, where the work happens, is up to the implementer. The section about [values](./values.md) will provide background on the topic. tl/dr; one needs to create some `References` and then some handles to the native-compatible underlying data. 30 | 31 | Most of the parsing MUST happen on thread as there are calls in the underlying code to `napi_env`. This is the critical no-no of native JS code. There can be zero interaction with the JS runtime unless the function is actively running on-thread. As an example a `Napi::String` will be what is delivered to native code but in order to get access to the `std::string` or `char *` one will need to get that on thread. Just having a `napi_value` handle does not necessarily mean you have access to the actual data. That needs to be done before returning from the original calling context (function coming in from JS). 32 | 33 | The same applies for return values that go back from worker-thread context to JS. As an example the value that returns from `*Verify` functions. The actual `bool` derived from the native function would be stored on the Worker and then in the `GetReturnValue` it would be converted via `Napi::Boolean::New`. All functions that run on-thread will take `napi_env` as the first argument and creating `New` values is no exception. 34 | 35 | This is a stripped-down version of the `BlstAsyncWorker` to show what members it holds. `_deferred` is the handle spoken about above. It holds the methods `_deferred.Resolve` and `_deferred.Reject` that are used to return values to JS. 36 | 37 | ```c++ 38 | class BlstAsyncWorker : public Napi::AsyncWorker 39 | { 40 | protected: 41 | const Napi::CallbackInfo &_info; 42 | Napi::Env _env; 43 | BlstTsAddon *_module; 44 | 45 | private: 46 | Napi::Promise::Deferred _deferred; 47 | bool _use_deferred; 48 | std::string _error; 49 | }; 50 | ``` 51 | 52 | By placing the handles on a class they are available to the methods throughout the phases of execution. The class is structured so that one function runs on thread to parse the incoming `CallbackInfo` a second runs the "work" and third converts the return value from a native member of the Worker into a JS value that gets sent to the original context. 53 | -------------------------------------------------------------------------------- /doc/native-node.md: -------------------------------------------------------------------------------- 1 | # Native Node Modules 2 | 3 | Running "native" code via node.js is nothing new. What has changed over the years is how native modules are built. Originally, they were built by calling the underlying libraries like `v8` and `libuv` directly. All people would do was bring in a header file and off they went, roughly... 4 | 5 | There were lots of internal API changes that were breaking and addon code needed to be updated for each library change so gyp would build. I remember fondly watching c compile v8 warnings on `npm install` in the early days. NAN was created as an open source alternative to ease the maintenance burden, and it was popular enough that the node team built in something similar. 6 | 7 | Que `Node-API` 8 | 9 | tl/dr? Skip to the end of [`napi-rs`](#napi-rs) for a surprise. 10 | 11 | ## `Node-API` 12 | 13 | Node is a `C++` application so one would think that exporting a `C++` api would be the thing to do. But for portability the team create `Node-API` as a `C` API, and this design decision has opened up node to every language that is compatible with the `C` [FFI](https://en.wikipedia.org/wiki/Foreign_function_interface#Operation_of_an_FFI) standard. That is most btw. It is technically possible to write node addons in C, C++, Rust, Go, Java, C#, etc... 14 | 15 | `Node-API` is built and compiled in with node so those tokens are available at runtime to any dynamically linked library. All that is necessary to use it is the header file `node_api.h` which can be found [here](https://github.com/nodejs/node/blob/main/src/node_api.h). 16 | 17 | ## `node-addon-api` 18 | 19 | `node-addon-api` is a header-only `C++` library that is published and installed via [npm](https://www.npmjs.com/package/node-addon-api). It is broken into two files [napi.h](https://github.com/nodejs/node-addon-api/blob/main/napi.h), which has all of the class definitions, and [napi-inl.h](https://github.com/nodejs/node-addon-api/blob/main/napi-inl.h) that contains the function implementations. 20 | 21 | We are going to look at async work as an example and analyze how the two pieces fit together and how the `C` code patterns and `C++` patterns interact. 22 | 23 | ```c++ 24 | /** 25 | * 26 | * `node-addon-api` 27 | * 28 | */ 29 | class AsyncWorker 30 | { 31 | public: 32 | void Queue() { 33 | napi_status status = napi_queue_async_work(_env, _work); 34 | NAPI_THROW_IF_FAILED_VOID(_env, status); 35 | } 36 | private: 37 | napi_env _env; 38 | napi_async_work _work; 39 | } 40 | /** 41 | * 42 | * Below here is `nodejs/node` 43 | * 44 | */ 45 | napi_status NAPI_CDECL napi_queue_async_work( 46 | napi_env env, 47 | napi_async_work work 48 | ) { 49 | CHECK_ENV(env); 50 | CHECK_ARG(env, work); 51 | 52 | uv_loop_t *event_loop = nullptr; 53 | STATUS_CALL(napi_get_uv_event_loop(env, &event_loop)); 54 | 55 | uvimpl::Work *w = reinterpret_cast(work); 56 | 57 | w->ScheduleWork(); 58 | 59 | return napi_clear_last_error(env); 60 | } 61 | ``` 62 | 63 | That code excerpt is a mix of the two files mentioned [above](#node-api) and shows the declaration and implementations together. Below the class is the `C` implementation of `napi_queue_async_work` that is found in the `nodejs/node`. 64 | 65 | The [full async implementation](./reference.md#full-async-implementations) is available in the reference section. 66 | 67 | ## Similarities and Differences 68 | 69 | The `C` and `C++` apis' are nearly identical. The 2 exceptions are: 70 | 71 | - New features arrive in `C` first. It generally takes a few months for the `C++` api to catch up 72 | - Not all features get implemented in `node-addon-api` but its a small list 73 | 74 | See the [`C/C++` big decision](./readme.md#c-vs-c) for more differences. 75 | 76 | ## `napi-rs` 77 | 78 | While its possible to write native addons in most languages, the only two that are officially supported are `C` and `C++`. `Rust` however, is not forgotten. `napi-rs` is a `Rust` library that uses `Node-API`, as `extern C`, and wraps it in a `Rust`-friendly API. 79 | 80 | You can find the docs [here](https://napi.rs/). 81 | 82 | ### The `napi-rs` Kicker 83 | 84 | I considered using the [blst `Rust` bindings](https://github.com/supranational/blst/tree/master/bindings/rust) and wrapping them in `napi-rs` as it mostly has the functions we need. We could wrap the bindings in a thin wrapper to massage it into our PKI API. 85 | 86 | Unfortunately, the bindings are already multi-threaded and will suffer the same issues as the [mixed multi-threading approach](./multi-threading.md#mixed-multi-threading). It would also be a new language to learn. It also feels a bit silly going from `C` to `Rust` to `C` to `C++`. I am also not sure if there will be a performance hit for the extra layer of abstraction. Lots of "also's" there. I considered doing a performance analysis to see what turns up, but my guess is that there will be negligible. 87 | 88 | Given the feature sets and learning curves of `C++` vs `Rust`, and the memory paradigm of `Rust`, I would consider building bindings in `Rust` if we were building a new application from scratch. But for this project, I think `C++` is the right choice. 89 | -------------------------------------------------------------------------------- /doc/readme.md: -------------------------------------------------------------------------------- 1 | # Intro 2 | 3 | This document is an attempt to provide a more approachable introduction to `@chainsafe/blst-ts` for JavaScript developers. The docs for `Node-API` and the other dependencies are terse, long and geared towards `C/C++` developers. On top of that, this library is critical to runtime efficiency for [Lodestar](https://github.com/ChainSafe/lodestar) so more than one viewpoint will benefit both projects and the community. 4 | 5 | My goal is to share some resources that were helpful, decisions that were made, to give an overview of native addons, nuances of working with the dependencies, and most importantly how that all fits together at runtime. 6 | 7 | Hopefully this guide will help inform the team for a thorough review process and onboard new contributors for maintenance. I also want to lower the barrier for using native code at ChainSafe, so if there is a potential opportunity through native modules, it will be an easier decision-making and development process. 8 | 9 | ## Table of Contents 10 | 11 | 1. [Introduction](./intro.md) 12 | - [Table of Contents](#table-of-contents) 13 | - [The Big Decisions](#the-big-decisions) 14 | - [`C` vs `C++`](#c-vs-c) 15 | - [Callbacks vs. Promises](#callbacks-vs-promises) 16 | - [Error Handling](#error-handling) 17 | 2. [`@chainsafe/blst-ts`](./repo.md) 18 | - [Organization](./repo.md#organization) 19 | - [Scripts](./repo.md#scripts) 20 | - [Dependencies](./repo.md#dependencies) 21 | - [Style Guide](./repo.md#style-guide) 22 | 3. [`supranational/blst`](./blst.md) 23 | - [Library Overview](./blst.md#overview) 24 | - [Repo Structure](./blst.md#structure) 25 | - [Existing `node.js` Bindings](./blst.md#existing-node-bindings) 26 | - [Initialization of `blst::Pairing`](./blst.md#initialization-of-blstpairing) 27 | 4. [Native Node Modules](./native-node.md) 28 | - [`Node-API`](./native-node.md#node-api) 29 | - [`node-addon-api`](./native-node.md#node-addon-api) 30 | - [Similarities and Differences](./native-node.md#similarites-and-differences) 31 | - [`napi-rs`](./native-node.md#napi-rs) 32 | 5. [Structuring Addons](./structuring-addons.md) 33 | - [Phases of Execution](./structuring-addons.md#phases-of-execution) 34 | - [Complex Data Types](./structuring-addons.md#complex-data-types) **_(still working)_** 35 | - [Context-Awareness](./structuring-addons.md#context-awareness) 36 | 6. [Building Addons](./building.md) 37 | - [Build Tools](./building.md#build-tools) 38 | - [`node-gyp`](./building.md#node-gyp) 39 | - [`binding.gyp`](./building.md#bindinggyp) 40 | - [Adding a Library as a Dependency](./building.md#adding-a-library-as-a-dependency) 41 | 7. [Debugging Addons](./debugging.md) 42 | - [Setting-Up `Valgrind`](./debugging.md#setting-up-valgrind) **_(still working)_** 43 | - [Debugging by Example](./debugging.md#compiler-generated-functions-in-c) 44 | - [The "bug"](./debugging.md#the-bug) 45 | - [Finding and Fixing "the bug"](./debugging.md#finding-and-fixing-the-bug) 46 | 8. [The `Environment`](./environment.md) 47 | - [Definitions](./environment.md#definitions) 48 | - [Bringing It All Together](./environment.md#bringing-it-all-together) 49 | 9. [JavaScript Values Under-the-Hood](./values.md) **_(still working)_** 50 | - [Allocation and De-Allocation](./values.md#allocation-and-de-allocation) 51 | - [`v8::Value` and `Napi::Value`](./values.md#v8value-and-napivalue) 52 | - [`v8::HandleScope` and `Napi::HandleScope`](./values.md#v8handlescope-and-napihandlescope) 53 | - [Lexical Context](./values.md#lexical-context) 54 | - [The Reference System](./values.md#the-reference-system) 55 | 10. [JS Classes](./classes.md) **_(still working)_** 56 | 11. [Multi-Threading](./multi-threading.md) 57 | - [`std::thread` Multi-Threading](./multi-threading.md#stdthread-multi-threading) 58 | - ["Mixed" Multi-Threading](./multi-threading.md#mixed-multi-threading) 59 | - [`libuv` Multi-Threading](./multi-threading.md#libuv-multi-threading) 60 | - [Returning Promises from Native Code](./multi-threading.md#returning-promises-from-native-code) 61 | 12. [Errors](./errors.md) 62 | - [`C` Errors](./errors.md#c-errors) 63 | - [`C++` Errors](./errors.md#js-errors) 64 | - [Turning `C++` Exceptions Off](./errors.md#turning-c-exceptions-off) 65 | 13. [A JS Developer's Perspective on `C/C++`](./js-perspective-on-c.md) 66 | - [What JS Developers Take For Granted](./js-perspective-on-c.md#what-js-developers-take-for-granted) 67 | - [Memory Management](./js-perspective-on-c.md#memory-management) 68 | - [Compiler-Generated Functions](./js-perspective-on-c.md#compiler-generated-functions) 69 | - [Passing To and Returning From Functions](./js-perspective-on-c.md#passing-to-and-returning-from-functions) 70 | - [Closing Thoughts](./js-perspective-on-c.md#closing-thoughts) 71 | 72 | ## The Big Decisions 73 | 74 | ### `C` vs `C++` 75 | 76 | Most of the docs and blogs are written for `C++` and while I was researching things I found it rare to see examples using the raw `Node-API`. After working with both it has become very clear to me why that is. In `C++` the best way keep track of allocations is RAII, through implementation of OOP, where classes cleanup themselves. 77 | 78 | In `C` the implementation of bindings code takes the member functions off of the classes. One creates a `struct` with a set of associated free functions. The functions CRUD the `struct` appropriately and it is generally passed as the first argument to the associated functions. 79 | 80 | The ultimate deciding factor was using `node-addon-api` is easier. The class structure makes a lot of well informed choices that are difficult to implement independently. A big thing is doing async is very tricky in `C`. There are a lot of phases that need to be handled explicitly and the classes [implement](./reference.md#node-addon-api) lines of code that would need to be written by hand just to make the `C` api "work". 81 | 82 | While writing the bindings for EIP-4844 I was [requested](https://github.com/ethereum/c-kzg-4844/pull/177#discussion_r1127851634) to use the `C` API for a section of code so it is definitely possible. That was synchronous boilerplate code that had an easy-to-follow [example](https://nodejs.github.io/node-addon-examples/special-topics/context-awareness/#bindingc). For complex situations like TS union types and multi-stage execution, `C` can be very difficult to implement. 83 | 84 | ### Callbacks vs Promises 85 | 86 | Callbacks feel antiquated and native-level support for promises exists. This was a pretty easy choice. 87 | 88 | ### Error Handling 89 | 90 | Turning errors off adds a lot of complexity with little benefit. See [Setup](https://github.com/nodejs/node-addon-api/blob/main/doc/setup.md) and [error handling](https://github.com/nodejs/node-addon-api/blob/main/doc/error_handling.md) in the `node-addon-api` docs for more info. 91 | -------------------------------------------------------------------------------- /doc/repo.md: -------------------------------------------------------------------------------- 1 | # The `@chainsafe/blst-ts` Repository 2 | 3 | ## Organization 4 | 5 | Some research was done looking through [this list](https://www.npmjs.com/browse/depended/node-addon-api) of projects that have built node bindings. Many use a similar structure to `nodejs/node` so this repo is structured similarly. 6 | 7 | | folder | contents | 8 | |---|---| 9 | | `.github` | github integration files | 10 | | `.vscode` | vscode helper files | 11 | | `benchmark` | benchmarking for the library | 12 | | `deps` | native dependencies | 13 | | `doc` | documentation | 14 | | `src` | c/c++ (or other c-export format compatible) code | 15 | | `lib` | js/ts wrapper code | 16 | | `test` | unit/spec/memory-leak testing and test fixtures | 17 | | `tools` | repo tools and scripts written in TypeScript | 18 | 19 | ## Scripts 20 | 21 | There are a number of scripts to help while working in this repo. The first two are the most important to be familiar with. 22 | 23 | | folder | contents | 24 | |---|---| 25 | `dev` | Watches files and re-runs `build/test` as appropriate 26 | `download-spec-tests` | Pulls the official ethereum spec tests 27 | `build` | Runs `node-gyp` in `Release` mode 28 | `build:debug` | Runs `node-gyp` in `Debug` mode 29 | `build:clean` | Runs `clean` and then runs a full `build` 30 | `test` | Runs unit, spec and performance tests 31 | `test:unit` | Runs unit tests 32 | `test:spec` | Runs spec tests 33 | `test:perf` | Runs performance tests 34 | 35 | ## Dependencies 36 | 37 | There are a few dependencies, however, most will be installed by `yarn` 38 | 39 | ```sh 40 | yarn install 41 | ``` 42 | 43 | The only one that needs to be explicitly handled is `blst` which is installed as a submodule 44 | 45 | 46 | ```sh 47 | git submodule update --init 48 | ``` 49 | 50 | `Openssl` is not currently a dependency of this library. `node.js` builds `openssl` from source, and uses `RAND_bytes` so that symbol should be available via dynamic linking at runtime. 51 | 52 | TODO: Need to do more research about dll linking to verify we do not need to specifically build `openssl`. This is what I am finding empirically and also how I understand it but I honestly don't know for certain and would like 100% surety. It is easy enough to add to `deps` and [build](./building.md#adding-a-library-as-a-dependency) if needed. 53 | 54 | **_note_**: There is no road map for `node.js` to move away from `openssl` but if it does this library will need to build `RAND_bytes` or replace it with another random number generator. 55 | 56 | ## Style Guide 57 | 58 | An attempt was made to use the Google style guide for C++. The linting was done via [the C++ extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools) in VSCode. It's possible that the implementation does not fully comply with the style guide. While the code looks "readable" it may be worth going through the guide again and updating for compliance. The style guide can be found [here](https://google.github.io/styleguide/cppguide.html). 59 | -------------------------------------------------------------------------------- /doc/structuring-addons.md: -------------------------------------------------------------------------------- 1 | # Structuring Addons 2 | 3 | ## Phases of Execution 4 | 5 | There were a couple of design decisions that evolved over a couple of iterations of this library. The part about re-parallelization is covered in [Mixed Multi-Threading](./multi-threading.md#mixed-multi-threading). Another was providing both a sync and async implementation for analysis and post-MVP decision making. 6 | 7 | While building the POC version a lot of code was non-DRY. When I first started, some of the paradigms that are written about in this documentation had not yet evolved. One of the big things I sought to do for the second iteration was to DRY up places that could be consolidated, in particular: argument parsing, validation and function setup. 8 | 9 | I originally wrote the sync and async versions of the functions separately. That highlighted a pattern. Bindings code goes through specific phases of execution and they are very clearly delineated. As a note these will be the "phases" that are referred to in the rest of this document. 10 | 11 | 1. Argument parsing, validation and conversion to native format 12 | 2. Execution of native library code 13 | 3. Return value conversion to JS format 14 | 15 | On top of that, the code for the first and third phases were identical regardless of the second phase running on the main thread or if the work was submitted as `node::ThreadPoolWork`. This revelation removed a huge amount of code and centralized some critical sections that are prone to hard-to-debug errors. It also helped to solidify the "Worker Pattern" that is utilized throughout the library. 16 | 17 | ```c++ 18 | class BlstAsyncWorker : public Napi::AsyncWorker { 19 | public: 20 | // all that is necessary to create the Worker is the incoming function context 21 | BlstAsyncWorker(const Napi::CallbackInfo &info); 22 | 23 | // execute work on main thread or on libuv worker thread. these are 24 | // both Phase 2 25 | Napi::Value RunSync(); 26 | Napi::Value Run(); 27 | 28 | protected: 29 | // saved to preserve context 30 | const Napi::CallbackInfo &_info; 31 | Napi::Env _env; 32 | 33 | // pure virtual functions that must be implemented by the function worker 34 | // 35 | // Setup is responsible for Phase 1 36 | virtual void Setup() = 0; 37 | // GetReturnValue is responsible for Phase 3 38 | virtual Napi::Value GetReturnValue() = 0; 39 | 40 | private: 41 | Napi::Promise::Deferred _deferred; 42 | }; 43 | ``` 44 | 45 | Each function has a Worker that extends `BlstAsyncWorker`. In this context it helps to simplify the setup/return conversions and simplifies the library structure. The Worker Pattern started because of the way a JS `Promise` is constructed. There is the JS side we are familiar with, and the native side is `Napi::Promise::Deferred`. `Napi::Promise::GetDeferred` is a method that returns a native handle to the `Promise`, that gets returned to JS. The handle is stored as `_deferred`, and needs to be maintained by the Worker to resolve/reject the `Promise`. RAII manages it as a member of the extended `Napi::AsyncWorker`. It was done that way for clean-up, since the `AsyncWorker` is designed to self-destruct after returning its value. 46 | 47 | ## Complex Data Types 48 | 49 | JS is much more forgiving than `C` when it comes to data types. In particular, converting TypeScript Union types to native data can be quite verbose. There needs to be explicit type checking and conversion for each step of the unwrapping process. It is much easier to do this with a helper class. It is possible to not use helper classes, however there is a lot of boilerplate code that needs to be written and maintained. The helper classes are designed to be used during class construction and the results can be checked during the `Setup` phase of the Worker. This not only simplifies the code but also makes error handling much easier. 50 | 51 | ## Context-Awareness 52 | 53 | "Context Awareness" is a term that popped up during a segfault googling session. While the fix was unrelated it highlighted an important idea. Node loads dll's once and `.js` files for each `Isolate`. 54 | 55 | In the case of native addons, the `.node` file may be `require`d by different threads and each has its own `Isolate` (think `Electron` and `Workers`). The problem is they share memory space. Even though the .dll is only loaded once it is initialized by each `Isolate`. If the initialization overwrites any information required by another `Environment` there will be a segfault or undefined behavior. 56 | 57 | To ensure that addons can function under any circumstance it is best to follow a few rules: 58 | 59 | - Do not use static or globally-scoped variables 60 | - Do not use static class members (these are stored similarly) 61 | - Current best practice is to use `napi_set_instance_data` and `napi_get_instance_data` to manage `Environment` specific data 62 | 63 | More info can be found [here](https://nodejs.github.io/node-addon-examples/special-topics/context-awareness/) 64 | 65 | While it is not necessary to set "globals" as "instance data", one should at a minimum make sure to use a [cleanup function](https://nodejs.org/dist/latest-v18.x/docs/api/n-api.html#cleanup-on-exit-of-the-current-nodejs-instance) instead of trying to manually manage the memory. 66 | -------------------------------------------------------------------------------- /index.d.ts: -------------------------------------------------------------------------------- 1 | /* tslint:disable */ 2 | /* eslint-disable */ 3 | 4 | /* auto-generated by NAPI-RS */ 5 | 6 | /** The length of a secret key. */ 7 | export const SECRET_KEY_LENGTH: number 8 | /** The length of a serialized public key. */ 9 | export const PUBLIC_KEY_LENGTH_COMPRESSED: number 10 | export const PUBLIC_KEY_LENGTH_UNCOMPRESSED: number 11 | /** The length of a serialized signature. */ 12 | export const SIGNATURE_LENGTH_COMPRESSED: number 13 | export const SIGNATURE_LENGTH_UNCOMPRESSED: number 14 | export interface SignatureSet { 15 | msg: Uint8Array 16 | pk: PublicKey 17 | sig: Signature 18 | } 19 | export interface PkAndSerializedSig { 20 | pk: PublicKey 21 | sig: Uint8Array 22 | } 23 | export interface PkAndSig { 24 | pk: PublicKey 25 | sig: Signature 26 | } 27 | /** 28 | * Aggregate multiple public keys into a single public key. 29 | * 30 | * If `pks_validate` is `true`, the public keys will be infinity and group checked. 31 | */ 32 | export declare function aggregatePublicKeys(pks: Array, pksValidate?: boolean | undefined | null): PublicKey 33 | /** 34 | * Aggregate multiple signatures into a single signature. 35 | * 36 | * If `sigs_groupcheck` is `true`, the signatures will be group checked. 37 | */ 38 | export declare function aggregateSignatures(sigs: Array, sigsGroupcheck?: boolean | undefined | null): Signature 39 | /** 40 | * Aggregate multiple serialized public keys into a single public key. 41 | * 42 | * If `pks_validate` is `true`, the public keys will be infinity and group checked. 43 | */ 44 | export declare function aggregateSerializedPublicKeys(pks: Array, pksValidate?: boolean | undefined | null): PublicKey 45 | /** 46 | * Aggregate multiple serialized signatures into a single signature. 47 | * 48 | * If `sigs_groupcheck` is `true`, the signatures will be group checked. 49 | */ 50 | export declare function aggregateSerializedSignatures(sigs: Array, sigsGroupcheck?: boolean | undefined | null): Signature 51 | /** 52 | * Aggregate multiple public keys and multiple serialized signatures into a single blinded public key and blinded signature. 53 | * 54 | * Signatures are deserialized and validated with infinity and group checks before aggregation. 55 | */ 56 | export declare function aggregateWithRandomness(sets: Array): PkAndSig 57 | /** 58 | * Aggregate multiple public keys and multiple serialized signatures into a single blinded public key and blinded signature. 59 | * 60 | * Signatures are deserialized and validated with infinity and group checks before aggregation. 61 | */ 62 | export declare function asyncAggregateWithRandomness(sets: Array): Promise 63 | /** 64 | * Verify a signature against a message and public key. 65 | * 66 | * If `pk_validate` is `true`, the public key will be infinity and group checked. 67 | * 68 | * If `sig_groupcheck` is `true`, the signature will be group checked. 69 | */ 70 | export declare function verify(msg: Uint8Array, pk: PublicKey, sig: Signature, pkValidate?: boolean | undefined | null, sigGroupcheck?: boolean | undefined | null): boolean 71 | /** 72 | * Verify an aggregated signature against multiple messages and multiple public keys. 73 | * 74 | * If `pk_validate` is `true`, the public keys will be infinity and group checked. 75 | * 76 | * If `sigs_groupcheck` is `true`, the signatures will be group checked. 77 | */ 78 | export declare function aggregateVerify(msgs: Array, pks: Array, sig: Signature, pkValidate?: boolean | undefined | null, sigsGroupcheck?: boolean | undefined | null): boolean 79 | /** 80 | * Verify an aggregated signature against a single message and multiple public keys. 81 | * 82 | * Proof-of-possession is required for public keys. 83 | * 84 | * If `sigs_groupcheck` is `true`, the signatures will be group checked. 85 | */ 86 | export declare function fastAggregateVerify(msg: Uint8Array, pks: Array, sig: Signature, sigsGroupcheck?: boolean | undefined | null): boolean 87 | /** 88 | * Verify multiple aggregated signatures against multiple messages and multiple public keys. 89 | * 90 | * If `pks_validate` is `true`, the public keys will be infinity and group checked. 91 | * 92 | * If `sigs_groupcheck` is `true`, the signatures will be group checked. 93 | * 94 | * See https://ethresear.ch/t/fast-verification-of-multiple-bls-signatures/5407 95 | */ 96 | export declare function verifyMultipleAggregateSignatures(sets: Array, pksValidate?: boolean | undefined | null, sigsGroupcheck?: boolean | undefined | null): boolean 97 | export declare class SecretKey { 98 | /** 99 | * Generate a secret key deterministically from a secret byte array `ikm`. 100 | * 101 | * `ikm` must be at least 32 bytes long. 102 | * 103 | * Optionally pass `key_info` bytes to derive multiple independent keys from the same `ikm`. 104 | * By default, the `key_info` is empty. 105 | */ 106 | static fromKeygen(ikm: Uint8Array, keyInfo?: Uint8Array | undefined | null): SecretKey 107 | /** 108 | * Generate a master secret key deterministically from a secret byte array `ikm` based on EIP-2333. 109 | * 110 | * `ikm` must be at least 32 bytes long. 111 | * 112 | * See https://eips.ethereum.org/EIPS/eip-2333 113 | */ 114 | static deriveMasterEip2333(ikm: Uint8Array): SecretKey 115 | /** 116 | * Derive a child secret key from a parent secret key based on EIP-2333. 117 | * 118 | * See https://eips.ethereum.org/EIPS/eip-2333 119 | */ 120 | deriveChildEip2333(index: number): SecretKey 121 | /** Deserialize a secret key from a byte array. */ 122 | static fromBytes(bytes: Uint8Array): SecretKey 123 | /** Deserialize a secret key from a hex string. */ 124 | static fromHex(hex: string): SecretKey 125 | /** Serialize a secret key to a byte array. */ 126 | toBytes(): Uint8Array 127 | /** Serialize a secret key to a hex string. */ 128 | toHex(): string 129 | /** Return the corresponding public key */ 130 | toPublicKey(): PublicKey 131 | sign(msg: Uint8Array): Signature 132 | } 133 | export declare class PublicKey { 134 | /** 135 | * Deserialize a public key from a byte array. 136 | * 137 | * If `pk_validate` is `true`, the public key will be infinity and group checked. 138 | */ 139 | static fromBytes(bytes: Uint8Array, pkValidate?: boolean | undefined | null): PublicKey 140 | /** 141 | * Deserialize a public key from a hex string. 142 | * 143 | * If `pk_validate` is `true`, the public key will be infinity and group checked. 144 | */ 145 | static fromHex(hex: string, pkValidate?: boolean | undefined | null): PublicKey 146 | /** Serialize a public key to a byte array. */ 147 | toBytes(compress?: boolean | undefined | null): Uint8Array 148 | /** Serialize a public key to a hex string. */ 149 | toHex(compress?: boolean | undefined | null): string 150 | /** Validate a public key with infinity and group check. */ 151 | keyValidate(): void 152 | } 153 | export declare class Signature { 154 | /** 155 | * Deserialize a signature from a byte array. 156 | * 157 | * If `sig_validate` is `true`, the public key will be infinity and group checked. 158 | * 159 | * If `sig_infcheck` is `false`, the infinity check will be skipped. 160 | */ 161 | static fromBytes(bytes: Uint8Array, sigValidate?: boolean | undefined | null, sigInfcheck?: boolean | undefined | null): Signature 162 | /** 163 | * Deserialize a signature from a hex string. 164 | * 165 | * If `sig_validate` is `true`, the public key will be infinity and group checked. 166 | * 167 | * If `sig_infcheck` is `false`, the infinity check will be skipped. 168 | */ 169 | static fromHex(hex: string, sigValidate?: boolean | undefined | null, sigInfcheck?: boolean | undefined | null): Signature 170 | /** Serialize a signature to a byte array. */ 171 | toBytes(compress?: boolean | undefined | null): Uint8Array 172 | /** Serialize a signature to a hex string. */ 173 | toHex(compress?: boolean | undefined | null): string 174 | /** 175 | * Validate a signature with infinity and group check. 176 | * 177 | * If `sig_infcheck` is `false`, the infinity check will be skipped. 178 | */ 179 | sigValidate(sigInfcheck?: boolean | undefined | null): void 180 | } 181 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | /* tslint:disable */ 2 | /* eslint-disable */ 3 | /* prettier-ignore */ 4 | 5 | /* auto-generated by NAPI-RS */ 6 | 7 | const { existsSync, readFileSync } = require('fs') 8 | const { join } = require('path') 9 | 10 | const { platform, arch } = process 11 | 12 | let nativeBinding = null 13 | let localFileExisted = false 14 | let loadError = null 15 | 16 | function isMusl() { 17 | // For Node 10 18 | if (!process.report || typeof process.report.getReport !== 'function') { 19 | try { 20 | const lddPath = require('child_process').execSync('which ldd').toString().trim() 21 | return readFileSync(lddPath, 'utf8').includes('musl') 22 | } catch (e) { 23 | return true 24 | } 25 | } else { 26 | const { glibcVersionRuntime } = process.report.getReport().header 27 | return !glibcVersionRuntime 28 | } 29 | } 30 | 31 | switch (platform) { 32 | case 'android': 33 | switch (arch) { 34 | case 'arm64': 35 | localFileExisted = existsSync(join(__dirname, 'blst.android-arm64.node')) 36 | try { 37 | if (localFileExisted) { 38 | nativeBinding = require('./blst.android-arm64.node') 39 | } else { 40 | nativeBinding = require('@chainsafe/blst-android-arm64') 41 | } 42 | } catch (e) { 43 | loadError = e 44 | } 45 | break 46 | case 'arm': 47 | localFileExisted = existsSync(join(__dirname, 'blst.android-arm-eabi.node')) 48 | try { 49 | if (localFileExisted) { 50 | nativeBinding = require('./blst.android-arm-eabi.node') 51 | } else { 52 | nativeBinding = require('@chainsafe/blst-android-arm-eabi') 53 | } 54 | } catch (e) { 55 | loadError = e 56 | } 57 | break 58 | default: 59 | throw new Error(`Unsupported architecture on Android ${arch}`) 60 | } 61 | break 62 | case 'win32': 63 | switch (arch) { 64 | case 'x64': 65 | localFileExisted = existsSync( 66 | join(__dirname, 'blst.win32-x64-msvc.node') 67 | ) 68 | try { 69 | if (localFileExisted) { 70 | nativeBinding = require('./blst.win32-x64-msvc.node') 71 | } else { 72 | nativeBinding = require('@chainsafe/blst-win32-x64-msvc') 73 | } 74 | } catch (e) { 75 | loadError = e 76 | } 77 | break 78 | case 'ia32': 79 | localFileExisted = existsSync( 80 | join(__dirname, 'blst.win32-ia32-msvc.node') 81 | ) 82 | try { 83 | if (localFileExisted) { 84 | nativeBinding = require('./blst.win32-ia32-msvc.node') 85 | } else { 86 | nativeBinding = require('@chainsafe/blst-win32-ia32-msvc') 87 | } 88 | } catch (e) { 89 | loadError = e 90 | } 91 | break 92 | case 'arm64': 93 | localFileExisted = existsSync( 94 | join(__dirname, 'blst.win32-arm64-msvc.node') 95 | ) 96 | try { 97 | if (localFileExisted) { 98 | nativeBinding = require('./blst.win32-arm64-msvc.node') 99 | } else { 100 | nativeBinding = require('@chainsafe/blst-win32-arm64-msvc') 101 | } 102 | } catch (e) { 103 | loadError = e 104 | } 105 | break 106 | default: 107 | throw new Error(`Unsupported architecture on Windows: ${arch}`) 108 | } 109 | break 110 | case 'darwin': 111 | localFileExisted = existsSync(join(__dirname, 'blst.darwin-universal.node')) 112 | try { 113 | if (localFileExisted) { 114 | nativeBinding = require('./blst.darwin-universal.node') 115 | } else { 116 | nativeBinding = require('@chainsafe/blst-darwin-universal') 117 | } 118 | break 119 | } catch {} 120 | switch (arch) { 121 | case 'x64': 122 | localFileExisted = existsSync(join(__dirname, 'blst.darwin-x64.node')) 123 | try { 124 | if (localFileExisted) { 125 | nativeBinding = require('./blst.darwin-x64.node') 126 | } else { 127 | nativeBinding = require('@chainsafe/blst-darwin-x64') 128 | } 129 | } catch (e) { 130 | loadError = e 131 | } 132 | break 133 | case 'arm64': 134 | localFileExisted = existsSync( 135 | join(__dirname, 'blst.darwin-arm64.node') 136 | ) 137 | try { 138 | if (localFileExisted) { 139 | nativeBinding = require('./blst.darwin-arm64.node') 140 | } else { 141 | nativeBinding = require('@chainsafe/blst-darwin-arm64') 142 | } 143 | } catch (e) { 144 | loadError = e 145 | } 146 | break 147 | default: 148 | throw new Error(`Unsupported architecture on macOS: ${arch}`) 149 | } 150 | break 151 | case 'freebsd': 152 | if (arch !== 'x64') { 153 | throw new Error(`Unsupported architecture on FreeBSD: ${arch}`) 154 | } 155 | localFileExisted = existsSync(join(__dirname, 'blst.freebsd-x64.node')) 156 | try { 157 | if (localFileExisted) { 158 | nativeBinding = require('./blst.freebsd-x64.node') 159 | } else { 160 | nativeBinding = require('@chainsafe/blst-freebsd-x64') 161 | } 162 | } catch (e) { 163 | loadError = e 164 | } 165 | break 166 | case 'linux': 167 | switch (arch) { 168 | case 'x64': 169 | if (isMusl()) { 170 | localFileExisted = existsSync( 171 | join(__dirname, 'blst.linux-x64-musl.node') 172 | ) 173 | try { 174 | if (localFileExisted) { 175 | nativeBinding = require('./blst.linux-x64-musl.node') 176 | } else { 177 | nativeBinding = require('@chainsafe/blst-linux-x64-musl') 178 | } 179 | } catch (e) { 180 | loadError = e 181 | } 182 | } else { 183 | localFileExisted = existsSync( 184 | join(__dirname, 'blst.linux-x64-gnu.node') 185 | ) 186 | try { 187 | if (localFileExisted) { 188 | nativeBinding = require('./blst.linux-x64-gnu.node') 189 | } else { 190 | nativeBinding = require('@chainsafe/blst-linux-x64-gnu') 191 | } 192 | } catch (e) { 193 | loadError = e 194 | } 195 | } 196 | break 197 | case 'arm64': 198 | if (isMusl()) { 199 | localFileExisted = existsSync( 200 | join(__dirname, 'blst.linux-arm64-musl.node') 201 | ) 202 | try { 203 | if (localFileExisted) { 204 | nativeBinding = require('./blst.linux-arm64-musl.node') 205 | } else { 206 | nativeBinding = require('@chainsafe/blst-linux-arm64-musl') 207 | } 208 | } catch (e) { 209 | loadError = e 210 | } 211 | } else { 212 | localFileExisted = existsSync( 213 | join(__dirname, 'blst.linux-arm64-gnu.node') 214 | ) 215 | try { 216 | if (localFileExisted) { 217 | nativeBinding = require('./blst.linux-arm64-gnu.node') 218 | } else { 219 | nativeBinding = require('@chainsafe/blst-linux-arm64-gnu') 220 | } 221 | } catch (e) { 222 | loadError = e 223 | } 224 | } 225 | break 226 | case 'arm': 227 | if (isMusl()) { 228 | localFileExisted = existsSync( 229 | join(__dirname, 'blst.linux-arm-musleabihf.node') 230 | ) 231 | try { 232 | if (localFileExisted) { 233 | nativeBinding = require('./blst.linux-arm-musleabihf.node') 234 | } else { 235 | nativeBinding = require('@chainsafe/blst-linux-arm-musleabihf') 236 | } 237 | } catch (e) { 238 | loadError = e 239 | } 240 | } else { 241 | localFileExisted = existsSync( 242 | join(__dirname, 'blst.linux-arm-gnueabihf.node') 243 | ) 244 | try { 245 | if (localFileExisted) { 246 | nativeBinding = require('./blst.linux-arm-gnueabihf.node') 247 | } else { 248 | nativeBinding = require('@chainsafe/blst-linux-arm-gnueabihf') 249 | } 250 | } catch (e) { 251 | loadError = e 252 | } 253 | } 254 | break 255 | case 'riscv64': 256 | if (isMusl()) { 257 | localFileExisted = existsSync( 258 | join(__dirname, 'blst.linux-riscv64-musl.node') 259 | ) 260 | try { 261 | if (localFileExisted) { 262 | nativeBinding = require('./blst.linux-riscv64-musl.node') 263 | } else { 264 | nativeBinding = require('@chainsafe/blst-linux-riscv64-musl') 265 | } 266 | } catch (e) { 267 | loadError = e 268 | } 269 | } else { 270 | localFileExisted = existsSync( 271 | join(__dirname, 'blst.linux-riscv64-gnu.node') 272 | ) 273 | try { 274 | if (localFileExisted) { 275 | nativeBinding = require('./blst.linux-riscv64-gnu.node') 276 | } else { 277 | nativeBinding = require('@chainsafe/blst-linux-riscv64-gnu') 278 | } 279 | } catch (e) { 280 | loadError = e 281 | } 282 | } 283 | break 284 | case 's390x': 285 | localFileExisted = existsSync( 286 | join(__dirname, 'blst.linux-s390x-gnu.node') 287 | ) 288 | try { 289 | if (localFileExisted) { 290 | nativeBinding = require('./blst.linux-s390x-gnu.node') 291 | } else { 292 | nativeBinding = require('@chainsafe/blst-linux-s390x-gnu') 293 | } 294 | } catch (e) { 295 | loadError = e 296 | } 297 | break 298 | default: 299 | throw new Error(`Unsupported architecture on Linux: ${arch}`) 300 | } 301 | break 302 | default: 303 | throw new Error(`Unsupported OS: ${platform}, architecture: ${arch}`) 304 | } 305 | 306 | if (!nativeBinding) { 307 | if (loadError) { 308 | throw loadError 309 | } 310 | throw new Error(`Failed to load native binding`) 311 | } 312 | 313 | const { SECRET_KEY_LENGTH, PUBLIC_KEY_LENGTH_COMPRESSED, PUBLIC_KEY_LENGTH_UNCOMPRESSED, SIGNATURE_LENGTH_COMPRESSED, SIGNATURE_LENGTH_UNCOMPRESSED, SecretKey, PublicKey, Signature, aggregatePublicKeys, aggregateSignatures, aggregateSerializedPublicKeys, aggregateSerializedSignatures, aggregateWithRandomness, asyncAggregateWithRandomness, verify, aggregateVerify, fastAggregateVerify, verifyMultipleAggregateSignatures } = nativeBinding 314 | 315 | module.exports.SECRET_KEY_LENGTH = SECRET_KEY_LENGTH 316 | module.exports.PUBLIC_KEY_LENGTH_COMPRESSED = PUBLIC_KEY_LENGTH_COMPRESSED 317 | module.exports.PUBLIC_KEY_LENGTH_UNCOMPRESSED = PUBLIC_KEY_LENGTH_UNCOMPRESSED 318 | module.exports.SIGNATURE_LENGTH_COMPRESSED = SIGNATURE_LENGTH_COMPRESSED 319 | module.exports.SIGNATURE_LENGTH_UNCOMPRESSED = SIGNATURE_LENGTH_UNCOMPRESSED 320 | module.exports.SecretKey = SecretKey 321 | module.exports.PublicKey = PublicKey 322 | module.exports.Signature = Signature 323 | module.exports.aggregatePublicKeys = aggregatePublicKeys 324 | module.exports.aggregateSignatures = aggregateSignatures 325 | module.exports.aggregateSerializedPublicKeys = aggregateSerializedPublicKeys 326 | module.exports.aggregateSerializedSignatures = aggregateSerializedSignatures 327 | module.exports.aggregateWithRandomness = aggregateWithRandomness 328 | module.exports.asyncAggregateWithRandomness = asyncAggregateWithRandomness 329 | module.exports.verify = verify 330 | module.exports.aggregateVerify = aggregateVerify 331 | module.exports.fastAggregateVerify = fastAggregateVerify 332 | module.exports.verifyMultipleAggregateSignatures = verifyMultipleAggregateSignatures 333 | -------------------------------------------------------------------------------- /npm/darwin-arm64/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-darwin-arm64` 2 | 3 | This is the **aarch64-apple-darwin** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/darwin-arm64/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-darwin-arm64", 3 | "version": "2.2.0", 4 | "os": [ 5 | "darwin" 6 | ], 7 | "cpu": [ 8 | "arm64" 9 | ], 10 | "main": "blst.darwin-arm64.node", 11 | "files": [ 12 | "blst.darwin-arm64.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | } 31 | } -------------------------------------------------------------------------------- /npm/darwin-x64/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-darwin-x64` 2 | 3 | This is the **x86_64-apple-darwin** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/darwin-x64/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-darwin-x64", 3 | "version": "2.2.0", 4 | "os": [ 5 | "darwin" 6 | ], 7 | "cpu": [ 8 | "x64" 9 | ], 10 | "main": "blst.darwin-x64.node", 11 | "files": [ 12 | "blst.darwin-x64.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | } 31 | } -------------------------------------------------------------------------------- /npm/linux-arm64-gnu/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-linux-arm64-gnu` 2 | 3 | This is the **aarch64-unknown-linux-gnu** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/linux-arm64-gnu/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-linux-arm64-gnu", 3 | "version": "2.2.0", 4 | "os": [ 5 | "linux" 6 | ], 7 | "cpu": [ 8 | "arm64" 9 | ], 10 | "main": "blst.linux-arm64-gnu.node", 11 | "files": [ 12 | "blst.linux-arm64-gnu.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | }, 31 | "libc": [ 32 | "glibc" 33 | ] 34 | } -------------------------------------------------------------------------------- /npm/linux-arm64-musl/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-linux-arm64-musl` 2 | 3 | This is the **aarch64-unknown-linux-musl** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/linux-arm64-musl/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-linux-arm64-musl", 3 | "version": "2.2.0", 4 | "os": [ 5 | "linux" 6 | ], 7 | "cpu": [ 8 | "arm64" 9 | ], 10 | "main": "blst.linux-arm64-musl.node", 11 | "files": [ 12 | "blst.linux-arm64-musl.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | }, 31 | "libc": [ 32 | "musl" 33 | ] 34 | } -------------------------------------------------------------------------------- /npm/linux-x64-gnu/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-linux-x64-gnu` 2 | 3 | This is the **x86_64-unknown-linux-gnu** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/linux-x64-gnu/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-linux-x64-gnu", 3 | "version": "2.2.0", 4 | "os": [ 5 | "linux" 6 | ], 7 | "cpu": [ 8 | "x64" 9 | ], 10 | "main": "blst.linux-x64-gnu.node", 11 | "files": [ 12 | "blst.linux-x64-gnu.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | }, 31 | "libc": [ 32 | "glibc" 33 | ] 34 | } -------------------------------------------------------------------------------- /npm/linux-x64-musl/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-linux-x64-musl` 2 | 3 | This is the **x86_64-unknown-linux-musl** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/linux-x64-musl/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-linux-x64-musl", 3 | "version": "2.2.0", 4 | "os": [ 5 | "linux" 6 | ], 7 | "cpu": [ 8 | "x64" 9 | ], 10 | "main": "blst.linux-x64-musl.node", 11 | "files": [ 12 | "blst.linux-x64-musl.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | }, 31 | "libc": [ 32 | "musl" 33 | ] 34 | } -------------------------------------------------------------------------------- /npm/win32-x64-msvc/README.md: -------------------------------------------------------------------------------- 1 | # `@chainsafe/blst-win32-x64-msvc` 2 | 3 | This is the **x86_64-pc-windows-msvc** binary for `@chainsafe/blst` 4 | -------------------------------------------------------------------------------- /npm/win32-x64-msvc/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst-win32-x64-msvc", 3 | "version": "2.2.0", 4 | "os": [ 5 | "win32" 6 | ], 7 | "cpu": [ 8 | "x64" 9 | ], 10 | "main": "blst.win32-x64-msvc.node", 11 | "files": [ 12 | "blst.win32-x64-msvc.node" 13 | ], 14 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 15 | "keywords": [ 16 | "bls", 17 | "bls12-381", 18 | "blst", 19 | "crypto", 20 | "ethereum", 21 | "napi" 22 | ], 23 | "license": "Apache-2.0", 24 | "engines": { 25 | "node": ">= 16" 26 | }, 27 | "repository": { 28 | "type": "git", 29 | "url": "https://github.com/ChainSafe/blst-ts" 30 | } 31 | } -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@chainsafe/blst", 3 | "version": "2.2.0", 4 | "description": "Typescript wrapper for supranational/blst native bindings, a highly performant BLS12-381 signature library", 5 | "scripts": { 6 | "artifacts": "napi artifacts", 7 | "build": "napi build --platform --release", 8 | "build:debug": "napi build --platform", 9 | "build:fuzz": "tsc --project tsconfig.fuzz.json", 10 | "postbuild:fuzz": "cp *.node fuzz-tests", 11 | "clean": "rimraf target cargo.lock *.node", 12 | "download-spec-tests": "node -r ts-node/register test/spec/downloadTests.ts", 13 | "lint": "npm run lint:rs && npm run lint:ts", 14 | "lint:rs": "cargo fmt --check", 15 | "lint:ts": "eslint --color --ext .js,.mjs,.cjs,.ts test/", 16 | "prepublishOnly": "napi prepublish -t npm", 17 | "test": "yarn test:unit && yarn test:spec", 18 | "test:fuzz": "ts-node test/fuzz/fuzz.test.ts", 19 | "test:memory": "node -r ts-node/register --expose-gc test/memory/memory.test.ts", 20 | "test:perf": "node -r ts-node/register node_modules/.bin/benchmark --config .benchrc.yaml test/perf/*.test.ts", 21 | "test:spec": "mocha test/spec/**/*.test.ts", 22 | "test:unit": "mocha test/unit/**/*.test.ts", 23 | "universal": "napi universal", 24 | "version": "napi version" 25 | }, 26 | "main": "index.js", 27 | "type": "commonjs", 28 | "types": "index.d.ts", 29 | "files": [ 30 | "index.js", 31 | "index.d.ts" 32 | ], 33 | "keywords": [ 34 | "bls", 35 | "bls12-381", 36 | "blst", 37 | "crypto", 38 | "ethereum", 39 | "napi" 40 | ], 41 | "repository": { 42 | "type": "git", 43 | "url": "https://github.com/ChainSafe/blst-ts" 44 | }, 45 | "napi": { 46 | "name": "blst", 47 | "triples": { 48 | "additional": [ 49 | "aarch64-apple-darwin", 50 | "aarch64-unknown-linux-gnu", 51 | "aarch64-unknown-linux-musl", 52 | "x86_64-unknown-linux-musl" 53 | ] 54 | } 55 | }, 56 | "license": "Apache-2.0", 57 | "devDependencies": { 58 | "@dapplion/benchmark": "^0.2.4", 59 | "@napi-rs/cli": "^2.18.3", 60 | "@types/chai": "^4.3.16", 61 | "@types/js-yaml": "^4.0.9", 62 | "@types/mocha": "^10.0.7", 63 | "@types/node": "^20.14.9", 64 | "@types/tar": "^6.1.13", 65 | "@typescript-eslint/eslint-plugin": "^7.15.0", 66 | "@typescript-eslint/parser": "^7.15.0", 67 | "chai": "^4.3.4", 68 | "eslint": "^8.57.0", 69 | "eslint-plugin-import": "^2.29.1", 70 | "eslint-plugin-node": "^11.1.0", 71 | "eslint-plugin-prettier": "^5.1.3", 72 | "js-yaml": "^4.1.0", 73 | "mocha": "^8.3.2", 74 | "prettier": "^3.3.2", 75 | "rimraf": "^5.0.8", 76 | "tar": "^7.4.0", 77 | "ts-node": "^9.1.1", 78 | "typescript": "^5.5.3" 79 | }, 80 | "engines": { 81 | "node": ">= 16" 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | tab_spaces = 2 2 | edition = "2021" 3 | -------------------------------------------------------------------------------- /test/__fixtures__/index.ts: -------------------------------------------------------------------------------- 1 | import {fromHex, getFilledUint8, getTestSet, sullyUint8Array} from "../utils"; 2 | 3 | export const invalidInputs: [string, any][] = [ 4 | ["boolean", true], 5 | ["number", 2], 6 | ["bigint", BigInt("2")], 7 | ["symbol", Symbol("foo")], 8 | ["null", null], 9 | ["undefined", undefined], 10 | ["object", {foo: "bar"}], 11 | ["proxy", new Proxy({foo: "bar"}, {})], 12 | ["date", new Date("1982-03-24T16:00:00-06:00")], 13 | [ 14 | "function", 15 | function () { 16 | /* no-op */ 17 | }, 18 | ], 19 | ["NaN", NaN], 20 | ["promise", Promise.resolve()], 21 | ["Uint16Array", new Uint16Array()], 22 | ["Uint32Array", new Uint32Array()], 23 | ["Map", new Map()], 24 | ["Set", new Set()], 25 | ]; 26 | 27 | export const KEY_MATERIAL = getFilledUint8(32, "123"); 28 | export const SECRET_KEY_BYTES = Uint8Array.from( 29 | Buffer.from("5620799c63c92bb7912122070f7ebb6ddd53bdf9aa63e7a7bffc177f03d14f68", "hex") 30 | ); 31 | 32 | export const validPublicKey = { 33 | keygen: "********************************", // Must be at least 32 bytes 34 | uncompressed: fromHex( 35 | "0ae7e5822ba97ab07877ea318e747499da648b27302414f9d0b9bb7e3646d248be90c9fdaddfdb93485a6e9334f0109301f36856007e1bc875ab1b00dbf47f9ead16c5562d889d8b270002ade81e78d473204fcb51ede8659bce3d95c67903bc" 36 | ), 37 | compressed: fromHex( 38 | "8ae7e5822ba97ab07877ea318e747499da648b27302414f9d0b9bb7e3646d248be90c9fdaddfdb93485a6e9334f01093" 39 | ), 40 | }; 41 | export const badPublicKey = Uint8Array.from( 42 | Buffer.from([ 43 | ...Uint8Array.prototype.slice.call(getTestSet().pk.toBytes(false), 8), 44 | ...Buffer.from("0123456789abcdef", "hex"), 45 | ]) 46 | ); 47 | 48 | export const G1_POINT_AT_INFINITY = 49 | "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; 50 | 51 | export const G2_POINT_AT_INFINITY = Buffer.from( 52 | "c000000000000000000000000000000000000000000000000000000000000000" + 53 | "0000000000000000000000000000000000000000000000000000000000000000" + 54 | "0000000000000000000000000000000000000000000000000000000000000000", 55 | "hex" 56 | ); 57 | 58 | export const validSignature = { 59 | keygen: "********************************", // Must be at least 32 bytes 60 | uncompressed: fromHex( 61 | "057565542eaa01ef2b910bf0eaba4d98a1e5b8b79cc425db08f8780732d0ea9bc85fc6175f272b2344bb27bc572ebf14022e52689dcedfccf44a00e5bd1aa59db44517217d6b0f21b372169ee761938c28914ddcb9663de54db288e760a8e14f0f465dc9f94edd3ea43442840e4ef6aeb51d1f77e8e5c5a0fadfb46f186f4644899c7cbefd6ead2b138b030b2914b748051cbab5d38fceb8bea84973ac08d1db5436f177dbcb11d9b7bbb39b6dc32047472f573c64be1d28fd848716c2844f88" 62 | ), 63 | compressed: fromHex( 64 | "a57565542eaa01ef2b910bf0eaba4d98a1e5b8b79cc425db08f8780732d0ea9bc85fc6175f272b2344bb27bc572ebf14022e52689dcedfccf44a00e5bd1aa59db44517217d6b0f21b372169ee761938c28914ddcb9663de54db288e760a8e14f" 65 | ), 66 | }; 67 | 68 | export const badSignature = sullyUint8Array(getTestSet().sig.toBytes(false)); 69 | -------------------------------------------------------------------------------- /test/fuzz/exec.ts: -------------------------------------------------------------------------------- 1 | import {exec as EXEC, ExecOptions, ChildProcess, PromiseWithChild} from "node:child_process"; 2 | 3 | export interface ExecPromiseOptions extends ExecOptions { 4 | pipeInput?: boolean; 5 | } 6 | 7 | const defaultOptions: ExecPromiseOptions = { 8 | timeout: 3 * 60 * 1000, // ms 9 | maxBuffer: 10e6, // bytes 10 | pipeInput: false, 11 | }; 12 | 13 | export function exec( 14 | command: string, 15 | logToConsole = true, 16 | execOptions: ExecPromiseOptions = {} 17 | ): PromiseWithChild { 18 | const options = {...defaultOptions, ...execOptions}; 19 | 20 | let child!: ChildProcess; 21 | const promise = new Promise((resolve, reject) => { 22 | const chunks: Buffer[] = []; 23 | function bufferOutput(data: string): void { 24 | chunks.push(Buffer.from(data)); 25 | } 26 | function stdoutHandler(data: string): void { 27 | process.stdout.write(data); 28 | } 29 | function stderrHandler(data: string): void { 30 | process.stderr.write(data); 31 | } 32 | 33 | child = EXEC(command, options, (err) => { 34 | child.stdout?.removeAllListeners("data"); 35 | child.stderr?.removeAllListeners("data"); 36 | const output = Buffer.concat(chunks).toString("utf8"); 37 | if (err) { 38 | return reject(err); 39 | } 40 | return resolve(output); 41 | }); 42 | 43 | if (child.stdin && options.pipeInput) { 44 | process.stdin.pipe(child.stdin); 45 | } 46 | child.stdout?.on("data", logToConsole ? stdoutHandler : bufferOutput); 47 | child.stderr?.on("data", logToConsole ? stderrHandler : bufferOutput); 48 | 49 | child.on("exit", () => { 50 | return resolve(Buffer.concat(chunks).toString("utf8")); 51 | }); 52 | }) as PromiseWithChild; 53 | 54 | promise.child = child; 55 | return promise; 56 | } 57 | -------------------------------------------------------------------------------- /test/fuzz/fuzz.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | import fs from "fs"; 3 | import {resolve} from "path"; 4 | import {PromiseWithChild, execSync} from "child_process"; 5 | import {testCases} from "./testCases"; 6 | import {exec} from "./exec"; 7 | 8 | /** 9 | * Fuzz testing framework 10 | * 11 | * jazzer.js is the fuzzer that will run the test cases but it is limited to 12 | * fuzzing a single case at at time. This script parallelizes the fuzzing to cut 13 | * down on test time. 14 | * 15 | * Each test case is run in its own child process. This is because the fuzzer 16 | * must be run from the command line. It plugs into the process itself to 17 | * check for segfault crashes which cannot be done from the application layer so 18 | * the library must be the entrance and it starts the node instance to run the 19 | * test cases. 20 | */ 21 | 22 | const ROOT_DIR = resolve(__dirname, "..", ".."); 23 | // timeout for all fuzz tests 24 | const TEST_TIMEOUT_IN_MINUTES = 20; 25 | 26 | if (!fs.existsSync(resolve(ROOT_DIR, "fuzz-tests", "test", "fuzz", "fuzzTarget.js"))) { 27 | throw new Error("fuzzTarget.js not found. Run `yarn build:fuzz` to generate the fuzzing framework files"); 28 | } 29 | 30 | if (!fs.existsSync(resolve(ROOT_DIR, "node_modules", ".bin", "jazzer"))) { 31 | execSync("npm i --no-package-lock --no-save @jazzer.js/core", {stdio: "inherit"}); 32 | } 33 | 34 | /** 35 | * Setup of graceful exit for the child processes. When this script is run by 36 | * the test:fuzz command it will be possible to ctrl+c to exit the script 37 | */ 38 | const testingProcesses: PromiseWithChild[] = []; 39 | async function exit(): Promise { 40 | for (const testProcess of testingProcesses) { 41 | if (testProcess.child) { 42 | testProcess.child.kill("SIGINT"); 43 | } 44 | } 45 | process.removeAllListeners("exit"); 46 | process.removeAllListeners("SIGINT"); 47 | clearTimeout(timeout); 48 | } 49 | // exit on hung processes 50 | const timeout = setTimeout(exit, TEST_TIMEOUT_IN_MINUTES * 60 * 1000); 51 | // catch uncaught exceptions, then exit gracefully to generate coverage reports 52 | process.on("exit", exit); 53 | // catch ctrl+c and exit gracefully to generate coverage reports 54 | process.on("SIGINT", exit); 55 | 56 | /** 57 | * Makes directories for test cases if they do not exist 58 | */ 59 | function makeDirs(...paths: string[]): void { 60 | for (const path of paths) { 61 | if (!fs.existsSync(path)) { 62 | fs.mkdirSync(path, {recursive: true}); 63 | } 64 | } 65 | } 66 | 67 | /** 68 | * Test cases are loaded from a separate file. The testCases are loaded both by 69 | * the main process and by the child process that runs the test case to ensure 70 | * synchronization 71 | */ 72 | for (const testCase of testCases) { 73 | /** 74 | * corpusDir provides a baseline for the fuzzer so it knows what inputs have 75 | * already been tested to broaden the search space 76 | */ 77 | const corpusDir = resolve(ROOT_DIR, "fuzz-tests", "corpus", testCase.name); 78 | const coverageDir = resolve(ROOT_DIR, "fuzz-tests", "coverage", testCase.name); 79 | makeDirs(corpusDir, coverageDir); 80 | 81 | const cmd = [ 82 | /** 83 | * because the fuzzer library is what calls the node instance it was not possible 84 | * to pass the test case to the node instance directly. Instead, the test case 85 | * is passed as an environment variable 86 | */ 87 | `FUZZ_TEST_CASE=${testCase.name}`, 88 | "node_modules/.bin/jazzer", 89 | "fuzz-tests/test/fuzz/fuzzTarget", 90 | corpusDir, 91 | "--includes build", 92 | "--includes prebuild", 93 | "--includes src", 94 | "--includes fuzz-tests/lib", 95 | "--includes fuzz-tests/test", 96 | "--includes fuzz-tests/utils", 97 | "--excludes node_modules", 98 | "--mode fuzzing", 99 | "--timeout 1000", // individual test timeout. each run is max 1 second 100 | "--sync false", 101 | "--coverage true", 102 | `--coverage_directory ${coverageDir}`, 103 | "--coverage_reporters lcov", 104 | // "--", 105 | // "-max_total_time=10", 106 | ].join(" "); 107 | 108 | console.log(`Running fuzz test: ${testCase.name}`); 109 | testingProcesses.push(exec(cmd, true, {cwd: ROOT_DIR})); 110 | } 111 | 112 | // eslint-disable-next-line @typescript-eslint/no-floating-promises 113 | Promise.allSettled(testingProcesses) 114 | .then((results) => { 115 | console.log("All fuzz tests completed"); 116 | for (const result of results) { 117 | if (result.status === "rejected") { 118 | console.error(result.reason); 119 | } 120 | } 121 | }) 122 | .finally(() => process.exit()); 123 | -------------------------------------------------------------------------------- /test/fuzz/fuzzTarget.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable @typescript-eslint/no-unsafe-return */ 2 | import {testCases} from "./testCases"; 3 | 4 | const TEST_CASE = process.env.FUZZ_TEST_CASE; 5 | const testCase = testCases.find(({name}) => name === TEST_CASE); 6 | if (!testCase) { 7 | throw new Error(`Unknown test case: ${TEST_CASE}`); 8 | } 9 | 10 | export async function fuzz(data: Buffer): Promise { 11 | try { 12 | const response = testCase?.target(data); 13 | if (response instanceof Promise) { 14 | return await response; 15 | } 16 | return response; 17 | } catch (e: unknown) { 18 | if (e instanceof Error && testCase?.expectedErrors.includes(e.message)) { 19 | return; 20 | } 21 | throw new Error(`in ${testCase?.name}: ${(e as Error).message}`); 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /test/fuzz/testCases.ts: -------------------------------------------------------------------------------- 1 | import {verify, SecretKey, PublicKey, Signature} from "../../index.js"; 2 | 3 | export interface FuzzTestCase { 4 | name: string; 5 | target: (data: Buffer) => any; 6 | expectedErrors: string[]; 7 | } 8 | 9 | export const testCases: FuzzTestCase[] = [ 10 | { 11 | name: "SecretKey.fromKeygen", 12 | target: (data: Buffer) => { 13 | return SecretKey.fromKeygen(data); 14 | }, 15 | expectedErrors: ["Invalid encoding"], 16 | }, 17 | { 18 | name: "SecretKey.deserialize", 19 | target: (data: Buffer) => { 20 | return SecretKey.fromBytes(data); 21 | }, 22 | expectedErrors: ["Invalid encoding"], 23 | }, 24 | { 25 | name: "secretKey.sign", 26 | target: (data: Buffer) => { 27 | return SecretKey.fromKeygen(Buffer.alloc(32), Buffer.from("*")).sign(data); 28 | }, 29 | expectedErrors: [], 30 | }, 31 | { 32 | name: "PublicKey.deserialize", 33 | target: (data: Buffer) => { 34 | return PublicKey.fromBytes(data); 35 | }, 36 | expectedErrors: ["Invalid encoding", "Point not on curve", "Point not in group", "Public key is infinity"], 37 | }, 38 | { 39 | name: "Signature.deserialize", 40 | target: (data: Buffer) => { 41 | return Signature.fromBytes(data); 42 | }, 43 | expectedErrors: ["Invalid encoding", "Point not on curve", "Point not in group"], 44 | }, 45 | { 46 | name: "verify", 47 | target: (data: Buffer) => { 48 | const secretKey = SecretKey.fromKeygen(Buffer.alloc(32, "*")); 49 | const publicKey = secretKey.toPublicKey(); 50 | const signature = secretKey.sign(data); 51 | return verify(data, publicKey, signature); 52 | }, 53 | expectedErrors: [], 54 | }, 55 | ]; 56 | -------------------------------------------------------------------------------- /test/memory/memory.test.ts: -------------------------------------------------------------------------------- 1 | import {memoryTest} from "../utils/memory/testRunner"; 2 | import * as napi from "../../index.js"; 3 | 4 | const sk = napi.SecretKey.fromKeygen(Buffer.alloc(32, "*&@#")); 5 | const skBytes = sk.toBytes(); 6 | const pk = sk.toPublicKey().toBytes(); 7 | const sig = sk.sign(Buffer.alloc(32, "*&@#")).toBytes(); 8 | 9 | // eslint-disable-next-line @typescript-eslint/no-floating-promises 10 | (async function runMemoryTests() { 11 | await memoryTest( 12 | [ 13 | { 14 | id: "Napi SecretKey", 15 | getInstance: () => napi.SecretKey.fromBytes(skBytes), 16 | }, 17 | { 18 | id: "Napi PublicKey", 19 | getInstance: () => napi.PublicKey.fromBytes(pk), 20 | }, 21 | { 22 | id: "Napi Signature", 23 | getInstance: () => napi.Signature.fromBytes(sig), 24 | }, 25 | ], 26 | { 27 | warmUpIterations: 10_000, 28 | gcDelay: 100, 29 | convergeFactor: 0.1 / 100, 30 | displayRunInfo: true, 31 | sampleEvery: 1000, 32 | maxInstances: 1_000_000, 33 | computeUsedMemory: (usage) => usage.heapUsed + usage.external + usage.arrayBuffers, 34 | } 35 | ); 36 | })(); 37 | -------------------------------------------------------------------------------- /test/memory/napi.heapdump.zst: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChainSafe/blst-ts/1c0711b98d6724381985611701db2e7922a7537e/test/memory/napi.heapdump.zst -------------------------------------------------------------------------------- /test/perf/PublicKey.test.ts: -------------------------------------------------------------------------------- 1 | import {itBench} from "@dapplion/benchmark"; 2 | import * as blst from "../../index.js"; 3 | import {arrayOfIndexes, getTestSet, getSerializedTestSet} from "../utils"; 4 | 5 | const napiTestKey = getTestSet(0).pk; 6 | 7 | describe("PublicKey", () => { 8 | itBench("PublicKey serialization", () => { 9 | napiTestKey.toBytes(); 10 | }); 11 | 12 | itBench({ 13 | id: "PublicKey deserialize", 14 | beforeEach: () => napiTestKey.toBytes(), 15 | fn: (serialized) => { 16 | blst.PublicKey.fromBytes(serialized, false); 17 | }, 18 | }); 19 | 20 | for (const count of [1, 100, 10_000]) { 21 | itBench({ 22 | id: `PublicKey deserialize and validate - ${count} keys`, 23 | beforeEach: () => arrayOfIndexes(0, count - 1).map((i) => getSerializedTestSet(i % 256).pk), 24 | fn: (publicKeys) => { 25 | for (const publicKey of publicKeys) { 26 | blst.PublicKey.fromBytes(publicKey, true); 27 | } 28 | }, 29 | }); 30 | } 31 | }); 32 | -------------------------------------------------------------------------------- /test/perf/SecretKey.test.ts: -------------------------------------------------------------------------------- 1 | import {itBench} from "@dapplion/benchmark"; 2 | import * as blst from "../../index.js"; 3 | import {commonMessage, getTestSet} from "../utils"; 4 | 5 | const napiTestKey = getTestSet(0).sk; 6 | 7 | describe("SecretKey", () => { 8 | const ikm = Buffer.alloc(32, 1); 9 | itBench("SecretKey.fromKeygen", () => { 10 | blst.SecretKey.fromKeygen(ikm); 11 | }); 12 | 13 | itBench("SecretKey serialization", () => { 14 | napiTestKey.toBytes(); 15 | }); 16 | 17 | itBench({ 18 | id: "SecretKey deserialization", 19 | beforeEach: () => napiTestKey.toBytes(), 20 | fn: (serialized) => { 21 | blst.SecretKey.fromBytes(serialized); 22 | }, 23 | }); 24 | 25 | itBench("SecretKey.toPublicKey", () => { 26 | napiTestKey.toPublicKey(); 27 | }); 28 | 29 | itBench("SecretKey.sign", () => { 30 | napiTestKey.sign(commonMessage); 31 | }); 32 | }); 33 | -------------------------------------------------------------------------------- /test/perf/Signature.test.ts: -------------------------------------------------------------------------------- 1 | import {itBench} from "@dapplion/benchmark"; 2 | import * as blst from "../../index.js"; 3 | import {arrayOfIndexes, getTestSet, getSerializedTestSet} from "../utils"; 4 | 5 | const napiTestSignature = getTestSet(0).sig; 6 | 7 | describe("Signature", () => { 8 | itBench("Signature serialization", () => { 9 | napiTestSignature.toBytes(); 10 | }); 11 | 12 | itBench({ 13 | id: "Signature deserialize", 14 | beforeEach: () => napiTestSignature.toBytes(), 15 | fn: (serialized) => { 16 | blst.Signature.fromBytes(serialized); 17 | }, 18 | }); 19 | 20 | for (const count of [1, 100, 10_000]) { 21 | itBench({ 22 | id: `Signatures deserialize and validate - ${count} sets`, 23 | before() { 24 | return arrayOfIndexes(0, count - 1).map((i) => getSerializedTestSet(i % 256).sig); 25 | }, 26 | beforeEach: (sigs) => sigs, 27 | fn: (signatures) => { 28 | for (const signature of signatures) { 29 | blst.Signature.fromBytes(signature, true); 30 | } 31 | }, 32 | }); 33 | } 34 | }); 35 | -------------------------------------------------------------------------------- /test/perf/functions.test.ts: -------------------------------------------------------------------------------- 1 | import {itBench} from "@dapplion/benchmark"; 2 | import * as blst from "../../index.js"; 3 | import {arrayOfIndexes, getTestSet, getTestSetSameMessage, getTestSetsSameMessage} from "../utils"; 4 | 5 | describe("functions", () => { 6 | describe("aggregatePublicKeys", () => { 7 | for (const count of [1, 8, 32, 128, 256]) { 8 | itBench({ 9 | id: `aggregatePublicKeys - ${count} sets`, 10 | beforeEach: () => arrayOfIndexes(0, count - 1).map((i) => getTestSet(i).pk), 11 | fn: (publicKeys) => { 12 | blst.aggregatePublicKeys(publicKeys); 13 | }, 14 | }); 15 | } 16 | }); 17 | describe("aggregateSignatures", () => { 18 | for (const count of [1, 8, 32, 128, 256]) { 19 | itBench({ 20 | id: `aggregateSignatures - ${count} sets`, 21 | beforeEach: () => arrayOfIndexes(0, count - 1).map((i) => getTestSet(i).sig), 22 | fn: (signatures) => { 23 | blst.aggregateSignatures(signatures); 24 | }, 25 | }); 26 | } 27 | }); 28 | describe("aggregateWithRandomness", () => { 29 | for (const count of [1, 16, 128, 256, 512, 1024]) { 30 | itBench({ 31 | id: `aggregateWithRandomness - ${count} sets`, 32 | before: () => { 33 | const {sets} = getTestSetsSameMessage(count); 34 | return sets.map((s) => ({ 35 | pk: s.pk, 36 | sig: s.sig.toBytes(), 37 | })); 38 | }, 39 | beforeEach: (sets) => sets, 40 | fn: (sets) => { 41 | blst.aggregateWithRandomness(sets); 42 | }, 43 | }); 44 | } 45 | }); 46 | describe("aggregateVerify", () => { 47 | for (const count of [1, 8, 32, 128, 256]) { 48 | itBench({ 49 | id: `aggregateVerify - ${count} sets`, 50 | beforeEach: () => { 51 | const sets = arrayOfIndexes(0, count - 1) 52 | .map((i) => getTestSet(i)) 53 | .reduce( 54 | (sets, set) => ({ 55 | messages: [...sets.messages, set.msg], 56 | publicKeys: [...sets.publicKeys, set.pk], 57 | signatures: [...sets.signatures, set.sig], 58 | }), 59 | { 60 | messages: [] as Uint8Array[], 61 | publicKeys: [] as blst.PublicKey[], 62 | signatures: [] as blst.Signature[], 63 | } 64 | ); 65 | return { 66 | messages: sets.messages, 67 | publicKeys: sets.publicKeys, 68 | signature: blst.aggregateSignatures(sets.signatures), 69 | }; 70 | }, 71 | fn: ({messages, publicKeys, signature}) => { 72 | blst.aggregateVerify(messages, publicKeys, signature); 73 | }, 74 | }); 75 | } 76 | }); 77 | describe("verifyMultipleAggregateSignatures", () => { 78 | for (const count of [1, 8, 32, 128, 256]) { 79 | itBench({ 80 | id: `verifyMultipleAggregateSignatures - ${count} sets`, 81 | beforeEach: () => arrayOfIndexes(0, count - 1).map((i) => getTestSet(i)), 82 | fn: (sets) => { 83 | blst.verifyMultipleAggregateSignatures(sets); 84 | }, 85 | }); 86 | } 87 | }); 88 | describe("verifyMultipleAggregateSignatures same message", () => { 89 | for (const count of [1, 8, 32, 128, 256]) { 90 | itBench({ 91 | id: `Same message - ${count} sets`, 92 | beforeEach: () => 93 | arrayOfIndexes(0, count - 1) 94 | .map((i) => getTestSetSameMessage(i)) 95 | .map((set) => { 96 | return { 97 | message: set.msg, 98 | secretKey: set.sk, 99 | publicKey: set.pk, 100 | signature: set.sig.toBytes(), 101 | }; 102 | }), 103 | fn: (sets) => { 104 | const aggregatedPubkey = blst.aggregatePublicKeys(sets.map((set) => set.publicKey)); 105 | const aggregatedSignature = blst.aggregateSignatures( 106 | sets.map((set) => { 107 | const sig = blst.Signature.fromBytes(set.signature, true, true); 108 | return sig; 109 | }) 110 | ); 111 | const isValid = blst.verify(sets[0].message, aggregatedPubkey, aggregatedSignature); 112 | if (!isValid) throw Error("Invalid"); 113 | }, 114 | }); 115 | } 116 | }); 117 | }); 118 | -------------------------------------------------------------------------------- /test/spec/downloadTests.ts: -------------------------------------------------------------------------------- 1 | import {downloadTests} from "./utils"; 2 | import {ethereumConsensusSpecsTests, blsSpecTests} from "./specTestVersioning"; 3 | 4 | /* eslint-disable no-console */ 5 | 6 | for (const downloadTestOpts of [ethereumConsensusSpecsTests, blsSpecTests]) { 7 | downloadTests(downloadTestOpts, console.log).catch((e: Error) => { 8 | console.error(e); 9 | process.exit(1); 10 | }); 11 | } 12 | -------------------------------------------------------------------------------- /test/spec/functions.ts: -------------------------------------------------------------------------------- 1 | import { 2 | SecretKey, 3 | PublicKey, 4 | Signature, 5 | aggregatePublicKeys, 6 | aggregateSignatures, 7 | verify as VERIFY, 8 | aggregateVerify, 9 | fastAggregateVerify, 10 | verifyMultipleAggregateSignatures, 11 | SignatureSet, 12 | } from "../../index.js"; 13 | import {CodeError, fromHex} from "../utils"; 14 | import {G2_POINT_AT_INFINITY} from "./utils"; 15 | 16 | export const testFnByName: Record any> = { 17 | sign, 18 | eth_aggregate_pubkeys, 19 | aggregate, 20 | verify, 21 | aggregate_verify, 22 | fast_aggregate_verify, 23 | eth_fast_aggregate_verify, 24 | batch_verify, 25 | deserialization_G1, 26 | deserialization_G2, 27 | }; 28 | 29 | function catchBLSTError(e: unknown): boolean { 30 | if ((e as CodeError).code?.startsWith("BLST")) return false; 31 | throw e; 32 | } 33 | 34 | /** 35 | * ``` 36 | * input: List[BLS Signature] -- list of input BLS signatures 37 | * output: BLS Signature -- expected output, single BLS signature or empty. 38 | * ``` 39 | */ 40 | function aggregate(input: string[]): string | null { 41 | return aggregateSignatures(input.map((hex) => Signature.fromHex(hex))).toHex(); 42 | } 43 | 44 | /** 45 | * ``` 46 | * input: 47 | * pubkeys: List[BLS Pubkey] -- the pubkeys 48 | * messages: List[bytes32] -- the messages 49 | * signature: BLS Signature -- the signature to verify against pubkeys and messages 50 | * output: bool -- true (VALID) or false (INVALID) 51 | * ``` 52 | */ 53 | function aggregate_verify(input: {pubkeys: string[]; messages: string[]; signature: string}): boolean { 54 | const {pubkeys, messages, signature} = input; 55 | try { 56 | return aggregateVerify( 57 | messages.map(fromHex), 58 | pubkeys.map((hex) => PublicKey.fromHex(hex)), 59 | Signature.fromHex(signature) 60 | ); 61 | } catch (e) { 62 | return catchBLSTError(e); 63 | } 64 | } 65 | 66 | /** 67 | * ``` 68 | * input: List[BLS Signature] -- list of input BLS signatures 69 | * output: BLS Signature -- expected output, single BLS signature or empty. 70 | * ``` 71 | */ 72 | function eth_aggregate_pubkeys(input: string[]): string | null { 73 | return aggregatePublicKeys(input.map((hex) => PublicKey.fromHex(hex, true))).toHex(); 74 | } 75 | 76 | /** 77 | * ``` 78 | * input: 79 | * pubkeys: List[BLS Pubkey] -- list of input BLS pubkeys 80 | * message: bytes32 -- the message 81 | * signature: BLS Signature -- the signature to verify against pubkeys and message 82 | * output: bool -- true (VALID) or false (INVALID) 83 | * ``` 84 | */ 85 | function eth_fast_aggregate_verify(input: {pubkeys: string[]; message: string; signature: string}): boolean { 86 | const {pubkeys, message, signature} = input; 87 | 88 | if (pubkeys.length === 0 && signature === G2_POINT_AT_INFINITY) { 89 | return true; 90 | } 91 | 92 | try { 93 | return fastAggregateVerify( 94 | fromHex(message), 95 | pubkeys.map((hex) => PublicKey.fromHex(hex, true)), 96 | Signature.fromHex(signature) 97 | ); 98 | } catch (e) { 99 | return catchBLSTError(e); 100 | } 101 | } 102 | 103 | /** 104 | * ``` 105 | * input: 106 | * pubkeys: List[BLS Pubkey] -- list of input BLS pubkeys 107 | * message: bytes32 -- the message 108 | * signature: BLS Signature -- the signature to verify against pubkeys and message 109 | * output: bool -- true (VALID) or false (INVALID) 110 | * ``` 111 | */ 112 | function fast_aggregate_verify(input: {pubkeys: string[]; message: string; signature: string}): boolean { 113 | const {pubkeys, message, signature} = input; 114 | 115 | try { 116 | return fastAggregateVerify( 117 | fromHex(message), 118 | pubkeys.map((hex) => PublicKey.fromHex(hex, true)), 119 | Signature.fromHex(signature) 120 | ); 121 | } catch (e) { 122 | return catchBLSTError(e); 123 | } 124 | } 125 | 126 | /** 127 | * input: 128 | * privkey: bytes32 -- the private key used for signing 129 | * message: bytes32 -- input message to sign (a hash) 130 | * output: BLS Signature -- expected output, single BLS signature or empty. 131 | */ 132 | function sign(input: {privkey: string; message: string}): string | null { 133 | const {privkey, message} = input; 134 | return SecretKey.fromHex(privkey).sign(fromHex(message)).toHex(); 135 | } 136 | 137 | /** 138 | * input: 139 | * pubkey: bytes48 -- the pubkey 140 | * message: bytes32 -- the message 141 | * signature: bytes96 -- the signature to verify against pubkey and message 142 | * output: bool -- VALID or INVALID 143 | */ 144 | function verify(input: {pubkey: string; message: string; signature: string}): boolean { 145 | const {pubkey, message, signature} = input; 146 | try { 147 | return VERIFY(fromHex(message), PublicKey.fromHex(pubkey), Signature.fromHex(signature)); 148 | } catch (e) { 149 | return catchBLSTError(e); 150 | } 151 | } 152 | 153 | /** 154 | * ``` 155 | * input: 156 | * pubkeys: List[bytes48] -- the pubkeys 157 | * messages: List[bytes32] -- the messages 158 | * signatures: List[bytes96] -- the signatures to verify against pubkeys and messages 159 | * output: bool -- VALID or INVALID 160 | * ``` 161 | * https://github.com/ethereum/bls12-381-tests/blob/master/formats/batch_verify.md 162 | */ 163 | function batch_verify(input: {pubkeys: string[]; messages: string[]; signatures: string[]}): boolean | null { 164 | const length = input.pubkeys.length; 165 | if (input.messages.length !== length && input.signatures.length !== length) { 166 | throw new Error("Invalid spec test. Must have same number in each array. Check spec yaml file"); 167 | } 168 | const sets: SignatureSet[] = []; 169 | try { 170 | for (let i = 0; i < length; i++) { 171 | sets.push({ 172 | msg: fromHex(input.messages[i]), 173 | pk: PublicKey.fromHex(input.pubkeys[i]), 174 | sig: Signature.fromHex(input.signatures[i]), 175 | }); 176 | } 177 | return verifyMultipleAggregateSignatures(sets); 178 | } catch (e) { 179 | return catchBLSTError(e); 180 | } 181 | } 182 | 183 | /** 184 | * ``` 185 | * input: pubkey: bytes48 -- the pubkey 186 | * output: bool -- VALID or INVALID 187 | * ``` 188 | * https://github.com/ethereum/bls12-381-tests/blob/master/formats/deserialization_G1.md 189 | */ 190 | function deserialization_G1(input: {pubkey: string}): boolean { 191 | try { 192 | PublicKey.fromHex(input.pubkey, true); 193 | return true; 194 | } catch (e) { 195 | return catchBLSTError(e); 196 | } 197 | } 198 | 199 | /** 200 | * ``` 201 | * input: signature: bytes92 -- the signature 202 | * output: bool -- VALID or INVALID 203 | * ``` 204 | * https://github.com/ethereum/bls12-381-tests/blob/master/formats/deserialization_G2.md 205 | */ 206 | function deserialization_G2(input: {signature: string}): boolean { 207 | try { 208 | Signature.fromHex(input.signature, true); 209 | return true; 210 | } catch (e) { 211 | return catchBLSTError(e); 212 | } 213 | } 214 | -------------------------------------------------------------------------------- /test/spec/index.test.ts: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-console */ 2 | import {expect} from "chai"; 3 | import {TestBatchMeta, getTestBatch} from "./utils"; 4 | import {testFnByName} from "./functions"; 5 | 6 | const testLocations: TestBatchMeta[] = [ 7 | {directory: "spec-tests/tests/general/phase0/bls", innerBlsFolder: true}, 8 | {directory: "spec-tests/tests/general/altair/bls", innerBlsFolder: true}, 9 | {directory: "spec-tests-bls", namedYamlFiles: true}, 10 | ]; 11 | 12 | const skippedFunctions: string[] = ["hash_to_G2"]; 13 | 14 | const skippedTestCaseNames: string[] = [ 15 | // TODO: BLS dealing of the Infinity public key does not allow to validate `infinity_with_true_b_flag`. 16 | // This _should_ not have any impact of Beacon Chain in production, so it's ignored until fixed upstream 17 | "deserialization_succeeds_infinity_with_true_b_flag", 18 | ]; 19 | 20 | (function runTests(): void { 21 | const batches = testLocations.map(getTestBatch); 22 | for (const {directory, testGroups: tests} of batches) { 23 | describe(directory, () => { 24 | for (const {functionName, testCases} of tests) { 25 | if (skippedFunctions.includes(functionName)) continue; 26 | describe(functionName, () => { 27 | const testFn = testFnByName[functionName]; 28 | before("Must be a known test function", () => { 29 | if (!testFn) throw Error(`Unknown test function: ${functionName}`); 30 | }); 31 | 32 | for (const {testCaseName, testCaseData} of testCases) { 33 | if (skippedTestCaseNames.includes(testCaseName)) { 34 | continue; 35 | } 36 | if (process.env.DEBUG) { 37 | console.log(testCaseData); 38 | } 39 | it(testCaseName, () => { 40 | if (testCaseData.output === null) { 41 | // eslint-disable-next-line @typescript-eslint/no-unsafe-return 42 | expect(() => testFn(testCaseData.input)).to.throw(); 43 | } else { 44 | expect(testFn(testCaseData.input)).to.deep.equal(testCaseData.output); 45 | } 46 | }); 47 | } 48 | }); 49 | } 50 | }); 51 | } 52 | })(); 53 | -------------------------------------------------------------------------------- /test/spec/specTestVersioning.ts: -------------------------------------------------------------------------------- 1 | import {join} from "node:path"; 2 | import {DownloadTestsOptions} from "./utils"; 3 | 4 | // WARNING! Don't move or rename this file !!! 5 | // 6 | // This file is used to generate the cache ID for spec tests download in Github Actions CI 7 | // It's path is hardcoded in: `.github/workflows/test-spec.yml` 8 | // 9 | // The contents of this file MUST include the URL, version and target path, and nothing else. 10 | 11 | // Target directory is the host package root: '/spec-tests' 12 | 13 | export const ethereumConsensusSpecsTests: DownloadTestsOptions = { 14 | specVersion: "v1.4.0", 15 | // Target directory is the host package root: 'packages/*/spec-tests' 16 | outputDir: join(__dirname, "../../spec-tests"), 17 | specTestsRepoUrl: "https://github.com/ethereum/consensus-spec-tests", 18 | testsToDownload: ["general"], 19 | }; 20 | 21 | export const blsSpecTests: DownloadTestsOptions = { 22 | specVersion: "v0.1.2", 23 | // Target directory is the host package root: 'packages/*/spec-tests-bls' 24 | outputDir: join(__dirname, "../../spec-tests-bls"), 25 | specTestsRepoUrl: "https://github.com/ethereum/bls12-381-tests", 26 | testsToDownload: ["bls_tests_yaml"], 27 | }; 28 | -------------------------------------------------------------------------------- /test/spec/utils.ts: -------------------------------------------------------------------------------- 1 | import fs from "node:fs"; 2 | import path from "node:path"; 3 | import stream from "node:stream"; 4 | import type {ReadableStream} from "node:stream/web"; 5 | import * as tar from "tar"; 6 | import jsYaml from "js-yaml"; 7 | 8 | const REPO_ROOT = path.resolve(__dirname, "..", ".."); 9 | export const SPEC_TEST_REPO_URL = "https://github.com/ethereum/consensus-spec-tests"; 10 | 11 | export const G2_POINT_AT_INFINITY = 12 | "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; 13 | export const G1_POINT_AT_INFINITY = 14 | "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"; 15 | 16 | // Examples of parsed YAML 17 | // { 18 | // input: [ 19 | // '0x91347bccf740d859038fcdcaf233eeceb2a436bcaaee9b2aa3bfb70efe29dfb2677562ccbea1c8e061fb9971b0753c240622fab78489ce96768259fc01360346da5b9f579e5da0d941e4c6ba18a0e64906082375394f337fa1af2b7127b0d121', 20 | // '0x9674e2228034527f4c083206032b020310face156d4a4685e2fcaec2f6f3665aa635d90347b6ce124eb879266b1e801d185de36a0a289b85e9039662634f2eea1e02e670bc7ab849d006a70b2f93b84597558a05b879c8d445f387a5d5b653df', 21 | // '0xae82747ddeefe4fd64cf9cedb9b04ae3e8a43420cd255e3c7cd06a8d88b7c7f8638543719981c5d16fa3527c468c25f0026704a6951bde891360c7e8d12ddee0559004ccdbe6046b55bae1b257ee97f7cdb955773d7cf29adf3ccbb9975e4eb9' 22 | // ], 23 | // output: '0x9712c3edd73a209c742b8250759db12549b3eaf43b5ca61376d9f30e2747dbcf842d8b2ac0901d2a093713e20284a7670fcf6954e9ab93de991bb9b313e664785a075fc285806fa5224c82bde146561b446ccfc706a64b8579513cfc4ff1d930' 24 | // } 25 | // 26 | // { 27 | // input: ['0x000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000'], 28 | // output: null 29 | // } 30 | // 31 | // { 32 | // input: ..., 33 | // output: false 34 | // } 35 | export interface TestCaseData { 36 | input: unknown; 37 | output: unknown; 38 | } 39 | 40 | export interface TestCase { 41 | testCaseName: string; 42 | testCaseData: TestCaseData; 43 | } 44 | 45 | export interface TestGroup { 46 | functionName: string; 47 | directory: string; 48 | testCases: TestCase[]; 49 | } 50 | 51 | export interface TestBatchMeta { 52 | directory: string; 53 | innerBlsFolder?: boolean; 54 | namedYamlFiles?: boolean; 55 | } 56 | 57 | export interface TestBatch { 58 | directory: string; 59 | testGroups: TestGroup[]; 60 | } 61 | 62 | function getTestCasesWithDataYaml(testDirectory: string): TestCase[] { 63 | const testCases: TestCase[] = []; 64 | for (const testCaseName of fs.readdirSync(testDirectory)) { 65 | const testCaseDir = path.resolve(testDirectory, testCaseName); 66 | const yamlPath = path.resolve(testCaseDir, "data.yaml"); 67 | if (!fs.existsSync(yamlPath)) { 68 | throw new Error(`Missing yaml data for ${testCaseDir}`); 69 | } 70 | testCases.push({ 71 | testCaseName, 72 | testCaseData: jsYaml.load(fs.readFileSync(yamlPath, "utf8")) as { 73 | input: unknown; 74 | output: unknown; 75 | }, 76 | }); 77 | } 78 | return testCases; 79 | } 80 | 81 | function getTestCasesWithNamedYaml(testDirectory: string): TestCase[] { 82 | const testCases: TestCase[] = []; 83 | for (const testCaseYaml of fs.readdirSync(testDirectory)) { 84 | const [testCaseName] = testCaseYaml.split("."); 85 | const yamlPath = path.resolve(testDirectory, testCaseYaml); 86 | if (!fs.existsSync(yamlPath)) { 87 | throw new Error(`Missing yaml data for ${testCaseName}`); 88 | } 89 | testCases.push({ 90 | testCaseName, 91 | testCaseData: jsYaml.load(fs.readFileSync(yamlPath, "utf8")) as { 92 | input: unknown; 93 | output: unknown; 94 | }, 95 | }); 96 | } 97 | return testCases; 98 | } 99 | 100 | export function getTestBatch({directory, innerBlsFolder, namedYamlFiles}: TestBatchMeta): TestBatch { 101 | const testBatch: TestBatch = {directory, testGroups: []}; 102 | 103 | const fullDirPath = path.resolve(REPO_ROOT, directory); 104 | for (const functionName of fs.readdirSync(fullDirPath)) { 105 | const pathSegments = [fullDirPath, functionName]; 106 | if (innerBlsFolder) pathSegments.push("bls"); 107 | const testDirectory = path.resolve(...pathSegments); 108 | if (!fs.statSync(testDirectory).isDirectory()) { 109 | continue; 110 | } 111 | const testGroup: TestGroup = { 112 | functionName, 113 | directory, 114 | testCases: namedYamlFiles ? getTestCasesWithNamedYaml(testDirectory) : getTestCasesWithDataYaml(testDirectory), 115 | }; 116 | testBatch.testGroups.push(testGroup); 117 | } 118 | 119 | return testBatch; 120 | } 121 | 122 | const logEmpty = (): void => {}; 123 | 124 | export type DownloadTestsOptions = { 125 | specVersion: string; 126 | outputDir: string; 127 | /** Root Github URL `https://github.com/ethereum/consensus-spec-tests` */ 128 | specTestsRepoUrl: string; 129 | /** Release files names to download without prefix `["general", "mainnet", "minimal"]` */ 130 | testsToDownload: string[]; 131 | }; 132 | 133 | /** 134 | * Generic Github release downloader. 135 | * Used by spec tests and SlashingProtectionInterchangeTest 136 | */ 137 | export async function downloadTests( 138 | {specVersion, specTestsRepoUrl, outputDir, testsToDownload}: DownloadTestsOptions, 139 | log: (msg: string) => void = logEmpty 140 | ): Promise { 141 | log(`outputDir = ${outputDir}`); 142 | 143 | // Use version.txt as a flag to prevent re-downloading the tests 144 | const versionFile = path.join(outputDir, "version.txt"); 145 | const existingVersion = fs.existsSync(versionFile) && fs.readFileSync(versionFile, "utf8").trim(); 146 | 147 | if (existingVersion === specVersion) { 148 | return log(`version ${specVersion} already downloaded`); 149 | } else { 150 | log(`Downloading new version ${specVersion}`); 151 | } 152 | 153 | if (fs.existsSync(outputDir)) { 154 | log(`Cleaning existing version ${existingVersion} at ${outputDir}`); 155 | fs.rmSync(outputDir, {recursive: true, force: true}); 156 | } 157 | 158 | fs.mkdirSync(outputDir, {recursive: true}); 159 | 160 | await Promise.all( 161 | testsToDownload.map(async (test) => { 162 | const url = `${specTestsRepoUrl ?? SPEC_TEST_REPO_URL}/releases/download/${specVersion}/${test}.tar.gz`; 163 | const fileName = url.split("/").pop(); 164 | const filePath = path.resolve(outputDir, String(fileName)); 165 | const {body, ok, headers} = await fetch(url); 166 | if (!ok || !body) { 167 | throw new Error(`Failed to download ${url}`); 168 | } 169 | 170 | const totalSize = headers.get("content-length"); 171 | log(`Downloading ${url} - ${totalSize} bytes`); 172 | 173 | await stream.promises.finished( 174 | stream.Readable.fromWeb(body as ReadableStream).pipe(fs.createWriteStream(filePath)) 175 | ); 176 | 177 | log(`Downloaded ${url}`); 178 | 179 | await tar.x({ 180 | file: filePath, 181 | cwd: outputDir, 182 | }); 183 | }) 184 | ); 185 | 186 | fs.writeFileSync(versionFile, specVersion); 187 | } 188 | -------------------------------------------------------------------------------- /test/unit/PublicKey.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {PUBLIC_KEY_LENGTH_COMPRESSED, PUBLIC_KEY_LENGTH_UNCOMPRESSED, PublicKey, SecretKey} from "../../index.js"; 3 | import {CodeError, expectEqualHex, expectNotEqualHex, sullyUint8Array} from "../utils"; 4 | import {validPublicKey, SECRET_KEY_BYTES, invalidInputs, G1_POINT_AT_INFINITY} from "../__fixtures__"; 5 | 6 | describe("PublicKey", () => { 7 | it("should exist", () => { 8 | expect(PublicKey).to.exist; 9 | }); 10 | describe("constructors", () => { 11 | describe("new PublicKey()", () => { 12 | it("should have a private constructor", () => { 13 | // eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-return 14 | expect(() => new (PublicKey as any)("foo-bar-baz")).to.throw( 15 | "Class contains no `constructor`, can not new it!" 16 | ); 17 | }); 18 | }); 19 | describe("deserialize", () => { 20 | it("should only take 48 or 96 bytes", () => { 21 | expect(() => PublicKey.fromBytes(Buffer.alloc(32, "*"))).to.throw("Invalid encoding"); 22 | }); 23 | it("should take uncompressed byte arrays", () => { 24 | expectEqualHex(PublicKey.fromBytes(validPublicKey.uncompressed).toBytes(), validPublicKey.compressed); 25 | }); 26 | it("should take compressed byte arrays", () => { 27 | expectEqualHex(PublicKey.fromBytes(validPublicKey.compressed).toBytes(), validPublicKey.compressed); 28 | }); 29 | describe("argument validation", () => { 30 | for (const [type, invalid] of invalidInputs) { 31 | it(`should throw on invalid pkBytes type: ${type}`, () => { 32 | expect(() => PublicKey.fromBytes(invalid)).to.throw(); 33 | }); 34 | } 35 | it("should throw incorrect length pkBytes", () => { 36 | expect(() => PublicKey.fromBytes(Buffer.alloc(12, "*"))).to.throw("Invalid encoding"); 37 | }); 38 | }); 39 | it("should throw on invalid key", () => { 40 | try { 41 | PublicKey.fromBytes(sullyUint8Array(validPublicKey.compressed), true); 42 | expect.fail("Did not throw error for badPublicKey"); 43 | } catch (e) { 44 | expect((e as CodeError).code === "BLST_POINT_NOT_ON_CURVE" || (e as CodeError).code === "BLST_BAD_ENCODING") 45 | .to.be.true; 46 | } 47 | }); 48 | it("should throw on zero key", () => { 49 | expect(() => PublicKey.fromBytes(Buffer.from(G1_POINT_AT_INFINITY))).to.throw("Invalid encoding"); 50 | }); 51 | }); 52 | }); 53 | describe("methods", () => { 54 | describe("toBytes", () => { 55 | const sk = SecretKey.fromBytes(SECRET_KEY_BYTES); 56 | const pk = sk.toPublicKey(); 57 | it("should toBytes the key to Uint8Array", () => { 58 | expect(pk.toBytes()).to.be.instanceof(Uint8Array); 59 | }); 60 | it("should default to compressed serialization", () => { 61 | expectEqualHex(pk.toBytes(), pk.toBytes(true)); 62 | expectNotEqualHex(pk.toBytes(), pk.toBytes(false)); 63 | }); 64 | it("should serialize compressed to the correct length", () => { 65 | expect(pk.toBytes(true)).to.have.lengthOf(PUBLIC_KEY_LENGTH_COMPRESSED); 66 | }); 67 | it("should serialize uncompressed to the correct length", () => { 68 | expect(pk.toBytes(false)).to.have.lengthOf(PUBLIC_KEY_LENGTH_UNCOMPRESSED); 69 | }); 70 | }); 71 | describe("toHex", () => { 72 | it("should toHex string correctly", () => { 73 | const key = PublicKey.fromBytes(validPublicKey.compressed); 74 | expectEqualHex(key.toHex(true), validPublicKey.compressed); 75 | }); 76 | }); 77 | describe("keyValidate()", () => { 78 | it("should not throw on valid public key", () => { 79 | const pk = PublicKey.fromBytes(validPublicKey.uncompressed); 80 | expect(pk.keyValidate()).to.be.undefined; 81 | }); 82 | }); 83 | }); 84 | }); 85 | -------------------------------------------------------------------------------- /test/unit/SecretKey.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {PublicKey, SECRET_KEY_LENGTH, SecretKey, Signature} from "../../index.js"; 3 | import {KEY_MATERIAL, SECRET_KEY_BYTES, invalidInputs} from "../__fixtures__"; 4 | import {expectEqualHex, expectNotEqualHex} from "../utils"; 5 | 6 | describe("SecretKey", () => { 7 | it("should exist", () => { 8 | expect(SecretKey).to.exist; 9 | }); 10 | describe("constructors", () => { 11 | describe("new SecretKey()", () => { 12 | it("should have a private constructor", () => { 13 | // eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-return 14 | expect(() => new (SecretKey as any)("foo-bar-baz")).to.throw( 15 | "Class contains no `constructor`, can not new it!" 16 | ); 17 | }); 18 | }); 19 | describe("SecretKey.fromKeygen", () => { 20 | it("should create an instance from Uint8Array ikm", () => { 21 | expect(SecretKey.fromKeygen(KEY_MATERIAL)).to.be.instanceOf(SecretKey); 22 | }); 23 | it("should create the same key from the same ikm", () => { 24 | expectEqualHex(SecretKey.fromKeygen(KEY_MATERIAL).toBytes(), SecretKey.fromKeygen(KEY_MATERIAL).toBytes()); 25 | }); 26 | it("should take a second 'info' argument", () => { 27 | expectNotEqualHex( 28 | SecretKey.fromKeygen(KEY_MATERIAL, Buffer.from("some fancy info")).toBytes(), 29 | SecretKey.fromKeygen(KEY_MATERIAL).toBytes() 30 | ); 31 | }); 32 | describe("argument validation", () => { 33 | const validInfoTypes = ["undefined", "null", "string"]; 34 | for (const [type, invalid] of invalidInputs) { 35 | it(`should throw on invalid ikm type: ${type}`, () => { 36 | expect(() => SecretKey.fromKeygen(invalid)).to.throw(); 37 | }); 38 | if (!validInfoTypes.includes(type)) { 39 | it(`should throw on invalid info type: ${type}`, () => { 40 | expect(() => SecretKey.fromKeygen(KEY_MATERIAL, invalid)).to.throw(); 41 | }); 42 | } 43 | } 44 | it("should throw incorrect length ikm", () => { 45 | expect(() => SecretKey.fromKeygen(Buffer.alloc(12, "*"))).to.throw("Invalid encoding"); 46 | }); 47 | }); 48 | }); 49 | describe("SecretKey.fromBytes", () => { 50 | it("should create an instance", () => { 51 | expect(SecretKey.fromBytes(SECRET_KEY_BYTES)).to.be.instanceOf(SecretKey); 52 | }); 53 | describe("argument validation", () => { 54 | for (const [type, invalid] of invalidInputs) { 55 | it(`should throw on invalid ikm type: ${type}`, () => { 56 | expect(() => SecretKey.fromBytes(invalid)).to.throw(); 57 | }); 58 | } 59 | it("should throw incorrect length ikm", () => { 60 | expect(() => SecretKey.fromBytes(Buffer.alloc(12, "*"))).to.throw("Invalid encoding"); 61 | }); 62 | }); 63 | }); 64 | }); 65 | describe("instance methods", () => { 66 | let key: SecretKey; 67 | beforeEach(() => { 68 | key = SecretKey.fromKeygen(KEY_MATERIAL); 69 | }); 70 | describe("toBytes", () => { 71 | it("should toBytes the key to Uint8Array", () => { 72 | expect(key.toBytes()).to.be.instanceof(Uint8Array); 73 | }); 74 | it("should be the correct length", () => { 75 | expect(key.toBytes().length).to.equal(SECRET_KEY_LENGTH); 76 | }); 77 | it("should reconstruct the same key", () => { 78 | const serialized = key.toBytes(); 79 | expectEqualHex(SecretKey.fromBytes(serialized).toBytes(), serialized); 80 | }); 81 | }); 82 | describe("toHex", () => { 83 | it("should toHex string correctly", () => { 84 | const key = SecretKey.fromBytes(SECRET_KEY_BYTES); 85 | expectEqualHex(key.toHex(), SECRET_KEY_BYTES); 86 | }); 87 | }); 88 | describe("toPublicKey", () => { 89 | it("should create a valid PublicKey", () => { 90 | const pk = key.toPublicKey(); 91 | expect(pk).to.be.instanceOf(PublicKey); 92 | expect(pk.keyValidate()).to.be.undefined; 93 | }); 94 | it("should return the same PublicKey from the same SecretKey", () => { 95 | const sk = SecretKey.fromBytes(SECRET_KEY_BYTES); 96 | const pk1 = sk.toPublicKey().toBytes(); 97 | const pk2 = sk.toPublicKey().toBytes(); 98 | expectEqualHex(pk1, pk2); 99 | }); 100 | }); 101 | describe("sign", () => { 102 | it("should create a valid Signature", () => { 103 | const sig = SecretKey.fromKeygen(KEY_MATERIAL).sign(Buffer.from("some fancy message")); 104 | expect(sig).to.be.instanceOf(Signature); 105 | expect(sig.sigValidate()).to.be.undefined; 106 | }); 107 | }); 108 | }); 109 | }); 110 | -------------------------------------------------------------------------------- /test/unit/Signature.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {SIGNATURE_LENGTH_COMPRESSED, SIGNATURE_LENGTH_UNCOMPRESSED, SecretKey, Signature} from "../../"; 3 | import {expectEqualHex, expectNotEqualHex, sullyUint8Array} from "../utils"; 4 | import {KEY_MATERIAL, invalidInputs, validSignature} from "../__fixtures__"; 5 | 6 | describe("Signature", () => { 7 | it("should exist", () => { 8 | expect(Signature).to.exist; 9 | }); 10 | describe("constructor", () => { 11 | it("should have a private new Signature()", () => { 12 | // eslint-disable-next-line @typescript-eslint/no-unsafe-call, @typescript-eslint/no-unsafe-return 13 | expect(() => new (Signature as any)()).to.throw("Class contains no `constructor`, can not new it!"); 14 | }); 15 | describe("Signature.fromBytes()", () => { 16 | it("should take uncompressed byte arrays", () => { 17 | expectEqualHex(Signature.fromBytes(validSignature.uncompressed).toBytes(), validSignature.compressed); 18 | }); 19 | it("should take compressed byte arrays", () => { 20 | expectEqualHex(Signature.fromBytes(validSignature.compressed).toBytes(), validSignature.compressed); 21 | }); 22 | describe("argument validation", () => { 23 | for (const [type, invalid] of invalidInputs) { 24 | it(`should throw on invalid pkBytes type: ${type}`, () => { 25 | expect(() => Signature.fromBytes(invalid)).to.throw(); 26 | }); 27 | } 28 | it("should only take 96 or 192 bytes", () => { 29 | expect(() => Signature.fromBytes(Buffer.alloc(32, "*"))).to.throw("Invalid encoding"); 30 | }); 31 | }); 32 | it("should throw on invalid key", () => { 33 | expect(() => Signature.fromBytes(sullyUint8Array(validSignature.compressed))).to.throw("Invalid encoding"); 34 | }); 35 | }); 36 | }); 37 | describe("methods", () => { 38 | describe("toBytes", () => { 39 | const sig = SecretKey.fromKeygen(KEY_MATERIAL).sign(Buffer.from("some fancy message")); 40 | it("should toBytes the signature to Uint8Array", () => { 41 | expect(sig.toBytes()).to.be.instanceof(Uint8Array); 42 | }); 43 | it("should default to compressed serialization", () => { 44 | expectEqualHex(sig.toBytes(), sig.toBytes(true)); 45 | expectNotEqualHex(sig.toBytes(), sig.toBytes(false)); 46 | }); 47 | it("should serialize compressed to the correct length", () => { 48 | expect(sig.toBytes(true)).to.have.lengthOf(SIGNATURE_LENGTH_COMPRESSED); 49 | }); 50 | it("should serialize uncompressed to the correct length", () => { 51 | expect(sig.toBytes(false)).to.have.lengthOf(SIGNATURE_LENGTH_UNCOMPRESSED); 52 | }); 53 | }); 54 | describe("toHex", () => { 55 | it("should toHex string correctly", () => { 56 | const key = Signature.fromBytes(validSignature.compressed); 57 | expectEqualHex(key.toHex(true), validSignature.compressed); 58 | }); 59 | }); 60 | describe("sigValidate()", () => { 61 | it("should return undefined for valid", () => { 62 | const sig = Signature.fromBytes(validSignature.compressed); 63 | expect(sig.sigValidate()).to.be.undefined; 64 | }); 65 | it("should throw for invalid", () => { 66 | const pkSeed = Signature.fromBytes(validSignature.compressed); 67 | const sig = Signature.fromBytes(Uint8Array.from([...pkSeed.toBytes().subarray(0, 94), ...Buffer.from("a1")])); 68 | expect(() => sig.sigValidate()).to.throw("Point not in group"); 69 | }); 70 | }); 71 | }); 72 | }); 73 | -------------------------------------------------------------------------------- /test/unit/aggregatePublicKeys.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {aggregatePublicKeys, PublicKey} from "../../index.js"; 3 | import {isEqualBytes, getTestSets, CodeError} from "../utils"; 4 | import {badPublicKey} from "../__fixtures__"; 5 | 6 | describe("Aggregate Public Keys", () => { 7 | const sets = getTestSets(10); 8 | const keys = sets.map(({pk}) => pk); 9 | 10 | describe("aggregatePublicKeys()", () => { 11 | it("should return a PublicKey", () => { 12 | const agg = aggregatePublicKeys(keys); 13 | expect(agg).to.be.instanceOf(PublicKey); 14 | }); 15 | it("should be able to keyValidate PublicKey", () => { 16 | const agg = aggregatePublicKeys(keys); 17 | expect(agg.keyValidate()).to.be.undefined; 18 | }); 19 | it("should throw for invalid PublicKey", function () { 20 | try { 21 | aggregatePublicKeys(keys.concat(PublicKey.fromBytes(badPublicKey)), true); 22 | expect.fail("Did not throw error for badPublicKey"); 23 | } catch (e) { 24 | const code = (e as CodeError).code ?? ""; 25 | expect(code.includes("BLST"), `${e}`).to.be.true; 26 | expect( 27 | code.includes("BLST_POINT_NOT_ON_CURVE") || 28 | code.includes("BLST_POINT_NOT_IN_GROUP") || 29 | code.includes("BLST_BAD_ENCODING") 30 | ).to.be.true; 31 | // expect((e as Error).message.endsWith("Invalid key at index 10")).to.be.true; 32 | } 33 | }); 34 | it("should return a key that is not in the keys array", () => { 35 | const agg = aggregatePublicKeys(keys); 36 | const serialized = agg.toBytes(); 37 | expect(keys.find((key) => isEqualBytes(key.toBytes(), serialized))).to.be.undefined; 38 | }); 39 | }); 40 | }); 41 | -------------------------------------------------------------------------------- /test/unit/aggregateSignatures.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {aggregateSignatures, Signature} from "../../index.js"; 3 | import {isEqualBytes, getTestSets, CodeError} from "../utils"; 4 | import {badSignature} from "../__fixtures__"; 5 | 6 | describe("Aggregate Signatures", () => { 7 | const sets = getTestSets(10); 8 | const signatures = sets.map(({sig}) => sig); 9 | 10 | describe("aggregateSignatures()", () => { 11 | it("should return a Signature", () => { 12 | const agg = aggregateSignatures(signatures); 13 | expect(agg).to.be.instanceOf(Signature); 14 | }); 15 | it("should be able to keyValidate Signature", () => { 16 | const agg = aggregateSignatures(signatures); 17 | expect(agg.sigValidate()).to.be.undefined; 18 | }); 19 | it("should throw for invalid Signature", () => { 20 | try { 21 | aggregateSignatures(signatures.concat(Signature.fromBytes(badSignature)), true); 22 | } catch (e) { 23 | expect((e as CodeError).code.startsWith("BLST")).to.be.true; 24 | expect( 25 | (e as CodeError).code.includes("BLST_POINT_NOT_ON_CURVE") || 26 | (e as CodeError).code.includes("BLST_BAD_ENCODING") 27 | ).to.be.true; 28 | // expect((e as Error).message.endsWith("Invalid signature at index 10")).to.be.true; 29 | } 30 | }); 31 | it("should return a key that is not in the keys array", () => { 32 | const agg = aggregateSignatures(signatures); 33 | const serialized = agg.toBytes(); 34 | expect(signatures.find((sig) => isEqualBytes(sig.toBytes(), serialized))).to.be.undefined; 35 | }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /test/unit/aggregateWithRandomness.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import { 3 | aggregatePublicKeys, 4 | aggregateSerializedSignatures, 5 | aggregateWithRandomness, 6 | asyncAggregateWithRandomness, 7 | PublicKey, 8 | Signature, 9 | verify, 10 | verifyMultipleAggregateSignatures, 11 | } from "../../index.js"; 12 | import {expectNotEqualHex, getTestSet, getTestSetsSameMessage} from "../utils"; 13 | import {G1_POINT_AT_INFINITY, G2_POINT_AT_INFINITY} from "../__fixtures__"; 14 | 15 | describe("Aggregate With Randomness", () => { 16 | const sameMessageSets = getTestSetsSameMessage(10); 17 | const msg = sameMessageSets.msg; 18 | const sets = sameMessageSets.sets.map((s) => ({ 19 | pk: s.pk, 20 | sig: s.sig.toBytes(), 21 | })); 22 | const randomSet = getTestSet(20); 23 | const infinityPublicKey = Buffer.from(G1_POINT_AT_INFINITY, "hex"); 24 | 25 | before(() => { 26 | // make sure sets are valid before starting 27 | expect(() => PublicKey.fromBytes(infinityPublicKey).keyValidate()).to.throw("Public key is infinity"); 28 | expect(verify(msg, sets[0].pk, Signature.fromBytes(sets[0].sig))).to.be.true; 29 | expect(verifyMultipleAggregateSignatures(sets.map((s) => ({msg, pk: s.pk, sig: Signature.fromBytes(s.sig)})))).to.be 30 | .true; 31 | expectNotEqualHex(msg, randomSet.msg); 32 | expect(verify(randomSet.msg, randomSet.pk, randomSet.sig)).to.be.true; 33 | expect(verifyMultipleAggregateSignatures([randomSet])).to.be.true; 34 | }); 35 | 36 | describe("aggregateWithRandomness()", () => { 37 | it("should not accept an empty array argument", () => { 38 | try { 39 | aggregateWithRandomness([]); 40 | expect.fail("aggregateWithRandomness with empty list should throw"); 41 | } catch (e) { 42 | expect((e as any).code).to.equal("BLST_AGGR_TYPE_MISMATCH"); 43 | } 44 | }); 45 | it("should accept an array of {pk: PublicKey, sig: Uint8Array}", () => { 46 | expect(() => aggregateWithRandomness([{pk: sets[0].pk, sig: sets[0].sig}])).not.to.throw(); 47 | // invalid publicKey property name 48 | expect(() => aggregateWithRandomness([{publicKey: sets[0].pk, sig: sets[0].sig} as any])).to.throw( 49 | "Missing field `pk`" 50 | ); 51 | // // invalid signature property name 52 | expect(() => aggregateWithRandomness([{pk: sets[0].pk, signature: sets[0].sig} as any])).to.throw( 53 | "Missing field `sig`" 54 | ); 55 | // // invalid publicKey property value 56 | expect(() => aggregateWithRandomness([{pk: 1 as any, sig: sets[0].sig}])).to.throw(); 57 | // // invalid signature property value 58 | expect(() => aggregateWithRandomness([{pk: sets[0].pk, sig: "bar" as any}])).to.throw(); 59 | }); 60 | it("should throw for invalid serialized", () => { 61 | expect(() => 62 | aggregateWithRandomness( 63 | sets.concat({ 64 | pk: sets[0].pk, 65 | //TODO: (@matthewkeil) this throws error "Public key is infinity" not signature because there is only one blst error 66 | sig: G2_POINT_AT_INFINITY, 67 | } as any) 68 | ) 69 | ).to.throw(); 70 | }); 71 | it("should return a {pk: PublicKey, sig: Signature} object", () => { 72 | const agg = aggregateWithRandomness(sets); 73 | expect(agg).to.be.instanceOf(Object); 74 | 75 | expect(agg).to.haveOwnProperty("pk"); 76 | expect(agg.pk).to.be.instanceOf(PublicKey); 77 | expect(() => agg.pk.keyValidate()).not.to.throw(); 78 | 79 | expect(agg).to.haveOwnProperty("sig"); 80 | expect(agg.sig).to.be.instanceOf(Signature); 81 | expect(() => agg.sig.sigValidate()).not.to.throw(); 82 | }); 83 | it("should add randomness to aggregated publicKey", () => { 84 | const withoutRandomness = aggregatePublicKeys(sets.map(({pk}) => pk)); 85 | const withRandomness = aggregateWithRandomness(sets).pk; 86 | expectNotEqualHex(withRandomness, withoutRandomness); 87 | }); 88 | it("should add randomness to aggregated signature", () => { 89 | const withoutRandomness = aggregateSerializedSignatures(sets.map(({sig}) => sig)); 90 | const withRandomness = aggregateWithRandomness(sets).sig; 91 | expectNotEqualHex(withRandomness, withoutRandomness); 92 | }); 93 | it("should produce verifiable set", () => { 94 | const {pk, sig} = aggregateWithRandomness(sets); 95 | expect(verify(msg, pk, sig)); 96 | }); 97 | it("should not validate for different message", async () => { 98 | const {pk, sig} = aggregateWithRandomness(sets); 99 | expect(verify(randomSet.msg, pk, sig)).to.be.false; 100 | }); 101 | it("should not validate included key/sig for different message", async () => { 102 | const {pk, sig} = aggregateWithRandomness([...sets, {pk: randomSet.pk, sig: randomSet.sig.toBytes()}]); 103 | expect(verify(msg, pk, sig)).to.be.false; 104 | }); 105 | }); 106 | describe("asyncAggregateWithRandomness()", () => { 107 | it("should not accept an empty array argument", async () => { 108 | try { 109 | await asyncAggregateWithRandomness([]); 110 | expect.fail("asyncAggregateWithRandomness with empty list should throw"); 111 | } catch (e) { 112 | expect((e as any).code).to.equal("BLST_AGGR_TYPE_MISMATCH"); 113 | } 114 | }); 115 | describe("should accept an array of {pk: PublicKey, sig: Uint8Array}", () => { 116 | it("should handle valid case", () => { 117 | expect(() => asyncAggregateWithRandomness([{pk: sets[0].pk, sig: sets[0].sig}])).not.to.throw(); 118 | }); 119 | it("should handle invalid publicKey property name", () => { 120 | expect(() => asyncAggregateWithRandomness([{publicKey: sets[0].pk, sig: sets[0].sig} as any])).to.throw( 121 | "Missing field `pk`" 122 | ); 123 | }); 124 | it("should handle invalid publicKey property value", () => { 125 | expect(() => asyncAggregateWithRandomness([{pk: 1 as any, sig: sets[0].sig}])).to.throw(); 126 | }); 127 | it("should handle invalid signature property name", () => { 128 | expect(() => asyncAggregateWithRandomness([{pk: sets[0].pk, signature: sets[0].sig} as any])).to.throw( 129 | "Missing field `sig`" 130 | ); 131 | }); 132 | it("should handle invalid signature property value", () => { 133 | expect(() => asyncAggregateWithRandomness([{pk: sets[0].pk, sig: "bar" as any}])).to.throw(); 134 | }); 135 | }); 136 | it("should throw for invalid serialized", async () => { 137 | try { 138 | await asyncAggregateWithRandomness( 139 | sets.concat({ 140 | pk: sets[0].pk, 141 | //TODO: (@matthewkeil) this throws error "Public key is infinity" not signature because there is only one blst error 142 | sig: G2_POINT_AT_INFINITY, 143 | } as any) 144 | ); 145 | expect.fail("should not get here"); 146 | } catch (err) { 147 | expect((err as Error).message).to.contain("Public key is infinity"); 148 | } 149 | }); 150 | it("should return a {pk: PublicKey, sig: Signature} object", async () => { 151 | const aggPromise = asyncAggregateWithRandomness(sets); 152 | expect(aggPromise).to.be.instanceOf(Promise); 153 | const agg = await aggPromise; 154 | expect(agg).to.be.instanceOf(Object); 155 | 156 | expect(agg).to.haveOwnProperty("pk"); 157 | expect(agg.pk).to.be.instanceOf(PublicKey); 158 | expect(() => agg.pk.keyValidate()).not.to.throw(); 159 | 160 | expect(agg).to.haveOwnProperty("sig"); 161 | expect(agg.sig).to.be.instanceOf(Signature); 162 | expect(() => agg.sig.sigValidate()).not.to.throw(); 163 | }); 164 | it("should add randomness to aggregated publicKey", async () => { 165 | const withoutRandomness = aggregatePublicKeys(sets.map(({pk}) => pk)); 166 | const withRandomness = await asyncAggregateWithRandomness(sets); 167 | expectNotEqualHex(withRandomness.pk, withoutRandomness); 168 | }); 169 | it("should add randomness to aggregated signature", async () => { 170 | const withoutRandomness = aggregateSerializedSignatures(sets.map(({sig}) => sig)); 171 | const withRandomness = await asyncAggregateWithRandomness(sets); 172 | expectNotEqualHex(withRandomness.sig, withoutRandomness); 173 | }); 174 | it("should produce verifiable set", async () => { 175 | const {pk, sig} = await asyncAggregateWithRandomness(sets); 176 | expect(verify(msg, pk, sig)); 177 | }); 178 | it("should not validate for different message", async () => { 179 | const {pk, sig} = await asyncAggregateWithRandomness(sets); 180 | expect(verify(randomSet.msg, pk, sig)).to.be.false; 181 | }); 182 | it("should not validate included key/sig for different message", async () => { 183 | const {pk, sig} = await asyncAggregateWithRandomness([...sets, {pk: randomSet.pk, sig: randomSet.sig.toBytes()}]); 184 | expect(verify(msg, pk, sig)).to.be.false; 185 | }); 186 | }); 187 | }); 188 | -------------------------------------------------------------------------------- /test/unit/bindings.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import * as bindings from "../.."; 3 | 4 | describe("bindings", () => { 5 | describe("exports", () => { 6 | const exports = new Set(Object.keys(bindings)); 7 | exports.delete("path"); 8 | exports.delete("default"); 9 | 10 | const expectedFunctions = [ 11 | "aggregatePublicKeys", 12 | "aggregateSignatures", 13 | "aggregateSerializedPublicKeys", 14 | "aggregateSerializedSignatures", 15 | "aggregateWithRandomness", 16 | "asyncAggregateWithRandomness", 17 | "verify", 18 | "aggregateVerify", 19 | "fastAggregateVerify", 20 | "verifyMultipleAggregateSignatures", 21 | ]; 22 | const expectedClasses = ["PublicKey", "SecretKey", "Signature"]; 23 | const expectedConstants = [ 24 | "SECRET_KEY_LENGTH", 25 | "PUBLIC_KEY_LENGTH_COMPRESSED", 26 | "PUBLIC_KEY_LENGTH_UNCOMPRESSED", 27 | "SIGNATURE_LENGTH_COMPRESSED", 28 | "SIGNATURE_LENGTH_UNCOMPRESSED", 29 | ]; 30 | after(() => { 31 | expect(exports.size).to.equal(0); 32 | }); 33 | it("should export all the expected functions", () => { 34 | for (const expected of expectedFunctions) { 35 | if (!exports.has(expected)) { 36 | throw new Error(`Missing export: ${expected}`); 37 | } 38 | exports.delete(expected); 39 | } 40 | }); 41 | it("should export all the expected classes", () => { 42 | for (const expected of expectedClasses) { 43 | if (!exports.has(expected)) { 44 | throw new Error(`Missing export: ${expected}`); 45 | } 46 | exports.delete(expected); 47 | } 48 | }); 49 | it("should export all the expected constants", () => { 50 | for (const expected of expectedConstants) { 51 | if (!exports.has(expected)) { 52 | throw new Error(`Missing export: ${expected}`); 53 | } 54 | exports.delete(expected); 55 | } 56 | }); 57 | }); 58 | describe("constants", () => { 59 | const { 60 | SECRET_KEY_LENGTH, 61 | PUBLIC_KEY_LENGTH_COMPRESSED, 62 | PUBLIC_KEY_LENGTH_UNCOMPRESSED, 63 | SIGNATURE_LENGTH_COMPRESSED, 64 | SIGNATURE_LENGTH_UNCOMPRESSED, 65 | } = bindings; 66 | it("SECRET_KEY_LENGTH", () => { 67 | expect(SECRET_KEY_LENGTH).to.be.a("number"); 68 | }); 69 | it("PUBLIC_KEY_LENGTH_UNCOMPRESSED", () => { 70 | expect(PUBLIC_KEY_LENGTH_UNCOMPRESSED).to.be.a("number"); 71 | }); 72 | it("PUBLIC_KEY_LENGTH_COMPRESSED", () => { 73 | expect(PUBLIC_KEY_LENGTH_COMPRESSED).to.be.a("number"); 74 | }); 75 | it("SIGNATURE_LENGTH_COMPRESSED", () => { 76 | expect(SIGNATURE_LENGTH_COMPRESSED).to.be.a("number"); 77 | }); 78 | it("SIGNATURE_LENGTH_UNCOMPRESSED", () => { 79 | expect(SIGNATURE_LENGTH_UNCOMPRESSED).to.be.a("number"); 80 | }); 81 | }); 82 | }); 83 | -------------------------------------------------------------------------------- /test/unit/utils.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {verify} from "../../index.js"; 3 | import {arrayOfIndexes, chunkifyMaximizeChunkSize, getTestSet} from "../utils"; 4 | 5 | describe("utils", () => { 6 | describe("helpers", () => { 7 | it("should build valid test sets", () => { 8 | const set = getTestSet(); 9 | expect(verify(set.msg, set.pk, set.sig)).to.be.true; 10 | }); 11 | }); 12 | describe("chunkifyMaximizeChunkSize", () => { 13 | const minPerChunk = 3; 14 | const testCases = [ 15 | [[0]], 16 | [[0, 1]], 17 | [[0, 1, 2]], 18 | [[0, 1, 2, 3]], 19 | [[0, 1, 2, 3, 4]], 20 | [ 21 | [0, 1, 2], 22 | [3, 4, 5], 23 | ], 24 | [ 25 | [0, 1, 2, 3], 26 | [4, 5, 6], 27 | ], 28 | [ 29 | [0, 1, 2, 3], 30 | [4, 5, 6, 7], 31 | ], 32 | ]; 33 | 34 | for (const [i, testCase] of testCases.entries()) { 35 | it(`array len ${i + 1}`, () => { 36 | const arr = arrayOfIndexes(0, i); 37 | const chunks = chunkifyMaximizeChunkSize(arr, minPerChunk); 38 | expect(chunks).to.deep.equal(testCase); 39 | }); 40 | } 41 | }); 42 | }); 43 | -------------------------------------------------------------------------------- /test/unit/verify.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {aggregateVerify, fastAggregateVerify, verify} from "../../index.js"; 3 | import {sullyUint8Array, getTestSet} from "../utils"; 4 | import {TestSet} from "../utils/types"; 5 | 6 | describe("Verify", () => { 7 | let testSet: TestSet; 8 | before(() => { 9 | testSet = getTestSet(); 10 | }); 11 | describe("verify", () => { 12 | it("should return a boolean", () => { 13 | expect(verify(testSet.msg, testSet.pk, testSet.sig)).to.be.a("boolean"); 14 | }); 15 | describe("should default to false", () => { 16 | it("should handle invalid message", () => { 17 | expect(verify(sullyUint8Array(testSet.msg), testSet.pk, testSet.sig)).to.be.false; 18 | }); 19 | }); 20 | it("should return true for valid sets", () => { 21 | expect(verify(testSet.msg, testSet.pk, testSet.sig)).to.be.true; 22 | }); 23 | }); 24 | }); 25 | 26 | describe("Aggregate Verify", () => { 27 | let testSet: TestSet; 28 | before(() => { 29 | testSet = getTestSet(); 30 | }); 31 | describe("aggregateVerify", () => { 32 | it("should return a boolean", () => { 33 | expect(aggregateVerify([testSet.msg], [testSet.pk], testSet.sig)).to.be.a("boolean"); 34 | }); 35 | describe("should default to false", () => { 36 | it("should handle invalid message", () => { 37 | expect(aggregateVerify([sullyUint8Array(testSet.msg)], [testSet.pk], testSet.sig)).to.be.false; 38 | }); 39 | }); 40 | it("should return true for valid sets", () => { 41 | expect(aggregateVerify([testSet.msg], [testSet.pk], testSet.sig)).to.be.true; 42 | }); 43 | }); 44 | }); 45 | 46 | describe("Fast Aggregate Verify", () => { 47 | let testSet: TestSet; 48 | before(() => { 49 | testSet = getTestSet(); 50 | }); 51 | describe("fastAggregateVerify", () => { 52 | it("should return a boolean", () => { 53 | expect(fastAggregateVerify(testSet.msg, [testSet.pk], testSet.sig)).to.be.a("boolean"); 54 | }); 55 | describe("should default to false", () => { 56 | it("should handle invalid message", () => { 57 | expect(fastAggregateVerify(sullyUint8Array(testSet.msg), [testSet.pk], testSet.sig)).to.be.false; 58 | }); 59 | }); 60 | it("should return true for valid sets", () => { 61 | expect(fastAggregateVerify(testSet.msg, [testSet.pk], testSet.sig)).to.be.true; 62 | }); 63 | }); 64 | }); 65 | -------------------------------------------------------------------------------- /test/unit/verifyMultipleAggregateSignatures.test.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {verifyMultipleAggregateSignatures} from "../../index.js"; 3 | import {getTestSet, getTestSets} from "../utils"; 4 | 5 | describe("Verify Multiple Aggregate Signatures", () => { 6 | describe("verifyMultipleAggregateSignatures", () => { 7 | it("should return a boolean", () => { 8 | expect(verifyMultipleAggregateSignatures([])).to.be.a("boolean"); 9 | }); 10 | it("should default to false", () => { 11 | expect(verifyMultipleAggregateSignatures([])).to.be.false; 12 | }); 13 | it("should return true for valid sets", () => { 14 | expect(verifyMultipleAggregateSignatures(getTestSets(6))).to.be.true; 15 | }); 16 | it("should return false for invalid sets", () => { 17 | const sets = getTestSets(6); 18 | const randomSet = getTestSet(20); 19 | sets[0].sig = randomSet.sig; 20 | expect(verifyMultipleAggregateSignatures(sets)).to.be.false; 21 | }); 22 | }); 23 | }); 24 | -------------------------------------------------------------------------------- /test/utils/helpers.ts: -------------------------------------------------------------------------------- 1 | import {expect} from "chai"; 2 | import {BufferLike, InstanceTestCases} from "./types"; 3 | 4 | function toHexString(bytes: BufferLike): string { 5 | if (typeof bytes === "string") return bytes; 6 | if (bytes instanceof Buffer) return bytes.toString("hex"); 7 | if (bytes instanceof Uint8Array) return Buffer.from(bytes).toString("hex"); 8 | if (typeof bytes.toBytes === "function") return Buffer.from(bytes.toBytes()).toString("hex"); 9 | throw Error("toHexString only accepts BufferLike types"); 10 | } 11 | 12 | export function toHex(bytes: BufferLike): string { 13 | const hex = toHexString(bytes); 14 | if (hex.startsWith("0x")) return hex; 15 | return "0x" + hex; 16 | } 17 | 18 | export function fromHex(hexString: string): Buffer { 19 | if (hexString.startsWith("0x")) hexString = hexString.slice(2); 20 | return Buffer.from(hexString, "hex"); 21 | } 22 | 23 | export function isEqualBytes(value: BufferLike, expected: BufferLike): boolean { 24 | return toHex(value) === toHex(expected); 25 | } 26 | 27 | export function expectEqualHex(value: BufferLike, expected: BufferLike): void { 28 | expect(toHex(value)).to.equal(toHex(expected)); 29 | } 30 | 31 | export function expectNotEqualHex(value: BufferLike, expected: BufferLike): void { 32 | expect(toHex(value)).to.not.equal(toHex(expected)); 33 | } 34 | 35 | export function getFilledUint8(length: number, fillWith: string | number | Buffer = "*"): Uint8Array { 36 | return Uint8Array.from(Buffer.alloc(length, fillWith)); 37 | } 38 | 39 | export function sullyUint8Array(bytes: Uint8Array): Uint8Array { 40 | return Uint8Array.from( 41 | Buffer.from([...Uint8Array.prototype.slice.call(bytes, 8), ...Buffer.from("0123456789abcdef", "hex")]) 42 | ); 43 | } 44 | 45 | export function arrayOfIndexes(start: number, end: number): number[] { 46 | const arr: number[] = []; 47 | for (let i = start; i <= end; i++) arr.push(i); 48 | return arr; 49 | } 50 | 51 | export function shuffle(array: T[]): T[] { 52 | let currentIndex = array.length, 53 | randomIndex; 54 | 55 | while (currentIndex != 0) { 56 | randomIndex = Math.floor(Math.random() * currentIndex); 57 | currentIndex--; 58 | 59 | [array[currentIndex], array[randomIndex]] = [array[randomIndex], array[currentIndex]]; 60 | } 61 | 62 | return array; 63 | } 64 | 65 | export function chunkifyMaximizeChunkSize(arr: T[], minPerChunk: number): T[][] { 66 | const chunkCount = Math.floor(arr.length / minPerChunk); 67 | if (chunkCount <= 1) { 68 | return [arr]; 69 | } 70 | 71 | // Prefer less chunks of bigger size 72 | const perChunk = Math.ceil(arr.length / chunkCount); 73 | const arrArr: T[][] = []; 74 | 75 | for (let i = 0; i < arr.length; i += perChunk) { 76 | arrArr.push(arr.slice(i, i + perChunk)); 77 | } 78 | 79 | return arrArr; 80 | } 81 | 82 | /** 83 | * Enforce tests for all instance methods and run them 84 | */ 85 | export function runInstanceTestCases( 86 | instanceTestCases: InstanceTestCases, 87 | getInstance: () => InstanceType 88 | ): void { 89 | for (const [key, testCases] of Object.entries(instanceTestCases)) { 90 | const methodKey = key as keyof InstanceType; 91 | for (const testCase of testCases) { 92 | it(`${String(methodKey)}: ${testCase.id || ""}`, () => { 93 | // Get a new fresh instance for this test 94 | const instance = testCase.instance || getInstance(); 95 | if (typeof instance[methodKey] !== "function") throw Error(`Method ${String(methodKey)} does not exist`); 96 | const res = (instance[methodKey] as (...args: any) => any)(...testCase.args); 97 | if (!res) { 98 | // OK 99 | } else if (res.serialize || res instanceof Uint8Array) { 100 | expectEqualHex(res, testCase.res); 101 | } else { 102 | expect(res).to.deep.equal(testCase.res); 103 | } 104 | }); 105 | } 106 | } 107 | } 108 | -------------------------------------------------------------------------------- /test/utils/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./memory"; 2 | export * from "./helpers"; 3 | export * from "./testSets"; 4 | export * from "./types"; 5 | -------------------------------------------------------------------------------- /test/utils/memory/index.ts: -------------------------------------------------------------------------------- 1 | export * from "./types"; 2 | export * from "./testRunner"; 3 | -------------------------------------------------------------------------------- /test/utils/memory/testRunner.ts: -------------------------------------------------------------------------------- 1 | import {MemoryAtInstance, MemoryTestOptions, MemoryTestResult, MemoryUsageResults, RegressionResults} from "./types"; 2 | 3 | /** 4 | * From https://github.com/simple-statistics/simple-statistics/blob/d0d177baf74976a2421638bce98ab028c5afb537/src/linear_regression.js 5 | * 6 | * [Simple linear regression](http://en.wikipedia.org/wiki/Simple_linear_regression) 7 | * is a simple way to find a fitted line between a set of coordinates. 8 | * This algorithm finds the slope and y-intercept of a regression line 9 | * using the least sum of squares. 10 | * 11 | * @param data an array of two-element of arrays, 12 | * like `[[0, 1], [2, 3]]` 13 | * @returns object containing slope and intersect of regression line 14 | * @example 15 | * linearRegression([[0, 0], [1, 1]]); // => { slope: 1, yIntercept: 0 } 16 | */ 17 | function linearRegression(coordinates: MemoryUsageResults): RegressionResults { 18 | // Store data length in a local variable to reduce 19 | // repeated object property lookups 20 | const dataLength = coordinates.length; 21 | 22 | //if there's only one point, arbitrarily choose a slope of 0 23 | //and a y-intercept of whatever the y of the initial point is 24 | if (dataLength === 1) { 25 | return { 26 | slope: 0, 27 | yIntercept: coordinates[0][1], 28 | }; 29 | } 30 | 31 | // Initialize our sums and scope the `slope` and `yIntercept` 32 | // variables that define the line. 33 | let sumX = 0; 34 | let sumY = 0; 35 | let sumXX = 0; 36 | let sumXY = 0; 37 | 38 | // Gather the sum of all x values, the sum of all 39 | // y values, and the sum of x^2 and (x*y) for each 40 | // value. 41 | // 42 | // In math notation, these would be SS_x, SS_y, SS_xx, and SS_xy 43 | for (let i = 0; i < dataLength; i++) { 44 | const [x, y] = coordinates[i]; 45 | sumX += x; 46 | sumY += y; 47 | sumXX += x * x; 48 | sumXY += x * y; 49 | } 50 | 51 | const slope = (dataLength * sumXY - sumX * sumY) / (dataLength * sumXX - sumX * sumX); 52 | const yIntercept = sumY / dataLength - (slope * sumX) / dataLength; 53 | 54 | return { 55 | slope, 56 | yIntercept, 57 | }; 58 | } 59 | 60 | /** 61 | * Run the garbage collector twice. Optionally can add a delay to allow 62 | * other promise resolution to occur before the second run of the GC 63 | * 64 | * @param gcDelay - Optional delay, in milliseconds, before 2nd run of GC 65 | */ 66 | async function runGarbageCollector(gcDelay = 100): Promise { 67 | global.gc?.(); 68 | if (gcDelay && gcDelay !== 0) { 69 | await new Promise((r) => setTimeout(r, gcDelay)); 70 | } 71 | global.gc?.(); 72 | } 73 | 74 | const MAX_SAMPLES = 10000; 75 | /** 76 | * When is a good time to stop the benchmark? A naive answer is after N 77 | * milliseconds or M runs. This code aims to stop the benchmark when the average 78 | * memory growth has converged at a value within a given convergence factor. It 79 | * stores two past values to be able to compute a very rough linear and 80 | * quadratic convergence. 81 | * 82 | * Gets total memory usage allocated by isolate. Can only account for usage the 83 | * isolate knows about and does not take into account RSS. Runs garbage collector 84 | * and waits for several manual passes to ensure all temporary objects are 85 | * collected before size is estimated. 86 | */ 87 | async function memoryTestRunner({ 88 | gcDelay = 100, 89 | sampleEvery = 1000, 90 | warmUpIterations = 100, 91 | maxInstances = Infinity, 92 | convergeFactor = 0.2 / 100, // 0.2% 93 | maxRssBytes, 94 | maxHeapBytes, 95 | getInstance, 96 | computeUsedMemory = (usage) => usage.heapUsed + usage.external + usage.arrayBuffers, 97 | }: MemoryTestOptions): Promise { 98 | // Array to store references to created instances (to prevent garbage collection) 99 | const refs: T[] = []; 100 | 101 | // Pre-Allocate array to store memory data at each sample point 102 | const memoryData: MemoryAtInstance[] = new Array(MAX_SAMPLES); 103 | for (let k = 0; k < MAX_SAMPLES; k++) { 104 | memoryData[k] = [0, 0, 0]; 105 | } 106 | 107 | // Preallocations to not skew memory usage 108 | let sampleIndex = 0; 109 | let i = 0; 110 | 111 | // Previous slope values for convergence calculation (2 prior and prior to current) 112 | let prevM0 = 0; 113 | let prevM1 = 0; 114 | 115 | // Warm-up phase to stabilize Isolate and testing framework memory usage 116 | for (let j = 0; j < warmUpIterations; j++) { 117 | getInstance(); 118 | } 119 | await runGarbageCollector(gcDelay); 120 | let memoryUsage = process.memoryUsage(); 121 | const startingUsage = computeUsedMemory(memoryUsage); 122 | const startingRss = memoryUsage.rss; 123 | 124 | // All variable beyond here must be pre-allocated or temporary to not affect 125 | // results 126 | loop: for (i = 0; i < maxInstances; i++) { 127 | refs.push(getInstance()); 128 | 129 | if (i % sampleEvery === 0) { 130 | await runGarbageCollector(gcDelay); 131 | memoryUsage = process.memoryUsage(); 132 | // do not create new tuple array, just reset inner tuple values 133 | memoryData[sampleIndex][0] = i; 134 | memoryData[sampleIndex][1] = computeUsedMemory(memoryUsage); 135 | memoryData[sampleIndex][2] = memoryUsage.rss; 136 | sampleIndex++; 137 | 138 | if ( 139 | sampleIndex >= MAX_SAMPLES || 140 | (maxRssBytes && memoryUsage.rss > maxRssBytes) || 141 | (maxHeapBytes && memoryUsage.heapTotal > maxHeapBytes) 142 | ) { 143 | break loop; 144 | } 145 | 146 | if (memoryData.length > 1) { 147 | const {slope} = linearRegression(memoryData.slice(0, sampleIndex)); 148 | 149 | // Compute convergence (1st order + 2nd order) 150 | const a = prevM0; 151 | const b = prevM1; 152 | const c = slope; 153 | 154 | /** 155 | * Approx linear convergence: Absolute difference between the current 156 | * and previous-to-previous slope. It measures how much the slope is 157 | * changing in a linear fashion. 158 | */ 159 | const convergenceLinear = Math.abs(c - a); 160 | 161 | /** 162 | * Approx quadratic convergence: Absolute difference between the 163 | * previous slope and the average of current and previous-to-previous 164 | * slopes. It gives an indication of the curvature or the rate of change 165 | * of the slope itself. 166 | */ 167 | const convergenceQuadratic = Math.abs(b - (a + c) / 2); 168 | 169 | /** 170 | * Take the greater of both to ensure both linear and quadratic 171 | * convergence are below the convergeFactor 172 | */ 173 | const convergence = Math.max(convergenceLinear, convergenceQuadratic) / a; 174 | 175 | /** 176 | * Stop the benchmark if the rate of change of memory usage has 177 | * stabilized sufficiently 178 | */ 179 | if (convergence < convergeFactor) { 180 | break loop; 181 | } 182 | 183 | prevM0 = prevM1; 184 | prevM1 = slope; 185 | } 186 | } 187 | } 188 | 189 | const numberOfSamples = sampleIndex - 1; 190 | const [instancesCreated, endingUsage, endingRss] = memoryData[numberOfSamples]; 191 | 192 | return { 193 | averageBytesPerInstance: linearRegression(memoryData.slice(0, sampleIndex)).slope, 194 | numberOfSamples, 195 | instancesCreated, 196 | totalMemoryAllocated: endingUsage - startingUsage, 197 | rssAllocated: endingRss - startingRss, 198 | }; 199 | } 200 | 201 | function stringifyResultByOrderOfMagnitude(bytes: number): string { 202 | let val = bytes / 1e9; 203 | if (val > 1) return `${val.toFixed(2)} gb`; 204 | val = bytes / 1e6; 205 | if (val > 1) return `${val.toFixed(2)} mb`; 206 | val = bytes / 1e3; 207 | if (val > 1) return `${val.toFixed(2)} kb`; 208 | return `${Math.ceil(bytes)} b`; 209 | } 210 | 211 | function formatRunResult(test: MemoryTestOptions, result: MemoryTestResult, titlePadding?: number): string { 212 | const title = titlePadding ? test.id.padEnd(titlePadding) : test.id; 213 | const segments = [title, `${stringifyResultByOrderOfMagnitude(result.averageBytesPerInstance)} / instance`]; 214 | if (test.displayRunInfo) { 215 | segments.push(`${result.totalMemoryAllocated} allocated by ${result.instancesCreated} instances`); 216 | segments.push(`${result.rssAllocated} rss allocated`); 217 | } 218 | return segments.join(" - "); 219 | } 220 | 221 | export async function memoryTest( 222 | testCases: MemoryTestOptions[], 223 | options?: Partial> 224 | ): Promise { 225 | if (global.gc === undefined) { 226 | throw Error("Must enable global.gc with --expose-gc flag when starting node"); 227 | } 228 | const longestId = Math.max(...testCases.map(({id}) => id.length)); 229 | 230 | for (const testRun of testCases) { 231 | const test = {...options, ...testRun}; 232 | const result = await memoryTestRunner(test); 233 | // eslint-disable-next-line no-console 234 | console.log(formatRunResult(test, result, longestId)); 235 | } 236 | } 237 | -------------------------------------------------------------------------------- /test/utils/memory/types.ts: -------------------------------------------------------------------------------- 1 | export interface RegressionResults { 2 | slope: number; 3 | yIntercept: number; 4 | } 5 | 6 | export type UsageComputation = (memoryUsage: NodeJS.MemoryUsage) => number; 7 | 8 | export type GetInstanceFunction = () => T; 9 | 10 | export interface MemoryUsage extends NodeJS.MemoryUsage { 11 | computed: number; 12 | } 13 | 14 | export interface MemoryTestResult { 15 | averageBytesPerInstance: number; 16 | instancesCreated: number; 17 | totalMemoryAllocated: number; 18 | numberOfSamples: number; 19 | rssAllocated: number; 20 | } 21 | 22 | export type MemoryTestOptions = { 23 | /** 24 | * Name of the test run 25 | */ 26 | id: string; 27 | 28 | /** 29 | * Allocation function for a single instance of the object to be tested 30 | */ 31 | getInstance: GetInstanceFunction; 32 | 33 | /** 34 | * How to compute the total memory usage. Defaults to 35 | * `heapUsed + external + arrayBuffers` 36 | */ 37 | computeUsedMemory?: UsageComputation; 38 | 39 | /** 40 | * 41 | */ 42 | rounds?: number; 43 | 44 | /** 45 | * 46 | */ 47 | instancesPerRound?: number; 48 | 49 | /** 50 | * Sample memory usage every `sampleEvery` instances balancing detail against 51 | * overhead. Optimal value depends on the object's memory footprint and test scope 52 | */ 53 | sampleEvery?: number; 54 | 55 | /** 56 | * Stop if `process.memoryUsage().rss > maxRssBytes`. 57 | */ 58 | maxRssBytes?: number; 59 | 60 | /** 61 | * Stop if `process.memoryUsage().heapTotal > maxHeapBytes`. 62 | */ 63 | maxHeapBytes?: number; 64 | 65 | /** 66 | * Stop after creating `maxInstances` instances. 67 | */ 68 | maxInstances?: number; 69 | 70 | /** 71 | * 72 | */ 73 | displayRunInfo?: boolean; 74 | 75 | /** 76 | * 77 | */ 78 | convergeFactor?: number; 79 | 80 | /** 81 | * Number of instances to create and garbage collect before 82 | * starting measurement 83 | */ 84 | warmUpIterations?: number; 85 | 86 | /** 87 | * Optional delay, in milliseconds, for garbage collection to be run 88 | * for the second time 89 | */ 90 | gcDelay?: number; 91 | }; 92 | 93 | /** 94 | * Used for linear regression calculation. MemoryAtInstance[0] is the instance 95 | * count and MemoryAtInstance[1] is the memory usage at that instance count. 96 | */ 97 | export type MemoryAtInstance = [number, number, number]; 98 | export type MemoryUsageResults = MemoryAtInstance[]; 99 | -------------------------------------------------------------------------------- /test/utils/testSets.ts: -------------------------------------------------------------------------------- 1 | import crypto from "crypto"; 2 | import {SECRET_KEY_LENGTH, SecretKey, Signature} from "../../index.js"; 3 | import {TestSet, SerializedSet, SameMessageTestSets} from "./types"; 4 | import {arrayOfIndexes} from "./helpers"; 5 | 6 | const DEFAULT_TEST_MESSAGE = Uint8Array.from(Buffer.from("test-message")); 7 | 8 | export function buildTestSetFromMessage(msg: Uint8Array = DEFAULT_TEST_MESSAGE): TestSet { 9 | const sk = SecretKey.fromKeygen(crypto.randomBytes(SECRET_KEY_LENGTH)); 10 | const pk = sk.toPublicKey(); 11 | const sig = sk.sign(msg); 12 | try { 13 | pk.keyValidate(); 14 | } catch { 15 | console.log(">>>\n>>>\n>>> Invalid Key Found in a TestSet\n>>>\n>>>"); 16 | return buildTestSetFromMessage(msg); 17 | } 18 | try { 19 | sig.sigValidate(); 20 | } catch { 21 | console.log(">>>\n>>>\n>>> Invalid Signature Found in a TestSet\n>>>\n>>>"); 22 | return buildTestSetFromMessage(msg); 23 | } 24 | return { 25 | msg, 26 | sk, 27 | pk, 28 | sig, 29 | }; 30 | } 31 | 32 | const testSets = new Map(); 33 | function buildTestSet(i: number): TestSet { 34 | const message = crypto.randomBytes(32); 35 | const set = buildTestSetFromMessage(message); 36 | testSets.set(i, set); 37 | return set; 38 | } 39 | 40 | export function getTestSet(i: number = 0): TestSet { 41 | const set = testSets.get(i); 42 | if (set) { 43 | return set; 44 | } 45 | return buildTestSet(i); 46 | } 47 | 48 | export function getTestSets(count: number): TestSet[] { 49 | return arrayOfIndexes(0, count - 1).map(getTestSet); 50 | } 51 | 52 | export const commonMessage = crypto.randomBytes(32); 53 | 54 | const commonMessageSignatures = new Map(); 55 | export function getTestSetSameMessage(i: number = 1): TestSet { 56 | const set = getTestSet(i); 57 | let sig = commonMessageSignatures.get(i); 58 | if (!sig) { 59 | sig = set.sk.sign(commonMessage); 60 | commonMessageSignatures.set(i, sig); 61 | } 62 | return { 63 | msg: commonMessage, 64 | sk: set.sk, 65 | pk: set.pk, 66 | sig, 67 | }; 68 | } 69 | 70 | export function getTestSetsSameMessage(count: number): SameMessageTestSets { 71 | const sets = arrayOfIndexes(0, count - 1).map(getTestSetSameMessage); 72 | return { 73 | msg: sets[0].msg, 74 | sets: sets.map(({sk, pk, sig}) => ({sk, pk, sig})), 75 | }; 76 | } 77 | 78 | const serializedSets = new Map(); 79 | export function getSerializedTestSet(i: number = 1): SerializedSet { 80 | const set = serializedSets.get(i); 81 | if (set) { 82 | return set; 83 | } 84 | const deserialized = getTestSet(i); 85 | const serialized = { 86 | msg: deserialized.msg, 87 | sk: deserialized.sk.toBytes(), 88 | pk: deserialized.pk.toBytes(), 89 | sig: deserialized.sig.toBytes(), 90 | }; 91 | serializedSets.set(i, serialized); 92 | return serialized; 93 | } 94 | -------------------------------------------------------------------------------- /test/utils/types.ts: -------------------------------------------------------------------------------- 1 | import * as bindings from "../../index.js"; 2 | 3 | export type BufferLike = string | Uint8Array | Buffer | bindings.PublicKey | bindings.Signature; 4 | 5 | export interface TestSet { 6 | msg: Uint8Array; 7 | sk: bindings.SecretKey; 8 | pk: bindings.PublicKey; 9 | sig: bindings.Signature; 10 | } 11 | 12 | export interface SameMessageTestSets { 13 | msg: Uint8Array; 14 | sets: { 15 | sk: bindings.SecretKey; 16 | pk: bindings.PublicKey; 17 | sig: bindings.Signature; 18 | }[]; 19 | } 20 | 21 | export type SerializedSet = Record; 22 | 23 | export type SignatureSetArray = bindings.SignatureSet[]; 24 | 25 | /** 26 | * Enforce tests for all instance methods 27 | */ 28 | export type InstanceTestCases = { 29 | [P in keyof Omit]: { 30 | id?: string; 31 | instance?: InstanceType; 32 | args: Parameters; 33 | res?: ReturnType; 34 | }[]; 35 | }; 36 | 37 | export type CodeError = { 38 | code: string; 39 | message: string; 40 | }; 41 | -------------------------------------------------------------------------------- /tsconfig.fuzz.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./tsconfig", 3 | "include": [ 4 | "index.js", 5 | "test/fuzz" 6 | ], 7 | "compilerOptions": { 8 | "noEmit": false, 9 | "outDir": "./fuzz-tests", 10 | "module": "CommonJS", 11 | "allowJs": true 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ESNext", 4 | "module": "CommonJS", 5 | "esModuleInterop": true, 6 | 7 | "strict": true, 8 | "alwaysStrict": true, 9 | "strictNullChecks": true, 10 | "strictFunctionTypes": true, 11 | "strictBindCallApply": true, 12 | "strictPropertyInitialization": true, 13 | "noImplicitAny": true, 14 | "noImplicitThis": true, 15 | "noImplicitReturns": true, 16 | "noImplicitOverride": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noFallthroughCasesInSwitch": true, 20 | 21 | "pretty": true, 22 | "sourceMap": true, 23 | "declaration": true, 24 | "declarationMap": true, 25 | }, 26 | } --------------------------------------------------------------------------------