├── .github └── workflows │ ├── check.yml │ └── docs.yml ├── .gitignore ├── LICENSE ├── README.md ├── build.zig ├── build.zig.zon └── src ├── base58 ├── alphabet.zig ├── base58.zig ├── decode.zig ├── encode.zig └── tests.zig ├── bech32 ├── bech32.zig └── hrp.zig ├── bips ├── bip32 │ ├── bip32.zig │ └── key.zig ├── bip39 │ ├── bip39.zig │ ├── language.zig │ └── pbkdf2.zig └── lib.zig ├── hashes ├── hash160.zig ├── lib.zig ├── ripemd160.zig └── siphash24.zig ├── root.zig ├── types ├── CompacSizeUint.zig └── lib.zig └── wif ├── lib.zig └── wif.zig /.github/workflows/check.yml: -------------------------------------------------------------------------------- 1 | name: Zig Test 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | env: 10 | ZIG_VERSION: 0.14.0-dev.1550+4fba7336a 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v3 18 | 19 | - name: Cache Zig 20 | uses: actions/cache@v3 21 | with: 22 | path: ~/zig 23 | key: ${{ runner.os }}-zig-${{ env.ZIG_VERSION }} 24 | 25 | - name: Install Zig 26 | if: steps.cache.outputs.cache-hit != 'true' 27 | run: | 28 | wget https://ziglang.org/builds/zig-linux-x86_64-${{ env.ZIG_VERSION }}.tar.xz 29 | tar -xf zig-linux-x86_64-${{ env.ZIG_VERSION }}.tar.xz 30 | mv zig-linux-x86_64-${{ env.ZIG_VERSION }} ~/zig 31 | 32 | - name: Add Zig to PATH 33 | run: echo "${HOME}/zig" >> $GITHUB_PATH 34 | 35 | - name: Cache Zig build artifacts 36 | uses: actions/cache@v3 37 | with: 38 | path: | 39 | zig-cache 40 | ~/.cache/zig 41 | key: ${{ runner.os }}-zig-build-${{ hashFiles('**/*.zig') }} 42 | restore-keys: | 43 | ${{ runner.os }}-zig-build- 44 | 45 | - name: Unit testing 46 | run: zig build test --summary all 47 | -------------------------------------------------------------------------------- /.github/workflows/docs.yml: -------------------------------------------------------------------------------- 1 | name: Generate and Deploy Docs 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | 7 | permissions: write-all 8 | 9 | env: 10 | ZIG_VERSION: 0.14.0-dev.1550+4fba7336a 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v3 18 | 19 | - name: Cache Zig 20 | uses: actions/cache@v3 21 | with: 22 | path: ~/zig 23 | key: ${{ runner.os }}-zig-${{ env.ZIG_VERSION }} 24 | 25 | - name: Install Zig 26 | if: steps.cache.outputs.cache-hit != 'true' 27 | run: | 28 | wget https://ziglang.org/builds/zig-linux-x86_64-${{ env.ZIG_VERSION }}.tar.xz 29 | tar -xf zig-linux-x86_64-${{ env.ZIG_VERSION }}.tar.xz 30 | mv zig-linux-x86_64-${{ env.ZIG_VERSION }} ~/zig 31 | 32 | - name: Add Zig to PATH 33 | run: echo "${HOME}/zig" >> $GITHUB_PATH 34 | 35 | - name: Cache Zig build artifacts 36 | uses: actions/cache@v3 37 | with: 38 | path: | 39 | zig-cache 40 | ~/.cache/zig 41 | key: ${{ runner.os }}-zig-build-${{ hashFiles('**/*.zig') }} 42 | restore-keys: | 43 | ${{ runner.os }}-zig-build- 44 | 45 | - name: Build Docs 46 | run: zig build docs 47 | 48 | - name: Deploy 49 | if: ${{ github.ref == 'refs/heads/main' }} && steps.check_changes.outcome == 'success' 50 | uses: peaceiris/actions-gh-pages@v3 51 | with: 52 | github_token: ${{ secrets.GITHUB_TOKEN }} 53 | publish_dir: zig-out/docs 54 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # OS-specific files 2 | **.DS_Store 3 | 4 | # Zig-specific build artifacts 5 | zig-cache/ 6 | .zig-cache/ 7 | zig-out/ 8 | /release/ 9 | /debug/ 10 | /build/ 11 | /build-*/ 12 | /docgen_tmp/ 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Zig Bitcoin Community 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # bitcoin-primitives 2 | 3 | Libraries and primitives for Bitcoin, written in Zig. 4 | 5 | # Zig Official Package Manager 6 | 7 | To install `bitcoin-primitives`, you need to run the following command in your root folder with `build.zig`: 8 | 9 | ```bash 10 | zig fetch --save git+https://github.com/zig-bitcoin/bitcoin-primitives#f3af13008b088796697fc656e26d8c2ddf73dc18 11 | ``` 12 | 13 | where `f3af13008b088796697fc656e26d8c2ddf73dc18` is the commit hash. 14 | 15 | Then, in your `build.zig`, you need to add our module: 16 | 17 | ```zig 18 | const bitcoin_primitives = b.dependency("bitcoin-primitives", .{ 19 | .target = target, 20 | .optimize = optimize, 21 | }); 22 | 23 | exe.root_module.addImport("bitcoin-primitives", bitcoin_primitives.module("bitcoin-primitives")); 24 | ``` 25 | -------------------------------------------------------------------------------- /build.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | 3 | // Although this function looks imperative, note that its job is to 4 | // declaratively construct a build graph that will be executed by an external 5 | // runner. 6 | pub fn build(b: *std.Build) void { 7 | // Standard target options allows the person running `zig build` to choose 8 | // what target to build for. Here we do not override the defaults, which 9 | // means any target is allowed, and the default is native. Other options 10 | // for restricting supported target set are available. 11 | const target = b.standardTargetOptions(.{}); 12 | 13 | // Standard optimization options allow the person running `zig build` to select 14 | // between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not 15 | // set a preferred release mode, allowing the user to decide how to optimize. 16 | const optimize = b.standardOptimizeOption(.{}); 17 | 18 | const secp256k1 = b.dependency("secp256k1", .{ 19 | .target = target, 20 | .optimize = optimize, 21 | }); 22 | 23 | const lib = b.addStaticLibrary(.{ 24 | .name = "bitcoin-primitives", 25 | // In this case the main source file is merely a path, however, in more 26 | // complicated build scripts, this could be a generated file. 27 | .root_source_file = b.path("src/root.zig"), 28 | .target = target, 29 | .optimize = optimize, 30 | }); 31 | lib.root_module.linkLibrary(secp256k1.artifact("libsecp")); 32 | lib.root_module.addImport("secp256k1", secp256k1.module("secp256k1")); 33 | 34 | // This declares intent for the library to be installed into the standard 35 | // location when the user invokes the "install" step (the default step when 36 | // running `zig build`). 37 | b.installArtifact(lib); 38 | 39 | // expose library as module 40 | const module = b.addModule("bitcoin-primitives", .{ 41 | .root_source_file = b.path("src/root.zig"), 42 | .target = target, 43 | .optimize = optimize, 44 | }); 45 | module.linkLibrary(secp256k1.artifact("libsecp")); 46 | module.addImport("secp256k1", secp256k1.module("secp256k1")); 47 | 48 | // Creates a step for unit testing. This only builds the test executable 49 | // but does not run it. 50 | const lib_unit_tests = b.addTest(.{ 51 | .root_source_file = b.path("src/root.zig"), 52 | .target = target, 53 | }); 54 | lib_unit_tests.root_module.addImport("secp256k1", secp256k1.module("secp256k1")); 55 | lib_unit_tests.root_module.linkLibrary(secp256k1.artifact("libsecp")); 56 | 57 | const run_lib_unit_tests = b.addRunArtifact(lib_unit_tests); 58 | 59 | // Similar to creating the run step earlier, this exposes a `test` step to 60 | // the `zig build --help` menu, providing a way for the user to request 61 | // running the unit tests. 62 | const test_step = b.step("test", "Run unit tests"); 63 | test_step.dependOn(&run_lib_unit_tests.step); 64 | 65 | // Add documentation generation step 66 | const install_docs = b.addInstallDirectory(.{ 67 | .source_dir = lib.getEmittedDocs(), 68 | .install_dir = .prefix, 69 | .install_subdir = "docs", 70 | }); 71 | 72 | const docs_step = b.step("docs", "Generate documentation"); 73 | docs_step.dependOn(&install_docs.step); 74 | 75 | const check_step = b.step("check", "Check step for LSP"); 76 | check_step.dependOn(&run_lib_unit_tests.step); 77 | } 78 | -------------------------------------------------------------------------------- /build.zig.zon: -------------------------------------------------------------------------------- 1 | .{ 2 | // This is the default name used by packages depending on this one. For 3 | // example, when a user runs `zig fetch --save `, this field is used 4 | // as the key in the `dependencies` table. Although the user can choose a 5 | // different name, most users will stick with this provided value. 6 | // 7 | // It is redundant to include "zig" in this name because it is already 8 | // within the Zig package namespace. 9 | .name = "bitcoin-primitives", 10 | 11 | // This is a [Semantic Version](https://semver.org/). 12 | // In a future version of Zig it will be used for package deduplication. 13 | .version = "0.0.0", 14 | 15 | // This field is optional. 16 | // This is currently advisory only; Zig does not yet do anything 17 | // with this value. 18 | //.minimum_zig_version = "0.11.0", 19 | 20 | // This field is optional. 21 | // Each dependency must either provide a `url` and `hash`, or a `path`. 22 | // `zig build --fetch` can be used to fetch all dependencies of a package, recursively. 23 | // Once all dependencies are fetched, `zig build` no longer requires 24 | // internet connectivity. 25 | .dependencies = .{ 26 | .secp256k1 = .{ 27 | .url = "git+https://github.com/zig-bitcoin/libsecp256k1-zig#d0a17874800d844d4e32a4c8d46646df6af62451", 28 | .hash = "12202cb4edf0cf0a77aaabcb6bc619024a004dcc6decd80e6f4563188280833b749a", 29 | }, 30 | }, 31 | .paths = .{ 32 | "build.zig", 33 | "build.zig.zon", 34 | "src", 35 | // For example... 36 | //"LICENSE", 37 | //"README.md", 38 | }, 39 | } 40 | -------------------------------------------------------------------------------- /src/base58/alphabet.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const expectEqualSlices = std.testing.expectEqualSlices; 3 | const expectError = std.testing.expectError; 4 | 5 | pub const BITCOIN_ALPHABET: [58]u8 = [58]u8{ 6 | '1', 7 | '2', 8 | '3', 9 | '4', 10 | '5', 11 | '6', 12 | '7', 13 | '8', 14 | '9', 15 | 'A', 16 | 'B', 17 | 'C', 18 | 'D', 19 | 'E', 20 | 'F', 21 | 'G', 22 | 'H', 23 | 'J', 24 | 'K', 25 | 'L', 26 | 'M', 27 | 'N', 28 | 'P', 29 | 'Q', 30 | 'R', 31 | 'S', 32 | 'T', 33 | 'U', 34 | 'V', 35 | 'W', 36 | 'X', 37 | 'Y', 38 | 'Z', 39 | 'a', 40 | 'b', 41 | 'c', 42 | 'd', 43 | 'e', 44 | 'f', 45 | 'g', 46 | 'h', 47 | 'i', 48 | 'j', 49 | 'k', 50 | 'm', 51 | 'n', 52 | 'o', 53 | 'p', 54 | 'q', 55 | 'r', 56 | 's', 57 | 't', 58 | 'u', 59 | 'v', 60 | 'w', 61 | 'x', 62 | 'y', 63 | 'z', 64 | }; 65 | 66 | pub const Alphabet = struct { 67 | encode: [58]u8, 68 | decode: [128]u8, 69 | 70 | const Options = struct { alphabet: [58]u8 = BITCOIN_ALPHABET }; 71 | 72 | const Self = @This(); 73 | 74 | pub const DEFAULT = Self.init(.{}) catch unreachable; 75 | 76 | /// Initialize an Alpabet set with options 77 | pub fn init(options: Options) !Self { 78 | var encode = [_]u8{0x00} ** 58; 79 | var decode = [_]u8{0xFF} ** 128; 80 | 81 | for (options.alphabet, 0..) |b, i| { 82 | if (b >= 128) { 83 | return error.NonAsciiChar; 84 | } 85 | if (decode[b] != 0xFF) { 86 | return error.DuplicateCharacter; 87 | } 88 | 89 | encode[i] = b; 90 | decode[b] = @intCast(i); 91 | } 92 | 93 | return .{ 94 | .encode = encode, 95 | .decode = decode, 96 | }; 97 | } 98 | }; 99 | 100 | test "Alphabet: verify Bitcoin alphabet" { 101 | try expectEqualSlices( 102 | u8, 103 | &"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz".*, 104 | &(try Alphabet.init(.{})).encode, 105 | ); 106 | } 107 | 108 | test "Alphabet: verify alphabet with non ascii char returns error" { 109 | try expectError( 110 | error.NonAsciiChar, 111 | Alphabet.init(.{ .alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxé".* }), 112 | ); 113 | } 114 | 115 | test "Alphabet: verify alphabet with duplicate char returns error" { 116 | try expectError( 117 | error.DuplicateCharacter, 118 | Alphabet.init(.{ .alphabet = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyy".* }), 119 | ); 120 | } 121 | -------------------------------------------------------------------------------- /src/base58/base58.zig: -------------------------------------------------------------------------------- 1 | pub const Encoder = @import("encode.zig").Encoder; 2 | pub const Decoder = @import("decode.zig").Decoder; 3 | pub const Alphabet = @import("alphabet.zig").Alphabet; 4 | 5 | test { 6 | _ = @import("tests.zig"); 7 | } 8 | -------------------------------------------------------------------------------- /src/base58/decode.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const expectEqualSlices = std.testing.expectEqualSlices; 3 | const Alphabet = @import("./alphabet.zig").Alphabet; 4 | 5 | /// `Base58Encoder` is a structure for encoding byte slices into Base58 format. 6 | pub const Decoder = struct { 7 | const Self = @This(); 8 | 9 | /// Contains the Base58 alphabet used for encoding. 10 | /// 11 | /// This should be initialized with a valid Base58 character set. 12 | alpha: Alphabet = Alphabet.init(.{}) catch unreachable, 13 | 14 | /// Pass a `allocator` & `encoded` bytes buffer. `decodeAlloc` will allocate a buffer 15 | /// to write into. It may also realloc as needed. Returned value is proper size. 16 | pub fn decodeAlloc(self: *const Self, allocator: std.mem.Allocator, encoded: []const u8) ![]u8 { 17 | var dest = try allocator.alloc(u8, encoded.len); 18 | 19 | const size = try self.decode(encoded, dest); 20 | if (dest.len != size) { 21 | dest = try allocator.realloc(dest, size); 22 | } 23 | 24 | return dest; 25 | } 26 | 27 | /// Pass a `encoded` and a `dest` to write decoded value into. `decode` returns a 28 | /// `usize` indicating how many bytes were written. Sizing/resizing, `dest` buffer is up to the caller. 29 | /// 30 | /// For further information on the Base58 decoding algorithm, see: https://datatracker.ietf.org/doc/html/draft-msporny-base58-03 31 | pub fn decode(self: *const Self, encoded: []const u8, dest: []u8) !usize { 32 | var index: usize = 0; 33 | const zero = self.alpha.encode[0]; 34 | 35 | for (encoded) |c| { 36 | if (c > 127) { 37 | return error.NonAsciiCharacter; 38 | } 39 | 40 | var val: usize = self.alpha.decode[c]; 41 | if (val == 0xFF) { 42 | return error.InvalidCharacter; 43 | } 44 | 45 | for (dest[0..index]) |*byte| { 46 | val += @as(usize, @intCast(byte.*)) * 58; 47 | byte.* = @intCast(val & 0xFF); 48 | val >>= 8; 49 | } 50 | 51 | while (val > 0) { 52 | if (index >= dest.len) { 53 | return error.BufferTooSmall; 54 | } 55 | 56 | dest[index] = @as(u8, @intCast(val)) & 0xFF; 57 | index += 1; 58 | val >>= 8; 59 | } 60 | } 61 | 62 | for (encoded) |c| { 63 | if (c != zero) break; 64 | 65 | dest[index] = 0; 66 | index += 1; 67 | } 68 | 69 | std.mem.reverse(u8, dest[0..index]); 70 | 71 | return index; 72 | } 73 | 74 | /// Decode a base58-encoded string (str) 75 | /// that includes a checksum into a byte 76 | /// is successful return decoded otherwise error 77 | pub fn decodeCheckAlloc(decoder: *const Decoder, allocator: std.mem.Allocator, data: []const u8) ![]u8 { 78 | const decoded = try decoder.decodeAlloc(allocator, data); 79 | errdefer allocator.free(decoded); 80 | 81 | if (decoded.len < 4) return error.TooShortError; 82 | 83 | const check_start = decoded.len - 4; 84 | 85 | var hasher = std.crypto.hash.sha2.Sha256.init(.{}); 86 | 87 | hasher.update(decoded[0..check_start]); 88 | const fr = hasher.finalResult(); 89 | 90 | hasher = std.crypto.hash.sha2.Sha256.init(.{}); 91 | hasher.update(&fr); 92 | 93 | const expected = std.mem.readInt(u32, hasher.finalResult()[0..4], .little); 94 | const actual = std.mem.readInt(u32, decoded[check_start..][0..4], .little); 95 | 96 | if (expected != actual) return error.IncorrectChecksum; 97 | 98 | return try allocator.realloc(decoded, check_start); 99 | } 100 | }; 101 | -------------------------------------------------------------------------------- /src/base58/encode.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const expectEqualSlices = std.testing.expectEqualSlices; 3 | const Alphabet = @import("alphabet.zig").Alphabet; 4 | 5 | /// `Base58Encoder` is a structure for encoding byte slices into Base58 format. 6 | pub const Encoder = struct { 7 | const Self = @This(); 8 | 9 | /// Contains the Base58 alphabet used for encoding. 10 | /// 11 | /// This should be initialized with a valid Base58 character set. 12 | alpha: Alphabet = Alphabet.init(.{}) catch unreachable, 13 | 14 | /// Encodes a byte slice into Base58 format. 15 | /// 16 | /// # Parameters 17 | /// 18 | /// * `self`: A pointer to the current instance of `Base58Encoder`. 19 | /// * `source`: A slice of bytes to encode. 20 | /// * `dest`: A slice of bytes where the Base58 encoded result will be stored. 21 | /// 22 | /// # Description 23 | /// 24 | /// This function encodes the provided byte slice (`source`) into Base58 format and 25 | /// stores the result in the `dest` slice. 26 | /// 27 | /// For further information on the Base58 encoding algorithm, see: https://datatracker.ietf.org/doc/html/draft-msporny-base58-03 28 | pub fn encode(self: *const Self, source: []const u8, dest: []u8) usize { 29 | // Index in the destination slice where the next Base58 character will be written. 30 | var index: usize = 0; 31 | // Count of leading zeros in the input data. 32 | var zero_counter: usize = 0; 33 | 34 | // Count leading zeros in the input source. 35 | // 36 | // This loop increments `zero_counter` as long as leading bytes are zero. 37 | while (zero_counter < source.len and source[zero_counter] == 0) { 38 | zero_counter += 1; 39 | } 40 | 41 | // Process the remaining bytes after leading zeros have been handled. 42 | for (source[zero_counter..]) |val| { 43 | // Initialize carry with the current byte value. 44 | var carry: usize = @intCast(val); 45 | 46 | // Encode carry into Base58 digits, modifying the `dest` slice. 47 | // This loop processes the carry and updates the destination slice accordingly. 48 | for (dest[0..index]) |*byte| { 49 | // Add carry to current byte value (multiplied by 256). 50 | carry += @as(usize, byte.*) << 8; 51 | // Store the Base58 digit in the destination. 52 | byte.* = @truncate(carry % @as(usize, 58)); 53 | // Reduce carry for the next iteration. 54 | carry /= 58; 55 | } 56 | 57 | // Process any remaining carry and add to the `dest` slice. 58 | while (carry > 0) { 59 | // Store the Base58 digit. 60 | dest[index] = @truncate(carry % 58); 61 | // Reduce carry for the next iteration. 62 | carry /= 58; 63 | // Move to the next position in the destination slice. 64 | index += 1; 65 | } 66 | } 67 | 68 | // Calculate the index where the encoded result ends. 69 | const dest_index = index + zero_counter; 70 | 71 | // Fill in the leading '1's for the leading zeros in the encoded result. 72 | // This loop places the correct number of '1' characters at the beginning of `dest`. 73 | for (dest[index..dest_index]) |*d| { 74 | d.* = self.alpha.encode[0]; 75 | } 76 | 77 | // Map the Base58 digit values to their corresponding characters using the `alpha` alphabet. 78 | for (dest[0..index]) |*val| { 79 | // Convert digit values to Base58 characters. 80 | val.* = self.alpha.encode[val.*]; 81 | } 82 | 83 | // Reverse the `dest` slice to produce the final encoded result. 84 | std.mem.reverse(u8, dest[0..dest_index]); 85 | return dest_index; 86 | } 87 | 88 | /// Pass an `allocator` & `source` bytes buffer. `encodeAlloc` will allocate a buffer 89 | /// to write into. It may also realloc as needed. Returned value is base58 encoded string. 90 | pub fn encodeAlloc(self: *const Self, allocator: std.mem.Allocator, source: []const u8) ![]u8 { 91 | var dest = try allocator.alloc(u8, source.len * 2); 92 | 93 | const size = self.encode(source, dest); 94 | if (dest.len != size) { 95 | dest = try allocator.realloc(dest, size); 96 | } 97 | 98 | return dest; 99 | } 100 | 101 | /// Encodes data using the encoder and appends a 4-byte checksum for integrity checking. 102 | /// 103 | /// This function computes the SHA-256 hash of the input data twice, extracts the first 4 bytes 104 | /// as the checksum, appends it to the data, and then encodes the concatenated result using 105 | /// the provided encoder. The checksum ensures data integrity when decoding. 106 | /// 107 | /// # Parameters 108 | /// 109 | /// - `encoder`: A pointer to the encoder that provides the `encode` method for Base58 or other encoding schemes. 110 | /// - `out`: A slice of bytes to store the final encoded output. It should have enough capacity to hold the 111 | /// encoded result of `data` plus the appended checksum. 112 | /// - `buf`: A temporary buffer slice used to store the data and checksum before encoding. 113 | /// It must have enough space to hold the original data plus 4 bytes of checksum. 114 | /// - `data`: A constant slice of input bytes to be encoded. The SHA-256 checksum is computed based on this data. 115 | /// 116 | /// # Returns 117 | /// 118 | /// The function returns the number of bytes written to `out`, which represents the length of the final encoded data. 119 | /// 120 | /// # Example 121 | /// 122 | /// ```zig 123 | /// var encoder: Encoder = // initialize encoder 124 | /// var out: [64]u8 = undefined; 125 | /// var buf: [64]u8 = undefined; 126 | /// const data: []const u8 = "some data to encode"; 127 | /// 128 | /// const result_len = encoder.encodeCheck(&out, &buf, data); 129 | /// std.debug.print("Encoded result: {s}\n", .{out[0..result_len]}); 130 | /// ``` 131 | /// 132 | /// # Preconditions 133 | /// 134 | /// - The `out` slice must be large enough to store the encoded result. 135 | /// - The `buf` slice must be large enough to store `data.len + 4` bytes. 136 | /// - The `encoder` should be properly initialized and should implement an `encode` method. 137 | /// 138 | /// # Notes 139 | /// 140 | /// The checksum is calculated as follows: 141 | /// 1. Compute the SHA-256 hash of the input data. 142 | /// 2. Compute the SHA-256 hash of the first hash. 143 | /// 3. Take the first 4 bytes of the second hash and append them to the data. 144 | /// 4. Encode the result using the provided `encoder`. 145 | pub fn encodeCheck(encoder: *const Encoder, out: []u8, buf: []u8, data: []const u8) usize { 146 | var checksum: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined; 147 | 148 | std.crypto.hash.sha2.Sha256.hash(data, &checksum, .{}); 149 | std.crypto.hash.sha2.Sha256.hash(&checksum, &checksum, .{}); 150 | 151 | @memcpy(buf[0..data.len], data); 152 | @memcpy(buf[data.len..][0..4], checksum[0..4]); 153 | 154 | return encoder.encode(buf[0 .. data.len + 4], out); 155 | } 156 | 157 | pub fn encodeCheckAlloc(encoder: *const Encoder, allocator: std.mem.Allocator, data: []const u8) ![]u8 { 158 | var hasher = std.crypto.hash.sha2.Sha256.init(.{}); 159 | hasher.update(data); 160 | var checksum = hasher.finalResult(); 161 | 162 | hasher = std.crypto.hash.sha2.Sha256.init(.{}); 163 | hasher.update(&checksum); 164 | checksum = hasher.finalResult(); 165 | 166 | var encoding_data = try allocator.alloc(u8, data.len + 4); 167 | defer allocator.free(encoding_data); 168 | 169 | @memcpy(encoding_data[0..data.len], data); 170 | @memcpy(encoding_data[data.len..], checksum[0..4]); 171 | 172 | return try encoder.encodeAlloc(allocator, encoding_data); 173 | } 174 | }; 175 | 176 | test "encode with check" { 177 | const e = Encoder{}; 178 | const encoded = try e.encodeCheckAlloc(std.testing.allocator, "hello world"); 179 | defer std.testing.allocator.free(encoded); 180 | 181 | var buf: [200]u8 = undefined; 182 | 183 | const encoded_no_alloc_size = e.encodeCheck(buf[0..], buf[100..], "hello world"); 184 | 185 | try std.testing.expectEqualStrings(encoded, buf[0..encoded_no_alloc_size]); 186 | } 187 | 188 | test "Base58Encoder: verify encoding" { 189 | const encoder: Encoder = .{}; 190 | 191 | var buf1: [0]u8 = undefined; 192 | _ = encoder.encode(&[_]u8{}, &buf1); 193 | try expectEqualSlices(u8, "", &buf1); 194 | 195 | var buf2: [2]u8 = undefined; 196 | _ = encoder.encode(&[_]u8{0x61}, &buf2); 197 | try expectEqualSlices(u8, "2g", &buf2); 198 | 199 | var buf3: [4]u8 = undefined; 200 | _ = encoder.encode(&[_]u8{ 0x62, 0x62, 0x62 }, &buf3); 201 | try expectEqualSlices(u8, "a3gV", &buf3); 202 | 203 | var buf4: [4]u8 = undefined; 204 | _ = encoder.encode(&[_]u8{ 0x63, 0x63, 0x63 }, &buf4); 205 | try expectEqualSlices(u8, "aPEr", &buf4); 206 | 207 | var buf5: [13]u8 = undefined; 208 | _ = encoder.encode(&[_]u8{ 0xbf, 0x4f, 0x89, 0x00, 0x1e, 0x67, 0x02, 0x74, 0xdd }, &buf5); 209 | try expectEqualSlices(u8, "3SEo3LWLoPntC", &buf5); 210 | 211 | var buf6: [5]u8 = undefined; 212 | _ = encoder.encode(&[_]u8{ 0x00, 0x00, 0x01, 0x02, 0x03 }, &buf6); 213 | try expectEqualSlices(u8, "11Ldp", &buf6); 214 | 215 | var buf7: [1]u8 = undefined; 216 | _ = encoder.encode(&[_]u8{0x00}, &buf7); 217 | try expectEqualSlices(u8, "1", &buf7); 218 | 219 | var buf8: [174]u8 = undefined; 220 | _ = encoder.encode(&[_]u8{ 221 | 0x03, 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 222 | 0x44, 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 223 | 0x89, 0x45, 0x28, 0x21, 0xE6, 0x38, 0xD0, 0x13, 0x77, 0xBE, 0x54, 0x66, 0xCF, 0x34, 0xE9, 0x0C, 224 | 0x6C, 0xC0, 0xAC, 0x29, 0xB7, 0xC9, 0x7C, 0x50, 0xDD, 0x3F, 0x84, 0xD5, 0xB5, 0xB5, 0x47, 0x09, 225 | 0x17, 0x92, 0x16, 0xD5, 0xD9, 0x89, 0x79, 0xFB, 0x1B, 0xD1, 0x31, 0x0B, 0xA6, 0x98, 0xDF, 0xB5, 226 | 0xAC, 0x2F, 0xFD, 0x72, 0xDB, 0xD0, 0x1A, 0xDF, 0xB7, 0xB8, 0xE1, 0xAF, 0xED, 0x6A, 0x26, 0x7E, 227 | 0x96, 0xBA, 0x7C, 0x90, 0x45, 0xF1, 0x2C, 0x7F, 0x99, 0x24, 0xA1, 0x99, 0x47, 0xB3, 0x91, 0x6C, 228 | 0xF7, 0x08, 0x01, 0xF2, 0xE2, 0x85, 0x8E, 0xFC, 0x16, 0x63, 0x69, 0x20, 0xD8, 0x71, 0x57, 0x4E, 229 | }, &buf8); 230 | try expectEqualSlices( 231 | u8, 232 | "KeThPkHTv5nsa4576Z47NqEtuSfUcKwv7YeueZ8dquGTDeBpimjGEZ1a7k1FCz8m8FEBcoJZjP5Aui6eKfPjdmGooHKtEPRbVotw6mRxNU3WbLtAH41mea9g8AB9Qe1DAFDReBWa67ZEP6ApWGhw9Dfr2vVXkLXEWj6W8HFApw4DKK", 233 | &buf8, 234 | ); 235 | } 236 | -------------------------------------------------------------------------------- /src/base58/tests.zig: -------------------------------------------------------------------------------- 1 | const DIGITS_OF_PI = [_]u8{ 2 | 0x03, 0x24, 0x3F, 0x6A, 0x88, 0x85, 0xA3, 0x08, 0xD3, 0x13, 0x19, 0x8A, 0x2E, 0x03, 0x70, 0x73, 3 | 0x44, 0xA4, 0x09, 0x38, 0x22, 0x29, 0x9F, 0x31, 0xD0, 0x08, 0x2E, 0xFA, 0x98, 0xEC, 0x4E, 0x6C, 4 | 0x89, 0x45, 0x28, 0x21, 0xE6, 0x38, 0xD0, 0x13, 0x77, 0xBE, 0x54, 0x66, 0xCF, 0x34, 0xE9, 0x0C, 5 | 0x6C, 0xC0, 0xAC, 0x29, 0xB7, 0xC9, 0x7C, 0x50, 0xDD, 0x3F, 0x84, 0xD5, 0xB5, 0xB5, 0x47, 0x09, 6 | 0x17, 0x92, 0x16, 0xD5, 0xD9, 0x89, 0x79, 0xFB, 0x1B, 0xD1, 0x31, 0x0B, 0xA6, 0x98, 0xDF, 0xB5, 7 | 0xAC, 0x2F, 0xFD, 0x72, 0xDB, 0xD0, 0x1A, 0xDF, 0xB7, 0xB8, 0xE1, 0xAF, 0xED, 0x6A, 0x26, 0x7E, 8 | 0x96, 0xBA, 0x7C, 0x90, 0x45, 0xF1, 0x2C, 0x7F, 0x99, 0x24, 0xA1, 0x99, 0x47, 0xB3, 0x91, 0x6C, 9 | 0xF7, 0x08, 0x01, 0xF2, 0xE2, 0x85, 0x8E, 0xFC, 0x16, 0x63, 0x69, 0x20, 0xD8, 0x71, 0x57, 0x4E, 10 | }; 11 | 12 | // Subset of test cases from https://github.com/cryptocoinjs/base-x/blob/master/test/fixtures.json 13 | pub const TEST_CASES: []const struct { []const u8, []const u8 } = &.{ 14 | .{ &.{}, "" }, 15 | .{ &.{0x61}, "2g" }, 16 | .{ &.{ 0x62, 0x62, 0x62 }, "a3gV" }, 17 | .{ &.{ 0x63, 0x63, 0x63 }, "aPEr" }, 18 | .{ &.{ 0x57, 0x2e, 0x47, 0x94 }, "3EFU7m" }, 19 | .{ &.{ 0x10, 0xc8, 0x51, 0x1e }, "Rt5zm" }, 20 | .{ &.{ 0x51, 0x6b, 0x6f, 0xcd, 0x0f }, "ABnLTmg" }, 21 | .{ 22 | &.{ 0xbf, 0x4f, 0x89, 0x00, 0x1e, 0x67, 0x02, 0x74, 0xdd }, 23 | "3SEo3LWLoPntC", 24 | }, 25 | .{ 26 | &.{ 0xec, 0xac, 0x89, 0xca, 0xd9, 0x39, 0x23, 0xc0, 0x23, 0x21 }, 27 | "EJDM8drfXA6uyA", 28 | }, 29 | .{ 30 | &.{ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, 31 | "1111111111", 32 | }, 33 | .{ 34 | &.{ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff }, 35 | "FPBt6CHo3fovdL", 36 | }, 37 | .{ 38 | &.{ 39 | 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 40 | }, 41 | "NKioeUVktgzXLJ1B3t", 42 | }, 43 | .{ 44 | &.{ 45 | 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 46 | 0xff, 0xff, 47 | }, 48 | "YcVfxkQb6JRzqk5kF2tNLv", 49 | }, 50 | .{ 51 | &.{ 52 | 0x73, 0x69, 0x6d, 0x70, 0x6c, 0x79, 0x20, 0x61, 0x20, 0x6c, 0x6f, 0x6e, 0x67, 0x20, 53 | 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 54 | }, 55 | "2cFupjhnEsSn59qHXstmK2ffpLv2", 56 | }, 57 | .{ 58 | &.{ 59 | 0x00, 0xeb, 0x15, 0x23, 0x1d, 0xfc, 0xeb, 0x60, 0x92, 0x58, 0x86, 0xb6, 0x7d, 0x06, 60 | 0x52, 0x99, 0x92, 0x59, 0x15, 0xae, 0xb1, 0x72, 0xc0, 0x66, 0x47, 61 | }, 62 | "1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L", 63 | }, 64 | .{ 65 | &.{ 66 | 0x00, 0x3c, 0x17, 0x6e, 0x65, 0x9b, 0xea, 0x0f, 0x29, 0xa3, 0xe9, 0xbf, 0x78, 0x80, 67 | 0xc1, 0x12, 0xb1, 0xb3, 0x1b, 0x4d, 0xc8, 0x26, 0x26, 0x81, 0x87, 68 | }, 69 | "16UjcYNBG9GTK4uq2f7yYEbuifqCzoLMGS", 70 | }, 71 | .{ 72 | &.{ 73 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 74 | 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 75 | 0x00, 0x00, 0x00, 0x00, 76 | }, 77 | "11111111111111111111111111111111", 78 | }, 79 | .{ 80 | &.{ 81 | 0x80, 0x11, 0x84, 0xcd, 0x2c, 0xdd, 0x64, 0x0c, 0xa4, 0x2c, 0xfc, 0x3a, 0x09, 0x1c, 82 | 0x51, 0xd5, 0x49, 0xb2, 0xf0, 0x16, 0xd4, 0x54, 0xb2, 0x77, 0x40, 0x19, 0xc2, 0xb2, 83 | 0xd2, 0xe0, 0x85, 0x29, 0xfd, 0x20, 0x6e, 0xc9, 0x7e, 84 | }, 85 | "5Hx15HFGyep2CfPxsJKe2fXJsCVn5DEiyoeGGF6JZjGbTRnqfiD", 86 | }, 87 | .{ &DIGITS_OF_PI, "KeThPkHTv5nsa4576Z47NqEtuSfUcKwv7YeueZ8dquGTDeBpimjGEZ1a7k1FCz8m8FEBcoJZjP5Aui6eKfPjdmGooHKtEPRbVotw6mRxNU3WbLtAH41mea9g8AB9Qe1DAFDReBWa67ZEP6ApWGhw9Dfr2vVXkLXEWj6W8HFApw4DKK" }, 88 | }; 89 | 90 | pub const CHECK_TEST_CASE: []const struct { []const u8, []const u8 } = &.{ 91 | .{ &.{}, "3QJmnh" }, 92 | .{ &.{0x31}, "6bdbJ1U" }, 93 | .{ &.{0x39}, "7VsrQCP" }, 94 | .{ &.{ 0x2d, 0x31 }, "PWEu9GGN" }, 95 | .{ &.{ 0x31, 0x31 }, "RVnPfpC2" }, 96 | .{ 97 | &.{ 0x31, 0x32, 0x33, 0x34, 0x35, 0x39, 0x38, 0x37, 0x36, 0x30 }, 98 | "K5zqBMZZTzUbAZQgrt4", 99 | }, 100 | .{ 101 | &.{ 102 | 0x00, 0x9b, 0x41, 0x54, 0xbb, 0xf2, 0x03, 0xe4, 0x13, 0x0c, 0x4b, 0x86, 0x25, 0x93, 103 | 0x18, 0xa4, 0x98, 0x75, 0xdd, 0x04, 0x56, 104 | }, 105 | "1F9v11cupBVMpz3CrVfCppv9Rw2xEtU1c6", 106 | }, 107 | .{ 108 | &.{ 109 | 0x53, 0x25, 0xb1, 0xe2, 0x3b, 0x5b, 0x24, 0xf3, 0x47, 0xed, 0x19, 0xde, 0x61, 0x23, 110 | 0x8a, 0xf1, 0x4b, 0xc4, 0x71, 0xca, 0xa1, 0xa7, 0x7a, 0xa5, 0x5d, 0xb2, 0xa7, 0xaf, 111 | 0x7d, 0xaa, 0x93, 0xaa, 112 | }, 113 | "dctKSXBbv2My3TGGUgTFjkxu1A9JM3Sscd5FydY4dkxnfwA7q", 114 | }, 115 | .{ &DIGITS_OF_PI, "371hJQw3jVfFQtQfQ1NnUFV4Z3i166yKJe3yyPAvJziEfUenJBD8SM6xGFop9cfCDCn4j9HcT9fS73jgGp8XZzYKmSxjxLcxfgETzg4BcDHLgHSynSFDGR5wJ58NkZSv2mVxvqVwG8hqxNFXrWms66ppx45yAjc7dYuBXqCPZ2GatCMmrhuX" }, 116 | }; 117 | 118 | const Encoder = @import("encode.zig").Encoder; 119 | const Decoder = @import("decode.zig").Decoder; 120 | const std = @import("std"); 121 | 122 | test "encode" { 123 | for (TEST_CASES) |test_case| { 124 | var encoder = Encoder{}; 125 | 126 | const encoded = try encoder.encodeAlloc(std.testing.allocator, test_case[0]); 127 | defer std.testing.allocator.free(encoded); 128 | 129 | try std.testing.expectEqualSlices(u8, test_case[1], encoded); 130 | } 131 | } 132 | 133 | test "encode with check" { 134 | var encoder = Encoder{}; 135 | 136 | for (CHECK_TEST_CASE) |test_case| { 137 | const data = try encoder.encodeCheckAlloc(std.testing.allocator, test_case[0]); 138 | defer std.testing.allocator.free(data); 139 | 140 | try std.testing.expectEqualSlices(u8, test_case[1], data); 141 | } 142 | } 143 | 144 | test "decode" { 145 | for (TEST_CASES) |test_case| { 146 | var decoder = Decoder{}; 147 | 148 | const decoded = try decoder.decodeAlloc(std.testing.allocator, test_case[1]); 149 | defer std.testing.allocator.free(decoded); 150 | 151 | try std.testing.expectEqualSlices(u8, test_case[0], decoded); 152 | } 153 | } 154 | 155 | test "decode with check" { 156 | var decoder = Decoder{}; 157 | 158 | for (CHECK_TEST_CASE) |test_case| { 159 | const data = try decoder.decodeCheckAlloc(std.testing.allocator, test_case[1]); 160 | defer std.testing.allocator.free(data); 161 | 162 | try std.testing.expectEqualSlices(u8, test_case[0], data); 163 | } 164 | } 165 | -------------------------------------------------------------------------------- /src/bech32/bech32.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | 3 | const Case = enum { 4 | upper, 5 | lower, 6 | none, 7 | }; 8 | 9 | /// Check if the HRP is valid. Returns the case of the HRP, if any. 10 | /// 11 | /// # Errors 12 | /// * **MixedCase**: If the HRP contains both uppercase and lowercase characters. 13 | /// * **InvalidChar**: If the HRP contains any non-ASCII characters (outside 33..=126). 14 | /// * **InvalidLength**: If the HRP is outside 1..83 characters long. 15 | fn checkHrp(hrp: []const u8) Error!Case { 16 | if (hrp.len == 0 or hrp.len > 83) return Error.InvalidLength; 17 | 18 | var has_lower: bool = false; 19 | var has_upper: bool = false; 20 | 21 | for (hrp) |b| { 22 | // Valid subset of ASCII 23 | if (!(b >= 33 and b <= 126)) return Error.InvalidChar; 24 | 25 | if (b >= 'a' and b <= 'z') has_lower = true else if (b >= 'A' and b <= 'Z') has_upper = true; 26 | 27 | if (has_lower and has_upper) return Error.MixedCase; 28 | } 29 | if (has_upper) return .upper; 30 | if (has_lower) return .lower; 31 | 32 | return .none; 33 | } 34 | 35 | fn verifyChecksum(allocator: std.mem.Allocator, hrp: []const u8, data: []const u5) Error!?Variant { 36 | var exp = try hrpExpand(allocator, hrp); 37 | defer exp.deinit(); 38 | 39 | try exp.appendSlice(data); 40 | return Variant.fromRemainder(polymod(exp.items)); 41 | } 42 | 43 | fn hrpExpand(allocator: std.mem.Allocator, hrp: []const u8) Error!std.ArrayList(u5) { 44 | var v = std.ArrayList(u5).init(allocator); 45 | errdefer v.deinit(); 46 | 47 | for (hrp) |b| { 48 | try v.append(@truncate(b >> 5)); 49 | } 50 | 51 | try v.append(0); 52 | 53 | for (hrp) |b| { 54 | try v.append(@truncate(b & 0x1f)); 55 | } 56 | 57 | return v; 58 | } 59 | 60 | /// Generator coefficients 61 | const GEN: [5]u32 = .{ 62 | 0x3b6a_57b2, 63 | 0x2650_8e6d, 64 | 0x1ea1_19fa, 65 | 0x3d42_33dd, 66 | 0x2a14_62b3, 67 | }; 68 | 69 | fn polymod(values: []const u5) u32 { 70 | var chk: u32 = 1; 71 | var b: u8 = undefined; 72 | for (values) |v| { 73 | b = @truncate(chk >> 25); 74 | chk = (chk & 0x01ff_ffff) << 5 ^ @as(u32, v); 75 | 76 | for (GEN, 0..) |item, i| { 77 | if (std.math.shr(u8, b, i) & 1 == 1) { 78 | chk ^= item; 79 | } 80 | } 81 | } 82 | 83 | return chk; 84 | } 85 | 86 | /// Human-readable part and data part separator 87 | const SEP: u8 = '1'; 88 | 89 | /// Encoding character set. Maps data value -> char 90 | const CHARSET: [32]u8 = .{ 91 | 'q', 'p', 'z', 'r', 'y', '9', 'x', '8', // +0 92 | 'g', 'f', '2', 't', 'v', 'd', 'w', '0', // +8 93 | 's', '3', 'j', 'n', '5', '4', 'k', 'h', // +16 94 | 'c', 'e', '6', 'm', 'u', 'a', '7', 'l', // +24 95 | }; 96 | 97 | /// Reverse character set. Maps ASCII byte -> CHARSET index on [0,31] 98 | const CHARSET_REV: [128]i8 = .{ 99 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 100 | -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 101 | 15, -1, 10, 17, 21, 20, 26, 30, 7, 5, -1, -1, -1, -1, -1, -1, -1, 29, -1, 24, 13, 25, 9, 8, 102 | 23, -1, 18, 22, 31, 27, 19, -1, 1, 0, 3, 16, 11, 28, 12, 14, 6, 4, 2, -1, -1, -1, -1, -1, 103 | -1, 29, -1, 24, 13, 25, 9, 8, 23, -1, 18, 22, 31, 27, 19, -1, 1, 0, 3, 16, 11, 28, 12, 14, 104 | 6, 4, 2, -1, -1, -1, -1, -1, 105 | }; 106 | 107 | /// Error types for Bech32 encoding / decoding 108 | pub const Error = std.mem.Allocator.Error || error{ 109 | /// String does not contain the separator character 110 | MissingSeparator, 111 | /// The checksum does not match the rest of the data 112 | InvalidChecksum, 113 | /// The data or human-readable part is too long or too short 114 | InvalidLength, 115 | /// Some part of the string contains an invalid character 116 | InvalidChar, 117 | /// Some part of the data has an invalid value 118 | InvalidData, 119 | /// The bit conversion failed due to a padding issue 120 | InvalidPadding, 121 | /// The whole string must be of one case 122 | MixedCase, 123 | }; 124 | 125 | const BECH32_CONST: u32 = 1; 126 | const BECH32M_CONST: u32 = 0x2bc8_30a3; 127 | 128 | /// Used for encode/decode operations for the two variants of Bech32 129 | pub const Variant = enum { 130 | /// The original Bech32 described in [BIP-0173](https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki) 131 | bech32, 132 | /// The improved Bech32m variant described in [BIP-0350](https://github.com/bitcoin/bips/blob/master/bip-0350.mediawiki) 133 | bech32m, 134 | 135 | // Produce the variant based on the remainder of the polymod operation 136 | fn fromRemainder(c: u32) ?Variant { 137 | return switch (c) { 138 | BECH32_CONST => .bech32, 139 | BECH32M_CONST => .bech32m, 140 | else => null, 141 | }; 142 | } 143 | 144 | fn constant(self: Variant) u32 { 145 | return switch (self) { 146 | .bech32 => BECH32_CONST, 147 | .bech32m => BECH32M_CONST, 148 | }; 149 | } 150 | }; 151 | 152 | /// Decode a bech32 string into the raw HRP and the `u5` data. 153 | fn splitAndDecode(allocator: std.mem.Allocator, s: []const u8) Error!struct { std.ArrayList(u8), std.ArrayList(u5) } { 154 | // Split at separator and check for two pieces 155 | 156 | const raw_hrp, const raw_data = if (std.mem.lastIndexOfScalar(u8, s, SEP)) |sep| .{ 157 | s[0..sep], s[sep + 1 ..], 158 | } else return Error.MissingSeparator; 159 | 160 | var case = try checkHrp(raw_hrp); 161 | 162 | var hrp_lower = std.ArrayList(u8).init(allocator); 163 | errdefer hrp_lower.deinit(); 164 | 165 | switch (case) { 166 | .upper => { 167 | try hrp_lower.ensureTotalCapacity(raw_hrp.len); 168 | hrp_lower.items.len = raw_hrp.len; 169 | _ = std.ascii.lowerString(hrp_lower.items, raw_hrp); 170 | }, 171 | // already lowercase 172 | .lower, .none => { 173 | try hrp_lower.appendSlice(raw_hrp); 174 | }, 175 | } 176 | 177 | var data = std.ArrayList(u5).init(allocator); 178 | errdefer data.deinit(); 179 | 180 | var it = std.unicode.Utf8View.initUnchecked(raw_data).iterator(); 181 | // Check data payload 182 | while (it.nextCodepoint()) |c| { 183 | // Only check if c is in the ASCII range, all invalid ASCII 184 | // characters have the value -1 in CHARSET_REV (which covers 185 | // the whole ASCII range) and will be filtered out later. 186 | if (c >= 128) return error.InvalidChar; 187 | 188 | if (switch (c) { 189 | 'a'...'z' => true, 190 | else => false, 191 | }) { 192 | switch (case) { 193 | .upper => return Error.MixedCase, 194 | .none => case = .lower, 195 | .lower => {}, 196 | } 197 | } else if (switch (c) { 198 | 'A'...'Z' => true, 199 | else => false, 200 | }) { 201 | switch (case) { 202 | .lower => return Error.MixedCase, 203 | .none => case = .upper, 204 | .upper => {}, 205 | } 206 | } 207 | 208 | // c should be <128 since it is in the ASCII range, CHARSET_REV.len() == 128 209 | const num_value = CHARSET_REV[c]; 210 | 211 | if (!(0 <= num_value and num_value <= 31)) return Error.InvalidChar; 212 | 213 | try data.append(@intCast(num_value)); 214 | } 215 | 216 | return .{ hrp_lower, data }; 217 | } 218 | 219 | const CHECKSUM_LENGTH: usize = 6; 220 | 221 | /// Decode a bech32 string into the raw HRP and the data bytes. 222 | /// 223 | /// Returns the HRP in lowercase, the data with the checksum removed, and the encoding. 224 | pub fn decode(allocator: std.mem.Allocator, s: []const u8) Error!struct { std.ArrayList(u8), std.ArrayList(u5), Variant } { 225 | const hrp_lower, var data = try splitAndDecode(allocator, s); 226 | errdefer data.deinit(); 227 | errdefer hrp_lower.deinit(); 228 | 229 | if (data.items.len < CHECKSUM_LENGTH) 230 | return Error.InvalidLength; 231 | 232 | if (try verifyChecksum(allocator, hrp_lower.items, data.items)) |v| { 233 | // Remove checksum from data payload 234 | data.items.len = data.items.len - CHECKSUM_LENGTH; 235 | 236 | return .{ hrp_lower, data, v }; 237 | } 238 | return Error.InvalidChecksum; 239 | } 240 | 241 | /// Encode a bech32 payload to an [WriteAny]. 242 | /// 243 | /// # Errors 244 | /// * If [checkHrp] returns an error for the given HRP. 245 | /// # Deviations from standard 246 | /// * No length limits are enforced for the data part 247 | pub fn encodeToFmt( 248 | allocator: std.mem.Allocator, 249 | fmt: std.io.AnyWriter, 250 | hrp: []const u8, 251 | data: []const u5, 252 | variant: Variant, 253 | ) !void { 254 | var hrp_lower = try std.ArrayList(u8).initCapacity(allocator, hrp.len); 255 | defer hrp_lower.deinit(); 256 | 257 | hrp_lower.appendSliceAssumeCapacity(hrp); 258 | 259 | _ = if (try checkHrp(hrp) == .upper) std.ascii.lowerString(hrp_lower.items, hrp); 260 | 261 | var writer = try Bech32Writer.init(hrp_lower.items, variant, fmt); 262 | 263 | try writer.write(data); 264 | try writer.finalize(); 265 | } 266 | 267 | /// Allocationless Bech32 writer that accumulates the checksum data internally and writes them out 268 | /// in the end. 269 | pub const Bech32Writer = struct { 270 | formatter: std.io.AnyWriter, 271 | chk: u32, 272 | variant: Variant, 273 | 274 | /// Creates a new writer that can write a bech32 string without allocating itself. 275 | /// 276 | /// This is a rather low-level API and doesn't check the HRP or data length for standard 277 | /// compliance. 278 | pub fn init(hrp: []const u8, variant: Variant, fmt: std.io.AnyWriter) !Bech32Writer { 279 | var writer = Bech32Writer{ 280 | .formatter = fmt, 281 | .chk = 1, 282 | .variant = variant, 283 | }; 284 | 285 | _ = try writer.formatter.write(hrp); 286 | try writer.formatter.writeByte(SEP); 287 | 288 | // expand HRP 289 | for (hrp) |b| { 290 | writer.polymodStep(@truncate(b >> 5)); 291 | } 292 | 293 | writer.polymodStep(0); 294 | for (hrp) |b| { 295 | writer.polymodStep(@truncate(b & 0x1f)); 296 | } 297 | 298 | return writer; 299 | } 300 | 301 | fn polymodStep(self: *@This(), v: u5) void { 302 | const b: u8 = @truncate(self.chk >> 25); 303 | 304 | self.chk = (self.chk & 0x01ff_ffff) << 5 ^ v; 305 | 306 | for (0.., GEN) |i, item| { 307 | if (std.math.shr(u8, b, i) & 1 == 1) { 308 | self.chk ^= item; 309 | } 310 | } 311 | } 312 | 313 | pub fn finalize(self: *@This()) !void { 314 | try self.writeChecksum(); 315 | } 316 | 317 | fn writeChecksum(self: *@This()) !void { 318 | // Pad with 6 zeros 319 | for (0..CHECKSUM_LENGTH) |_| { 320 | self.polymodStep(0); 321 | } 322 | 323 | const plm: u32 = self.chk ^ self.variant.constant(); 324 | 325 | for (0..CHECKSUM_LENGTH) |p| { 326 | const v: u8 = @intCast(std.math.shr(u32, plm, (5 * (5 - p))) & 0x1f); 327 | 328 | try self.formatter.writeByte(CHARSET[v]); 329 | } 330 | } 331 | 332 | /// Write a `u5` slice 333 | fn write(self: *@This(), data: []const u5) !void { 334 | for (data) |b| { 335 | try self.writeU5(b); 336 | } 337 | } 338 | 339 | /// Writes a single 5 bit value of the data part 340 | fn writeU5(self: *@This(), data: u5) !void { 341 | self.polymodStep(data); 342 | 343 | try self.formatter.writeByte(CHARSET[data]); 344 | } 345 | }; 346 | 347 | // Encode a bech32 payload to string. 348 | // 349 | // # Errors 350 | // * If [check_hrp] returns an error for the given HRP. 351 | // # Deviations from standard 352 | // * No length limits are enforced for the data part 353 | pub fn encode(allocator: std.mem.Allocator, hrp: []const u8, data: []const u5, variant: Variant) !std.ArrayList(u8) { 354 | var buf = std.ArrayList(u8).init(allocator); 355 | errdefer buf.deinit(); 356 | 357 | try encodeToFmt(allocator, buf.writer().any(), hrp, data, variant); 358 | 359 | return buf; 360 | } 361 | 362 | pub fn toBase32(allocator: std.mem.Allocator, d: []const u8) !std.ArrayList(u5) { 363 | var self = std.ArrayList(u5).init(allocator); 364 | errdefer self.deinit(); 365 | 366 | // Amount of bits left over from last round, stored in buffer. 367 | var buffer_bits: u32 = 0; 368 | // Holds all unwritten bits left over from last round. The bits are stored beginning from 369 | // the most significant bit. E.g. if buffer_bits=3, then the byte with bits a, b and c will 370 | // look as follows: [a, b, c, 0, 0, 0, 0, 0] 371 | var buffer: u8 = 0; 372 | 373 | for (d) |b| { 374 | // Write first u5 if we have to write two u5s this round. That only happens if the 375 | // buffer holds too many bits, so we don't have to combine buffer bits with new bits 376 | // from this rounds byte. 377 | if (buffer_bits >= 5) { 378 | try self.append(@truncate(std.math.shr(u8, buffer & 0b1111_1000, 3))); 379 | buffer <<= 5; 380 | buffer_bits -= 5; 381 | } 382 | 383 | // Combine all bits from buffer with enough bits from this rounds byte so that they fill 384 | // a u5. Save reamining bits from byte to buffer. 385 | const from_buffer = buffer >> 3; 386 | const from_byte = std.math.shr(u8, b, 3 + buffer_bits); // buffer_bits <= 4 387 | 388 | try self.append(@truncate(from_buffer | from_byte)); 389 | buffer = std.math.shl(u8, b, 5 - buffer_bits); 390 | buffer_bits += 3; 391 | } 392 | 393 | // There can be at most two u5s left in the buffer after processing all bytes, write them. 394 | if (buffer_bits >= 5) { 395 | try self.append(@truncate((buffer & 0b1111_1000) >> 3)); 396 | buffer <<= 5; 397 | buffer_bits -= 5; 398 | } 399 | 400 | if (buffer_bits != 0) { 401 | try self.append(@truncate(buffer >> 3)); 402 | } 403 | 404 | return self; 405 | } 406 | 407 | /// Encode a bech32 payload without a checksum to an [std.io.AnyWriter]. 408 | /// 409 | /// # Errors 410 | /// * If [checkHrp] returns an error for the given HRP. 411 | /// # Deviations from standard 412 | /// * No length limits are enforced for the data part 413 | pub fn encodeWithoutChecksumToFmt( 414 | allocator: std.mem.Allocator, 415 | fmt: std.io.AnyWriter, 416 | hrp: []const u8, 417 | data: []const u5, 418 | ) !void { 419 | var hrp_lower = try std.ArrayList(u8).initCapacity(allocator, hrp.len); 420 | defer hrp_lower.deinit(); 421 | 422 | hrp_lower.appendSliceAssumeCapacity(hrp); 423 | 424 | _ = if (try checkHrp(hrp) == .upper) std.ascii.lowerString(hrp_lower.items, hrp); 425 | 426 | _ = try fmt.write(hrp); 427 | 428 | _ = try fmt.writeByte(SEP); 429 | 430 | for (data) |b| { 431 | try fmt.writeByte(CHARSET[b]); 432 | } 433 | } 434 | 435 | /// Encode a bech32 payload to string without the checksum. 436 | /// 437 | /// # Errors 438 | /// * If [checkHrp] returns an error for the given HRP. 439 | /// # Deviations from standard 440 | /// * No length limits are enforced for the data part 441 | pub fn encodeWithoutChecksum(allocator: std.mem.Allocator, hrp: []const u8, data: []const u5) !std.ArrayList(u8) { 442 | var buf = std.ArrayList(u8).init(allocator); 443 | errdefer buf.deinit(); 444 | 445 | try encodeWithoutChecksumToFmt(allocator, buf.writer().any(), hrp, data); 446 | 447 | return buf; 448 | } 449 | 450 | /// Decode a bech32 string into the raw HRP and the data bytes, assuming no checksum. 451 | /// 452 | /// Returns the HRP in lowercase and the data. 453 | pub fn decodeWithoutChecksum(allocator: std.mem.Allocator, s: []const u8) Error!struct { std.ArrayList(u8), std.ArrayList(u5) } { 454 | return splitAndDecode(allocator, s); 455 | } 456 | 457 | /// Convert base32 to base256, removes null-padding if present, returns 458 | /// `Err(Error::InvalidPadding)` if padding bits are unequal `0` 459 | pub fn arrayListFromBase32(allocator: std.mem.Allocator, b: []const u5) !std.ArrayList(u8) { 460 | return convertBits(u5, allocator, b, 5, 8, false); 461 | } 462 | 463 | /// Convert between bit sizes 464 | /// 465 | /// # Errors 466 | /// * `Error::InvalidData` if any element of `data` is out of range 467 | /// * `Error::InvalidPadding` if `pad == false` and the padding bits are not `0` 468 | /// 469 | /// # Panics 470 | /// Function will panic if attempting to convert `from` or `to` a bit size that 471 | /// is 0 or larger than 8 bits. 472 | /// 473 | /// # Examples 474 | /// 475 | /// ```zig 476 | /// const base5 = try convertBits(u8, allocator, &.{0xff}, 8, 5, true); 477 | /// std.testing.expectEqualSlices(u8, base5.items, &.{0x1f, 0x1c}); 478 | /// ``` 479 | pub fn convertBits(comptime T: type, allocator: std.mem.Allocator, data: []const T, from: u32, to: u32, pad: bool) !std.ArrayList(u8) { 480 | if (from > 8 or to > 8 or from == 0 or to == 0) { 481 | @panic("convert_bits `from` and `to` parameters 0 or greater than 8"); 482 | } 483 | 484 | var acc: u32 = 0; 485 | var bits: u32 = 0; 486 | var ret = std.ArrayList(u8).init(allocator); 487 | errdefer ret.deinit(); 488 | 489 | const maxv: u32 = std.math.shl(u32, 1, to) - 1; 490 | for (data) |value| { 491 | const v: u32 = @intCast(value); 492 | if (std.math.shr(u32, v, from) != 0) { 493 | // Input value exceeds `from` bit size 494 | return error.InvalidData; 495 | } 496 | acc = std.math.shl(u32, acc, from) | v; 497 | bits += from; 498 | 499 | while (bits >= to) { 500 | bits -= to; 501 | try ret.append(@truncate(std.math.shr(u32, acc, bits) & maxv)); 502 | } 503 | } 504 | 505 | if (pad) { 506 | if (bits > 0) { 507 | try ret.append(@truncate(std.math.shl(u32, acc, to - bits) & maxv)); 508 | } 509 | } else if (bits >= from or (std.math.shl(u32, acc, to - bits) & maxv) != 0) { 510 | return error.InvalidPadding; 511 | } 512 | 513 | return ret; 514 | } 515 | 516 | test "encode" { 517 | try std.testing.expectError( 518 | error.InvalidLength, 519 | encode(std.testing.allocator, "", &.{ 1, 2, 3, 4 }, .bech32), 520 | ); 521 | } 522 | 523 | test "roundtrip_without_checksum" { 524 | const hrp = "lnbc"; 525 | const data = try toBase32(std.testing.allocator, "Hello World!"); 526 | defer data.deinit(); 527 | 528 | const encoded = try encodeWithoutChecksum(std.testing.allocator, hrp, data.items); 529 | defer encoded.deinit(); 530 | 531 | const decoded_hrp, const decoded_data = 532 | try decodeWithoutChecksum(std.testing.allocator, encoded.items); 533 | defer decoded_hrp.deinit(); 534 | defer decoded_data.deinit(); 535 | 536 | try std.testing.expectEqualSlices(u8, hrp, decoded_hrp.items); 537 | 538 | try std.testing.expectEqualSlices(u5, data.items, decoded_data.items); 539 | } 540 | 541 | test "decode ln" { 542 | const str = "lnbc10n1pnw2hkzdqqpp5ve584t0cv27hwmy0cx9ca8uwyqyfw9y9dm3r8vus9fv36r2l9yjssp59g4z52329g4z52329g4z52329g4z52329g4z52329g4z52329g4qcqzysc39n3w6zq50y3775yc66mmvt2fe5aa9mzzdnvq5palgyw6j9uu04qnm0g4ftm5ehtm5aulwtuy0hfrtqcdaxcl2r7wz2x4503levlacqswkkem"; 543 | 544 | const h, const d, const v = try decode(std.testing.allocator, str); 545 | _ = v; // autofix 546 | defer h.deinit(); 547 | defer d.deinit(); 548 | } 549 | 550 | test "test_hrp_case_decode" { 551 | const hrp, const data, const variant = try decode(std.testing.allocator, "hrp1qqqq40atq3"); 552 | defer hrp.deinit(); 553 | defer data.deinit(); 554 | 555 | var expected_data = try toBase32(std.testing.allocator, &.{ 0x00, 0x00 }); 556 | defer expected_data.deinit(); 557 | 558 | try std.testing.expectEqual(.bech32, variant); 559 | try std.testing.expectEqualSlices(u8, "hrp", hrp.items); 560 | try std.testing.expectEqualSlices(u5, expected_data.items, data.items); 561 | } 562 | 563 | test "test_hrp_case" { 564 | var data = try toBase32(std.testing.allocator, &.{ 0x00, 0x00 }); 565 | defer data.deinit(); 566 | 567 | // Tests for issue with HRP case checking being ignored for encoding 568 | const encoded = try encode(std.testing.allocator, "HRP", data.items, .bech32); 569 | defer encoded.deinit(); 570 | 571 | try std.testing.expectEqualSlices(u8, "hrp1qqqq40atq3", encoded.items); 572 | } 573 | -------------------------------------------------------------------------------- /src/bech32/hrp.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const expect = std.testing.expect; 3 | const expectEqualSlices = std.testing.expectEqualSlices; 4 | const expectError = std.testing.expectError; 5 | const expectEqualStrings = std.testing.expectEqualStrings; 6 | 7 | /// The human readable part of a bech32 address is limited to 83 US-ASCII characters. 8 | const MAX_HRP_LEN: usize = 83; 9 | 10 | /// The minimum ASCII value for a valid character in the human readable part. 11 | const MIN_ASCII: u8 = 33; 12 | 13 | /// The maximum ASCII value for a valid character in the human readable part. 14 | const MAX_ASCII: u8 = 126; 15 | 16 | /// The human-readable part (HRP) for the Bitcoin mainnet. 17 | /// 18 | /// This corresponds to `bc` prefix. 19 | /// 20 | /// Example: 21 | /// - Mainnet P2WPKH: bc1qw508d6qejxtdg4y5r3zarvary0c5xw7kv8f3t4 22 | const BC: Hrp = .{ 23 | .buf = [_]u8{ 24 | 98, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30 | 0, 0, 0, 0, 0, 31 | }, 32 | .size = 2, 33 | }; 34 | 35 | /// The human-readable part (HRP) for Bitcoin testnet networks (testnet and signet). 36 | /// 37 | /// This corresponds to `tb` prefix. 38 | /// 39 | /// Example: 40 | /// - Testnet P2WPKH: tb1qw508d6qejxtdg4y5r3zarvary0c5xw7kxpjzsx 41 | const TB: Hrp = .{ 42 | .buf = [_]u8{ 43 | 116, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 46 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 47 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 49 | 0, 0, 0, 0, 0, 50 | }, 51 | .size = 2, 52 | }; 53 | 54 | /// The human-readable part (HRP) for the Bitcoin regtest network. 55 | /// 56 | /// This corresponds to `bcrt` prefix. 57 | const BCRT: Hrp = .{ 58 | .buf = [_]u8{ 59 | 98, 99, 114, 116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 62 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64 | 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65 | 0, 0, 0, 0, 0, 66 | }, 67 | .size = 4, 68 | }; 69 | 70 | /// Various errors that can occur during HRP processing. 71 | /// 72 | /// These errors help in validating and debugging issues with bech32 address formatting. 73 | const HrpError = error{ 74 | /// This error occurs when the provided human-readable part (HRP) is empty. 75 | /// 76 | /// A valid HRP must contain at least one character to comply with the 77 | /// specification, and this error signals the absence of such a character. 78 | EmptyHrp, 79 | 80 | /// This error is returned when the HRP exceeds the maximum allowed length. 81 | /// 82 | /// According to the specification, the HRP must not exceed 83 characters. 83 | /// This error occurs if the HRP length is greater than the allowed limit. 84 | TooLongHrp, 85 | 86 | /// This error occurs when a non-ASCII character is found in the HRP. 87 | /// 88 | /// The HRP is restricted to US-ASCII characters (values between 33 and 126). 89 | /// This error indicates that a character outside of this range was encountered. 90 | NonAsciiChar, 91 | 92 | /// This error is returned when the HRP contains a character that falls 93 | /// outside the valid ASCII range for a bech32-encoded string. 94 | /// 95 | /// The HRP should only contain characters within the range of 33 to 126. 96 | /// If a character falls outside of this range, this error is triggered. 97 | InvalidAsciiByte, 98 | 99 | /// This error occurs when the HRP contains both uppercase and lowercase letters. 100 | /// 101 | /// To ensure consistency and compatibility, the HRP must either be fully 102 | /// lowercase or fully uppercase. Mixing of cases is not allowed, and this 103 | /// error indicates a violation of that rule. 104 | MixedCaseHrp, 105 | }; 106 | 107 | /// Represents the human-readable part (HRP) of a Bech32 encoded string. 108 | /// 109 | /// The HRP is the prefix of a Bech32 string that is used to convey contextual information about the 110 | /// encoded data (such as the blockchain network or address type). 111 | /// 112 | /// The Bech32 specification, defined in [BIP-173](https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki), 113 | /// restricts the HRP to 1-83 characters from the US-ASCII set, with the characters required to be in the range 114 | /// of ASCII values 33-126. 115 | /// Furthermore, HRP must not be mixed-case (i.e., it cannot contain both uppercase and lowercase letters). 116 | pub const Hrp = struct { 117 | const Self = @This(); 118 | 119 | /// ASCII buffer for the human readable part. 120 | /// 121 | /// This buffer stores the validated HRP string characters and is initialized to zeros. 122 | /// The size of the buffer is fixed at `MAX_HRP_LEN` (83 bytes) to match the Bech32 specification. 123 | /// 124 | /// # Guarantee: 125 | /// The buffer ensures that no mixed-case characters are stored in the HRP (all characters will either be lowercase 126 | /// or uppercase, but not both). 127 | buf: [MAX_HRP_LEN]u8 = [_]u8{0} ** MAX_HRP_LEN, 128 | /// The number of characters currently stored in the HRP. 129 | /// 130 | /// This value tracks how many bytes from the buffer are actively used for the HRP. It will always be 131 | /// less than or equal to `MAX_HRP_LEN`. 132 | size: usize = 0, 133 | 134 | /// Parses and validates a human-readable part (HRP) according to the Bech32 specification. 135 | /// 136 | /// This function checks that the provided `hrp` string is valid according to the Bech32 rules as defined in 137 | /// BIP-173. The HRP must: 138 | /// 139 | /// - Be between 1 and 83 characters in length. 140 | /// - Contain only valid US-ASCII characters within the range [33-126]. 141 | /// - Not be mixed-case, meaning it must either be all lowercase or all uppercase, but not both. 142 | /// 143 | /// # Parameters: 144 | /// - `hrp`: A byte slice representing the HRP string to validate. 145 | /// 146 | /// # Returns: 147 | /// - Returns an `Hrp` struct if the HRP is valid. 148 | /// - Returns an error if the HRP is empty, too long, contains invalid characters, or is mixed-case. 149 | pub fn parse(hrp: []const u8) HrpError!Self { 150 | // Check if the provided HRP is empty, as an HRP must contain at least one character. 151 | if (hrp.len == 0) 152 | return HrpError.EmptyHrp; 153 | 154 | // Check if the HRP exceeds the maximum allowed length of 83 characters, returning an error if it does. 155 | if (hrp.len > MAX_HRP_LEN) 156 | return HrpError.TooLongHrp; 157 | 158 | // Create a new instance of the `Hrp` struct, initializing the buffer with zeros and size with 0. 159 | var new = Self{}; 160 | 161 | // Flags to detect if there are any lowercase or uppercase letters in the HRP. 162 | var has_lower = false; 163 | var has_upper = false; 164 | 165 | // Loop through each character of the HRP by its index `i` and character `c`. 166 | for (hrp, 0..) |c, i| { 167 | 168 | // Check if the current character is a valid ASCII character (0-127). 169 | if (!std.ascii.isAscii(c)) 170 | return HrpError.NonAsciiChar; 171 | 172 | // Ensure that the character is within the valid range of ASCII values for Bech32 (33-126). 173 | // Characters outside this range are invalid. 174 | if (c < MIN_ASCII or c > MAX_ASCII) 175 | return HrpError.InvalidAsciiByte; 176 | 177 | // If the character is lowercase, ensure that no uppercase characters have been encountered so far. 178 | // If an uppercase character was already found, return a `MixedCaseHrp` error. 179 | if (std.ascii.isLower(c)) { 180 | if (has_upper) 181 | return HrpError.MixedCaseHrp; 182 | 183 | // Mark that a lowercase letter has been found. 184 | has_lower = true; 185 | } else if (std.ascii.isUpper(c)) { 186 | // If the character is uppercase, ensure that no lowercase characters have been encountered. 187 | // If a lowercase character was already found, return a `MixedCaseHrp` error. 188 | if (has_lower) 189 | return HrpError.MixedCaseHrp; 190 | 191 | // Mark that an uppercase letter has been found. 192 | has_upper = true; 193 | } 194 | 195 | // Store the valid character into the buffer at the current index. 196 | new.buf[i] = c; 197 | // Increment the size of the HRP by 1 to account for the newly added character. 198 | new.size += 1; 199 | } 200 | 201 | // Return the constructed and validated `Hrp` instance. 202 | return new; 203 | } 204 | 205 | /// Converts the human-readable part (HRP) to a lowercase representation. 206 | pub fn toLowerCase(self: *const Self, output: []u8) []const u8 { 207 | std.debug.assert(output.len >= self.size); 208 | 209 | // Loop through each character of the HRP and convert it to lowercase. 210 | for (self.buf[0..self.size], 0..) |b, i| { 211 | output[i] = std.ascii.toLower(b); 212 | } 213 | 214 | return output[0..self.size]; 215 | } 216 | 217 | /// Converts the human-readable part (HRP) to bytes. 218 | pub fn asBytes(self: *const Self) []const u8 { 219 | return self.buf[0..self.size]; 220 | } 221 | 222 | /// Checks whether two HRPs are equal. 223 | pub fn eql(self: *const Self, rhs: *const Self) bool { 224 | // If the HRPs have different sizes, they are not equal. 225 | if (self.size != rhs.size) return false; 226 | 227 | // Create buffers to store the lowercase versions of the HRPs. 228 | var buf_lhs: [MAX_HRP_LEN]u8 = undefined; 229 | var buf_rhs: [MAX_HRP_LEN]u8 = undefined; 230 | 231 | // Convert both HRPs to lowercase. 232 | const l = self.toLowerCase(&buf_lhs); 233 | const r = rhs.toLowerCase(&buf_rhs); 234 | 235 | // Compare each byte of the lowercase HRPs for equality. 236 | for (l, r) |a, b| 237 | if (a != b) return false; 238 | 239 | return true; 240 | } 241 | 242 | /// Checks whether a given Segwit address is valid on either the mainnet or testnet. 243 | /// 244 | /// A Segwit address must follow the Bech32 encoding format, with the human-readable 245 | /// part "bc" for mainnet or "tb" for testnet. This function combines the logic of 246 | /// validating an address on both networks. 247 | /// 248 | /// # Returns 249 | /// - `true` if the Segwit address is valid on either the mainnet or testnet. 250 | /// - `false` otherwise. 251 | /// 252 | /// # Segwit Address Requirements: 253 | /// - The human-readable part must be "bc" (mainnet) or "tb" (testnet). 254 | /// - The witness program must follow the rules outlined in BIP141. 255 | pub fn isValidSegwit(self: *const Self) bool { 256 | return self.isValidOnMainnet() or self.isValidOnTestnet(); 257 | } 258 | 259 | /// Checks whether a given Segwit address is valid on the Bitcoin mainnet. 260 | /// 261 | /// Segwit addresses on the mainnet use the human-readable part "bc". This function 262 | /// verifies that the provided address corresponds to the mainnet format. 263 | /// 264 | /// # Returns 265 | /// - `true` if the Segwit address is valid on the mainnet (with the "bc" prefix). 266 | /// - `false` otherwise. 267 | pub fn isValidOnMainnet(self: *const Self) bool { 268 | return self.eql(&BC); 269 | } 270 | 271 | /// Checks whether a given Segwit address is valid on the Bitcoin testnet. 272 | /// 273 | /// Segwit addresses on the testnet use the human-readable part "tb". This function 274 | /// verifies that the provided address corresponds to the testnet format. 275 | /// 276 | /// # Returns 277 | /// - `true` if the Segwit address is valid on the testnet (with the "tb" prefix). 278 | /// - `false` otherwise. 279 | pub fn isValidOnTestnet(self: *const Self) bool { 280 | return self.eql(&TB); 281 | } 282 | 283 | /// Checks whether a given Segwit address is valid on the Bitcoin signet. 284 | /// 285 | /// Segwit addresses on signet also use the human-readable part "tb", similar to 286 | /// testnet addresses. This function verifies that the provided address corresponds 287 | /// to the signet format. 288 | /// 289 | /// # Returns 290 | /// - `true` if the Segwit address is valid on signet (with the "tb" prefix). 291 | /// - `false` otherwise. 292 | pub fn isValidOnSignet(self: *const Self) bool { 293 | return self.eql(&TB); 294 | } 295 | 296 | /// Checks whether a given Segwit address is valid on the Bitcoin regtest network. 297 | /// 298 | /// Segwit addresses on the regtest network use the human-readable part "bcrt". 299 | /// This function verifies that the provided address corresponds to the regtest 300 | /// format. 301 | /// 302 | /// # Returns 303 | /// - `true` if the Segwit address is valid on regtest (with the "bcrt" prefix). 304 | /// - `false` otherwise. 305 | pub fn isValidOnRegtest(self: *const Self) bool { 306 | return self.eql(&BCRT); 307 | } 308 | }; 309 | 310 | test "Hrp: check parse is ok" { 311 | // Some valid human readable parts. 312 | // 313 | // Taken from https://github.com/rust-bitcoin/rust-bech32/blob/master/src/primitives/hrp.rs 314 | const cases = [_][]const u8{ 315 | "a", 316 | "A", 317 | "abcdefg", 318 | "ABCDEFG", 319 | "abc123def", 320 | "ABC123DEF", 321 | "!\"#$%&'()*+,-./", 322 | "1234567890", 323 | }; 324 | 325 | // Go through all the test cases. 326 | for (cases) |c| { 327 | // Check that the human readable part is parsed correctly. 328 | const hrp = try Hrp.parse(c); 329 | // Check that the human readable part is correctly stored in the buffer. 330 | try expectEqualSlices(u8, c, hrp.buf[0..hrp.size]); 331 | // Check that the remaining buffer is zeroed. 332 | for (hrp.buf[hrp.size..]) |b| try expect(b == 0); 333 | // Check that the size is correct. 334 | try expect(hrp.size == c.len); 335 | } 336 | } 337 | 338 | test "Hrp: mixed case Hrp should fail parsing" { 339 | // A human readable part that contains both uppercase and lowercase characters. 340 | const case = "has-capitals-aAbB"; 341 | // Attempt to parse the mixed-case HRP, expecting a `MixedCaseHrp` error. 342 | try expectError(HrpError.MixedCaseHrp, Hrp.parse(case)); 343 | } 344 | 345 | test "Hrp: empty Hrp should fail parsing" { 346 | // An empty human readable part. 347 | const case = ""; 348 | // Attempt to parse the empty HRP, expecting an `EmptyHrp` error. 349 | try expectError(HrpError.EmptyHrp, Hrp.parse(case)); 350 | } 351 | 352 | test "Hrp: Hrp with non ASCII character should fail parsing" { 353 | // A human readable part that contains invalid ASCII characters. 354 | const case = "has-value-out-of-range-∈∈∈∈∈∈∈∈"; 355 | // Attempt to parse the HRP with invalid characters, expecting an `InvalidAsciiByte` error. 356 | try expectError(HrpError.NonAsciiChar, Hrp.parse(case)); 357 | } 358 | 359 | test "Hrp: Hrp with too many characters should fail parsing" { 360 | // A human readable part that exceeds the maximum allowed length. 361 | const case = "toolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolongtoolong"; 362 | // Attempt to parse the HRP that is too long, expecting a `TooLongHrp` error. 363 | try expectError(HrpError.TooLongHrp, Hrp.parse(case)); 364 | } 365 | 366 | test "Hrp: Hrp with invalid ASCII byte should fail parsing" { 367 | // A human readable part that contains invalid ASCII characters. 368 | const case = "has spaces in it"; 369 | // Attempt to parse the HRP with invalid characters, expecting an `InvalidAsciiByte` error. 370 | try expectError(HrpError.InvalidAsciiByte, Hrp.parse(case)); 371 | } 372 | 373 | test "Hrp: Hrp to lower case" { 374 | // Some valid human readable parts. 375 | const cases = [_][]const u8{ 376 | "a", 377 | "A", 378 | "abcdefg", 379 | "ABCDEFG", 380 | "abc123def", 381 | "ABC123DEF", 382 | "!\"#$%&'()*+,-./", 383 | "1234567890", 384 | }; 385 | 386 | // The expected results for the human readable parts in lowercase. 387 | const expected_results = [_][]const u8{ 388 | "a", 389 | "a", 390 | "abcdefg", 391 | "abcdefg", 392 | "abc123def", 393 | "abc123def", 394 | "!\"#$%&'()*+,-./", 395 | "1234567890", 396 | }; 397 | 398 | // Go through all the test cases. 399 | for (cases, expected_results) |case, expected| { 400 | // Parse the human readable part. 401 | const hrp = try Hrp.parse(case); 402 | var buf: [MAX_HRP_LEN]u8 = undefined; 403 | 404 | // Convert the human readable part to lowercase. 405 | try expectEqualStrings(expected, hrp.toLowerCase(&buf)); 406 | } 407 | } 408 | 409 | test "Hrp: as bytes should return the proper bytes" { 410 | // Some valid human readable parts. 411 | const cases = [_][]const u8{ 412 | "a", 413 | "A", 414 | "abcdefg", 415 | "ABCDEFG", 416 | "abc123def", 417 | "ABC123DEF", 418 | "!\"#$%&'()*+,-./", 419 | "1234567890", 420 | }; 421 | 422 | // Go through all the test cases. 423 | for (cases) |case| { 424 | // Parse the human readable part. 425 | const hrp = try Hrp.parse(case); 426 | // Convert the human readable part to lowercase. 427 | try expectEqualSlices(u8, case, hrp.asBytes()); 428 | } 429 | } 430 | 431 | test "Hrp: ensure eql function works properly" { 432 | // Parse two human readable parts which are equal. 433 | const lhs1 = try Hrp.parse("!\"#$%&'()*+,-./"); 434 | const rhs1 = try Hrp.parse("!\"#$%&'()*+,-./"); 435 | // Assert that the two human readable parts are equal. 436 | try expect(lhs1.eql(&rhs1)); 437 | 438 | // Generate another human readable part which is different. 439 | const rhs2 = try Hrp.parse("!\"#$%&'()*+,-.a"); 440 | // Assert that the two human readable parts are not equal. 441 | try expect(!lhs1.eql(&rhs2)); 442 | 443 | // Generate another human readable part with a different size. 444 | const rhs3 = try Hrp.parse("!\"#$%&'()*+,-."); 445 | // Assert that the two human readable parts are not equal (different size). 446 | try expect(!lhs1.eql(&rhs3)); 447 | 448 | // Parse two human readable parts which are equal, but with different case. 449 | const lhs_case_insensitive = try Hrp.parse("abcdefg"); 450 | const rhs_case_insensitive = try Hrp.parse("ABCDEFG"); 451 | // Assert that the two human readable parts are equal. 452 | try expect(lhs_case_insensitive.eql(&rhs_case_insensitive)); 453 | } 454 | 455 | test "Hrp: ensure constants are properly setup" { 456 | try expect(BC.eql(&(try Hrp.parse("bc")))); 457 | try expect(TB.eql(&(try Hrp.parse("tb")))); 458 | try expect(BCRT.eql(&(try Hrp.parse("bcrt")))); 459 | } 460 | -------------------------------------------------------------------------------- /src/bips/bip32/bip32.zig: -------------------------------------------------------------------------------- 1 | //! BIP32 implementation. 2 | //! 3 | //! Implementation of BIP32 hierarchical deterministic wallets, as defined 4 | //! at . 5 | //! 6 | const Ripemd160 = @import("../../hashes/lib.zig").Ripemd160; 7 | const secp256k1 = @import("secp256k1"); 8 | const Secp256k1NumberOfPoints = 115792089237316195423570985008687907852837564279074904382605163141518161494337; 9 | const key_lib = @import("key.zig"); 10 | 11 | const base58 = @import("../../base58/base58.zig"); 12 | 13 | const std = @import("std"); 14 | const Hmac = std.crypto.auth.hmac.sha2.HmacSha512; 15 | 16 | pub const Network = enum { MAINNET, TESTNET, REGTEST, SIMNET }; 17 | 18 | pub const SerializedPrivateKeyVersion = enum(u32) { 19 | MAINNET = 0x0488aDe4, 20 | TESTNET = 0x04358394, 21 | SEGWIT_MAINNET = 0x04b2430c, 22 | SEGWIT_TESTNET = 0x045f18bc, 23 | }; 24 | 25 | pub const SerializedPublicKeyVersion = enum(u32) { 26 | MAINNET = 0x0488b21e, 27 | TESTNET = 0x043587cf, 28 | SEGWIT_MAINNET = 0x04b24746, 29 | SEGWIT_TESTNET = 0x045f1cf6, 30 | }; 31 | 32 | /// A chain code 33 | pub const ChainCode = struct { 34 | inner: [32]u8, 35 | 36 | fn fromHmac(hmac: [64]u8) ChainCode { 37 | return .{ .inner = hmac[32..].* }; 38 | } 39 | }; 40 | 41 | /// A fingerprint 42 | pub const Fingerprint = struct { 43 | inner: [4]u8, 44 | }; 45 | 46 | /// Extended private key 47 | pub const ExtendedPrivKey = struct { 48 | /// The network this key is to be used on 49 | network: Network, 50 | /// How many derivations this key is from the master (which is 0) 51 | depth: u8, 52 | /// Fingerprint of the parent key (0 for master) 53 | parent_fingerprint: Fingerprint, 54 | /// Child number of the key used to derive from parent (0 for master) 55 | child_number: ChildNumber, 56 | /// Private key 57 | private_key: secp256k1.SecretKey, 58 | /// Chain code 59 | chain_code: ChainCode, 60 | 61 | pub fn fromStr(allocator: std.mem.Allocator, s: []const u8) !ExtendedPrivKey { 62 | const decoder = base58.Decoder{}; 63 | const decoded = try decoder.decodeCheckAlloc(allocator, s); 64 | defer allocator.free(decoded); 65 | 66 | if (decoded.len != 78) return error.InvalidLength; 67 | 68 | return try decode(decoded); 69 | } 70 | 71 | pub fn toStr(self: ExtendedPrivKey, allocator: std.mem.Allocator) ![]const u8 { 72 | const encoded = self.encode(); 73 | const encoder = base58.Encoder{}; 74 | 75 | return encoder.encodeCheckAlloc(allocator, &encoded); 76 | } 77 | 78 | /// Extended private key binary encoding according to BIP 32 79 | pub fn encode(self: ExtendedPrivKey) [78]u8 { 80 | var ret = [_]u8{0} ** 78; 81 | 82 | ret[0..4].* = switch (self.network) { 83 | .MAINNET => .{ 0x04, 0x88, 0xAD, 0xE4 }, 84 | else => .{ 0x04, 0x35, 0x83, 0x94 }, 85 | }; 86 | 87 | ret[4] = self.depth; 88 | ret[5..9].* = self.parent_fingerprint.inner; 89 | 90 | var buf: [4]u8 = undefined; 91 | std.mem.writeInt(u32, &buf, self.child_number.toU32(), .big); 92 | 93 | ret[9..13].* = buf; 94 | ret[13..45].* = self.chain_code.inner; 95 | ret[45] = 0; 96 | ret[46..78].* = self.private_key.data; 97 | return ret; 98 | } 99 | 100 | /// Construct a new master key from a seed value 101 | pub fn initMaster(network: Network, seed: []const u8) !ExtendedPrivKey { 102 | var hmac_engine = Hmac.init("Bitcoin seed"); 103 | hmac_engine.update(seed); 104 | var hmac_result: [Hmac.mac_length]u8 = undefined; 105 | 106 | hmac_engine.final(&hmac_result); 107 | 108 | return ExtendedPrivKey{ 109 | .network = network, 110 | .depth = 0, 111 | .parent_fingerprint = .{ .inner = .{ 0, 0, 0, 0 } }, 112 | .child_number = try ChildNumber.fromNormalIdx(0), 113 | .private_key = try secp256k1.SecretKey.fromSlice(hmac_result[0..32]), 114 | .chain_code = ChainCode.fromHmac(hmac_result), 115 | }; 116 | } 117 | 118 | /// Constructs ECDSA compressed private key matching internal secret key representation. 119 | pub fn toPrivateKey(self: ExtendedPrivKey) key_lib.PrivateKey { 120 | return .{ 121 | .compressed = true, 122 | .network = self.network, 123 | .inner = self.private_key, 124 | }; 125 | } 126 | 127 | /// Constructs BIP340 keypair for Schnorr signatures and Taproot use matching the internal 128 | /// secret key representation. 129 | pub fn toKeypair(self: ExtendedPrivKey, secp: secp256k1.Secp256k1) secp256k1.KeyPair { 130 | return secp256k1.KeyPair.fromSecretKey(&secp, &self.private_key) catch @panic("BIP32 internal private key representation is broken"); 131 | } 132 | 133 | /// Private->Private child key derivation 134 | pub fn ckdPriv( 135 | self: ExtendedPrivKey, 136 | secp: secp256k1.Secp256k1, 137 | i: ChildNumber, 138 | ) !ExtendedPrivKey { 139 | var hmac_engine = Hmac.init(self.chain_code.inner[0..]); 140 | switch (i) { 141 | .normal => { 142 | // Non-hardened key: compute public data and use that 143 | hmac_engine.update(&self.private_key.publicKey(secp).serialize()); 144 | }, 145 | .hardened => { 146 | // Hardened key: use only secret data to prevent public derivation 147 | hmac_engine.update(&.{0}); 148 | hmac_engine.update(self.private_key.data[0..]); 149 | }, 150 | } 151 | 152 | const i_u32 = i.toU32(); 153 | var buf: [4]u8 = undefined; 154 | 155 | std.mem.writeInt(u32, &buf, i_u32, .big); 156 | 157 | hmac_engine.update(&buf); 158 | 159 | var hmac_result: [Hmac.mac_length]u8 = undefined; 160 | 161 | hmac_engine.final(&hmac_result); 162 | 163 | const sk = secp256k1.SecretKey.fromSlice(hmac_result[0..32]) catch @panic("statistically impossible to hit"); 164 | const tweaked = sk.addTweak(secp256k1.Scalar.fromSecretKey(self.private_key)) catch @panic("statistically impossible to hit"); 165 | 166 | return .{ 167 | .network = self.network, 168 | .depth = self.depth + 1, 169 | .parent_fingerprint = self.fingerprint(secp), 170 | .child_number = i, 171 | .private_key = tweaked, 172 | .chain_code = ChainCode.fromHmac(hmac_result), 173 | }; 174 | } 175 | 176 | /// Attempts to derive an extended private key from a path. 177 | /// 178 | /// The `path` argument can be both of type `DerivationPath` or `Vec`. 179 | pub fn derivePriv( 180 | self: ExtendedPrivKey, 181 | secp: secp256k1.Secp256k1, 182 | path: []const ChildNumber, 183 | ) !ExtendedPrivKey { 184 | var sk = self; 185 | for (path) |cnum| { 186 | sk = try sk.ckdPriv(secp, cnum); 187 | } 188 | 189 | return sk; 190 | } 191 | 192 | /// Returns the HASH160 of the public key belonging to the xpriv 193 | pub fn identifier(self: ExtendedPrivKey, secp: secp256k1.Secp256k1) XpubIdentifier { 194 | return ExtendedPubKey.fromPrivateKey(secp, self).identifier(); 195 | } 196 | 197 | /// Returns the first four bytes of the identifier 198 | pub fn fingerprint(self: ExtendedPrivKey, secp: secp256k1.Secp256k1) Fingerprint { 199 | return .{ .inner = self.identifier(secp).inner[0..4].* }; 200 | } 201 | 202 | /// Decoding extended private key from binary data according to BIP 32 203 | pub fn decode(data: []const u8) !ExtendedPrivKey { 204 | if (data.len != 78) { 205 | return error.WrongExtendedKeyLength; 206 | } 207 | 208 | const network = if (std.mem.eql(u8, data[0..4], &.{ 0x04, 0x88, 0xAD, 0xE4 })) 209 | Network.MAINNET 210 | else if (std.mem.eql(u8, data[0..4], &.{ 0x04, 0x35, 0x83, 0x94 })) 211 | Network.TESTNET 212 | else 213 | return error.UnknownVersion; 214 | 215 | return .{ 216 | .network = network, 217 | .depth = data[4], 218 | .parent_fingerprint = .{ .inner = data[5..9].* }, 219 | .child_number = ChildNumber.fromU32(std.mem.readInt(u32, data[9..13], .big)), 220 | .chain_code = .{ .inner = data[13..45].* }, 221 | .private_key = try secp256k1.SecretKey.fromSlice(data[46..78]), 222 | }; 223 | } 224 | }; 225 | 226 | /// Extended public key 227 | pub const ExtendedPubKey = struct { 228 | /// The network this key is to be used on 229 | network: Network, 230 | /// How many derivations this key is from the master (which is 0) 231 | depth: u8, 232 | /// Fingerprint of the parent key 233 | parent_fingerprint: Fingerprint, 234 | /// Child number of the key used to derive from parent (0 for master) 235 | child_number: ChildNumber, 236 | /// Public key 237 | public_key: secp256k1.PublicKey, 238 | /// Chain code 239 | chain_code: ChainCode, 240 | 241 | pub fn fromStr(allocator: std.mem.Allocator, s: []const u8) !ExtendedPubKey { 242 | const decoder = base58.Decoder{}; 243 | const decoded = try decoder.decodeCheckAlloc(allocator, s); 244 | defer allocator.free(decoded); 245 | 246 | if (decoded.len != 78) return error.InvalidLength; 247 | 248 | return try decode(decoded); 249 | } 250 | 251 | pub fn toStr(self: ExtendedPubKey, allocator: std.mem.Allocator) ![]const u8 { 252 | const encoder = base58.Encoder{}; 253 | return try encoder.encodeCheckAlloc(allocator, &self.encode()); 254 | } 255 | 256 | /// Extended public key binary encoding according to BIP 32 257 | pub fn encode(self: ExtendedPubKey) [78]u8 { 258 | var ret = [_]u8{0} ** 78; 259 | 260 | ret[0..4].* = switch (self.network) { 261 | .MAINNET => .{ 0x04, 0x88, 0xB2, 0x1E }, 262 | else => .{ 0x04, 0x35, 0x87, 0xCF }, 263 | }; 264 | 265 | ret[4] = self.depth; 266 | ret[5..9].* = self.parent_fingerprint.inner; 267 | 268 | var buf: [4]u8 = undefined; 269 | std.mem.writeInt(u32, &buf, self.child_number.toU32(), .big); 270 | 271 | ret[9..13].* = buf; 272 | ret[13..45].* = self.chain_code.inner; 273 | ret[45..78].* = self.public_key.serialize(); 274 | return ret; 275 | } 276 | 277 | pub fn decode(data: []const u8) !ExtendedPubKey { 278 | if (data.len != 78) { 279 | return error.WrongExtendedKeyLength; 280 | } 281 | 282 | const network = if (std.mem.eql(u8, data[0..4], &.{ 0x04, 0x88, 0xB2, 0x1E })) 283 | Network.MAINNET 284 | else if (std.mem.eql(u8, data[0..4], &.{ 0x04, 0x35, 0x87, 0xCF })) 285 | Network.TESTNET 286 | else 287 | return error.UnknownVersion; 288 | 289 | return .{ 290 | .network = network, 291 | .depth = data[4], 292 | .parent_fingerprint = .{ .inner = data[5..9].* }, 293 | .child_number = ChildNumber.fromU32(std.mem.readInt(u32, data[9..13], .big)), 294 | .chain_code = .{ .inner = data[13..45].* }, 295 | .public_key = try secp256k1.PublicKey.fromSlice(data[45..78]), 296 | }; 297 | } 298 | 299 | /// Derives a public key from a private key 300 | pub fn fromPrivateKey( 301 | secp: secp256k1.Secp256k1, 302 | sk: ExtendedPrivKey, 303 | ) ExtendedPubKey { 304 | return .{ 305 | .network = sk.network, 306 | .depth = sk.depth, 307 | .parent_fingerprint = sk.parent_fingerprint, 308 | .child_number = sk.child_number, 309 | .public_key = sk.private_key.publicKey(secp), 310 | .chain_code = sk.chain_code, 311 | }; 312 | } 313 | 314 | /// Attempts to derive an extended public key from a path. 315 | /// 316 | /// The `path` argument can be any type implementing `AsRef`, such as `DerivationPath`, for instance. 317 | pub fn derivePub( 318 | self: ExtendedPubKey, 319 | secp: secp256k1.Secp256k1, 320 | path: []ChildNumber, 321 | ) !ExtendedPubKey { 322 | var pk = self; 323 | for (path) |cnum| { 324 | pk = try pk.ckdPub(secp, cnum); 325 | } 326 | 327 | return pk; 328 | } 329 | 330 | /// Compute the scalar tweak added to this key to get a child key 331 | pub fn ckdPubTweak( 332 | self: ExtendedPubKey, 333 | i: ChildNumber, 334 | ) !struct { secp256k1.SecretKey, ChainCode } { 335 | switch (i) { 336 | .hardened => return error.CannotDeriveFromHardenedKey, 337 | .normal => |n| { 338 | var hmac_engine = Hmac.init(&self.chain_code.inner); 339 | 340 | hmac_engine.update(&self.public_key.serialize()); 341 | 342 | var buf: [4]u8 = undefined; 343 | std.mem.writeInt(u32, &buf, n, .big); 344 | 345 | hmac_engine.update(&buf); 346 | var hmac_result: [Hmac.mac_length]u8 = undefined; 347 | hmac_engine.final(&hmac_result); 348 | 349 | const private_key = try secp256k1.SecretKey.fromSlice(hmac_result[0..32]); 350 | const chain_code = ChainCode.fromHmac(hmac_result); 351 | 352 | return .{ private_key, chain_code }; 353 | }, 354 | } 355 | } 356 | 357 | /// Public->Public child key derivation 358 | pub fn ckdPub( 359 | self: ExtendedPubKey, 360 | secp: secp256k1.Secp256k1, 361 | i: ChildNumber, 362 | ) !ExtendedPubKey { 363 | const sk, const chain_code = try self.ckdPubTweak(i); 364 | 365 | const tweaked = try self.public_key.addExpTweak(secp, secp256k1.Scalar.fromSecretKey(sk)); 366 | 367 | return .{ 368 | .network = self.network, 369 | .depth = self.depth + 1, 370 | .parent_fingerprint = self.fingerprint(), 371 | .child_number = i, 372 | .public_key = tweaked, 373 | .chain_code = chain_code, 374 | }; 375 | } 376 | 377 | /// Returns the HASH160 of the chaincode 378 | pub fn identifier(self: ExtendedPubKey) XpubIdentifier { 379 | return .{ .inner = hash160(&self.public_key.serialize()) }; 380 | } 381 | 382 | /// Returns the first four bytes of the identifier 383 | pub fn fingerprint(self: ExtendedPubKey) Fingerprint { 384 | return .{ .inner = self.identifier().inner[0..4].* }; 385 | } 386 | }; 387 | 388 | fn hash160(data: []const u8) [Ripemd160.digest_length]u8 { 389 | var hasher256 = std.crypto.hash.sha2.Sha256.init(.{}); 390 | hasher256.update(data); 391 | 392 | var out256: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined; 393 | hasher256.final(&out256); 394 | 395 | var hasher = Ripemd160.init(.{}); 396 | hasher.update(&out256); 397 | 398 | var out: [Ripemd160.digest_length]u8 = undefined; 399 | hasher.final(&out); 400 | return out; 401 | } 402 | 403 | pub const XpubIdentifier = struct { 404 | inner: [Ripemd160.digest_length]u8, 405 | }; 406 | 407 | /// A child number for a derived key 408 | pub const ChildNumber = union(enum) { 409 | /// Non-hardened key 410 | /// Key index, within [0, 2^31 - 1] 411 | normal: u32, 412 | /// Hardened key 413 | /// Key index, within [0, 2^31 - 1] 414 | hardened: u32, 415 | 416 | pub fn fromStr(inp: []const u8) !ChildNumber { 417 | const is_hardened = (inp[inp.len - 1] == '\'' or inp[inp.len - 1] == 'h'); 418 | 419 | if (is_hardened) return try fromHardenedIdx(try std.fmt.parseInt(u32, inp[0 .. inp.len - 1], 10)) else return try fromNormalIdx(try std.fmt.parseInt(u32, inp, 10)); 420 | } 421 | 422 | /// Create a [`Normal`] from an index, returns an error if the index is not within 423 | /// [0, 2^31 - 1]. 424 | /// 425 | /// [`Normal`]: #variant.Normal 426 | pub fn fromNormalIdx(index: u32) !ChildNumber { 427 | if ((index & (1 << 31)) == 0) 428 | return .{ .normal = index }; 429 | 430 | return error.InvalidChildNumber; 431 | } 432 | 433 | /// Create a [`Hardened`] from an index, returns an error if the index is not within 434 | /// [0, 2^31 - 1]. 435 | /// 436 | /// [`Hardened`]: #variant.Hardened 437 | pub fn fromHardenedIdx(index: u32) !ChildNumber { 438 | if (index & (1 << 31) == 0) 439 | return .{ .hardened = index }; 440 | 441 | return error.InvalidChildNumber; 442 | } 443 | 444 | /// Returns `true` if the child number is a [`Normal`] value. 445 | /// 446 | /// [`Normal`]: #variant.Normal 447 | pub fn isNormal(self: ChildNumber) bool { 448 | return !self.isHardened(); 449 | } 450 | 451 | /// Returns `true` if the child number is a [`Hardened`] value. 452 | /// 453 | /// [`Hardened`]: #variant.Hardened 454 | pub fn isHardened(self: ChildNumber) bool { 455 | return switch (self) { 456 | .hardened => true, 457 | .normal => false, 458 | }; 459 | } 460 | /// Returns the child number that is a single increment from this one. 461 | pub fn increment(self: ChildNumber) !ChildNumber { 462 | return switch (self) { 463 | .hardened => |idx| try fromHardenedIdx(idx + 1), 464 | .normal => |idx| try fromNormalIdx(idx + 1), 465 | }; 466 | } 467 | 468 | fn fromU32(number: u32) ChildNumber { 469 | if (number & (1 << 31) != 0) { 470 | return .{ 471 | .hardened = number ^ (1 << 31), 472 | }; 473 | } else { 474 | return .{ .normal = number }; 475 | } 476 | } 477 | 478 | fn toU32(self: ChildNumber) u32 { 479 | return switch (self) { 480 | .normal => |index| index, 481 | .hardened => |index| index | (1 << 31), 482 | }; 483 | } 484 | }; 485 | 486 | fn testPath( 487 | secp: secp256k1.Secp256k1, 488 | network: Network, 489 | seed: []const u8, 490 | path: []ChildNumber, 491 | expected_sk: []const u8, 492 | expected_pk: []const u8, 493 | ) !void { 494 | var sk = try ExtendedPrivKey.initMaster(network, seed); 495 | var pk = ExtendedPubKey.fromPrivateKey(secp, sk); 496 | 497 | // Check derivation convenience method for ExtendedPrivKey 498 | { 499 | const actual_sk = try (try sk.derivePriv(secp, path)).toStr(std.testing.allocator); 500 | defer std.testing.allocator.free(actual_sk); 501 | 502 | try std.testing.expectEqualSlices( 503 | u8, 504 | actual_sk, 505 | expected_sk, 506 | ); 507 | } 508 | 509 | // Check derivation convenience method for ExtendedPubKey, should error 510 | // appropriately if any ChildNumber is hardened 511 | for (path) |cnum| { 512 | if (cnum.isHardened()) { 513 | try std.testing.expectError(error.CannotDeriveFromHardenedKey, pk.derivePub(secp, path)); 514 | break; 515 | } 516 | } else { 517 | const derivedPub = try (try pk.derivePub(secp, path)).toStr(std.testing.allocator); 518 | defer std.testing.allocator.free(derivedPub); 519 | 520 | try std.testing.expectEqualSlices(u8, derivedPub, expected_pk); 521 | } 522 | 523 | // Derive keys, checking hardened and non-hardened derivation one-by-one 524 | for (path) |num| { 525 | sk = try sk.ckdPriv(secp, num); 526 | switch (num) { 527 | .normal => { 528 | const pk2 = try pk.ckdPub(secp, num); 529 | pk = ExtendedPubKey.fromPrivateKey(secp, sk); 530 | try std.testing.expectEqualDeep(pk, pk2); 531 | }, 532 | .hardened => { 533 | try std.testing.expectError(error.CannotDeriveFromHardenedKey, pk.ckdPub(secp, num)); 534 | pk = ExtendedPubKey.fromPrivateKey(secp, sk); 535 | }, 536 | } 537 | } 538 | // Check result against expected base58 539 | const skStr = try sk.toStr(std.testing.allocator); 540 | defer std.testing.allocator.free(skStr); 541 | try std.testing.expectEqualSlices(u8, skStr, expected_sk); 542 | 543 | const pkStr = try pk.toStr(std.testing.allocator); 544 | defer std.testing.allocator.free(pkStr); 545 | try std.testing.expectEqualSlices(u8, pkStr, expected_pk); 546 | 547 | // Check decoded base58 against result 548 | const decoded_sk = try ExtendedPrivKey.fromStr(std.testing.allocator, expected_sk); 549 | const decoded_pk = try ExtendedPubKey.fromStr(std.testing.allocator, expected_pk); 550 | 551 | try std.testing.expectEqualDeep(decoded_sk, sk); 552 | try std.testing.expectEqualDeep(decoded_pk, pk); 553 | } 554 | 555 | fn derivatePathFromStr(path: []const u8, allocator: std.mem.Allocator) !std.ArrayList(ChildNumber) { 556 | if (path.len == 0 or (path.len == 1 and path[0] == 'm') or (path.len == 2 and path[0] == 'm' and path[1] == '/')) return std.ArrayList(ChildNumber).init(allocator); 557 | 558 | var p = path; 559 | 560 | if (std.mem.startsWith(u8, path, "m/")) p = path[2..]; 561 | 562 | var parts = std.mem.splitScalar(u8, p, '/'); 563 | 564 | var result = std.ArrayList(ChildNumber).init(allocator); 565 | errdefer result.deinit(); 566 | 567 | while (parts.next()) |s| { 568 | try result.append(try ChildNumber.fromStr(s)); 569 | } 570 | 571 | return result; 572 | } 573 | 574 | test "schnorr_broken_privkey_ffs" { 575 | // Xpriv having secret key set to all 0xFF's 576 | const xpriv_str = "xprv9s21ZrQH143K24Mfq5zL5MhWK9hUhhGbd45hLXo2Pq2oqzMMo63oStZzFAzHGBP2UuGCqWLTAPLcMtD9y5gkZ6Eq3Rjuahrv17fENZ3QzxW"; 577 | try std.testing.expectError(error.InvalidSecretKey, ExtendedPrivKey.fromStr(std.testing.allocator, xpriv_str)); 578 | } 579 | 580 | test "vector_1" { 581 | var secp = secp256k1.Secp256k1.genNew(); 582 | defer secp.deinit(); 583 | 584 | var buf: [100]u8 = undefined; 585 | 586 | const seed = try std.fmt.hexToBytes(&buf, "000102030405060708090a0b0c0d0e0f"); 587 | // derivation path, expected_sk , expected_pk 588 | const testSuite: []const struct { Network, []const u8, []const u8, []const u8 } = &.{ 589 | .{ 590 | .MAINNET, 591 | "m", 592 | "xprv9s21ZrQH143K3QTDL4LXw2F7HEK3wJUD2nW2nRk4stbPy6cq3jPPqjiChkVvvNKmPGJxWUtg6LnF5kejMRNNU3TGtRBeJgk33yuGBxrMPHi", 593 | "xpub661MyMwAqRbcFtXgS5sYJABqqG9YLmC4Q1Rdap9gSE8NqtwybGhePY2gZ29ESFjqJoCu1Rupje8YtGqsefD265TMg7usUDFdp6W1EGMcet8", 594 | }, 595 | .{ 596 | .MAINNET, 597 | "m/0h", 598 | "xprv9uHRZZhk6KAJC1avXpDAp4MDc3sQKNxDiPvvkX8Br5ngLNv1TxvUxt4cV1rGL5hj6KCesnDYUhd7oWgT11eZG7XnxHrnYeSvkzY7d2bhkJ7", 599 | "xpub68Gmy5EdvgibQVfPdqkBBCHxA5htiqg55crXYuXoQRKfDBFA1WEjWgP6LHhwBZeNK1VTsfTFUHCdrfp1bgwQ9xv5ski8PX9rL2dZXvgGDnw", 600 | }, 601 | .{ 602 | .MAINNET, 603 | "m/0h/1", 604 | "xprv9wTYmMFdV23N2TdNG573QoEsfRrWKQgWeibmLntzniatZvR9BmLnvSxqu53Kw1UmYPxLgboyZQaXwTCg8MSY3H2EU4pWcQDnRnrVA1xe8fs", 605 | "xpub6ASuArnXKPbfEwhqN6e3mwBcDTgzisQN1wXN9BJcM47sSikHjJf3UFHKkNAWbWMiGj7Wf5uMash7SyYq527Hqck2AxYysAA7xmALppuCkwQ", 606 | }, 607 | .{ 608 | .MAINNET, 609 | "m/0h/1/2h", 610 | "xprv9z4pot5VBttmtdRTWfWQmoH1taj2axGVzFqSb8C9xaxKymcFzXBDptWmT7FwuEzG3ryjH4ktypQSAewRiNMjANTtpgP4mLTj34bhnZX7UiM", 611 | "xpub6D4BDPcP2GT577Vvch3R8wDkScZWzQzMMUm3PWbmWvVJrZwQY4VUNgqFJPMM3No2dFDFGTsxxpG5uJh7n7epu4trkrX7x7DogT5Uv6fcLW5", 612 | }, 613 | .{ 614 | .MAINNET, 615 | "m/0h/1/2h/2", 616 | "xprvA2JDeKCSNNZky6uBCviVfJSKyQ1mDYahRjijr5idH2WwLsEd4Hsb2Tyh8RfQMuPh7f7RtyzTtdrbdqqsunu5Mm3wDvUAKRHSC34sJ7in334", 617 | "xpub6FHa3pjLCk84BayeJxFW2SP4XRrFd1JYnxeLeU8EqN3vDfZmbqBqaGJAyiLjTAwm6ZLRQUMv1ZACTj37sR62cfN7fe5JnJ7dh8zL4fiyLHV", 618 | }, 619 | .{ 620 | .MAINNET, 621 | "m/0h/1/2h/2/1000000000", 622 | "xprvA41z7zogVVwxVSgdKUHDy1SKmdb533PjDz7J6N6mV6uS3ze1ai8FHa8kmHScGpWmj4WggLyQjgPie1rFSruoUihUZREPSL39UNdE3BBDu76", 623 | "xpub6H1LXWLaKsWFhvm6RVpEL9P4KfRZSW7abD2ttkWP3SSQvnyA8FSVqNTEcYFgJS2UaFcxupHiYkro49S8yGasTvXEYBVPamhGW6cFJodrTHy", 624 | }, 625 | }; 626 | 627 | for (testSuite, 0..) |suite, idx| { 628 | errdefer { 629 | std.log.warn("suite failed n={d} : {any}", .{ idx + 1, suite }); 630 | } 631 | 632 | const path = try derivatePathFromStr(suite[1], std.testing.allocator); 633 | defer path.deinit(); 634 | 635 | try testPath(secp, .MAINNET, seed, path.items, suite[2], suite[3]); 636 | } 637 | } 638 | 639 | test "vector_2" { 640 | var secp = secp256k1.Secp256k1.genNew(); 641 | defer secp.deinit(); 642 | 643 | var buf: [100]u8 = undefined; 644 | 645 | const seed = try std.fmt.hexToBytes(&buf, "fffcf9f6f3f0edeae7e4e1dedbd8d5d2cfccc9c6c3c0bdbab7b4b1aeaba8a5a29f9c999693908d8a8784817e7b7875726f6c696663605d5a5754514e4b484542"); 646 | // derivation path, expected_sk , expected_pk 647 | const testSuite: []const struct { Network, []const u8, []const u8, []const u8 } = &.{ 648 | .{ 649 | .MAINNET, 650 | "m", 651 | "xprv9s21ZrQH143K31xYSDQpPDxsXRTUcvj2iNHm5NUtrGiGG5e2DtALGdso3pGz6ssrdK4PFmM8NSpSBHNqPqm55Qn3LqFtT2emdEXVYsCzC2U", 652 | "xpub661MyMwAqRbcFW31YEwpkMuc5THy2PSt5bDMsktWQcFF8syAmRUapSCGu8ED9W6oDMSgv6Zz8idoc4a6mr8BDzTJY47LJhkJ8UB7WEGuduB", 653 | }, 654 | .{ 655 | .MAINNET, 656 | "m/0", 657 | "xprv9vHkqa6EV4sPZHYqZznhT2NPtPCjKuDKGY38FBWLvgaDx45zo9WQRUT3dKYnjwih2yJD9mkrocEZXo1ex8G81dwSM1fwqWpWkeS3v86pgKt", 658 | "xpub69H7F5d8KSRgmmdJg2KhpAK8SR3DjMwAdkxj3ZuxV27CprR9LgpeyGmXUbC6wb7ERfvrnKZjXoUmmDznezpbZb7ap6r1D3tgFxHmwMkQTPH", 659 | }, 660 | .{ 661 | .MAINNET, 662 | "m/0/2147483647h", 663 | "xprv9wSp6B7kry3Vj9m1zSnLvN3xH8RdsPP1Mh7fAaR7aRLcQMKTR2vidYEeEg2mUCTAwCd6vnxVrcjfy2kRgVsFawNzmjuHc2YmYRmagcEPdU9", 664 | "xpub6ASAVgeehLbnwdqV6UKMHVzgqAG8Gr6riv3Fxxpj8ksbH9ebxaEyBLZ85ySDhKiLDBrQSARLq1uNRts8RuJiHjaDMBU4Zn9h8LZNnBC5y4a", 665 | }, 666 | .{ 667 | .MAINNET, 668 | "m/0/2147483647h/1", 669 | "xprv9zFnWC6h2cLgpmSA46vutJzBcfJ8yaJGg8cX1e5StJh45BBciYTRXSd25UEPVuesF9yog62tGAQtHjXajPPdbRCHuWS6T8XA2ECKADdw4Ef", 670 | "xpub6DF8uhdarytz3FWdA8TvFSvvAh8dP3283MY7p2V4SeE2wyWmG5mg5EwVvmdMVCQcoNJxGoWaU9DCWh89LojfZ537wTfunKau47EL2dhHKon", 671 | }, 672 | .{ 673 | .MAINNET, 674 | "m/0/2147483647h/1/2147483646h", 675 | "xprvA1RpRA33e1JQ7ifknakTFpgNXPmW2YvmhqLQYMmrj4xJXXWYpDPS3xz7iAxn8L39njGVyuoseXzU6rcxFLJ8HFsTjSyQbLYnMpCqE2VbFWc", 676 | "xpub6ERApfZwUNrhLCkDtcHTcxd75RbzS1ed54G1LkBUHQVHQKqhMkhgbmJbZRkrgZw4koxb5JaHWkY4ALHY2grBGRjaDMzQLcgJvLJuZZvRcEL", 677 | }, 678 | .{ 679 | .MAINNET, 680 | "m/0/2147483647h/1/2147483646h/2", 681 | "xprvA2nrNbFZABcdryreWet9Ea4LvTJcGsqrMzxHx98MMrotbir7yrKCEXw7nadnHM8Dq38EGfSh6dqA9QWTyefMLEcBYJUuekgW4BYPJcr9E7j", 682 | "xpub6FnCn6nSzZAw5Tw7cgR9bi15UV96gLZhjDstkXXxvCLsUXBGXPdSnLFbdpq8p9HmGsApME5hQTZ3emM2rnY5agb9rXpVGyy3bdW6EEgAtqt", 683 | }, 684 | }; 685 | 686 | for (testSuite, 0..) |suite, idx| { 687 | errdefer { 688 | std.log.warn("suite failed n={d} : {any}", .{ idx + 1, suite }); 689 | } 690 | 691 | const path = try derivatePathFromStr(suite[1], std.testing.allocator); 692 | defer path.deinit(); 693 | 694 | try testPath(secp, .MAINNET, seed, path.items, suite[2], suite[3]); 695 | } 696 | } 697 | 698 | test "vector_3" { 699 | var secp = secp256k1.Secp256k1.genNew(); 700 | defer secp.deinit(); 701 | 702 | var buf: [100]u8 = undefined; 703 | 704 | const seed = try std.fmt.hexToBytes(&buf, "4b381541583be4423346c643850da4b320e46a87ae3d2a4e6da11eba819cd4acba45d239319ac14f863b8d5ab5a0d0c64d2e8a1e7d1457df2e5a3c51c73235be"); 705 | 706 | const path_1 = try derivatePathFromStr("m", std.testing.allocator); 707 | defer path_1.deinit(); 708 | 709 | // m 710 | try testPath(secp, .MAINNET, seed, path_1.items, "xprv9s21ZrQH143K25QhxbucbDDuQ4naNntJRi4KUfWT7xo4EKsHt2QJDu7KXp1A3u7Bi1j8ph3EGsZ9Xvz9dGuVrtHHs7pXeTzjuxBrCmmhgC6", "xpub661MyMwAqRbcEZVB4dScxMAdx6d4nFc9nvyvH3v4gJL378CSRZiYmhRoP7mBy6gSPSCYk6SzXPTf3ND1cZAceL7SfJ1Z3GC8vBgp2epUt13"); 711 | 712 | // m/0h 713 | const path_2 = try derivatePathFromStr("m/0h", std.testing.allocator); 714 | defer path_2.deinit(); 715 | 716 | try testPath(secp, .MAINNET, seed, path_2.items, "xprv9uPDJpEQgRQfDcW7BkF7eTya6RPxXeJCqCJGHuCJ4GiRVLzkTXBAJMu2qaMWPrS7AANYqdq6vcBcBUdJCVVFceUvJFjaPdGZ2y9WACViL4L", "xpub68NZiKmJWnxxS6aaHmn81bvJeTESw724CRDs6HbuccFQN9Ku14VQrADWgqbhhTHBaohPX4CjNLf9fq9MYo6oDaPPLPxSb7gwQN3ih19Zm4Y"); 717 | } 718 | 719 | test "base58_check_decode_encode" { 720 | const encoder = base58.Encoder{}; 721 | const encoded = try encoder.encodeCheckAlloc(std.testing.allocator, "test"); 722 | defer std.testing.allocator.free(encoded); 723 | 724 | const decoder = base58.Decoder{}; 725 | const decoded = try decoder.decodeCheckAlloc(std.testing.allocator, encoded); 726 | defer std.testing.allocator.free(decoded); 727 | 728 | try std.testing.expectEqualSlices(u8, decoded, "test"); 729 | } 730 | -------------------------------------------------------------------------------- /src/bips/bip32/key.zig: -------------------------------------------------------------------------------- 1 | const secp256k1 = @import("secp256k1"); 2 | const Network = @import("bip32.zig").Network; 3 | 4 | /// A Bitcoin ECDSA private key 5 | pub const PrivateKey = struct { 6 | /// Whether this private key should be serialized as compressed 7 | compressed: bool, 8 | /// The network on which this key should be used 9 | network: Network, 10 | /// The actual ECDSA key 11 | inner: secp256k1.SecretKey, 12 | }; 13 | -------------------------------------------------------------------------------- /src/bips/bip39/bip39.zig: -------------------------------------------------------------------------------- 1 | //! # BIP39 Mnemonic Codes 2 | //! 3 | //! https://github.com/bitcoin/bips/blob/master/bip-0039.mediawiki 4 | //! 5 | const std = @import("std"); 6 | const language = @import("language.zig"); 7 | const pbkdf2 = @import("pbkdf2.zig"); 8 | 9 | /// The minimum number of words in a mnemonic. 10 | const MIN_NB_WORDS: usize = 12; 11 | 12 | /// The maximum number of words in a mnemonic. 13 | const MAX_NB_WORDS: usize = 24; 14 | 15 | /// The index used to indicate the mnemonic ended. 16 | const EOF: u16 = std.math.maxInt(u16); 17 | 18 | /// A mnemonic code. 19 | /// 20 | /// The [core::str::FromStr] implementation will try to determine the language of the 21 | /// mnemonic from all the supported languages. (Languages have to be explicitly enabled using 22 | /// the Cargo features.) 23 | /// 24 | /// Supported number of words are 12, 15, 18, 21, and 24. 25 | pub const Mnemonic = struct { 26 | /// The language the mnemonic. 27 | lang: language.Language, 28 | /// The indiced of the words. 29 | /// Mnemonics with less than the max nb of words are terminated with EOF. 30 | words: [MAX_NB_WORDS]u16, 31 | 32 | /// Parse a mnemonic in normalized UTF8 in the given language. 33 | pub fn parseInNormalized(lang: language.Language, s: []const u8) !Mnemonic { 34 | var it = std.mem.splitScalar(u8, s, ' '); 35 | var nb_words: usize = 0; 36 | 37 | while (it.next()) |_| nb_words += 1; 38 | it.reset(); 39 | 40 | if (isInvalidWordCount(nb_words)) { 41 | return error.BadWordCount; 42 | } 43 | 44 | // Here we will store the eventual words. 45 | var words = [_]u16{EOF} ** MAX_NB_WORDS; 46 | 47 | // And here we keep track of the bits to calculate and validate the checksum. 48 | // We only use `nb_words * 11` elements in this array. 49 | var bits = [_]bool{false} ** (MAX_NB_WORDS * 11); 50 | 51 | { 52 | var i: usize = 0; 53 | while (it.next()) |word| { 54 | const idx = lang.findWord(word) orelse return error.UnknownWord; 55 | 56 | words[i] = idx; 57 | 58 | for (0..11) |j| { 59 | bits[i * 11 + j] = std.math.shr(u16, idx, 10 - j) & 1 == 1; 60 | } 61 | i += 1; 62 | } 63 | } 64 | 65 | // Verify the checksum. 66 | // We only use `nb_words / 3 * 4` elements in this array. 67 | 68 | var entropy = [_]u8{0} ** (MAX_NB_WORDS / 3 * 4); 69 | const nb_bytes_entropy = nb_words / 3 * 4; 70 | for (0..nb_bytes_entropy) |i| { 71 | for (0..8) |j| { 72 | if (bits[i * 8 + j]) { 73 | entropy[i] += std.math.shl(u8, 1, 7 - j); 74 | } 75 | } 76 | } 77 | 78 | var hasher = std.crypto.hash.sha2.Sha256.init(.{}); 79 | hasher.update(entropy[0..nb_bytes_entropy]); 80 | 81 | const check = hasher.finalResult(); 82 | 83 | for (0..nb_bytes_entropy / 4) |i| { 84 | if (bits[8 * nb_bytes_entropy + i] != ((check[i / 8] & (std.math.shl(usize, 1, 7 - (i % 8)))) > 0)) { 85 | return error.InvalidChecksum; 86 | } 87 | } 88 | 89 | return .{ 90 | .lang = lang, 91 | .words = words, 92 | }; 93 | } 94 | 95 | /// Convert to seed bytes with a passphrase in normalized UTF8. 96 | pub fn toSeedNormalized(self: Mnemonic, normalized_passphrase: []const u8) ![64]u8 { 97 | const PBKDF2_ROUNDS: usize = 2048; 98 | const PBKDF2_BYTES: usize = 64; 99 | 100 | var seed = [_]u8{0} ** PBKDF2_BYTES; 101 | 102 | pbkdf2.pbkdf2((try self.getWords()).slice(), normalized_passphrase, PBKDF2_ROUNDS, &seed); 103 | return seed; 104 | } 105 | 106 | /// Returns an slice over [Mnemonic] word indices. 107 | /// 108 | pub fn wordIndices(self: Mnemonic) !std.BoundedArray(u16, MAX_NB_WORDS) { 109 | var result = try std.BoundedArray(u16, MAX_NB_WORDS).init(0); 110 | 111 | for (self.words) |w| { 112 | if (w != EOF) { 113 | result.appendAssumeCapacity(w); 114 | continue; 115 | } 116 | 117 | break; 118 | } 119 | 120 | return result; 121 | } 122 | 123 | /// Returns an iterator over the words of the [Mnemonic]. 124 | /// 125 | /// # Examples 126 | /// 127 | /// Basic usage: 128 | /// 129 | /// ``` 130 | /// const bip39 = @import("bip39"); 131 | /// 132 | /// const mnemonic = try bip39.Mnemonic.fromEntropy(&([_]u8{0} ** 32)); 133 | /// for (mnemonic.words()) |word| { 134 | /// std.log.debug("word: {s}", .{word}); 135 | /// } 136 | /// ``` 137 | pub fn getWords(self: Mnemonic) !std.BoundedArray([]const u8, MAX_NB_WORDS) { 138 | const list = self.lang.wordList(); 139 | const word_indices = try self.wordIndices(); 140 | 141 | var result = try std.BoundedArray([]const u8, MAX_NB_WORDS).init(0); 142 | 143 | for (word_indices.slice()) |i| { 144 | result.appendAssumeCapacity(list[i]); 145 | } 146 | 147 | return result; 148 | } 149 | 150 | /// Create a new [Mnemonic] in the specified language from the given entropy. 151 | /// Entropy must be a multiple of 32 bits (4 bytes) and 128-256 bits in length. 152 | pub fn fromEntropyIn(lang: language.Language, entropy: []const u8) !Mnemonic { 153 | const MAX_ENTROPY_BITS: usize = 256; 154 | const MIN_ENTROPY_BITS: usize = 128; 155 | const MAX_CHECKSUM_BITS: usize = 8; 156 | 157 | const nb_bytes = entropy.len; 158 | const nb_bits = nb_bytes * 8; 159 | 160 | if (nb_bits % 32 != 0) { 161 | return error.BadEntropyBitCount; 162 | } 163 | 164 | if (nb_bits < MIN_ENTROPY_BITS or nb_bits > MAX_ENTROPY_BITS) { 165 | return error.BadEntropyBitCount; 166 | } 167 | 168 | const check = v: { 169 | var out: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined; 170 | std.crypto.hash.sha2.Sha256.hash(entropy, &out, .{}); 171 | break :v out; 172 | }; 173 | 174 | var bits = [_]bool{false} ** (MAX_ENTROPY_BITS + MAX_CHECKSUM_BITS); 175 | 176 | for (0..nb_bytes) |i| { 177 | for (0..8) |j| { 178 | bits[i * 8 + j] = (entropy[i] & (std.math.shl(usize, 1, 7 - j))) > 0; 179 | } 180 | } 181 | 182 | for (0..nb_bytes / 4) |i| { 183 | bits[8 * nb_bytes + i] = (check[i / 8] & (std.math.shl(usize, 1, 7 - (i % 8)))) > 0; 184 | } 185 | 186 | var words = [_]u16{EOF} ** MAX_NB_WORDS; 187 | const nb_words = nb_bytes * 3 / 4; 188 | for (0..nb_words) |i| { 189 | var idx: u16 = 0; 190 | for (0..11) |j| { 191 | if (bits[i * 11 + j]) { 192 | idx += std.math.shl(u16, 1, @as(u16, @truncate(10 - j))); 193 | } 194 | } 195 | 196 | words[i] = idx; 197 | } 198 | 199 | return .{ 200 | .lang = lang, 201 | .words = words, 202 | }; 203 | } 204 | }; 205 | 206 | fn isInvalidWordCount(word_count: usize) bool { 207 | return word_count < MIN_NB_WORDS or word_count % 3 != 0 or word_count > MAX_NB_WORDS; 208 | } 209 | 210 | test "english_vectors" { 211 | // These vectors are tuples of 212 | // (entropy, mnemonic, seed) 213 | 214 | const test_vectors = [_]struct { []const u8, []const u8, []const u8 }{ 215 | .{ 216 | "00000000000000000000000000000000", 217 | "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about", 218 | "c55257c360c07c72029aebc1b53c05ed0362ada38ead3e3e9efa3708e53495531f09a6987599d18264c1e1c92f2cf141630c7a3c4ab7c81b2f001698e7463b04", 219 | }, 220 | .{ 221 | "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", 222 | "legal winner thank year wave sausage worth useful legal winner thank yellow", 223 | "2e8905819b8723fe2c1d161860e5ee1830318dbf49a83bd451cfb8440c28bd6fa457fe1296106559a3c80937a1c1069be3a3a5bd381ee6260e8d9739fce1f607", 224 | }, 225 | .{ 226 | "80808080808080808080808080808080", 227 | "letter advice cage absurd amount doctor acoustic avoid letter advice cage above", 228 | "d71de856f81a8acc65e6fc851a38d4d7ec216fd0796d0a6827a3ad6ed5511a30fa280f12eb2e47ed2ac03b5c462a0358d18d69fe4f985ec81778c1b370b652a8", 229 | }, 230 | .{ 231 | "ffffffffffffffffffffffffffffffff", 232 | "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo wrong", 233 | "ac27495480225222079d7be181583751e86f571027b0497b5b5d11218e0a8a13332572917f0f8e5a589620c6f15b11c61dee327651a14c34e18231052e48c069", 234 | }, 235 | .{ 236 | "000000000000000000000000000000000000000000000000", 237 | "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon agent", 238 | "035895f2f481b1b0f01fcf8c289c794660b289981a78f8106447707fdd9666ca06da5a9a565181599b79f53b844d8a71dd9f439c52a3d7b3e8a79c906ac845fa", 239 | }, 240 | .{ 241 | "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", 242 | "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal will", 243 | "f2b94508732bcbacbcc020faefecfc89feafa6649a5491b8c952cede496c214a0c7b3c392d168748f2d4a612bada0753b52a1c7ac53c1e93abd5c6320b9e95dd", 244 | }, 245 | .{ 246 | "808080808080808080808080808080808080808080808080", 247 | "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter always", 248 | "107d7c02a5aa6f38c58083ff74f04c607c2d2c0ecc55501dadd72d025b751bc27fe913ffb796f841c49b1d33b610cf0e91d3aa239027f5e99fe4ce9e5088cd65", 249 | }, 250 | .{ 251 | "ffffffffffffffffffffffffffffffffffffffffffffffff", 252 | "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo when", 253 | "0cd6e5d827bb62eb8fc1e262254223817fd068a74b5b449cc2f667c3f1f985a76379b43348d952e2265b4cd129090758b3e3c2c49103b5051aac2eaeb890a528", 254 | }, 255 | .{ 256 | "0000000000000000000000000000000000000000000000000000000000000000", 257 | "abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon art", 258 | "bda85446c68413707090a52022edd26a1c9462295029f2e60cd7c4f2bbd3097170af7a4d73245cafa9c3cca8d561a7c3de6f5d4a10be8ed2a5e608d68f92fcc8", 259 | }, 260 | .{ 261 | "7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f7f", 262 | "legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth useful legal winner thank year wave sausage worth title", 263 | "bc09fca1804f7e69da93c2f2028eb238c227f2e9dda30cd63699232578480a4021b146ad717fbb7e451ce9eb835f43620bf5c514db0f8add49f5d121449d3e87", 264 | }, 265 | .{ 266 | "8080808080808080808080808080808080808080808080808080808080808080", 267 | "letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic avoid letter advice cage absurd amount doctor acoustic bless", 268 | "c0c519bd0e91a2ed54357d9d1ebef6f5af218a153624cf4f2da911a0ed8f7a09e2ef61af0aca007096df430022f7a2b6fb91661a9589097069720d015e4e982f", 269 | }, 270 | .{ 271 | "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", 272 | "zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo zoo vote", 273 | "dd48c104698c30cfe2b6142103248622fb7bb0ff692eebb00089b32d22484e1613912f0a5b694407be899ffd31ed3992c456cdf60f5d4564b8ba3f05a69890ad", 274 | }, 275 | .{ 276 | "9e885d952ad362caeb4efe34a8e91bd2", 277 | "ozone drill grab fiber curtain grace pudding thank cruise elder eight picnic", 278 | "274ddc525802f7c828d8ef7ddbcdc5304e87ac3535913611fbbfa986d0c9e5476c91689f9c8a54fd55bd38606aa6a8595ad213d4c9c9f9aca3fb217069a41028", 279 | }, 280 | .{ 281 | "6610b25967cdcca9d59875f5cb50b0ea75433311869e930b", 282 | "gravity machine north sort system female filter attitude volume fold club stay feature office ecology stable narrow fog", 283 | "628c3827a8823298ee685db84f55caa34b5cc195a778e52d45f59bcf75aba68e4d7590e101dc414bc1bbd5737666fbbef35d1f1903953b66624f910feef245ac", 284 | }, 285 | .{ 286 | "68a79eaca2324873eacc50cb9c6eca8cc68ea5d936f98787c60c7ebc74e6ce7c", 287 | "hamster diagram private dutch cause delay private meat slide toddler razor book happy fancy gospel tennis maple dilemma loan word shrug inflict delay length", 288 | "64c87cde7e12ecf6704ab95bb1408bef047c22db4cc7491c4271d170a1b213d20b385bc1588d9c7b38f1b39d415665b8a9030c9ec653d75e65f847d8fc1fc440", 289 | }, 290 | .{ 291 | "c0ba5a8e914111210f2bd131f3d5e08d", 292 | "scheme spot photo card baby mountain device kick cradle pact join borrow", 293 | "ea725895aaae8d4c1cf682c1bfd2d358d52ed9f0f0591131b559e2724bb234fca05aa9c02c57407e04ee9dc3b454aa63fbff483a8b11de949624b9f1831a9612", 294 | }, 295 | .{ 296 | "6d9be1ee6ebd27a258115aad99b7317b9c8d28b6d76431c3", 297 | "horn tenant knee talent sponsor spell gate clip pulse soap slush warm silver nephew swap uncle crack brave", 298 | "fd579828af3da1d32544ce4db5c73d53fc8acc4ddb1e3b251a31179cdb71e853c56d2fcb11aed39898ce6c34b10b5382772db8796e52837b54468aeb312cfc3d", 299 | }, 300 | .{ 301 | "9f6a2878b2520799a44ef18bc7df394e7061a224d2c33cd015b157d746869863", 302 | "panda eyebrow bullet gorilla call smoke muffin taste mesh discover soft ostrich alcohol speed nation flash devote level hobby quick inner drive ghost inside", 303 | "72be8e052fc4919d2adf28d5306b5474b0069df35b02303de8c1729c9538dbb6fc2d731d5f832193cd9fb6aeecbc469594a70e3dd50811b5067f3b88b28c3e8d", 304 | }, 305 | .{ 306 | "23db8160a31d3e0dca3688ed941adbf3", 307 | "cat swing flag economy stadium alone churn speed unique patch report train", 308 | "deb5f45449e615feff5640f2e49f933ff51895de3b4381832b3139941c57b59205a42480c52175b6efcffaa58a2503887c1e8b363a707256bdd2b587b46541f5", 309 | }, 310 | .{ 311 | "8197a4a47f0425faeaa69deebc05ca29c0a5b5cc76ceacc0", 312 | "light rule cinnamon wrap drastic word pride squirrel upgrade then income fatal apart sustain crack supply proud access", 313 | "4cbdff1ca2db800fd61cae72a57475fdc6bab03e441fd63f96dabd1f183ef5b782925f00105f318309a7e9c3ea6967c7801e46c8a58082674c860a37b93eda02", 314 | }, 315 | .{ 316 | "066dca1a2bb7e8a1db2832148ce9933eea0f3ac9548d793112d9a95c9407efad", 317 | "all hour make first leader extend hole alien behind guard gospel lava path output census museum junior mass reopen famous sing advance salt reform", 318 | "26e975ec644423f4a4c4f4215ef09b4bd7ef924e85d1d17c4cf3f136c2863cf6df0a475045652c57eb5fb41513ca2a2d67722b77e954b4b3fc11f7590449191d", 319 | }, 320 | .{ 321 | "f30f8c1da665478f49b001d94c5fc452", 322 | "vessel ladder alter error federal sibling chat ability sun glass valve picture", 323 | "2aaa9242daafcee6aa9d7269f17d4efe271e1b9a529178d7dc139cd18747090bf9d60295d0ce74309a78852a9caadf0af48aae1c6253839624076224374bc63f", 324 | }, 325 | .{ 326 | "c10ec20dc3cd9f652c7fac2f1230f7a3c828389a14392f05", 327 | "scissors invite lock maple supreme raw rapid void congress muscle digital elegant little brisk hair mango congress clump", 328 | "7b4a10be9d98e6cba265566db7f136718e1398c71cb581e1b2f464cac1ceedf4f3e274dc270003c670ad8d02c4558b2f8e39edea2775c9e232c7cb798b069e88", 329 | }, 330 | .{ 331 | "f585c11aec520db57dd353c69554b21a89b20fb0650966fa0a9d6f74fd989d8f", 332 | "void come effort suffer camp survey warrior heavy shoot primary clutch crush open amazing screen patrol group space point ten exist slush involve unfold", 333 | "01f5bced59dec48e362f2c45b5de68b9fd6c92c6634f44d6d40aab69056506f0e35524a518034ddc1192e1dacd32c1ed3eaa3c3b131c88ed8e7e54c49a5d0998", 334 | }, 335 | }; 336 | 337 | var buf: [300]u8 = undefined; 338 | 339 | for (test_vectors) |vector| { 340 | const entropy = try std.fmt.hexToBytes(&buf, vector[0]); 341 | const mn = try Mnemonic.fromEntropyIn(.english, entropy); 342 | const mn1 = try Mnemonic.parseInNormalized(.english, vector[1]); 343 | 344 | try std.testing.expectEqualDeep(mn, mn1); 345 | 346 | const seed = try std.fmt.hexToBytes(buf[100..], vector[2]); 347 | 348 | const seeded = try mn.toSeedNormalized("TREZOR"); 349 | 350 | try std.testing.expectEqualSlices(u8, &seeded, seed); 351 | } 352 | } 353 | -------------------------------------------------------------------------------- /src/bips/bip39/language.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | 3 | /// The maximum number of languages enabled. 4 | pub const MAX_NB_LANGUAGES: usize = 9; 5 | 6 | pub const Language = enum { 7 | /// The English language. 8 | english, 9 | 10 | pub inline fn wordList(self: Language) *const [2048][]const u8 { 11 | return switch (self) { 12 | inline .english => return &ENGLISH_WORDS, 13 | }; 14 | } 15 | 16 | /// Returns true if all words in the list are guaranteed to 17 | /// only be in this list and not in any other. 18 | pub inline fn uniqueWords(self: Language) bool { 19 | return switch (self) { 20 | inline .english => false, 21 | }; 22 | } 23 | 24 | /// Get words from the word list that start with the given prefix. 25 | pub fn wordsByPrefix(self: Language, prefix: []const u8) ?[]const []const u8 { 26 | // The words in the word list are ordered lexicographically. This means 27 | // that we cannot use `binary_search` to find words more efficiently, 28 | // because the Rust ordering is based on the byte values. However, it 29 | // does mean that words that share a prefix will follow each other. 30 | var start_from: usize = 0; 31 | var count: usize = 0; 32 | 33 | const word_list = self.wordList(); 34 | for (word_list.*, 0..) |w, idx| { 35 | if (std.mem.startsWith(u8, w, prefix)) { 36 | count = 1; 37 | start_from = idx; 38 | 39 | for (idx + 1..2048) |i| { 40 | if (!std.mem.startsWith(u8, word_list.*[i], prefix)) break; 41 | 42 | count += 1; 43 | } 44 | break; 45 | } 46 | } 47 | 48 | if (count == 0) return null; 49 | 50 | return word_list[start_from .. start_from + count]; 51 | } 52 | 53 | /// Get the index of the word in the word list. 54 | pub inline fn findWord(self: Language, word: []const u8) ?u16 { 55 | for (self.wordList(), 0..) |w, i| { 56 | if (std.mem.eql(u8, w, word)) return @truncate(i); 57 | } 58 | 59 | return null; 60 | } 61 | }; 62 | 63 | const ENGLISH_WORDS: [2048][]const u8 = .{ 64 | "abandon", 65 | "ability", 66 | "able", 67 | "about", 68 | "above", 69 | "absent", 70 | "absorb", 71 | "abstract", 72 | "absurd", 73 | "abuse", 74 | "access", 75 | "accident", 76 | "account", 77 | "accuse", 78 | "achieve", 79 | "acid", 80 | "acoustic", 81 | "acquire", 82 | "across", 83 | "act", 84 | "action", 85 | "actor", 86 | "actress", 87 | "actual", 88 | "adapt", 89 | "add", 90 | "addict", 91 | "address", 92 | "adjust", 93 | "admit", 94 | "adult", 95 | "advance", 96 | "advice", 97 | "aerobic", 98 | "affair", 99 | "afford", 100 | "afraid", 101 | "again", 102 | "age", 103 | "agent", 104 | "agree", 105 | "ahead", 106 | "aim", 107 | "air", 108 | "airport", 109 | "aisle", 110 | "alarm", 111 | "album", 112 | "alcohol", 113 | "alert", 114 | "alien", 115 | "all", 116 | "alley", 117 | "allow", 118 | "almost", 119 | "alone", 120 | "alpha", 121 | "already", 122 | "also", 123 | "alter", 124 | "always", 125 | "amateur", 126 | "amazing", 127 | "among", 128 | "amount", 129 | "amused", 130 | "analyst", 131 | "anchor", 132 | "ancient", 133 | "anger", 134 | "angle", 135 | "angry", 136 | "animal", 137 | "ankle", 138 | "announce", 139 | "annual", 140 | "another", 141 | "answer", 142 | "antenna", 143 | "antique", 144 | "anxiety", 145 | "any", 146 | "apart", 147 | "apology", 148 | "appear", 149 | "apple", 150 | "approve", 151 | "april", 152 | "arch", 153 | "arctic", 154 | "area", 155 | "arena", 156 | "argue", 157 | "arm", 158 | "armed", 159 | "armor", 160 | "army", 161 | "around", 162 | "arrange", 163 | "arrest", 164 | "arrive", 165 | "arrow", 166 | "art", 167 | "artefact", 168 | "artist", 169 | "artwork", 170 | "ask", 171 | "aspect", 172 | "assault", 173 | "asset", 174 | "assist", 175 | "assume", 176 | "asthma", 177 | "athlete", 178 | "atom", 179 | "attack", 180 | "attend", 181 | "attitude", 182 | "attract", 183 | "auction", 184 | "audit", 185 | "august", 186 | "aunt", 187 | "author", 188 | "auto", 189 | "autumn", 190 | "average", 191 | "avocado", 192 | "avoid", 193 | "awake", 194 | "aware", 195 | "away", 196 | "awesome", 197 | "awful", 198 | "awkward", 199 | "axis", 200 | "baby", 201 | "bachelor", 202 | "bacon", 203 | "badge", 204 | "bag", 205 | "balance", 206 | "balcony", 207 | "ball", 208 | "bamboo", 209 | "banana", 210 | "banner", 211 | "bar", 212 | "barely", 213 | "bargain", 214 | "barrel", 215 | "base", 216 | "basic", 217 | "basket", 218 | "battle", 219 | "beach", 220 | "bean", 221 | "beauty", 222 | "because", 223 | "become", 224 | "beef", 225 | "before", 226 | "begin", 227 | "behave", 228 | "behind", 229 | "believe", 230 | "below", 231 | "belt", 232 | "bench", 233 | "benefit", 234 | "best", 235 | "betray", 236 | "better", 237 | "between", 238 | "beyond", 239 | "bicycle", 240 | "bid", 241 | "bike", 242 | "bind", 243 | "biology", 244 | "bird", 245 | "birth", 246 | "bitter", 247 | "black", 248 | "blade", 249 | "blame", 250 | "blanket", 251 | "blast", 252 | "bleak", 253 | "bless", 254 | "blind", 255 | "blood", 256 | "blossom", 257 | "blouse", 258 | "blue", 259 | "blur", 260 | "blush", 261 | "board", 262 | "boat", 263 | "body", 264 | "boil", 265 | "bomb", 266 | "bone", 267 | "bonus", 268 | "book", 269 | "boost", 270 | "border", 271 | "boring", 272 | "borrow", 273 | "boss", 274 | "bottom", 275 | "bounce", 276 | "box", 277 | "boy", 278 | "bracket", 279 | "brain", 280 | "brand", 281 | "brass", 282 | "brave", 283 | "bread", 284 | "breeze", 285 | "brick", 286 | "bridge", 287 | "brief", 288 | "bright", 289 | "bring", 290 | "brisk", 291 | "broccoli", 292 | "broken", 293 | "bronze", 294 | "broom", 295 | "brother", 296 | "brown", 297 | "brush", 298 | "bubble", 299 | "buddy", 300 | "budget", 301 | "buffalo", 302 | "build", 303 | "bulb", 304 | "bulk", 305 | "bullet", 306 | "bundle", 307 | "bunker", 308 | "burden", 309 | "burger", 310 | "burst", 311 | "bus", 312 | "business", 313 | "busy", 314 | "butter", 315 | "buyer", 316 | "buzz", 317 | "cabbage", 318 | "cabin", 319 | "cable", 320 | "cactus", 321 | "cage", 322 | "cake", 323 | "call", 324 | "calm", 325 | "camera", 326 | "camp", 327 | "can", 328 | "canal", 329 | "cancel", 330 | "candy", 331 | "cannon", 332 | "canoe", 333 | "canvas", 334 | "canyon", 335 | "capable", 336 | "capital", 337 | "captain", 338 | "car", 339 | "carbon", 340 | "card", 341 | "cargo", 342 | "carpet", 343 | "carry", 344 | "cart", 345 | "case", 346 | "cash", 347 | "casino", 348 | "castle", 349 | "casual", 350 | "cat", 351 | "catalog", 352 | "catch", 353 | "category", 354 | "cattle", 355 | "caught", 356 | "cause", 357 | "caution", 358 | "cave", 359 | "ceiling", 360 | "celery", 361 | "cement", 362 | "census", 363 | "century", 364 | "cereal", 365 | "certain", 366 | "chair", 367 | "chalk", 368 | "champion", 369 | "change", 370 | "chaos", 371 | "chapter", 372 | "charge", 373 | "chase", 374 | "chat", 375 | "cheap", 376 | "check", 377 | "cheese", 378 | "chef", 379 | "cherry", 380 | "chest", 381 | "chicken", 382 | "chief", 383 | "child", 384 | "chimney", 385 | "choice", 386 | "choose", 387 | "chronic", 388 | "chuckle", 389 | "chunk", 390 | "churn", 391 | "cigar", 392 | "cinnamon", 393 | "circle", 394 | "citizen", 395 | "city", 396 | "civil", 397 | "claim", 398 | "clap", 399 | "clarify", 400 | "claw", 401 | "clay", 402 | "clean", 403 | "clerk", 404 | "clever", 405 | "click", 406 | "client", 407 | "cliff", 408 | "climb", 409 | "clinic", 410 | "clip", 411 | "clock", 412 | "clog", 413 | "close", 414 | "cloth", 415 | "cloud", 416 | "clown", 417 | "club", 418 | "clump", 419 | "cluster", 420 | "clutch", 421 | "coach", 422 | "coast", 423 | "coconut", 424 | "code", 425 | "coffee", 426 | "coil", 427 | "coin", 428 | "collect", 429 | "color", 430 | "column", 431 | "combine", 432 | "come", 433 | "comfort", 434 | "comic", 435 | "common", 436 | "company", 437 | "concert", 438 | "conduct", 439 | "confirm", 440 | "congress", 441 | "connect", 442 | "consider", 443 | "control", 444 | "convince", 445 | "cook", 446 | "cool", 447 | "copper", 448 | "copy", 449 | "coral", 450 | "core", 451 | "corn", 452 | "correct", 453 | "cost", 454 | "cotton", 455 | "couch", 456 | "country", 457 | "couple", 458 | "course", 459 | "cousin", 460 | "cover", 461 | "coyote", 462 | "crack", 463 | "cradle", 464 | "craft", 465 | "cram", 466 | "crane", 467 | "crash", 468 | "crater", 469 | "crawl", 470 | "crazy", 471 | "cream", 472 | "credit", 473 | "creek", 474 | "crew", 475 | "cricket", 476 | "crime", 477 | "crisp", 478 | "critic", 479 | "crop", 480 | "cross", 481 | "crouch", 482 | "crowd", 483 | "crucial", 484 | "cruel", 485 | "cruise", 486 | "crumble", 487 | "crunch", 488 | "crush", 489 | "cry", 490 | "crystal", 491 | "cube", 492 | "culture", 493 | "cup", 494 | "cupboard", 495 | "curious", 496 | "current", 497 | "curtain", 498 | "curve", 499 | "cushion", 500 | "custom", 501 | "cute", 502 | "cycle", 503 | "dad", 504 | "damage", 505 | "damp", 506 | "dance", 507 | "danger", 508 | "daring", 509 | "dash", 510 | "daughter", 511 | "dawn", 512 | "day", 513 | "deal", 514 | "debate", 515 | "debris", 516 | "decade", 517 | "december", 518 | "decide", 519 | "decline", 520 | "decorate", 521 | "decrease", 522 | "deer", 523 | "defense", 524 | "define", 525 | "defy", 526 | "degree", 527 | "delay", 528 | "deliver", 529 | "demand", 530 | "demise", 531 | "denial", 532 | "dentist", 533 | "deny", 534 | "depart", 535 | "depend", 536 | "deposit", 537 | "depth", 538 | "deputy", 539 | "derive", 540 | "describe", 541 | "desert", 542 | "design", 543 | "desk", 544 | "despair", 545 | "destroy", 546 | "detail", 547 | "detect", 548 | "develop", 549 | "device", 550 | "devote", 551 | "diagram", 552 | "dial", 553 | "diamond", 554 | "diary", 555 | "dice", 556 | "diesel", 557 | "diet", 558 | "differ", 559 | "digital", 560 | "dignity", 561 | "dilemma", 562 | "dinner", 563 | "dinosaur", 564 | "direct", 565 | "dirt", 566 | "disagree", 567 | "discover", 568 | "disease", 569 | "dish", 570 | "dismiss", 571 | "disorder", 572 | "display", 573 | "distance", 574 | "divert", 575 | "divide", 576 | "divorce", 577 | "dizzy", 578 | "doctor", 579 | "document", 580 | "dog", 581 | "doll", 582 | "dolphin", 583 | "domain", 584 | "donate", 585 | "donkey", 586 | "donor", 587 | "door", 588 | "dose", 589 | "double", 590 | "dove", 591 | "draft", 592 | "dragon", 593 | "drama", 594 | "drastic", 595 | "draw", 596 | "dream", 597 | "dress", 598 | "drift", 599 | "drill", 600 | "drink", 601 | "drip", 602 | "drive", 603 | "drop", 604 | "drum", 605 | "dry", 606 | "duck", 607 | "dumb", 608 | "dune", 609 | "during", 610 | "dust", 611 | "dutch", 612 | "duty", 613 | "dwarf", 614 | "dynamic", 615 | "eager", 616 | "eagle", 617 | "early", 618 | "earn", 619 | "earth", 620 | "easily", 621 | "east", 622 | "easy", 623 | "echo", 624 | "ecology", 625 | "economy", 626 | "edge", 627 | "edit", 628 | "educate", 629 | "effort", 630 | "egg", 631 | "eight", 632 | "either", 633 | "elbow", 634 | "elder", 635 | "electric", 636 | "elegant", 637 | "element", 638 | "elephant", 639 | "elevator", 640 | "elite", 641 | "else", 642 | "embark", 643 | "embody", 644 | "embrace", 645 | "emerge", 646 | "emotion", 647 | "employ", 648 | "empower", 649 | "empty", 650 | "enable", 651 | "enact", 652 | "end", 653 | "endless", 654 | "endorse", 655 | "enemy", 656 | "energy", 657 | "enforce", 658 | "engage", 659 | "engine", 660 | "enhance", 661 | "enjoy", 662 | "enlist", 663 | "enough", 664 | "enrich", 665 | "enroll", 666 | "ensure", 667 | "enter", 668 | "entire", 669 | "entry", 670 | "envelope", 671 | "episode", 672 | "equal", 673 | "equip", 674 | "era", 675 | "erase", 676 | "erode", 677 | "erosion", 678 | "error", 679 | "erupt", 680 | "escape", 681 | "essay", 682 | "essence", 683 | "estate", 684 | "eternal", 685 | "ethics", 686 | "evidence", 687 | "evil", 688 | "evoke", 689 | "evolve", 690 | "exact", 691 | "example", 692 | "excess", 693 | "exchange", 694 | "excite", 695 | "exclude", 696 | "excuse", 697 | "execute", 698 | "exercise", 699 | "exhaust", 700 | "exhibit", 701 | "exile", 702 | "exist", 703 | "exit", 704 | "exotic", 705 | "expand", 706 | "expect", 707 | "expire", 708 | "explain", 709 | "expose", 710 | "express", 711 | "extend", 712 | "extra", 713 | "eye", 714 | "eyebrow", 715 | "fabric", 716 | "face", 717 | "faculty", 718 | "fade", 719 | "faint", 720 | "faith", 721 | "fall", 722 | "false", 723 | "fame", 724 | "family", 725 | "famous", 726 | "fan", 727 | "fancy", 728 | "fantasy", 729 | "farm", 730 | "fashion", 731 | "fat", 732 | "fatal", 733 | "father", 734 | "fatigue", 735 | "fault", 736 | "favorite", 737 | "feature", 738 | "february", 739 | "federal", 740 | "fee", 741 | "feed", 742 | "feel", 743 | "female", 744 | "fence", 745 | "festival", 746 | "fetch", 747 | "fever", 748 | "few", 749 | "fiber", 750 | "fiction", 751 | "field", 752 | "figure", 753 | "file", 754 | "film", 755 | "filter", 756 | "final", 757 | "find", 758 | "fine", 759 | "finger", 760 | "finish", 761 | "fire", 762 | "firm", 763 | "first", 764 | "fiscal", 765 | "fish", 766 | "fit", 767 | "fitness", 768 | "fix", 769 | "flag", 770 | "flame", 771 | "flash", 772 | "flat", 773 | "flavor", 774 | "flee", 775 | "flight", 776 | "flip", 777 | "float", 778 | "flock", 779 | "floor", 780 | "flower", 781 | "fluid", 782 | "flush", 783 | "fly", 784 | "foam", 785 | "focus", 786 | "fog", 787 | "foil", 788 | "fold", 789 | "follow", 790 | "food", 791 | "foot", 792 | "force", 793 | "forest", 794 | "forget", 795 | "fork", 796 | "fortune", 797 | "forum", 798 | "forward", 799 | "fossil", 800 | "foster", 801 | "found", 802 | "fox", 803 | "fragile", 804 | "frame", 805 | "frequent", 806 | "fresh", 807 | "friend", 808 | "fringe", 809 | "frog", 810 | "front", 811 | "frost", 812 | "frown", 813 | "frozen", 814 | "fruit", 815 | "fuel", 816 | "fun", 817 | "funny", 818 | "furnace", 819 | "fury", 820 | "future", 821 | "gadget", 822 | "gain", 823 | "galaxy", 824 | "gallery", 825 | "game", 826 | "gap", 827 | "garage", 828 | "garbage", 829 | "garden", 830 | "garlic", 831 | "garment", 832 | "gas", 833 | "gasp", 834 | "gate", 835 | "gather", 836 | "gauge", 837 | "gaze", 838 | "general", 839 | "genius", 840 | "genre", 841 | "gentle", 842 | "genuine", 843 | "gesture", 844 | "ghost", 845 | "giant", 846 | "gift", 847 | "giggle", 848 | "ginger", 849 | "giraffe", 850 | "girl", 851 | "give", 852 | "glad", 853 | "glance", 854 | "glare", 855 | "glass", 856 | "glide", 857 | "glimpse", 858 | "globe", 859 | "gloom", 860 | "glory", 861 | "glove", 862 | "glow", 863 | "glue", 864 | "goat", 865 | "goddess", 866 | "gold", 867 | "good", 868 | "goose", 869 | "gorilla", 870 | "gospel", 871 | "gossip", 872 | "govern", 873 | "gown", 874 | "grab", 875 | "grace", 876 | "grain", 877 | "grant", 878 | "grape", 879 | "grass", 880 | "gravity", 881 | "great", 882 | "green", 883 | "grid", 884 | "grief", 885 | "grit", 886 | "grocery", 887 | "group", 888 | "grow", 889 | "grunt", 890 | "guard", 891 | "guess", 892 | "guide", 893 | "guilt", 894 | "guitar", 895 | "gun", 896 | "gym", 897 | "habit", 898 | "hair", 899 | "half", 900 | "hammer", 901 | "hamster", 902 | "hand", 903 | "happy", 904 | "harbor", 905 | "hard", 906 | "harsh", 907 | "harvest", 908 | "hat", 909 | "have", 910 | "hawk", 911 | "hazard", 912 | "head", 913 | "health", 914 | "heart", 915 | "heavy", 916 | "hedgehog", 917 | "height", 918 | "hello", 919 | "helmet", 920 | "help", 921 | "hen", 922 | "hero", 923 | "hidden", 924 | "high", 925 | "hill", 926 | "hint", 927 | "hip", 928 | "hire", 929 | "history", 930 | "hobby", 931 | "hockey", 932 | "hold", 933 | "hole", 934 | "holiday", 935 | "hollow", 936 | "home", 937 | "honey", 938 | "hood", 939 | "hope", 940 | "horn", 941 | "horror", 942 | "horse", 943 | "hospital", 944 | "host", 945 | "hotel", 946 | "hour", 947 | "hover", 948 | "hub", 949 | "huge", 950 | "human", 951 | "humble", 952 | "humor", 953 | "hundred", 954 | "hungry", 955 | "hunt", 956 | "hurdle", 957 | "hurry", 958 | "hurt", 959 | "husband", 960 | "hybrid", 961 | "ice", 962 | "icon", 963 | "idea", 964 | "identify", 965 | "idle", 966 | "ignore", 967 | "ill", 968 | "illegal", 969 | "illness", 970 | "image", 971 | "imitate", 972 | "immense", 973 | "immune", 974 | "impact", 975 | "impose", 976 | "improve", 977 | "impulse", 978 | "inch", 979 | "include", 980 | "income", 981 | "increase", 982 | "index", 983 | "indicate", 984 | "indoor", 985 | "industry", 986 | "infant", 987 | "inflict", 988 | "inform", 989 | "inhale", 990 | "inherit", 991 | "initial", 992 | "inject", 993 | "injury", 994 | "inmate", 995 | "inner", 996 | "innocent", 997 | "input", 998 | "inquiry", 999 | "insane", 1000 | "insect", 1001 | "inside", 1002 | "inspire", 1003 | "install", 1004 | "intact", 1005 | "interest", 1006 | "into", 1007 | "invest", 1008 | "invite", 1009 | "involve", 1010 | "iron", 1011 | "island", 1012 | "isolate", 1013 | "issue", 1014 | "item", 1015 | "ivory", 1016 | "jacket", 1017 | "jaguar", 1018 | "jar", 1019 | "jazz", 1020 | "jealous", 1021 | "jeans", 1022 | "jelly", 1023 | "jewel", 1024 | "job", 1025 | "join", 1026 | "joke", 1027 | "journey", 1028 | "joy", 1029 | "judge", 1030 | "juice", 1031 | "jump", 1032 | "jungle", 1033 | "junior", 1034 | "junk", 1035 | "just", 1036 | "kangaroo", 1037 | "keen", 1038 | "keep", 1039 | "ketchup", 1040 | "key", 1041 | "kick", 1042 | "kid", 1043 | "kidney", 1044 | "kind", 1045 | "kingdom", 1046 | "kiss", 1047 | "kit", 1048 | "kitchen", 1049 | "kite", 1050 | "kitten", 1051 | "kiwi", 1052 | "knee", 1053 | "knife", 1054 | "knock", 1055 | "know", 1056 | "lab", 1057 | "label", 1058 | "labor", 1059 | "ladder", 1060 | "lady", 1061 | "lake", 1062 | "lamp", 1063 | "language", 1064 | "laptop", 1065 | "large", 1066 | "later", 1067 | "latin", 1068 | "laugh", 1069 | "laundry", 1070 | "lava", 1071 | "law", 1072 | "lawn", 1073 | "lawsuit", 1074 | "layer", 1075 | "lazy", 1076 | "leader", 1077 | "leaf", 1078 | "learn", 1079 | "leave", 1080 | "lecture", 1081 | "left", 1082 | "leg", 1083 | "legal", 1084 | "legend", 1085 | "leisure", 1086 | "lemon", 1087 | "lend", 1088 | "length", 1089 | "lens", 1090 | "leopard", 1091 | "lesson", 1092 | "letter", 1093 | "level", 1094 | "liar", 1095 | "liberty", 1096 | "library", 1097 | "license", 1098 | "life", 1099 | "lift", 1100 | "light", 1101 | "like", 1102 | "limb", 1103 | "limit", 1104 | "link", 1105 | "lion", 1106 | "liquid", 1107 | "list", 1108 | "little", 1109 | "live", 1110 | "lizard", 1111 | "load", 1112 | "loan", 1113 | "lobster", 1114 | "local", 1115 | "lock", 1116 | "logic", 1117 | "lonely", 1118 | "long", 1119 | "loop", 1120 | "lottery", 1121 | "loud", 1122 | "lounge", 1123 | "love", 1124 | "loyal", 1125 | "lucky", 1126 | "luggage", 1127 | "lumber", 1128 | "lunar", 1129 | "lunch", 1130 | "luxury", 1131 | "lyrics", 1132 | "machine", 1133 | "mad", 1134 | "magic", 1135 | "magnet", 1136 | "maid", 1137 | "mail", 1138 | "main", 1139 | "major", 1140 | "make", 1141 | "mammal", 1142 | "man", 1143 | "manage", 1144 | "mandate", 1145 | "mango", 1146 | "mansion", 1147 | "manual", 1148 | "maple", 1149 | "marble", 1150 | "march", 1151 | "margin", 1152 | "marine", 1153 | "market", 1154 | "marriage", 1155 | "mask", 1156 | "mass", 1157 | "master", 1158 | "match", 1159 | "material", 1160 | "math", 1161 | "matrix", 1162 | "matter", 1163 | "maximum", 1164 | "maze", 1165 | "meadow", 1166 | "mean", 1167 | "measure", 1168 | "meat", 1169 | "mechanic", 1170 | "medal", 1171 | "media", 1172 | "melody", 1173 | "melt", 1174 | "member", 1175 | "memory", 1176 | "mention", 1177 | "menu", 1178 | "mercy", 1179 | "merge", 1180 | "merit", 1181 | "merry", 1182 | "mesh", 1183 | "message", 1184 | "metal", 1185 | "method", 1186 | "middle", 1187 | "midnight", 1188 | "milk", 1189 | "million", 1190 | "mimic", 1191 | "mind", 1192 | "minimum", 1193 | "minor", 1194 | "minute", 1195 | "miracle", 1196 | "mirror", 1197 | "misery", 1198 | "miss", 1199 | "mistake", 1200 | "mix", 1201 | "mixed", 1202 | "mixture", 1203 | "mobile", 1204 | "model", 1205 | "modify", 1206 | "mom", 1207 | "moment", 1208 | "monitor", 1209 | "monkey", 1210 | "monster", 1211 | "month", 1212 | "moon", 1213 | "moral", 1214 | "more", 1215 | "morning", 1216 | "mosquito", 1217 | "mother", 1218 | "motion", 1219 | "motor", 1220 | "mountain", 1221 | "mouse", 1222 | "move", 1223 | "movie", 1224 | "much", 1225 | "muffin", 1226 | "mule", 1227 | "multiply", 1228 | "muscle", 1229 | "museum", 1230 | "mushroom", 1231 | "music", 1232 | "must", 1233 | "mutual", 1234 | "myself", 1235 | "mystery", 1236 | "myth", 1237 | "naive", 1238 | "name", 1239 | "napkin", 1240 | "narrow", 1241 | "nasty", 1242 | "nation", 1243 | "nature", 1244 | "near", 1245 | "neck", 1246 | "need", 1247 | "negative", 1248 | "neglect", 1249 | "neither", 1250 | "nephew", 1251 | "nerve", 1252 | "nest", 1253 | "net", 1254 | "network", 1255 | "neutral", 1256 | "never", 1257 | "news", 1258 | "next", 1259 | "nice", 1260 | "night", 1261 | "noble", 1262 | "noise", 1263 | "nominee", 1264 | "noodle", 1265 | "normal", 1266 | "north", 1267 | "nose", 1268 | "notable", 1269 | "note", 1270 | "nothing", 1271 | "notice", 1272 | "novel", 1273 | "now", 1274 | "nuclear", 1275 | "number", 1276 | "nurse", 1277 | "nut", 1278 | "oak", 1279 | "obey", 1280 | "object", 1281 | "oblige", 1282 | "obscure", 1283 | "observe", 1284 | "obtain", 1285 | "obvious", 1286 | "occur", 1287 | "ocean", 1288 | "october", 1289 | "odor", 1290 | "off", 1291 | "offer", 1292 | "office", 1293 | "often", 1294 | "oil", 1295 | "okay", 1296 | "old", 1297 | "olive", 1298 | "olympic", 1299 | "omit", 1300 | "once", 1301 | "one", 1302 | "onion", 1303 | "online", 1304 | "only", 1305 | "open", 1306 | "opera", 1307 | "opinion", 1308 | "oppose", 1309 | "option", 1310 | "orange", 1311 | "orbit", 1312 | "orchard", 1313 | "order", 1314 | "ordinary", 1315 | "organ", 1316 | "orient", 1317 | "original", 1318 | "orphan", 1319 | "ostrich", 1320 | "other", 1321 | "outdoor", 1322 | "outer", 1323 | "output", 1324 | "outside", 1325 | "oval", 1326 | "oven", 1327 | "over", 1328 | "own", 1329 | "owner", 1330 | "oxygen", 1331 | "oyster", 1332 | "ozone", 1333 | "pact", 1334 | "paddle", 1335 | "page", 1336 | "pair", 1337 | "palace", 1338 | "palm", 1339 | "panda", 1340 | "panel", 1341 | "panic", 1342 | "panther", 1343 | "paper", 1344 | "parade", 1345 | "parent", 1346 | "park", 1347 | "parrot", 1348 | "party", 1349 | "pass", 1350 | "patch", 1351 | "path", 1352 | "patient", 1353 | "patrol", 1354 | "pattern", 1355 | "pause", 1356 | "pave", 1357 | "payment", 1358 | "peace", 1359 | "peanut", 1360 | "pear", 1361 | "peasant", 1362 | "pelican", 1363 | "pen", 1364 | "penalty", 1365 | "pencil", 1366 | "people", 1367 | "pepper", 1368 | "perfect", 1369 | "permit", 1370 | "person", 1371 | "pet", 1372 | "phone", 1373 | "photo", 1374 | "phrase", 1375 | "physical", 1376 | "piano", 1377 | "picnic", 1378 | "picture", 1379 | "piece", 1380 | "pig", 1381 | "pigeon", 1382 | "pill", 1383 | "pilot", 1384 | "pink", 1385 | "pioneer", 1386 | "pipe", 1387 | "pistol", 1388 | "pitch", 1389 | "pizza", 1390 | "place", 1391 | "planet", 1392 | "plastic", 1393 | "plate", 1394 | "play", 1395 | "please", 1396 | "pledge", 1397 | "pluck", 1398 | "plug", 1399 | "plunge", 1400 | "poem", 1401 | "poet", 1402 | "point", 1403 | "polar", 1404 | "pole", 1405 | "police", 1406 | "pond", 1407 | "pony", 1408 | "pool", 1409 | "popular", 1410 | "portion", 1411 | "position", 1412 | "possible", 1413 | "post", 1414 | "potato", 1415 | "pottery", 1416 | "poverty", 1417 | "powder", 1418 | "power", 1419 | "practice", 1420 | "praise", 1421 | "predict", 1422 | "prefer", 1423 | "prepare", 1424 | "present", 1425 | "pretty", 1426 | "prevent", 1427 | "price", 1428 | "pride", 1429 | "primary", 1430 | "print", 1431 | "priority", 1432 | "prison", 1433 | "private", 1434 | "prize", 1435 | "problem", 1436 | "process", 1437 | "produce", 1438 | "profit", 1439 | "program", 1440 | "project", 1441 | "promote", 1442 | "proof", 1443 | "property", 1444 | "prosper", 1445 | "protect", 1446 | "proud", 1447 | "provide", 1448 | "public", 1449 | "pudding", 1450 | "pull", 1451 | "pulp", 1452 | "pulse", 1453 | "pumpkin", 1454 | "punch", 1455 | "pupil", 1456 | "puppy", 1457 | "purchase", 1458 | "purity", 1459 | "purpose", 1460 | "purse", 1461 | "push", 1462 | "put", 1463 | "puzzle", 1464 | "pyramid", 1465 | "quality", 1466 | "quantum", 1467 | "quarter", 1468 | "question", 1469 | "quick", 1470 | "quit", 1471 | "quiz", 1472 | "quote", 1473 | "rabbit", 1474 | "raccoon", 1475 | "race", 1476 | "rack", 1477 | "radar", 1478 | "radio", 1479 | "rail", 1480 | "rain", 1481 | "raise", 1482 | "rally", 1483 | "ramp", 1484 | "ranch", 1485 | "random", 1486 | "range", 1487 | "rapid", 1488 | "rare", 1489 | "rate", 1490 | "rather", 1491 | "raven", 1492 | "raw", 1493 | "razor", 1494 | "ready", 1495 | "real", 1496 | "reason", 1497 | "rebel", 1498 | "rebuild", 1499 | "recall", 1500 | "receive", 1501 | "recipe", 1502 | "record", 1503 | "recycle", 1504 | "reduce", 1505 | "reflect", 1506 | "reform", 1507 | "refuse", 1508 | "region", 1509 | "regret", 1510 | "regular", 1511 | "reject", 1512 | "relax", 1513 | "release", 1514 | "relief", 1515 | "rely", 1516 | "remain", 1517 | "remember", 1518 | "remind", 1519 | "remove", 1520 | "render", 1521 | "renew", 1522 | "rent", 1523 | "reopen", 1524 | "repair", 1525 | "repeat", 1526 | "replace", 1527 | "report", 1528 | "require", 1529 | "rescue", 1530 | "resemble", 1531 | "resist", 1532 | "resource", 1533 | "response", 1534 | "result", 1535 | "retire", 1536 | "retreat", 1537 | "return", 1538 | "reunion", 1539 | "reveal", 1540 | "review", 1541 | "reward", 1542 | "rhythm", 1543 | "rib", 1544 | "ribbon", 1545 | "rice", 1546 | "rich", 1547 | "ride", 1548 | "ridge", 1549 | "rifle", 1550 | "right", 1551 | "rigid", 1552 | "ring", 1553 | "riot", 1554 | "ripple", 1555 | "risk", 1556 | "ritual", 1557 | "rival", 1558 | "river", 1559 | "road", 1560 | "roast", 1561 | "robot", 1562 | "robust", 1563 | "rocket", 1564 | "romance", 1565 | "roof", 1566 | "rookie", 1567 | "room", 1568 | "rose", 1569 | "rotate", 1570 | "rough", 1571 | "round", 1572 | "route", 1573 | "royal", 1574 | "rubber", 1575 | "rude", 1576 | "rug", 1577 | "rule", 1578 | "run", 1579 | "runway", 1580 | "rural", 1581 | "sad", 1582 | "saddle", 1583 | "sadness", 1584 | "safe", 1585 | "sail", 1586 | "salad", 1587 | "salmon", 1588 | "salon", 1589 | "salt", 1590 | "salute", 1591 | "same", 1592 | "sample", 1593 | "sand", 1594 | "satisfy", 1595 | "satoshi", 1596 | "sauce", 1597 | "sausage", 1598 | "save", 1599 | "say", 1600 | "scale", 1601 | "scan", 1602 | "scare", 1603 | "scatter", 1604 | "scene", 1605 | "scheme", 1606 | "school", 1607 | "science", 1608 | "scissors", 1609 | "scorpion", 1610 | "scout", 1611 | "scrap", 1612 | "screen", 1613 | "script", 1614 | "scrub", 1615 | "sea", 1616 | "search", 1617 | "season", 1618 | "seat", 1619 | "second", 1620 | "secret", 1621 | "section", 1622 | "security", 1623 | "seed", 1624 | "seek", 1625 | "segment", 1626 | "select", 1627 | "sell", 1628 | "seminar", 1629 | "senior", 1630 | "sense", 1631 | "sentence", 1632 | "series", 1633 | "service", 1634 | "session", 1635 | "settle", 1636 | "setup", 1637 | "seven", 1638 | "shadow", 1639 | "shaft", 1640 | "shallow", 1641 | "share", 1642 | "shed", 1643 | "shell", 1644 | "sheriff", 1645 | "shield", 1646 | "shift", 1647 | "shine", 1648 | "ship", 1649 | "shiver", 1650 | "shock", 1651 | "shoe", 1652 | "shoot", 1653 | "shop", 1654 | "short", 1655 | "shoulder", 1656 | "shove", 1657 | "shrimp", 1658 | "shrug", 1659 | "shuffle", 1660 | "shy", 1661 | "sibling", 1662 | "sick", 1663 | "side", 1664 | "siege", 1665 | "sight", 1666 | "sign", 1667 | "silent", 1668 | "silk", 1669 | "silly", 1670 | "silver", 1671 | "similar", 1672 | "simple", 1673 | "since", 1674 | "sing", 1675 | "siren", 1676 | "sister", 1677 | "situate", 1678 | "six", 1679 | "size", 1680 | "skate", 1681 | "sketch", 1682 | "ski", 1683 | "skill", 1684 | "skin", 1685 | "skirt", 1686 | "skull", 1687 | "slab", 1688 | "slam", 1689 | "sleep", 1690 | "slender", 1691 | "slice", 1692 | "slide", 1693 | "slight", 1694 | "slim", 1695 | "slogan", 1696 | "slot", 1697 | "slow", 1698 | "slush", 1699 | "small", 1700 | "smart", 1701 | "smile", 1702 | "smoke", 1703 | "smooth", 1704 | "snack", 1705 | "snake", 1706 | "snap", 1707 | "sniff", 1708 | "snow", 1709 | "soap", 1710 | "soccer", 1711 | "social", 1712 | "sock", 1713 | "soda", 1714 | "soft", 1715 | "solar", 1716 | "soldier", 1717 | "solid", 1718 | "solution", 1719 | "solve", 1720 | "someone", 1721 | "song", 1722 | "soon", 1723 | "sorry", 1724 | "sort", 1725 | "soul", 1726 | "sound", 1727 | "soup", 1728 | "source", 1729 | "south", 1730 | "space", 1731 | "spare", 1732 | "spatial", 1733 | "spawn", 1734 | "speak", 1735 | "special", 1736 | "speed", 1737 | "spell", 1738 | "spend", 1739 | "sphere", 1740 | "spice", 1741 | "spider", 1742 | "spike", 1743 | "spin", 1744 | "spirit", 1745 | "split", 1746 | "spoil", 1747 | "sponsor", 1748 | "spoon", 1749 | "sport", 1750 | "spot", 1751 | "spray", 1752 | "spread", 1753 | "spring", 1754 | "spy", 1755 | "square", 1756 | "squeeze", 1757 | "squirrel", 1758 | "stable", 1759 | "stadium", 1760 | "staff", 1761 | "stage", 1762 | "stairs", 1763 | "stamp", 1764 | "stand", 1765 | "start", 1766 | "state", 1767 | "stay", 1768 | "steak", 1769 | "steel", 1770 | "stem", 1771 | "step", 1772 | "stereo", 1773 | "stick", 1774 | "still", 1775 | "sting", 1776 | "stock", 1777 | "stomach", 1778 | "stone", 1779 | "stool", 1780 | "story", 1781 | "stove", 1782 | "strategy", 1783 | "street", 1784 | "strike", 1785 | "strong", 1786 | "struggle", 1787 | "student", 1788 | "stuff", 1789 | "stumble", 1790 | "style", 1791 | "subject", 1792 | "submit", 1793 | "subway", 1794 | "success", 1795 | "such", 1796 | "sudden", 1797 | "suffer", 1798 | "sugar", 1799 | "suggest", 1800 | "suit", 1801 | "summer", 1802 | "sun", 1803 | "sunny", 1804 | "sunset", 1805 | "super", 1806 | "supply", 1807 | "supreme", 1808 | "sure", 1809 | "surface", 1810 | "surge", 1811 | "surprise", 1812 | "surround", 1813 | "survey", 1814 | "suspect", 1815 | "sustain", 1816 | "swallow", 1817 | "swamp", 1818 | "swap", 1819 | "swarm", 1820 | "swear", 1821 | "sweet", 1822 | "swift", 1823 | "swim", 1824 | "swing", 1825 | "switch", 1826 | "sword", 1827 | "symbol", 1828 | "symptom", 1829 | "syrup", 1830 | "system", 1831 | "table", 1832 | "tackle", 1833 | "tag", 1834 | "tail", 1835 | "talent", 1836 | "talk", 1837 | "tank", 1838 | "tape", 1839 | "target", 1840 | "task", 1841 | "taste", 1842 | "tattoo", 1843 | "taxi", 1844 | "teach", 1845 | "team", 1846 | "tell", 1847 | "ten", 1848 | "tenant", 1849 | "tennis", 1850 | "tent", 1851 | "term", 1852 | "test", 1853 | "text", 1854 | "thank", 1855 | "that", 1856 | "theme", 1857 | "then", 1858 | "theory", 1859 | "there", 1860 | "they", 1861 | "thing", 1862 | "this", 1863 | "thought", 1864 | "three", 1865 | "thrive", 1866 | "throw", 1867 | "thumb", 1868 | "thunder", 1869 | "ticket", 1870 | "tide", 1871 | "tiger", 1872 | "tilt", 1873 | "timber", 1874 | "time", 1875 | "tiny", 1876 | "tip", 1877 | "tired", 1878 | "tissue", 1879 | "title", 1880 | "toast", 1881 | "tobacco", 1882 | "today", 1883 | "toddler", 1884 | "toe", 1885 | "together", 1886 | "toilet", 1887 | "token", 1888 | "tomato", 1889 | "tomorrow", 1890 | "tone", 1891 | "tongue", 1892 | "tonight", 1893 | "tool", 1894 | "tooth", 1895 | "top", 1896 | "topic", 1897 | "topple", 1898 | "torch", 1899 | "tornado", 1900 | "tortoise", 1901 | "toss", 1902 | "total", 1903 | "tourist", 1904 | "toward", 1905 | "tower", 1906 | "town", 1907 | "toy", 1908 | "track", 1909 | "trade", 1910 | "traffic", 1911 | "tragic", 1912 | "train", 1913 | "transfer", 1914 | "trap", 1915 | "trash", 1916 | "travel", 1917 | "tray", 1918 | "treat", 1919 | "tree", 1920 | "trend", 1921 | "trial", 1922 | "tribe", 1923 | "trick", 1924 | "trigger", 1925 | "trim", 1926 | "trip", 1927 | "trophy", 1928 | "trouble", 1929 | "truck", 1930 | "true", 1931 | "truly", 1932 | "trumpet", 1933 | "trust", 1934 | "truth", 1935 | "try", 1936 | "tube", 1937 | "tuition", 1938 | "tumble", 1939 | "tuna", 1940 | "tunnel", 1941 | "turkey", 1942 | "turn", 1943 | "turtle", 1944 | "twelve", 1945 | "twenty", 1946 | "twice", 1947 | "twin", 1948 | "twist", 1949 | "two", 1950 | "type", 1951 | "typical", 1952 | "ugly", 1953 | "umbrella", 1954 | "unable", 1955 | "unaware", 1956 | "uncle", 1957 | "uncover", 1958 | "under", 1959 | "undo", 1960 | "unfair", 1961 | "unfold", 1962 | "unhappy", 1963 | "uniform", 1964 | "unique", 1965 | "unit", 1966 | "universe", 1967 | "unknown", 1968 | "unlock", 1969 | "until", 1970 | "unusual", 1971 | "unveil", 1972 | "update", 1973 | "upgrade", 1974 | "uphold", 1975 | "upon", 1976 | "upper", 1977 | "upset", 1978 | "urban", 1979 | "urge", 1980 | "usage", 1981 | "use", 1982 | "used", 1983 | "useful", 1984 | "useless", 1985 | "usual", 1986 | "utility", 1987 | "vacant", 1988 | "vacuum", 1989 | "vague", 1990 | "valid", 1991 | "valley", 1992 | "valve", 1993 | "van", 1994 | "vanish", 1995 | "vapor", 1996 | "various", 1997 | "vast", 1998 | "vault", 1999 | "vehicle", 2000 | "velvet", 2001 | "vendor", 2002 | "venture", 2003 | "venue", 2004 | "verb", 2005 | "verify", 2006 | "version", 2007 | "very", 2008 | "vessel", 2009 | "veteran", 2010 | "viable", 2011 | "vibrant", 2012 | "vicious", 2013 | "victory", 2014 | "video", 2015 | "view", 2016 | "village", 2017 | "vintage", 2018 | "violin", 2019 | "virtual", 2020 | "virus", 2021 | "visa", 2022 | "visit", 2023 | "visual", 2024 | "vital", 2025 | "vivid", 2026 | "vocal", 2027 | "voice", 2028 | "void", 2029 | "volcano", 2030 | "volume", 2031 | "vote", 2032 | "voyage", 2033 | "wage", 2034 | "wagon", 2035 | "wait", 2036 | "walk", 2037 | "wall", 2038 | "walnut", 2039 | "want", 2040 | "warfare", 2041 | "warm", 2042 | "warrior", 2043 | "wash", 2044 | "wasp", 2045 | "waste", 2046 | "water", 2047 | "wave", 2048 | "way", 2049 | "wealth", 2050 | "weapon", 2051 | "wear", 2052 | "weasel", 2053 | "weather", 2054 | "web", 2055 | "wedding", 2056 | "weekend", 2057 | "weird", 2058 | "welcome", 2059 | "west", 2060 | "wet", 2061 | "whale", 2062 | "what", 2063 | "wheat", 2064 | "wheel", 2065 | "when", 2066 | "where", 2067 | "whip", 2068 | "whisper", 2069 | "wide", 2070 | "width", 2071 | "wife", 2072 | "wild", 2073 | "will", 2074 | "win", 2075 | "window", 2076 | "wine", 2077 | "wing", 2078 | "wink", 2079 | "winner", 2080 | "winter", 2081 | "wire", 2082 | "wisdom", 2083 | "wise", 2084 | "wish", 2085 | "witness", 2086 | "wolf", 2087 | "woman", 2088 | "wonder", 2089 | "wood", 2090 | "wool", 2091 | "word", 2092 | "work", 2093 | "world", 2094 | "worry", 2095 | "worth", 2096 | "wrap", 2097 | "wreck", 2098 | "wrestle", 2099 | "wrist", 2100 | "write", 2101 | "wrong", 2102 | "yard", 2103 | "year", 2104 | "yellow", 2105 | "you", 2106 | "young", 2107 | "youth", 2108 | "zebra", 2109 | "zero", 2110 | "zone", 2111 | "zoo", 2112 | }; 2113 | 2114 | test "words_by_prefix" { 2115 | const lang: Language = .english; 2116 | 2117 | var res = lang.wordsByPrefix("woo") orelse @panic("not expect"); 2118 | try std.testing.expectEqualSlices([]const u8, &.{ "wood", "wool" }, res); 2119 | 2120 | res = lang.wordsByPrefix("") orelse @panic("not expect"); 2121 | try std.testing.expectEqual(res.len, 2048); 2122 | 2123 | try std.testing.expect(lang.wordsByPrefix("woof") == null); 2124 | } 2125 | -------------------------------------------------------------------------------- /src/bips/bip39/pbkdf2.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const SALT_PREFIX = "mnemonic"; 3 | 4 | const Hmac = std.crypto.auth.hmac.sha2.HmacSha512; 5 | const Sha512 = std.crypto.hash.sha2.Sha512; 6 | 7 | /// Calculate the binary size of the mnemonic. 8 | fn mnemonicByteLen(mnemonic: []const []const u8) usize { 9 | var len: usize = 0; 10 | for (0.., mnemonic) |i, word| { 11 | if (i > 0) { 12 | len += 1; 13 | } 14 | 15 | len += word.len; 16 | } 17 | return len; 18 | } 19 | 20 | /// Wrote the mnemonic in binary form into the hash engine. 21 | fn mnemonicWriteInto(mnemonic: []const []const u8, engine: *Sha512) void { 22 | for (0.., mnemonic) |i, word| { 23 | if (i > 0) { 24 | engine.update(" "); 25 | } 26 | engine.update(word); 27 | } 28 | } 29 | 30 | /// Create an HMAC engine from the passphrase. 31 | /// We need a special method because we can't allocate a new byte 32 | /// vector for the entire serialized mnemonic. 33 | fn createHmacEngine(mnemonic: []const []const u8) Hmac { 34 | // Inner code is borrowed from the bitcoin_hashes::hmac::HmacEngine::new method. 35 | var ipad = [_]u8{0x36} ** 128; 36 | var opad = [_]u8{0x5c} ** 128; 37 | 38 | var iengine = Sha512.init(.{}); 39 | 40 | if (mnemonicByteLen(mnemonic) > Sha512.block_length) { 41 | const hash = v: { 42 | var engine = Sha512.init(.{}); 43 | mnemonicWriteInto(mnemonic, &engine); 44 | var final: [Sha512.digest_length]u8 = undefined; 45 | engine.final(&final); 46 | break :v final; 47 | }; 48 | 49 | for (ipad[0..64], hash) |*b_i, b_h| { 50 | b_i.* = b_i.* ^ b_h; 51 | } 52 | 53 | for (opad[0..64], hash) |*b_o, b_h| { 54 | b_o.* = b_o.* ^ b_h; 55 | } 56 | } else { 57 | // First modify the first elements from the prefix. 58 | var cursor: usize = 0; 59 | for (0.., mnemonic) |i, word| { 60 | if (i > 0) { 61 | ipad[cursor] ^= ' '; 62 | opad[cursor] ^= ' '; 63 | cursor += 1; 64 | } 65 | 66 | const min_len = @min(ipad.len - cursor, word.len); 67 | for (ipad[cursor .. cursor + min_len], word[0..min_len]) |*b_i, b_h| { 68 | b_i.* = b_i.* ^ b_h; 69 | } 70 | 71 | for (opad[cursor .. cursor + min_len], word[0..min_len]) |*b_o, b_h| { 72 | b_o.* = b_o.* ^ b_h; 73 | } 74 | 75 | cursor += word.len; 76 | // assert!(cursor <= sha512::HashEngine::BLOCK_SIZE, "mnemonic_byte_len is broken"); 77 | } 78 | } 79 | 80 | iengine.update(ipad[0..Sha512.block_length]); 81 | 82 | return Hmac{ 83 | .o_key_pad = opad[0..Sha512.block_length].*, 84 | .hash = iengine, 85 | }; 86 | } 87 | 88 | inline fn xor(res: []u8, salt: []const u8) void { 89 | // length mismatch in xor 90 | std.debug.assert(salt.len >= res.len); 91 | const min_len = @min(res.len, salt.len); 92 | for (res[0..min_len], salt[0..min_len]) |*a, b| { 93 | a.* = a.* ^ b; 94 | } 95 | } 96 | 97 | /// PBKDF2-HMAC-SHA512 implementation using bitcoin_hashes. 98 | pub fn pbkdf2(mnemonic: []const []const u8, unprefixed_salt: []const u8, c: usize, res: []u8) void { 99 | const prf = createHmacEngine(mnemonic); 100 | @memset(res, 0); 101 | 102 | // var pprf = prf; 103 | 104 | // var prf_buf: [Hmac.mac_length]u8 = undefined; 105 | // pprf.final(&prf_buf); 106 | 107 | // std.log.warn("pprf :{any}", .{prf_buf}); 108 | 109 | var i: usize = 0; 110 | 111 | while (i < res.len) : ({ 112 | i += Sha512.digest_length; 113 | }) { 114 | const chunk_too = @min(res.len, i + Sha512.digest_length); 115 | const chunk: []u8 = res[i..chunk_too]; 116 | var salt = v: { 117 | var prfc = prf; 118 | prfc.update(SALT_PREFIX); 119 | prfc.update(unprefixed_salt); 120 | 121 | var buf: [4]u8 = undefined; 122 | std.mem.writeInt(u32, &buf, @truncate(i + 1), .big); 123 | 124 | prfc.update(&buf); 125 | 126 | var salt: [Hmac.mac_length]u8 = undefined; 127 | 128 | prfc.final(&salt); 129 | 130 | xor(chunk, &salt); 131 | break :v salt; 132 | }; 133 | 134 | for (1..c) |_| { 135 | var prfc = prf; 136 | 137 | prfc.update(&salt); 138 | 139 | prfc.final(&salt); 140 | xor(chunk, &salt); 141 | } 142 | } 143 | } 144 | -------------------------------------------------------------------------------- /src/bips/lib.zig: -------------------------------------------------------------------------------- 1 | pub const bip39 = @import("bip39/bip39.zig"); 2 | pub const bip32 = @import("bip32/bip32.zig"); 3 | -------------------------------------------------------------------------------- /src/hashes/hash160.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const testing = std.testing; 3 | const Sha256 = std.crypto.hash.sha2.Sha256; 4 | const Ripemd160 = @import("ripemd160.zig").Ripemd160; 5 | 6 | pub const Hash160 = struct { 7 | const Self = @This(); 8 | pub const digest_length = 20; 9 | pub const Options = struct {}; 10 | 11 | pub fn init(_: Options) Self { 12 | return .{}; 13 | } 14 | 15 | pub inline fn hash(b: []const u8, out: *[digest_length]u8, _: Options) void { 16 | var sha_out: [Sha256.digest_length]u8 = undefined; 17 | 18 | // Step 1: Compute SHA256 hash 19 | Sha256.hash(b, &sha_out, .{}); 20 | 21 | // Step 2: Compute RIPEMD160 of the SHA256 result 22 | Ripemd160.hash(&sha_out, out, .{}); 23 | } 24 | }; 25 | 26 | // Testing Hash160 against known vectors 27 | test "hash160 vectors" { 28 | const test_cases = [_]struct { 29 | input: []const u8, 30 | expected: []const u8, 31 | }{ 32 | .{ .input = "hello", .expected = "b6a9c8c230722b7c748331a8b450f05566dc7d0f" }, 33 | .{ .input = "blockchain", .expected = "755f6f4af6e11c5cf642f0ed6ecda89d8619cee7" }, 34 | .{ .input = "abc", .expected = "bb1be98c142444d7a56aa3981c3942a978e4dc33" }, 35 | .{ .input = "bitcoin", .expected = "6b2904910f9b40b2244eed93a7b8d992b22f8d32" }, 36 | }; 37 | 38 | for (test_cases) |case| { 39 | errdefer { 40 | std.log.err("test case failed, case = {s}", .{std.json.fmt(case, .{})}); 41 | } 42 | var expected_output: [Hash160.digest_length]u8 = undefined; 43 | _ = try std.fmt.hexToBytes(&expected_output, case.expected); 44 | 45 | var actual_output: [Hash160.digest_length]u8 = undefined; 46 | 47 | Hash160.hash(case.input, &actual_output, .{}); 48 | try testing.expectEqualSlices(u8, &expected_output, &actual_output); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/hashes/lib.zig: -------------------------------------------------------------------------------- 1 | pub const Ripemd160 = @import("ripemd160.zig").Ripemd160; 2 | pub const Hash160 = @import("hash160.zig").Hash160; 3 | -------------------------------------------------------------------------------- /src/hashes/ripemd160.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const testing = std.testing; 3 | const mem = std.mem; 4 | 5 | pub const Ripemd160 = struct { 6 | const Self = @This(); 7 | pub const block_length = 64; 8 | pub const digest_length = 20; 9 | pub const Options = struct {}; 10 | 11 | s: [5]u32, 12 | // Streaming Cache 13 | buf: [64]u8 = undefined, 14 | buf_len: u8 = 0, 15 | total_len: u64 = 0, 16 | 17 | pub fn init(options: Options) Self { 18 | _ = options; 19 | return Self{ 20 | .s = [_]u32{ 21 | 0x67452301, 22 | 0xEFCDAB89, 23 | 0x98BADCFE, 24 | 0x10325476, 25 | 0xC3D2E1F0, 26 | }, 27 | }; 28 | } 29 | 30 | pub fn update(d: *Self, b: []const u8) void { 31 | var off: usize = 0; 32 | 33 | // Partial buffer exists from previous update. Copy into buffer then hash. 34 | if (d.buf_len != 0 and d.buf_len + b.len >= 64) { 35 | off += 64 - d.buf_len; 36 | @memcpy(d.buf[d.buf_len..][0..off], b[0..off]); 37 | 38 | d.round(&d.buf); 39 | d.buf_len = 0; 40 | } 41 | 42 | // Full middle blocks. 43 | while (off + 64 <= b.len) : (off += 64) { 44 | d.round(b[off..][0..64]); 45 | } 46 | 47 | // Copy any remainder for next pass. 48 | const b_slice = b[off..]; 49 | @memcpy(d.buf[d.buf_len..][0..b_slice.len], b_slice); 50 | d.buf_len += @as(u8, @intCast(b[off..].len)); 51 | 52 | d.total_len += b.len; 53 | } 54 | 55 | fn blockToWords(block: *const [block_length]u8) [16]u32 { 56 | var words: [16]u32 = undefined; 57 | for (words, 0..) |_, i| { 58 | // zig fmt: off 59 | words[i] = 0; 60 | words[i] |= (@as(u32, block[i * 4 + 3]) << 24); 61 | words[i] |= (@as(u32, block[i * 4 + 2]) << 16); 62 | words[i] |= (@as(u32, block[i * 4 + 1]) << 8); 63 | words[i] |= (@as(u32, block[i * 4 + 0]) << 0); 64 | // zig fmt: on 65 | } 66 | return words; 67 | } 68 | 69 | fn func(j: usize, x: u32, y: u32, z: u32) u32 { 70 | return switch (j) { 71 | // f(j, x, y, z) = x XOR y XOR z (0 <= j <= 15) 72 | 0...15 => x ^ y ^ z, 73 | // f(j, x, y, z) = (x AND y) OR (NOT(x) AND z) (16 <= j <= 31) 74 | 16...31 => (x & y) | (~x & z), 75 | // f(j, x, y, z) = (x OR NOT(y)) XOR z (32 <= j <= 47) 76 | 32...47 => (x | ~y) ^ z, 77 | // f(j, x, y, z) = (x AND z) OR (y AND NOT(z)) (48 <= j <= 63) 78 | 48...63 => (x & z) | (y & ~z), 79 | // f(j, x, y, z) = x XOR (y OR NOT(z)) (64 <= j <= 79) 80 | // !!! omg xor and or 64 81 | 64...79 => x ^ (y | ~z), 82 | else => unreachable, 83 | }; 84 | } 85 | 86 | fn round(d: *Self, b: *const [block_length]u8) void { 87 | var leftA = d.s[0]; 88 | var leftB = d.s[1]; 89 | var leftC = d.s[2]; 90 | var leftD = d.s[3]; 91 | var leftE = d.s[4]; 92 | 93 | var rightA = d.s[0]; 94 | var rightB = d.s[1]; 95 | var rightC = d.s[2]; 96 | var rightD = d.s[3]; 97 | var rightE = d.s[4]; 98 | 99 | const words: [16]u32 = blockToWords(b); 100 | var tmp: u32 = undefined; 101 | var j: usize = 0; 102 | while (j < 80) : (j += 1) { 103 | // zig fmt: off 104 | tmp = std.math.rotl(u32, leftA 105 | +% func(j, leftB, leftC, leftD) 106 | +% words[left_selecting_words[j]] 107 | +% left_K[j / 16], 108 | left_tmp_shift_amount[j]) +% leftE; 109 | // zig fmt: on 110 | leftA = leftE; 111 | leftE = leftD; 112 | leftD = std.math.rotl(u32, leftC, 10); 113 | leftC = leftB; 114 | leftB = tmp; 115 | 116 | // zig fmt: off 117 | tmp = std.math.rotl(u32, rightA 118 | +% func(79 - j, rightB, rightC, rightD) 119 | +% words[right_selecting_words[j]] 120 | +% right_K[j / 16], 121 | right_tmp_shift_amount[j]) +% rightE; 122 | // zig fmt: on 123 | rightA = rightE; 124 | rightE = rightD; 125 | rightD = std.math.rotl(u32, rightC, 10); 126 | rightC = rightB; 127 | rightB = tmp; 128 | } 129 | 130 | tmp = d.s[1] +% leftC +% rightD; 131 | d.s[1] = d.s[2] +% leftD +% rightE; 132 | d.s[2] = d.s[3] +% leftE +% rightA; 133 | d.s[3] = d.s[4] +% leftA +% rightB; 134 | d.s[4] = d.s[0] +% leftB +% rightC; 135 | d.s[0] = tmp; 136 | } 137 | 138 | pub fn final(d: *Self, out: *[digest_length]u8) void { 139 | // The buffer here will never be completely full. 140 | @memset(d.buf[d.buf_len..], 0); 141 | 142 | // Append padding bits. 143 | d.buf[d.buf_len] = 0x80; 144 | d.buf_len += 1; 145 | 146 | // > 448 mod 512 so need to add an extra round to wrap around. 147 | if (64 - d.buf_len < 8) { 148 | d.round(d.buf[0..]); 149 | @memset(d.buf[0..], 0); 150 | } 151 | 152 | // Append message length in more simple way 153 | const len = (d.total_len * 8); 154 | mem.writeInt(u64, d.buf[56..64], len, .little); 155 | 156 | d.round(d.buf[0..]); 157 | 158 | for (d.s, 0..) |s, j| { 159 | mem.writeInt(u32, out[4 * j ..][0..4], s, .little); 160 | } 161 | } 162 | 163 | pub fn hash(b: []const u8, out: *[digest_length]u8, options: Options) void { 164 | var d = Ripemd160.init(options); 165 | d.update(b); 166 | d.final(out); 167 | } 168 | }; 169 | 170 | test "test vectors" { 171 | const input = [_][]const u8{ 172 | "", 173 | "a", 174 | "abc", 175 | "message digest", 176 | "abcdefghijklmnopqrstuvwxyz", 177 | "abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq", 178 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", 179 | "1234567890" ** 8, 180 | "a" ** 1000000, 181 | }; 182 | const output = [_][]const u8{ 183 | "9c1185a5c5e9fc54612808977ee8f548b2258d31", 184 | "0bdc9d2d256b3ee9daae347be6f4dc835a467ffe", 185 | "8eb208f7e05d987a9b044a8e98c6b087f15a0bfc", 186 | "5d0689ef49d2fae572b881b123a85ffa21595f36", 187 | "f71c27109c692c1b56bbdceb5b9d2865b3708dbc", 188 | "12a053384a9c0c88e405a06c27dcf49ada62eb2b", 189 | "b0e20b6e3116640286ed3a87a5713079b21f5189", 190 | "9b752e45573d4b39f4dbd3323cab82bf63326bfb", 191 | "52783243c1697bdbe16d37f97f68f08325dc1528", 192 | }; 193 | for (0..input.len) |i| { 194 | var expected_output: [Ripemd160.digest_length]u8 = undefined; 195 | _ = try std.fmt.hexToBytes(&expected_output, output[i]); 196 | var actual_output: [Ripemd160.digest_length]u8 = undefined; 197 | Ripemd160.hash(input[i], &actual_output, .{}); 198 | try testing.expectEqualSlices(u8, &expected_output, &actual_output); 199 | } 200 | } 201 | 202 | test "streaming" { 203 | var h = Ripemd160.init(.{}); 204 | var out: [Ripemd160.digest_length]u8 = undefined; 205 | h.final(&out); 206 | try testing.expectEqualSlices(u8, &[_]u8{ 207 | 0x9c, 0x11, 0x85, 0xa5, 0xc5, 0xe9, 0xfc, 0x54, 0x61, 0x28, 208 | 0x08, 0x97, 0x7e, 0xe8, 0xf5, 0x48, 0xb2, 0x25, 0x8d, 0x31, 209 | }, &out); 210 | 211 | h = Ripemd160.init(.{}); 212 | h.update("abc"); 213 | h.final(&out); 214 | try testing.expectEqualSlices(u8, &[_]u8{ 215 | 0x8e, 0xb2, 0x08, 0xf7, 0xe0, 0x5d, 0x98, 0x7a, 0x9b, 0x04, 216 | 0x4a, 0x8e, 0x98, 0xc6, 0xb0, 0x87, 0xf1, 0x5a, 0x0b, 0xfc, 217 | }, &out); 218 | 219 | h = Ripemd160.init(.{}); 220 | h.update("a"); 221 | h.update("b"); 222 | h.update("c"); 223 | h.final(&out); 224 | try testing.expectEqualSlices(u8, &[_]u8{ 225 | 0x8e, 0xb2, 0x08, 0xf7, 0xe0, 0x5d, 0x98, 0x7a, 0x9b, 0x04, 226 | 0x4a, 0x8e, 0x98, 0xc6, 0xb0, 0x87, 0xf1, 0x5a, 0x0b, 0xfc, 227 | }, &out); 228 | } 229 | 230 | const left_selecting_words = [80]u32{ 231 | 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 232 | 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, 233 | 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, 234 | 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, 235 | 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13, 236 | }; 237 | 238 | const right_selecting_words = [80]u32{ 239 | 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, 240 | 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, 241 | 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, 242 | 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, 243 | 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11, 244 | }; 245 | 246 | const left_tmp_shift_amount = [80]u32{ 247 | 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, 248 | 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, 249 | 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, 250 | 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, 251 | 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6, 252 | }; 253 | 254 | const right_tmp_shift_amount = [80]u32{ 255 | 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, 256 | 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, 257 | 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, 258 | 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, 259 | 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11, 260 | }; 261 | 262 | const left_K = [5]u32{ 0x00000000, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E }; 263 | const right_K = [5]u32{ 0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0x00000000 }; 264 | -------------------------------------------------------------------------------- /src/hashes/siphash24.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const testing = std.testing; 3 | const mem = std.mem; 4 | const math = std.math; 5 | const assert = std.debug.assert; 6 | const SipHash64 = std.hash.SipHash64; 7 | const SipHash128 = std.hash.SipHash128; 8 | 9 | // Adapted from https://github.com/ziglang/zig/blob/master/lib/std/crypto/siphash.zig 10 | 11 | pub fn SipHash24(comptime T: type) type { 12 | const c_rounds = 2; 13 | const d_rounds = 4; 14 | return switch (T) { 15 | u64 => SipHash64(c_rounds, d_rounds), 16 | u128 => SipHash128(c_rounds, d_rounds), 17 | else => @compileError("Type must be u64 or u128, got " ++ @typeName(T)), 18 | }; 19 | } 20 | 21 | // Test vectors from reference implementation. 22 | // https://github.com/veorq/SipHash/blob/master/vectors.h 23 | const test_key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"; 24 | 25 | test "siphash64-2-4 sanity" { 26 | const vectors = [_][8]u8{ 27 | "\x31\x0e\x0e\xdd\x47\xdb\x6f\x72".*, // "" 28 | "\xfd\x67\xdc\x93\xc5\x39\xf8\x74".*, // "\x00" 29 | "\x5a\x4f\xa9\xd9\x09\x80\x6c\x0d".*, // "\x00\x01" ... etc 30 | "\x2d\x7e\xfb\xd7\x96\x66\x67\x85".*, 31 | "\xb7\x87\x71\x27\xe0\x94\x27\xcf".*, 32 | "\x8d\xa6\x99\xcd\x64\x55\x76\x18".*, 33 | "\xce\xe3\xfe\x58\x6e\x46\xc9\xcb".*, 34 | "\x37\xd1\x01\x8b\xf5\x00\x02\xab".*, 35 | "\x62\x24\x93\x9a\x79\xf5\xf5\x93".*, 36 | "\xb0\xe4\xa9\x0b\xdf\x82\x00\x9e".*, 37 | "\xf3\xb9\xdd\x94\xc5\xbb\x5d\x7a".*, 38 | "\xa7\xad\x6b\x22\x46\x2f\xb3\xf4".*, 39 | "\xfb\xe5\x0e\x86\xbc\x8f\x1e\x75".*, 40 | "\x90\x3d\x84\xc0\x27\x56\xea\x14".*, 41 | "\xee\xf2\x7a\x8e\x90\xca\x23\xf7".*, 42 | "\xe5\x45\xbe\x49\x61\xca\x29\xa1".*, 43 | "\xdb\x9b\xc2\x57\x7f\xcc\x2a\x3f".*, 44 | "\x94\x47\xbe\x2c\xf5\xe9\x9a\x69".*, 45 | "\x9c\xd3\x8d\x96\xf0\xb3\xc1\x4b".*, 46 | "\xbd\x61\x79\xa7\x1d\xc9\x6d\xbb".*, 47 | "\x98\xee\xa2\x1a\xf2\x5c\xd6\xbe".*, 48 | "\xc7\x67\x3b\x2e\xb0\xcb\xf2\xd0".*, 49 | "\x88\x3e\xa3\xe3\x95\x67\x53\x93".*, 50 | "\xc8\xce\x5c\xcd\x8c\x03\x0c\xa8".*, 51 | "\x94\xaf\x49\xf6\xc6\x50\xad\xb8".*, 52 | "\xea\xb8\x85\x8a\xde\x92\xe1\xbc".*, 53 | "\xf3\x15\xbb\x5b\xb8\x35\xd8\x17".*, 54 | "\xad\xcf\x6b\x07\x63\x61\x2e\x2f".*, 55 | "\xa5\xc9\x1d\xa7\xac\xaa\x4d\xde".*, 56 | "\x71\x65\x95\x87\x66\x50\xa2\xa6".*, 57 | "\x28\xef\x49\x5c\x53\xa3\x87\xad".*, 58 | "\x42\xc3\x41\xd8\xfa\x92\xd8\x32".*, 59 | "\xce\x7c\xf2\x72\x2f\x51\x27\x71".*, 60 | "\xe3\x78\x59\xf9\x46\x23\xf3\xa7".*, 61 | "\x38\x12\x05\xbb\x1a\xb0\xe0\x12".*, 62 | "\xae\x97\xa1\x0f\xd4\x34\xe0\x15".*, 63 | "\xb4\xa3\x15\x08\xbe\xff\x4d\x31".*, 64 | "\x81\x39\x62\x29\xf0\x90\x79\x02".*, 65 | "\x4d\x0c\xf4\x9e\xe5\xd4\xdc\xca".*, 66 | "\x5c\x73\x33\x6a\x76\xd8\xbf\x9a".*, 67 | "\xd0\xa7\x04\x53\x6b\xa9\x3e\x0e".*, 68 | "\x92\x59\x58\xfc\xd6\x42\x0c\xad".*, 69 | "\xa9\x15\xc2\x9b\xc8\x06\x73\x18".*, 70 | "\x95\x2b\x79\xf3\xbc\x0a\xa6\xd4".*, 71 | "\xf2\x1d\xf2\xe4\x1d\x45\x35\xf9".*, 72 | "\x87\x57\x75\x19\x04\x8f\x53\xa9".*, 73 | "\x10\xa5\x6c\xf5\xdf\xcd\x9a\xdb".*, 74 | "\xeb\x75\x09\x5c\xcd\x98\x6c\xd0".*, 75 | "\x51\xa9\xcb\x9e\xcb\xa3\x12\xe6".*, 76 | "\x96\xaf\xad\xfc\x2c\xe6\x66\xc7".*, 77 | "\x72\xfe\x52\x97\x5a\x43\x64\xee".*, 78 | "\x5a\x16\x45\xb2\x76\xd5\x92\xa1".*, 79 | "\xb2\x74\xcb\x8e\xbf\x87\x87\x0a".*, 80 | "\x6f\x9b\xb4\x20\x3d\xe7\xb3\x81".*, 81 | "\xea\xec\xb2\xa3\x0b\x22\xa8\x7f".*, 82 | "\x99\x24\xa4\x3c\xc1\x31\x57\x24".*, 83 | "\xbd\x83\x8d\x3a\xaf\xbf\x8d\xb7".*, 84 | "\x0b\x1a\x2a\x32\x65\xd5\x1a\xea".*, 85 | "\x13\x50\x79\xa3\x23\x1c\xe6\x60".*, 86 | "\x93\x2b\x28\x46\xe4\xd7\x06\x66".*, 87 | "\xe1\x91\x5f\x5c\xb1\xec\xa4\x6c".*, 88 | "\xf3\x25\x96\x5c\xa1\x6d\x62\x9f".*, 89 | "\x57\x5f\xf2\x8e\x60\x38\x1b\xe5".*, 90 | "\x72\x45\x06\xeb\x4c\x32\x8a\x95".*, 91 | }; 92 | 93 | const siphash = SipHash24(u64); 94 | 95 | var buffer: [64]u8 = undefined; 96 | for (vectors, 0..) |vector, i| { 97 | buffer[i] = @as(u8, @intCast(i)); 98 | 99 | var out: [siphash.mac_length]u8 = undefined; 100 | siphash.create(&out, buffer[0..i], test_key); 101 | try testing.expectEqual(out, vector); 102 | } 103 | } 104 | 105 | test "siphash128-2-4 sanity" { 106 | const vectors = [_][16]u8{ 107 | "\xa3\x81\x7f\x04\xba\x25\xa8\xe6\x6d\xf6\x72\x14\xc7\x55\x02\x93".*, 108 | "\xda\x87\xc1\xd8\x6b\x99\xaf\x44\x34\x76\x59\x11\x9b\x22\xfc\x45".*, 109 | "\x81\x77\x22\x8d\xa4\xa4\x5d\xc7\xfc\xa3\x8b\xde\xf6\x0a\xff\xe4".*, 110 | "\x9c\x70\xb6\x0c\x52\x67\xa9\x4e\x5f\x33\xb6\xb0\x29\x85\xed\x51".*, 111 | "\xf8\x81\x64\xc1\x2d\x9c\x8f\xaf\x7d\x0f\x6e\x7c\x7b\xcd\x55\x79".*, 112 | "\x13\x68\x87\x59\x80\x77\x6f\x88\x54\x52\x7a\x07\x69\x0e\x96\x27".*, 113 | "\x14\xee\xca\x33\x8b\x20\x86\x13\x48\x5e\xa0\x30\x8f\xd7\xa1\x5e".*, 114 | "\xa1\xf1\xeb\xbe\xd8\xdb\xc1\x53\xc0\xb8\x4a\xa6\x1f\xf0\x82\x39".*, 115 | "\x3b\x62\xa9\xba\x62\x58\xf5\x61\x0f\x83\xe2\x64\xf3\x14\x97\xb4".*, 116 | "\x26\x44\x99\x06\x0a\xd9\xba\xab\xc4\x7f\x8b\x02\xbb\x6d\x71\xed".*, 117 | "\x00\x11\x0d\xc3\x78\x14\x69\x56\xc9\x54\x47\xd3\xf3\xd0\xfb\xba".*, 118 | "\x01\x51\xc5\x68\x38\x6b\x66\x77\xa2\xb4\xdc\x6f\x81\xe5\xdc\x18".*, 119 | "\xd6\x26\xb2\x66\x90\x5e\xf3\x58\x82\x63\x4d\xf6\x85\x32\xc1\x25".*, 120 | "\x98\x69\xe2\x47\xe9\xc0\x8b\x10\xd0\x29\x93\x4f\xc4\xb9\x52\xf7".*, 121 | "\x31\xfc\xef\xac\x66\xd7\xde\x9c\x7e\xc7\x48\x5f\xe4\x49\x49\x02".*, 122 | "\x54\x93\xe9\x99\x33\xb0\xa8\x11\x7e\x08\xec\x0f\x97\xcf\xc3\xd9".*, 123 | "\x6e\xe2\xa4\xca\x67\xb0\x54\xbb\xfd\x33\x15\xbf\x85\x23\x05\x77".*, 124 | "\x47\x3d\x06\xe8\x73\x8d\xb8\x98\x54\xc0\x66\xc4\x7a\xe4\x77\x40".*, 125 | "\xa4\x26\xe5\xe4\x23\xbf\x48\x85\x29\x4d\xa4\x81\xfe\xae\xf7\x23".*, 126 | "\x78\x01\x77\x31\xcf\x65\xfa\xb0\x74\xd5\x20\x89\x52\x51\x2e\xb1".*, 127 | "\x9e\x25\xfc\x83\x3f\x22\x90\x73\x3e\x93\x44\xa5\xe8\x38\x39\xeb".*, 128 | "\x56\x8e\x49\x5a\xbe\x52\x5a\x21\x8a\x22\x14\xcd\x3e\x07\x1d\x12".*, 129 | "\x4a\x29\xb5\x45\x52\xd1\x6b\x9a\x46\x9c\x10\x52\x8e\xff\x0a\xae".*, 130 | "\xc9\xd1\x84\xdd\xd5\xa9\xf5\xe0\xcf\x8c\xe2\x9a\x9a\xbf\x69\x1c".*, 131 | "\x2d\xb4\x79\xae\x78\xbd\x50\xd8\x88\x2a\x8a\x17\x8a\x61\x32\xad".*, 132 | "\x8e\xce\x5f\x04\x2d\x5e\x44\x7b\x50\x51\xb9\xea\xcb\x8d\x8f\x6f".*, 133 | "\x9c\x0b\x53\xb4\xb3\xc3\x07\xe8\x7e\xae\xe0\x86\x78\x14\x1f\x66".*, 134 | "\xab\xf2\x48\xaf\x69\xa6\xea\xe4\xbf\xd3\xeb\x2f\x12\x9e\xeb\x94".*, 135 | "\x06\x64\xda\x16\x68\x57\x4b\x88\xb9\x35\xf3\x02\x73\x58\xae\xf4".*, 136 | "\xaa\x4b\x9d\xc4\xbf\x33\x7d\xe9\x0c\xd4\xfd\x3c\x46\x7c\x6a\xb7".*, 137 | "\xea\x5c\x7f\x47\x1f\xaf\x6b\xde\x2b\x1a\xd7\xd4\x68\x6d\x22\x87".*, 138 | "\x29\x39\xb0\x18\x32\x23\xfa\xfc\x17\x23\xde\x4f\x52\xc4\x3d\x35".*, 139 | "\x7c\x39\x56\xca\x5e\xea\xfc\x3e\x36\x3e\x9d\x55\x65\x46\xeb\x68".*, 140 | "\x77\xc6\x07\x71\x46\xf0\x1c\x32\xb6\xb6\x9d\x5f\x4e\xa9\xff\xcf".*, 141 | "\x37\xa6\x98\x6c\xb8\x84\x7e\xdf\x09\x25\xf0\xf1\x30\x9b\x54\xde".*, 142 | "\xa7\x05\xf0\xe6\x9d\xa9\xa8\xf9\x07\x24\x1a\x2e\x92\x3c\x8c\xc8".*, 143 | "\x3d\xc4\x7d\x1f\x29\xc4\x48\x46\x1e\x9e\x76\xed\x90\x4f\x67\x11".*, 144 | "\x0d\x62\xbf\x01\xe6\xfc\x0e\x1a\x0d\x3c\x47\x51\xc5\xd3\x69\x2b".*, 145 | "\x8c\x03\x46\x8b\xca\x7c\x66\x9e\xe4\xfd\x5e\x08\x4b\xbe\xe7\xb5".*, 146 | "\x52\x8a\x5b\xb9\x3b\xaf\x2c\x9c\x44\x73\xcc\xe5\xd0\xd2\x2b\xd9".*, 147 | "\xdf\x6a\x30\x1e\x95\xc9\x5d\xad\x97\xae\x0c\xc8\xc6\x91\x3b\xd8".*, 148 | "\x80\x11\x89\x90\x2c\x85\x7f\x39\xe7\x35\x91\x28\x5e\x70\xb6\xdb".*, 149 | "\xe6\x17\x34\x6a\xc9\xc2\x31\xbb\x36\x50\xae\x34\xcc\xca\x0c\x5b".*, 150 | "\x27\xd9\x34\x37\xef\xb7\x21\xaa\x40\x18\x21\xdc\xec\x5a\xdf\x89".*, 151 | "\x89\x23\x7d\x9d\xed\x9c\x5e\x78\xd8\xb1\xc9\xb1\x66\xcc\x73\x42".*, 152 | "\x4a\x6d\x80\x91\xbf\x5e\x7d\x65\x11\x89\xfa\x94\xa2\x50\xb1\x4c".*, 153 | "\x0e\x33\xf9\x60\x55\xe7\xae\x89\x3f\xfc\x0e\x3d\xcf\x49\x29\x02".*, 154 | "\xe6\x1c\x43\x2b\x72\x0b\x19\xd1\x8e\xc8\xd8\x4b\xdc\x63\x15\x1b".*, 155 | "\xf7\xe5\xae\xf5\x49\xf7\x82\xcf\x37\x90\x55\xa6\x08\x26\x9b\x16".*, 156 | "\x43\x8d\x03\x0f\xd0\xb7\xa5\x4f\xa8\x37\xf2\xad\x20\x1a\x64\x03".*, 157 | "\xa5\x90\xd3\xee\x4f\xbf\x04\xe3\x24\x7e\x0d\x27\xf2\x86\x42\x3f".*, 158 | "\x5f\xe2\xc1\xa1\x72\xfe\x93\xc4\xb1\x5c\xd3\x7c\xae\xf9\xf5\x38".*, 159 | "\x2c\x97\x32\x5c\xbd\x06\xb3\x6e\xb2\x13\x3d\xd0\x8b\x3a\x01\x7c".*, 160 | "\x92\xc8\x14\x22\x7a\x6b\xca\x94\x9f\xf0\x65\x9f\x00\x2a\xd3\x9e".*, 161 | "\xdc\xe8\x50\x11\x0b\xd8\x32\x8c\xfb\xd5\x08\x41\xd6\x91\x1d\x87".*, 162 | "\x67\xf1\x49\x84\xc7\xda\x79\x12\x48\xe3\x2b\xb5\x92\x25\x83\xda".*, 163 | "\x19\x38\xf2\xcf\x72\xd5\x4e\xe9\x7e\x94\x16\x6f\xa9\x1d\x2a\x36".*, 164 | "\x74\x48\x1e\x96\x46\xed\x49\xfe\x0f\x62\x24\x30\x16\x04\x69\x8e".*, 165 | "\x57\xfc\xa5\xde\x98\xa9\xd6\xd8\x00\x64\x38\xd0\x58\x3d\x8a\x1d".*, 166 | "\x9f\xec\xde\x1c\xef\xdc\x1c\xbe\xd4\x76\x36\x74\xd9\x57\x53\x59".*, 167 | "\xe3\x04\x0c\x00\xeb\x28\xf1\x53\x66\xca\x73\xcb\xd8\x72\xe7\x40".*, 168 | "\x76\x97\x00\x9a\x6a\x83\x1d\xfe\xcc\xa9\x1c\x59\x93\x67\x0f\x7a".*, 169 | "\x58\x53\x54\x23\x21\xf5\x67\xa0\x05\xd5\x47\xa4\xf0\x47\x59\xbd".*, 170 | "\x51\x50\xd1\x77\x2f\x50\x83\x4a\x50\x3e\x06\x9a\x97\x3f\xbd\x7c".*, 171 | }; 172 | 173 | const siphash = SipHash24(u128); 174 | 175 | var buffer: [64]u8 = undefined; 176 | for (vectors, 0..) |vector, i| { 177 | buffer[i] = @as(u8, @intCast(i)); 178 | 179 | var out: [siphash.mac_length]u8 = undefined; 180 | siphash.create(&out, buffer[0..i], test_key[0..]); 181 | try testing.expectEqual(out, vector); 182 | } 183 | } 184 | -------------------------------------------------------------------------------- /src/root.zig: -------------------------------------------------------------------------------- 1 | pub const base58 = @import("base58/encode.zig"); 2 | pub const bech32 = @import("bech32/bech32.zig"); 3 | pub const bips = @import("bips/lib.zig"); 4 | pub const hashes = @import("hashes/lib.zig"); 5 | pub const secp256k1 = @import("secp256k1"); 6 | pub const types = @import("types/lib.zig"); 7 | pub const wif = @import("wif/wif.zig"); 8 | 9 | test { 10 | const std = @import("std"); 11 | std.testing.log_level = .warn; 12 | std.testing.refAllDeclsRecursive(@This()); 13 | } 14 | -------------------------------------------------------------------------------- /src/types/CompacSizeUint.zig: -------------------------------------------------------------------------------- 1 | //! CompactSize Unsigned Integer 2 | //! 3 | //! A wrapper arround an `u64` exposing the en/decoding methods. 4 | //! 5 | //! * Specifications: 6 | //! https://btcinformation.org/en/developer-reference#compactsize-unsigned-integers 7 | //! 8 | //! * Implementation details: 9 | //! This implementation accounts for system endianness, and will work correctly on both big and little endian system. 10 | 11 | const Self = @This(); 12 | 13 | const std = @import("std"); 14 | const io = std.io; 15 | const native_endian = @import("builtin").target.cpu.arch.endian(); 16 | 17 | /// The inner value 18 | inner: u64, 19 | 20 | /// Returns a new instance 21 | pub inline fn new(inner: u64) Self { 22 | return .{ .inner = inner }; 23 | } 24 | 25 | /// Returns the original value 26 | pub inline fn value(self: Self) u64 { 27 | return self.inner; 28 | } 29 | 30 | pub fn hint_encoded_len(self: Self) usize { 31 | const v = self.value(); 32 | 33 | return if (v <= 252) 34 | 1 35 | else if (v <= 0xffff) 36 | 3 37 | else if (v <= 0xffffffff) 38 | 5 39 | else 40 | 9; 41 | } 42 | 43 | /// Encodes the inner value 44 | /// 45 | /// The caller is responsible for freeing the returned memory. 46 | pub fn encode(self: Self, allocator: std.mem.Allocator) std.mem.Allocator.Error![]u8 { 47 | const encoded_len = self.hint_encoded_len(); 48 | const res = try allocator.alloc(u8, encoded_len); 49 | 50 | self.encodeToSlice(res); 51 | 52 | return res; 53 | } 54 | 55 | /// Encodes the inner value into a slice 56 | /// 57 | /// dest.len must be >= self.hint_encoded_len(). 58 | pub fn encodeToSlice(self: Self, dest: []u8) void { 59 | const v = self.value(); 60 | 61 | if (v <= 252) { 62 | std.mem.writeInt(u8, dest[0..1], @intCast(v), .little); 63 | } else if (v <= 0xffff) { 64 | dest[0] = 0xfd; 65 | std.mem.writeInt(u16, dest[1..3], @intCast(v), .little); 66 | } else if (v <= 0xffffffff) { 67 | dest[0] = 0xfe; 68 | std.mem.writeInt(u32, dest[1..5], @intCast(v), .little); 69 | } else { 70 | dest[0] = 0xff; 71 | std.mem.writeInt(u64, dest[1..9], @intCast(v), .little); 72 | } 73 | } 74 | 75 | /// Encodes the inner value into the writer 76 | pub fn encodeToWriter(self: Self, w: anytype) !void { 77 | comptime { 78 | if (!std.meta.hasFn(@TypeOf(w), "writeByte")) @compileError("Expects w to have fn 'writeByte'."); 79 | if (!std.meta.hasFn(@TypeOf(w), "writeInt")) @compileError("Expects w to have fn 'writeInt'."); 80 | } 81 | 82 | const val = self.value(); 83 | 84 | if (val <= 252) { 85 | try w.writeInt(u8, @intCast(val), .little); 86 | } else if (val <= 0xffff) { 87 | try w.writeByte(0xfd); 88 | try w.writeInt(u16, @intCast(val), .little); 89 | } else if (val <= 0xffffffff) { 90 | try w.writeByte(0xfe); 91 | try w.writeInt(u32, @intCast(val), .little); 92 | } else { 93 | try w.writeByte(0xff); 94 | try w.writeInt(u64, @intCast(val), .little); 95 | } 96 | } 97 | 98 | pub const DecodeCompactSizeUintError = error{ 99 | EmptyInput, 100 | InputTooShort, 101 | }; 102 | 103 | /// Parses an encoded u64 as a CompactSizeUint 104 | pub fn decodeSlice(input: []const u8) DecodeCompactSizeUintError!Self { 105 | if (input.len == 0) return error.EmptyInput; 106 | 107 | const num_len: usize = switch (input[0]) { 108 | 0xff => ff: { 109 | if (input.len < 9) return error.InputTooShort; 110 | break :ff 8; 111 | }, 112 | 0xfe => fe: { 113 | if (input.len < 5) return error.InputTooShort; 114 | break :fe 4; 115 | }, 116 | 0xfd => fd: { 117 | if (input.len < 3) return error.InputTooShort; 118 | break :fd 2; 119 | }, 120 | else => return .{ .inner = input[0] }, 121 | }; 122 | 123 | var buffer = [_]u8{0} ** 8; 124 | @memcpy(buffer[0..num_len], input[1 .. num_len + 1]); 125 | if (native_endian == .big) { 126 | @byteSwap(buffer); 127 | } 128 | 129 | return .{ .inner = @bitCast(buffer) }; 130 | } 131 | 132 | /// Parses an encoded u64 as a CompactSizeUint 133 | pub fn decodeReader(r: anytype) !Self { 134 | comptime { 135 | if (!std.meta.hasFn(@TypeOf(r), "readByte")) @compileError("Expects r to have fn 'readByte'."); 136 | if (!std.meta.hasFn(@TypeOf(r), "readNoEof")) @compileError("Expects r to have fn 'readNoEof'."); 137 | } 138 | 139 | const first_byte = try r.readByte(); 140 | const num_len: usize = switch (first_byte) { 141 | 0xff => 8, 142 | 0xfe => 4, 143 | 0xfd => 2, 144 | else => return .{ .inner = @intCast(first_byte) }, 145 | }; 146 | 147 | var buffer = std.mem.zeroes([8]u8); 148 | try r.readNoEof(buffer[0..num_len]); 149 | if (native_endian == .big) { 150 | @byteSwap(buffer); 151 | } 152 | 153 | return .{ .inner = @bitCast(buffer) }; 154 | } 155 | 156 | // TESTS 157 | 158 | test "ok_full_flow_for_key_values" { 159 | const values = [_]u64{ 0, 252, 0xffff, 0xffffffff, std.math.maxInt(u64) }; 160 | const zeroed_buffer = [_]u8{0} ** 9; 161 | var buffer = [_]u8{0} ** 9; 162 | const allocator = std.testing.allocator; 163 | 164 | for (values) |num| { 165 | const compact = Self.new(num); 166 | // encode 167 | { 168 | const encoding = try compact.encode(allocator); 169 | defer allocator.free(encoding); 170 | const decoded = try Self.decodeSlice(encoding); 171 | try std.testing.expectEqual(decoded.value(), num); 172 | } 173 | // encode_to_slice 174 | { 175 | @memcpy(buffer[0..], zeroed_buffer[0..]); 176 | const buf = buffer[9 - compact.hint_encoded_len() ..]; 177 | compact.encodeToSlice(buf); 178 | const decoded = try Self.decodeSlice(buf); 179 | try std.testing.expectEqual(decoded.value(), num); 180 | } 181 | // encode_to_writer 182 | { 183 | @memcpy(buffer[0..], zeroed_buffer[0..]); 184 | var fbs = std.io.fixedBufferStream(&buffer); 185 | const writer = fbs.writer(); 186 | const reader = fbs.reader(); 187 | try compact.encodeToWriter(writer); 188 | fbs.reset(); 189 | const decoded = try Self.decodeReader(reader); 190 | try std.testing.expectEqual(decoded.value(), num); 191 | } 192 | } 193 | } 194 | 195 | test "ok_full_flow_for_1k_random_values" { 196 | const rand = std.crypto.random; 197 | const zeroed_buffer = [_]u8{0} ** 9; 198 | var buffer = [_]u8{0} ** 9; 199 | const allocator = std.testing.allocator; 200 | 201 | for (0..1000) |_| { 202 | const num = rand.int(u64); 203 | 204 | const compact = Self.new(num); 205 | 206 | // encode 207 | { 208 | const encoding = try compact.encode(allocator); 209 | defer allocator.free(encoding); 210 | const decoded = try Self.decodeSlice(encoding); 211 | try std.testing.expectEqual(decoded.value(), num); 212 | } 213 | // encode_to_slice 214 | { 215 | @memcpy(buffer[0..], zeroed_buffer[0..]); 216 | const buf = buffer[9 - compact.hint_encoded_len() ..]; 217 | compact.encodeToSlice(buf); 218 | const decoded = try Self.decodeSlice(buf); 219 | try std.testing.expectEqual(decoded.value(), num); 220 | } 221 | // encode_to_writer 222 | { 223 | @memcpy(buffer[0..], zeroed_buffer[0..]); 224 | var fbs = std.io.fixedBufferStream(&buffer); 225 | const writer = fbs.writer(); 226 | const reader = fbs.reader(); 227 | try compact.encodeToWriter(writer); 228 | fbs.reset(); 229 | const decoded = try Self.decodeReader(reader); 230 | try std.testing.expectEqual(decoded.value(), num); 231 | } 232 | } 233 | } 234 | 235 | test "ko_decode_slice" { 236 | var input = [_]u8{0} ** 10; 237 | 238 | input[0] = 0xff; 239 | try std.testing.expectError(error.InputTooShort, Self.decodeSlice(input[0..8])); 240 | _ = try Self.decodeSlice(input[0..9]); 241 | _ = try Self.decodeSlice(input[0..]); 242 | 243 | input[0] = 0xfe; 244 | try std.testing.expectError(error.InputTooShort, Self.decodeSlice(input[0..4])); 245 | _ = try Self.decodeSlice(input[0..5]); 246 | _ = try Self.decodeSlice(input[0..]); 247 | 248 | input[0] = 0xfd; 249 | try std.testing.expectError(error.InputTooShort, Self.decodeSlice(input[0..2])); 250 | _ = try Self.decodeSlice(input[0..3]); 251 | _ = try Self.decodeSlice(input[0..]); 252 | 253 | input[0] = 0xfc; 254 | try std.testing.expectError(error.EmptyInput, Self.decodeSlice(input[0..0])); 255 | _ = try Self.decodeSlice(input[0..1]); 256 | _ = try Self.decodeSlice(input[0..]); 257 | } 258 | 259 | test "ko_endode_when_oom" { 260 | const allocator = std.testing.failing_allocator; 261 | 262 | const num = Self.new(42); 263 | 264 | try std.testing.expectError(error.OutOfMemory, num.encode(allocator)); 265 | } 266 | -------------------------------------------------------------------------------- /src/types/lib.zig: -------------------------------------------------------------------------------- 1 | pub const CompatSizeUint = @import("./CompacSizeUint.zig"); 2 | -------------------------------------------------------------------------------- /src/wif/lib.zig: -------------------------------------------------------------------------------- 1 | pub const Wif = @import("wif.zig").WIF; 2 | -------------------------------------------------------------------------------- /src/wif/wif.zig: -------------------------------------------------------------------------------- 1 | const PrivateKey = @import("../bips/bip32/key.zig").PrivateKey; 2 | const Network = @import("../bips/bip32/bip32.zig").Network; 3 | const std = @import("std"); 4 | const Base58Encoder = @import("../base58/base58.zig").Encoder; 5 | const Base58Decoder = @import("../base58/base58.zig").Decoder; 6 | const secp256k1 = @import("secp256k1"); 7 | 8 | /// WIF as defined in https://en.bitcoin.it/wiki/Wallet_import_format 9 | pub const WIF_PREFIX_MAINNET: u8 = 0x80; 10 | pub const WIF_PREFIX_TESTNET: u8 = 0xef; 11 | pub const WIF_COMPRESSED_FLAG: u8 = 0x01; 12 | 13 | pub const WIFDecodeError = error{ InvalidNetwork, InvalidChecksum }; 14 | 15 | pub const WIF = struct { 16 | const Self = @This(); 17 | inner: []u8, 18 | 19 | pub fn fromPrivateKey(private_key: PrivateKey) !Self { 20 | const max_size = 1 + 32 + 1 + 4; // prefix + key + compressed flag + checksum 21 | var actual_size: u8 = max_size - 1; 22 | if (private_key.compressed) { 23 | actual_size += 1; 24 | } 25 | var buf = [_]u8{0} ** max_size; 26 | 27 | if (private_key.network == Network.MAINNET) { 28 | buf[0] = WIF_PREFIX_MAINNET; 29 | } else { 30 | buf[0] = WIF_PREFIX_TESTNET; 31 | } 32 | 33 | @memcpy(buf[1..33], private_key.inner.data[0..32]); 34 | 35 | if (private_key.compressed) { 36 | buf[33] = WIF_COMPRESSED_FLAG; 37 | } 38 | 39 | var sha256 = std.crypto.hash.sha2.Sha256.init(.{}); 40 | var out256: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined; 41 | 42 | sha256.update(buf[0 .. actual_size - 4]); 43 | sha256.final(&out256); 44 | 45 | sha256 = std.crypto.hash.sha2.Sha256.init(.{}); 46 | 47 | sha256.update(out256[0..std.crypto.hash.sha2.Sha256.digest_length]); 48 | sha256.final(&out256); 49 | 50 | @memcpy(buf[actual_size - 4 .. actual_size], out256[0..4]); 51 | 52 | // base58 encode 53 | const encoder = Base58Encoder{}; 54 | var encode_buf = [_]u8{0} ** 52; // max wif len is 52 55 | const encode_size = encoder.encode(buf[0..actual_size], &encode_buf); 56 | const wif = WIF{ 57 | .inner = encode_buf[0..encode_size], 58 | }; 59 | return wif; 60 | } 61 | 62 | pub fn toString(self: WIF) []u8 { 63 | return self.inner; 64 | } 65 | 66 | pub fn fromString(wif: []const u8) !WIF { 67 | // decode base58 68 | const decoder = Base58Decoder{}; 69 | 70 | var decoded = [_]u8{0} ** 38; // max len is 38 71 | const decode_size = try decoder.decode(wif, &decoded); 72 | 73 | const new_wif = WIF{ 74 | .inner = decoded[0..decode_size], 75 | }; 76 | 77 | // check checksum 78 | var out256: [std.crypto.hash.sha2.Sha256.digest_length]u8 = undefined; 79 | 80 | var sha256 = std.crypto.hash.sha2.Sha256.init(.{}); 81 | sha256.update(decoded[0 .. decode_size - 4]); 82 | sha256.final(&out256); 83 | 84 | sha256 = std.crypto.hash.sha2.Sha256.init(.{}); 85 | 86 | sha256.update(out256[0..std.crypto.hash.sha2.Sha256.digest_length]); 87 | sha256.final(&out256); 88 | 89 | if (decoded[decode_size - 4] != out256[0] or 90 | decoded[decode_size - 3] != out256[1] or 91 | decoded[decode_size - 2] != out256[2] or 92 | decoded[decode_size - 1] != out256[3]) 93 | { 94 | return WIFDecodeError.InvalidChecksum; 95 | } 96 | 97 | return new_wif; 98 | } 99 | 100 | pub fn toPrivateKey(self: WIF) !PrivateKey { 101 | const network = switch (self.inner[0]) { 102 | WIF_PREFIX_MAINNET => Network.MAINNET, 103 | WIF_PREFIX_TESTNET => Network.TESTNET, 104 | else => return WIFDecodeError.InvalidNetwork, 105 | }; 106 | const compressed = self.inner[33] == WIF_COMPRESSED_FLAG; 107 | 108 | const data: [32]u8 = self.inner[1..33].*; // secp256k1.SecretKey.fromSlice has some weird effect on the array 109 | return PrivateKey{ .network = network, .compressed = compressed, .inner = try secp256k1.SecretKey.fromSlice(&data) }; 110 | } 111 | }; 112 | 113 | test "WIF with compressed private key" { 114 | const privateKey = PrivateKey{ .network = Network.MAINNET, .compressed = true, .inner = try secp256k1.SecretKey.fromString("7bea4d472aa93e49321bbde5db88b126b9435482e1f39d84664530a5f40408cd") }; 115 | const wif = try WIF.fromPrivateKey(privateKey); 116 | const expected = "L1NawHPsZVHsnW4DUBC7K36LzXfcsLck85fMSoEGyT4LMZv9xSjD"; 117 | const actual = wif.toString(); 118 | try std.testing.expectEqualSlices(u8, expected[0..], actual[0..]); 119 | 120 | const got_wif = try WIF.fromString(expected); 121 | const got_privateKey = try got_wif.toPrivateKey(); 122 | try std.testing.expectEqualDeep(privateKey, got_privateKey); 123 | } 124 | 125 | test "WIF with uncompressed private key 2" { 126 | const privateKey = PrivateKey{ .network = Network.MAINNET, .compressed = false, .inner = try secp256k1.SecretKey.fromString("0C28FCA386C7A227600B2FE50B7CAE11EC86D3BF1FBE471BE89827E19D72AA1D") }; 127 | const wif = try WIF.fromPrivateKey(privateKey); 128 | const expected = "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ"; 129 | const actual = wif.toString(); 130 | try std.testing.expectEqualSlices(u8, expected[0..], actual[0..]); 131 | 132 | const got_wif = try WIF.fromString(expected); 133 | const got_privateKey = try got_wif.toPrivateKey(); 134 | try std.testing.expectEqualDeep(privateKey, got_privateKey); 135 | } 136 | 137 | test "WIF with uncompressed private key" { 138 | const privateKey = PrivateKey{ .network = Network.MAINNET, .compressed = false, .inner = try secp256k1.SecretKey.fromString("46605abb568e1566834e7ee57e271964534d8fc3b23ca5f546b081ad7e233671") }; 139 | const wif = try WIF.fromPrivateKey(privateKey); 140 | const expected = "5JMHFZHuMcVnqVBARmg3jW3LMxdB6qbJtesC5xhXRji6wabvbWu"; 141 | const actual = wif.toString(); 142 | try std.testing.expectEqualSlices(u8, expected[0..], actual[0..]); 143 | 144 | const got_wif = try WIF.fromString(expected); 145 | const got_privateKey = try got_wif.toPrivateKey(); 146 | try std.testing.expectEqualDeep(privateKey, got_privateKey); 147 | } 148 | 149 | test "WIF with compressed testnet private key" { 150 | const privateKey = PrivateKey{ .network = Network.TESTNET, .compressed = true, .inner = try secp256k1.SecretKey.fromString("46605abb568e1566834e7ee57e271964534d8fc3b23ca5f546b081ad7e233671") }; 151 | const wif = try WIF.fromPrivateKey(privateKey); 152 | const expected = "cPwWCAXTX3NLUSq7zjzURugN5jp5FDa832H13KJNoJARUPsaTJ9G"; 153 | const actual = wif.toString(); 154 | try std.testing.expectEqualSlices(u8, expected[0..], actual[0..]); 155 | 156 | const got_wif = try WIF.fromString(expected); 157 | const got_privateKey = try got_wif.toPrivateKey(); 158 | try std.testing.expectEqualDeep(privateKey, got_privateKey); 159 | } 160 | 161 | test "WIF with uncompressed testnet private key\n" { 162 | const privateKey = PrivateKey{ .network = Network.TESTNET, .compressed = false, .inner = try secp256k1.SecretKey.fromString("46605abb568e1566834e7ee57e271964534d8fc3b23ca5f546b081ad7e233671") }; 163 | const wif = try WIF.fromPrivateKey(privateKey); 164 | const expected = "927uqJ7SwqZvoYgT47Zxc6bJ1cytG18WEbj9Ab42mUT9icaLVhF"; 165 | const actual = wif.toString(); 166 | try std.testing.expectEqualSlices(u8, expected[0..], actual[0..]); 167 | 168 | const got_wif = try WIF.fromString(expected); 169 | const got_privateKey = try got_wif.toPrivateKey(); 170 | try std.testing.expectEqualDeep(privateKey, got_privateKey); 171 | } 172 | --------------------------------------------------------------------------------