├── .github └── workflows │ ├── libsec.yml │ └── python.yml ├── .gitignore ├── LICENSE.md ├── MANIFEST.in ├── README.md ├── buidl ├── __init__.py ├── bcur.py ├── bech32.py ├── bip39_words.txt ├── blinding.py ├── block.py ├── bloomfilter.py ├── cecc.py ├── chash.py ├── compactfilter.py ├── descriptor.py ├── ecc.py ├── hash.py ├── hd.py ├── helper.py ├── libsec.h ├── libsec_build.py ├── libsec_status.py ├── merkleblock.py ├── mnemonic.py ├── network.py ├── op.py ├── pbkdf2.py ├── pecc.py ├── phash.py ├── psbt.py ├── psbt_helper.py ├── script.py ├── shamir.py ├── siphash.py ├── slip39_words.txt ├── taproot.py ├── test │ ├── __init__.py │ ├── conftest.py │ ├── test_bcur.py │ ├── test_bech32.py │ ├── test_blinding.py │ ├── test_block.py │ ├── test_bloomfilter.py │ ├── test_compactfilter.py │ ├── test_descriptor.py │ ├── test_ecc.py │ ├── test_hash.py │ ├── test_hd.py │ ├── test_helper.py │ ├── test_merkleblock.py │ ├── test_mnemonic.py │ ├── test_musig.py │ ├── test_network.py │ ├── test_op.py │ ├── test_pecc.py │ ├── test_psbt.py │ ├── test_psbt_helper.py │ ├── test_schnorr.py │ ├── test_script.py │ ├── test_shamir.py │ ├── test_taproot.py │ ├── test_timelock.py │ ├── test_tx.py │ └── tx.cache ├── timelock.py ├── tx.py └── witness.py ├── clean.sh ├── docs └── multiwallet.md ├── multiwallet.py ├── requirements-libsec.txt ├── requirements-test.txt ├── run_tests.sh ├── setup.cfg ├── setup.py ├── singlesweep.py ├── test_multiwallet.py ├── test_singlesweep.py └── update_pypi.sh /.github/workflows/libsec.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-python 2 | 3 | name: fast libsec 4 | on: push 5 | jobs: 6 | 7 | all-tests: 8 | # Super fast but only Ubuntu 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | matrix: 12 | python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] 13 | # Downgrading to 20.04 to avoid this openssl bug 14 | # https://github.com/bitcoin/bitcoin/issues/23710 15 | os: [ubuntu-20.04] 16 | steps: 17 | - uses: actions/checkout@v3 18 | - uses: actions/setup-python@v4 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | cache: 'pip' 22 | cache-dependency-path: '**/requirements*.txt' 23 | - name: Install python dependencies 24 | run: | 25 | pip install -r requirements-test.txt && pip install -r requirements-libsec.txt 26 | - name: pytest python unit tests, excluding slowest ones 27 | # all of the tests excluded here get run below once libsec is installed 28 | run: | 29 | pytest -vv --durations=0 -k "not musig and not psbt and not descriptor and not hd and not taproot and not script and not schnorr and not blinding and not shamir and not mnemonic" buidl/test 30 | - name: Install secp256k1 dependencies 31 | run: | 32 | sudo apt install -y libffi-dev pkg-config 33 | - name: Install secp256k1 34 | run: | 35 | # https://github.com/bitcoin-core/secp256k1/issues/542 36 | time git clone --depth 1 https://github.com/bitcoin-core/secp256k1 && cd secp256k1 && time ./autogen.sh && time ./configure --prefix=/usr --enable-module-extrakeys --enable-module-schnorrsig --enable-experimental && time make && time sudo make install 37 | - name: Build libsec 38 | run: | 39 | python3 -m pip install --editable . && cd buidl && python3 libsec_build.py && cd .. && python3 -c "from buidl import *; print('success') if is_libsec_enabled() else print('LIBSEC INSTALL FAIL')" 40 | - name: libsec ALL unit tests 41 | run: | 42 | pytest -vv --durations=0 buidl/test 43 | - name: Lint with flake8 44 | run: | 45 | # stop the build if there are Python syntax errors or undefined names 46 | flake8 . --count --statistics 47 | - name: Lint with black 48 | run: | 49 | black . --diff --check --exclude='tests_wycheproof_generate.py' 50 | - name: pytest CLI singlesig 51 | run: | 52 | # We retry these 3x if needed, which is a disgusting hack but GH is really buggy for CLI apps 53 | pytest -vv test_singlesweep.py || pytest -vv test_singlesweep.py || pytest -vv test_singlesweep.py 54 | - name: pytest CLI multisig 55 | run: | 56 | # We retry these 3x if needed, which is a disgusting hack but GH is really buggy for CLI apps 57 | pytest -vv test_multiwallet.py || pytest -vv test_multiwallet.py || pytest -vv test_multiwallet.py 58 | env: 59 | SKIP_GH_UNRELIABLE_TESTS: True 60 | -------------------------------------------------------------------------------- /.github/workflows/python.yml: -------------------------------------------------------------------------------- 1 | # https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-python 2 | 3 | name: slow python 4 | on: 5 | pull_request: 6 | # These tests are so slow that we only run them when a PR is opened/reopened 7 | # If an existing PR gets new commits, only the libsec tests will run (to manually trigger a full test-suite, close and reopen the github PR) 8 | types: [opened, reopened] 9 | 10 | jobs: 11 | 12 | unit-tests: 13 | runs-on: ${{ matrix.os }} 14 | strategy: 15 | matrix: 16 | # TODO: consider cutting down on python-version/os combos for speed 17 | python-version: [3.6, 3.7, 3.8, 3.9, "3.10"] 18 | os: [ubuntu-20.04, macos-11, macos-12, windows-2019, windows-2022] 19 | # TODO: add ubuntu-22.04 support! Something is up with openssl for that 20 | # https://github.com/bitcoin-core/secp256k1/issues/542 21 | exclude: 22 | # Ubuntu 22.04 doesn't come with python 3.6 installed 23 | - os: ubuntu-22.04 24 | python-version: 3.6 25 | steps: 26 | - uses: actions/checkout@v3 27 | - uses: actions/setup-python@v4 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | cache: 'pip' 31 | cache-dependency-path: '**/requirements-test.txt' 32 | - name: Install python dependencies 33 | run: | 34 | pip install -r requirements-test.txt 35 | - name: pytest unit tests in pure python 36 | run: | 37 | pytest -vv --durations=0 buidl/test 38 | 39 | 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | .venv/* 3 | .venv3/* 4 | *.ipynb_checkpoints* 5 | *__pycache__* 6 | bin/ 7 | lib/ 8 | lib64 9 | pyvenv.cfg 10 | share/ 11 | *.orig 12 | *.rej 13 | *.patch 14 | *.coverage 15 | *.swp 16 | *\.c 17 | *\.o 18 | *\.so 19 | *\.swp 20 | .DS_Store 21 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2020 Bitcoin Developers 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include buidl/bip39_words.txt 2 | include buidl/slip39_words.txt 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # `buidl` Bitcoin Library 2 | 3 | [![Python](https://github.com/buidl-bitcoin/buidl-python/actions/workflows/python.yml/badge.svg)](https://github.com/buidl-bitcoin/buidl-python/actions/workflows/python.yml) 4 | 5 | `buidl` is a `python3` bitcoin library with 0 dependencies. 6 | It is easy-to-read, has extensive test coverage, simple to install on airgapped computers (just copy over this directory). 7 | 8 | `buidl` has extensive feature support for the bitcoin blockchain: 9 | * **Trust-minimized** - easy-to-read cryptography implemented in straightforward/way with no third-party dependencies 10 | * **Performant** - optional [secp256k1 library bindings](https://github.com/bitcoin-core/secp256k1) offers a ~100x speedup, see [performance section below](#performance) 11 | * **Wallet tools** for various script types (`p2pkh`, `p2sh`, `p2sh-wrapped-p2wsh`, `p2wsh` and `p2tr` (coming soon) ), compressed/uncompressed pubkeys, address encodings, HD support (BIP32, BIP39, BIP44, seedpicker, etc), PSBT generation/decoding/validation, etc 12 | * **Extensive multisig support**/tooling for output descriptors, receive address validation, change address detection, fee verification, blinding xpubs, PSBTs, [BCUR v0](https://github.com/BlockchainCommons/Research/blob/master/papers/bcr-2020-005-ur.md) (v1 coming soon), airgap signing, etc 13 | * **SPV proofs and compact block filters** (Neutrino) - build powerful trust-minimized apps without scanning the whole blockchain 14 | * **P2P gossip network** - connect directly to the bitcoin network 15 | * **Footgun protection** - from elegantly structured OOP classes to [RFC6979](https://datatracker.ietf.org/doc/html/rfc6979) deterministic k-values, `buidl` is designed to be usable for non-cryptographers 16 | * **0 altcoins** - for maximum readability/focus. The only other supported coins are bitcoin's `testnet` and `signet`, which intentionally have no value. 17 | 18 | *This repository comes with zero guarantees, use at your own risk.* 19 | 20 | ## Installation 21 | 22 | #### Online 23 | ```bash 24 | $ pip3 install buidl --upgrade 25 | ``` 26 | 27 | #### Offline 28 | Download this repo and then run: 29 | ```bash 30 | $ python3 setup.py install 31 | ``` 32 | (alternatively, because `buidl` has no dependencies you can just `cd` into this root directory and call `buidl` without installation) 33 | 34 | ## Multiwallet 35 | `multiwallet` is a stateless CLI multisig PSBT wallet. 36 | Since `buidl` has no dependencies, you can run multiwallet by just `cd`ing to the root directory of this project: 37 | 38 | ```bash 39 | $ python3 multiwallet.py 40 | Welcome to multiwallet... 41 | ``` 42 | 43 | If you have installed `buidl`, you can run `multiwallet.py` from any directory: 44 | ```bash 45 | $ multiwallet.py 46 | Welcome to multiwallet... 47 | ``` 48 | 49 | For more information on installing multiwallet, see [multiwallet.md](docs/multiwallet.md) or check out [this demo](https://twitter.com/mflaxman/status/1321503036724989952). 50 | 51 | `singlesweep.py` works the same way for sweeping out of paper wallets, but is intentionally undocumented. 52 | 53 | ## Tests 54 | 55 | Run tests with `pytest`: 56 | ```bash 57 | $ git clone https://github.com/buidl-bitcoin/buidl-python.git && cd buidl-python 58 | $ pytest -v 59 | ``` 60 | (these will be 1-2 orders of magnitue faster with libsec bindings, see [performance section below](#performance)) 61 | 62 | Run `black`: 63 | ```bash 64 | $ black . --diff --check 65 | ``` 66 | 67 | Run `flake8`: 68 | ```bash 69 | $ flake8 . 70 | ``` 71 | 72 | ## Performance 73 | 74 | You can speed this library up ~100x by using C-bindings to [bitcoin core's `libsecp256k1` library](https://github.com/bitcoin-core/secp256k1). 75 | 76 | ### `libsecp256k1` Dependency Installation 77 | 78 | 79 | #### Easy (MacOS only) 80 | Use [brew](https://brew.sh/) to install `libsecp256k1`: 81 | ``` 82 | $ brew tap buidl-bitcoin/homebrew-libsecp256k1 83 | $ brew install pkg-config libffi libsecp256k1 84 | ``` 85 | 86 | #### Hard (Linux/Mac) 87 | Compile `libsecp256k1` from scratch with experimental modules enabled to make Schnorr signatures work: 88 | ```bash 89 | $ git clone https://github.com/bitcoin-core/secp256k1 90 | $ cd secp256k1 91 | $ ./autogen.sh 92 | $ ./configure --enable-module-extrakeys --enable-module-schnorrsig --enable-experimental 93 | $ make 94 | $ sudo make install 95 | ``` 96 | 97 | ### `buidl` Installation 98 | 99 | ```bash 100 | $ git clone git@github.com:buidl-bitcoin/buidl-python.git && cd buidl-python && python3 -m pip install -r requirements-libsec.txt && python3 -m pip install --editable . && cd buidl && python3 libsec_build.py && cd .. && python3 -c "from buidl import *; print('success') if is_libsec_enabled() else print('LIBSEC INSTALL FAIL')" 101 | 102 | ``` 103 | -------------------------------------------------------------------------------- /buidl/__init__.py: -------------------------------------------------------------------------------- 1 | # TODO: specify what specifically to import 2 | from .bcur import * 3 | from .bech32 import * 4 | from .blinding import * 5 | from .block import * 6 | from .bloomfilter import * 7 | from .descriptor import * 8 | from .ecc import * 9 | from .hd import * 10 | from .helper import * 11 | from .libsec_status import * 12 | from .merkleblock import * 13 | from .mnemonic import * 14 | from .network import * 15 | from .op import * 16 | from .pbkdf2 import * 17 | from .psbt import * 18 | from .psbt_helper import * 19 | from .script import * 20 | from .shamir import * 21 | from .taproot import * 22 | from .tx import * 23 | from .witness import * 24 | -------------------------------------------------------------------------------- /buidl/bcur.py: -------------------------------------------------------------------------------- 1 | from buidl.bech32 import ( 2 | cbor_encode, 3 | cbor_decode, 4 | bc32encode, 5 | bc32decode, 6 | uses_only_bech32_chars, 7 | ) 8 | from buidl.helper import is_intable 9 | 10 | from binascii import a2b_base64, b2a_base64 11 | from math import ceil 12 | 13 | import hashlib 14 | 15 | 16 | class BCURStringFormatError(RuntimeError): 17 | pass 18 | 19 | 20 | def bcur_encode(data): 21 | """Returns bcur encoded string and checksum""" 22 | cbor = cbor_encode(data) 23 | enc = bc32encode(cbor) 24 | h = hashlib.sha256(cbor).digest() 25 | enc_hash = bc32encode(h) 26 | return enc, enc_hash 27 | 28 | 29 | def bcur_decode(data, checksum=None): 30 | """Returns decoded data, verifies checksum if provided""" 31 | cbor = bc32decode(data) 32 | if checksum is not None: 33 | h = bc32decode(checksum) 34 | calculated_digest = hashlib.sha256(cbor).digest() 35 | if h != calculated_digest: 36 | raise ValueError(f"Calculated digest {calculated_digest} != {h}") 37 | return cbor_decode(cbor) 38 | 39 | 40 | def _parse_bcur_helper(bcur_string): 41 | """ 42 | This parses a bcur string and returns the following (or raises an error): 43 | 44 | payload, checksum, x, y 45 | 46 | Notes: 47 | - Works for both BCURSingle and BCURMulti. 48 | - All entries may be empty except for payload. 49 | - Checksums are not validated here, as checksum validation is different for single vs multi. 50 | """ 51 | 52 | if type(bcur_string) is not str: 53 | raise BCURStringFormatError( 54 | f"{bcur_string} is of type {type(bcur_string)}, not a string" 55 | ) 56 | 57 | string = bcur_string.lower().strip() 58 | 59 | if not string.startswith("ur:bytes/"): 60 | raise BCURStringFormatError(f"String {string} doesn't start with ur:bytes/") 61 | 62 | bcur_parts = string.split("/") 63 | if len(bcur_parts) == 2: 64 | # Non-animated QR code (just 1 qr, doesn't display 1of1 nor checksum) 65 | _, payload = bcur_parts 66 | checksum, x_int, y_int = None, 1, 1 67 | elif len(bcur_parts) == 3: 68 | # Non-animated QR code (just 1 qr, doesn't display 1of1 but does have checksum) 69 | _, checksum, payload = bcur_parts 70 | x_int, y_int = 1, 1 71 | elif len(bcur_parts) == 4: 72 | # Animated QR code 73 | _, xofy, checksum, payload = bcur_parts 74 | 75 | xofy_parts = xofy.split("of") 76 | if len(xofy_parts) != 2: 77 | raise BCURStringFormatError(f"x-of-y section malformed: {xofy_parts}") 78 | 79 | if not is_intable(xofy_parts[0]) or not is_intable(xofy_parts[1]): 80 | raise BCURStringFormatError( 81 | f"x and y (in x-of-y) must both be integers: {xofy_parts}" 82 | ) 83 | 84 | x_int = int(xofy_parts[0]) 85 | y_int = int(xofy_parts[1]) 86 | 87 | if x_int > y_int: 88 | raise BCURStringFormatError("x must be >= y (in x-of-y): {xofy_parts}") 89 | 90 | else: 91 | raise BCURStringFormatError(f"{string} doesn't have 2-4 slashes") 92 | 93 | if checksum: 94 | if len(checksum) != 58: 95 | raise BCURStringFormatError("Checksum must be 58 chars") 96 | if not uses_only_bech32_chars(checksum): 97 | raise BCURStringFormatError( 98 | f"checksum can only contain bech32 characters: {checksum}" 99 | ) 100 | 101 | if not uses_only_bech32_chars(payload): 102 | raise BCURStringFormatError( 103 | f"Payload can only contain bech32 characters: {payload}" 104 | ) 105 | 106 | return payload, checksum, x_int, y_int 107 | 108 | 109 | class BCURSingle: 110 | def __init__(self, text_b64, encoded=None, checksum=None): 111 | binary_b64 = a2b_base64(text_b64) 112 | enc, enc_hash = bcur_encode(data=binary_b64) 113 | if encoded and encoded != enc: 114 | raise ValueError(f"Calculated encoding {enc} != {encoded}") 115 | 116 | if checksum and checksum != enc_hash: 117 | raise ValueError(f"Calculated checksum {enc_hash} != {checksum}") 118 | 119 | self.text_b64 = text_b64 120 | self.encoded = enc 121 | self.enc_hash = enc_hash 122 | 123 | def __repr__(self): 124 | return self.encode() 125 | 126 | def encode(self, use_checksum=True): 127 | # Single QR, no x-of-y 128 | if use_checksum: 129 | return f"ur:bytes/{self.enc_hash}/{self.encoded}" 130 | else: 131 | return f"ur:bytes/{self.encoded}" 132 | 133 | @classmethod 134 | def parse(cls, to_parse): 135 | """Parses (decodes) a BCURSingle from a single BCUR string""" 136 | 137 | payload, checksum, x, y = _parse_bcur_helper(bcur_string=to_parse) 138 | 139 | if x != 1 or y != 1: 140 | raise BCURStringFormatError( 141 | f"BCURSingle must have x=1 and y=1, instead got x={x} and y={y}" 142 | ) 143 | 144 | # will throw an error if checksum is incorrect 145 | enc = bcur_decode(data=payload, checksum=checksum) 146 | return cls( 147 | text_b64=b2a_base64(enc).strip().decode(), 148 | encoded=payload, 149 | checksum=checksum, 150 | ) 151 | 152 | 153 | class BCURMulti: 154 | def __init__(self, text_b64, encoded=None, checksum=None): 155 | binary_b64 = a2b_base64(text_b64) 156 | enc, enc_hash = bcur_encode(data=binary_b64) 157 | if encoded and encoded != enc: 158 | raise ValueError(f"Calculated encoding {enc} != {encoded}") 159 | 160 | if checksum and checksum != enc_hash: 161 | raise ValueError(f"Calculated checksum {enc_hash} != {checksum}") 162 | 163 | self.checksum = checksum 164 | self.encoded = enc 165 | self.text_b64 = text_b64 166 | self.enc_hash = enc_hash 167 | 168 | def __repr__(self): 169 | return f"bcur: {self.checksum}\n{self.text_b64}\n" 170 | 171 | def encode(self, max_size_per_chunk=300, animate=True): 172 | """ 173 | Take some base64 text (i.e. a PSBT string) and encode it into multiple QR codes using Blockchain Commons Uniform Resources. 174 | 175 | If animate=False, then max_size_per_chunk is ignored and this returns a 1of1 with checksum. 176 | 177 | Use parse() to return a BCURMulti object from this encoded result. 178 | 179 | This algorithm makes all the chunks of about equal length. 180 | This makes sure that the last chunk is not (too) different in size which is visually noticeable when animation occurs 181 | Inspired by this JS implementation: 182 | https://github.com/cryptoadvance/specter-desktop/blob/da35e7d88072475746077432710c77f799017eb0/src/cryptoadvance/specter/templates/includes/qr-code.html 183 | """ 184 | 185 | if animate is False: 186 | number_of_chunks = 1 187 | else: 188 | number_of_chunks = ceil(len(self.encoded) / max_size_per_chunk) 189 | 190 | chunk_length = ceil(len(self.encoded) / number_of_chunks) 191 | 192 | # For number_of_chunks == 1 (with no checksum) use BCURSingle 193 | 194 | resulting_chunks = [] 195 | for cnt in range(number_of_chunks): 196 | start_idx = cnt * chunk_length 197 | finish_idx = (cnt + 1) * chunk_length 198 | resulting_chunks.append( 199 | f"ur:bytes/{cnt+1}of{number_of_chunks}/{self.enc_hash}/{self.encoded[start_idx:finish_idx]}" 200 | ) 201 | 202 | return resulting_chunks 203 | 204 | @classmethod 205 | def parse(cls, to_parse): 206 | """Parses a BCURMulti from a list of BCUR strings""" 207 | if type(to_parse) not in (list, tuple): 208 | raise BCURStringFormatError( 209 | f"{to_parse} is of type {type(to_parse)}, not a list/tuple" 210 | ) 211 | 212 | payloads = [] 213 | global_checksum, global_y = "", 0 214 | for cnt, bcur_string in enumerate(to_parse): 215 | entry_payload, entry_checksum, entry_x, entry_y = _parse_bcur_helper( 216 | bcur_string=bcur_string 217 | ) 218 | if cnt + 1 != entry_x: 219 | raise ValueError( 220 | f"BCUR strings not in order: got {entry_x} and was expecting {cnt+1}" 221 | ) 222 | 223 | # Initialize checksum and y (as in x-of-y) on first loop 224 | if cnt == 0: 225 | global_checksum = entry_checksum 226 | global_y = entry_y 227 | 228 | elif entry_checksum != global_checksum: 229 | raise ValueError( 230 | f"Entry {bcur_string} has checksum {entry_checksum} but we're expecting {global_checksum}" 231 | ) 232 | elif entry_y != global_y: 233 | raise ValueError( 234 | f"Entry {bcur_string} wants {entry_y} parts but we're expecting {global_y} parts" 235 | ) 236 | # All checks pass 237 | payloads.append(entry_payload) 238 | 239 | # will throw an error if checksum is incorrect 240 | enc = bcur_decode(data="".join(payloads), checksum=global_checksum) 241 | 242 | return cls(text_b64=b2a_base64(enc).strip().decode(), checksum=global_checksum) 243 | -------------------------------------------------------------------------------- /buidl/bech32.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from io import BytesIO 4 | 5 | from buidl.helper import int_to_big_endian 6 | 7 | BECH32_ALPHABET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l" 8 | GEN = [0x3B6A57B2, 0x26508E6D, 0x1EA119FA, 0x3D4233DD, 0x2A1462B3] 9 | 10 | BECH32_CHARS_RE = re.compile("^[qpzry9x8gf2tvdw0s3jn54khce6mua7l]*$") 11 | 12 | BECH32M_CONSTANT = 0x2BC830A3 13 | 14 | PREFIX = { 15 | "mainnet": "bc", 16 | "testnet": "tb", 17 | "regtest": "bcrt", 18 | "signet": "tb", 19 | } 20 | NET_FOR_PREFIX = {v: k for k, v in PREFIX.items() if k != "signet"} 21 | 22 | 23 | def uses_only_bech32_chars(string): 24 | return bool(BECH32_CHARS_RE.match(string.lower())) 25 | 26 | 27 | # next four functions are straight from BIP0173: 28 | # https://github.com/bitcoin/bips/blob/master/bip-0173.mediawiki 29 | def bech32_polymod(values): 30 | chk = 1 31 | for v in values: 32 | b = chk >> 25 33 | chk = (chk & 0x1FFFFFF) << 5 ^ v 34 | for i in range(5): 35 | chk ^= GEN[i] if ((b >> i) & 1) else 0 36 | return chk 37 | 38 | 39 | def bech32_hrp_expand(s): 40 | b = s.encode("ascii") 41 | return [x >> 5 for x in b] + [0] + [x & 31 for x in b] 42 | 43 | 44 | def bech32_verify_checksum(hrp, data): 45 | return bech32_polymod(bech32_hrp_expand(hrp) + data) == 1 46 | 47 | 48 | def bech32_create_checksum(hrp, data): 49 | values = bech32_hrp_expand(hrp) + data 50 | polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ 1 51 | return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)] 52 | 53 | 54 | # next two functions are straight from BIP0350: 55 | # https://github.com/bitcoin/bips/blob/master/bip-0350.mediawiki 56 | def bech32m_verify_checksum(hrp, data): 57 | return bech32_polymod(bech32_hrp_expand(hrp) + data) == BECH32M_CONSTANT 58 | 59 | 60 | def bech32m_create_checksum(hrp, data): 61 | values = bech32_hrp_expand(hrp) + data 62 | polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ BECH32M_CONSTANT 63 | return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)] 64 | 65 | 66 | def group_32(s): 67 | """Convert from 8-bit bytes to 5-bit array of integers""" 68 | result = [] 69 | unused_bits = 0 70 | current = 0 71 | for c in s: 72 | unused_bits += 8 73 | current = (current << 8) + c 74 | while unused_bits > 5: 75 | unused_bits -= 5 76 | result.append(current >> unused_bits) 77 | mask = (1 << unused_bits) - 1 78 | current &= mask 79 | result.append(current << (5 - unused_bits)) 80 | return result 81 | 82 | 83 | def convertbits(data, frombits, tobits, pad=True): 84 | """General power-of-2 base conversion.""" 85 | acc = 0 86 | bits = 0 87 | ret = [] 88 | maxv = (1 << tobits) - 1 89 | max_acc = (1 << (frombits + tobits - 1)) - 1 90 | for value in data: 91 | if value < 0 or (value >> frombits): 92 | return None 93 | acc = ((acc << frombits) | value) & max_acc 94 | bits += frombits 95 | while bits >= tobits: 96 | bits -= tobits 97 | ret.append((acc >> bits) & maxv) 98 | if pad: 99 | if bits: 100 | ret.append((acc << (tobits - bits)) & maxv) 101 | elif bits >= frombits or ((acc << (tobits - bits)) & maxv): 102 | return None 103 | return ret 104 | 105 | 106 | def bc32encode(data: bytes) -> str: 107 | """ 108 | bc32 encoding 109 | see https://github.com/BlockchainCommons/Research/blob/master/papers/bcr-2020-004-bc32.md 110 | """ 111 | dd = convertbits(data, 8, 5) 112 | polymod = bech32_polymod([0] + dd + [0, 0, 0, 0, 0, 0]) ^ 0x3FFFFFFF 113 | chk = [(polymod >> 5 * (5 - i)) & 31 for i in range(6)] 114 | return "".join([BECH32_ALPHABET[d] for d in dd + chk]) 115 | 116 | 117 | def bc32decode(bc32: str) -> bytes: 118 | """ 119 | bc32 decoding 120 | see https://github.com/BlockchainCommons/Research/blob/master/papers/bcr-2020-004-bc32.md 121 | """ 122 | if bc32.lower() != bc32 and bc32.upper() != bc32: 123 | return None 124 | bc32 = bc32.lower() 125 | if not all([x in BECH32_ALPHABET for x in bc32]): 126 | return None 127 | res = [BECH32_ALPHABET.find(c) for c in bc32.lower()] 128 | if bech32_polymod([0] + res) != 0x3FFFFFFF: 129 | return None 130 | return bytes(convertbits(res[:-6], 5, 8, False)) 131 | 132 | 133 | def cbor_encode(data): 134 | length = len(data) 135 | if length <= 23: 136 | prefix = bytes([0x40 + length]) 137 | elif length <= 255: 138 | prefix = bytes([0x58, length]) 139 | elif length <= 65535: 140 | prefix = b"\x59" + length.to_bytes(2, "big") 141 | else: 142 | prefix = b"\x60" + length.to_bytes(4, "big") 143 | return prefix + data 144 | 145 | 146 | def cbor_decode(data): 147 | s = BytesIO(data) 148 | b = s.read(1)[0] 149 | if b >= 0x40 and b < 0x58: 150 | length = b - 0x40 151 | return s.read(length) 152 | if b == 0x58: 153 | length = s.read(1)[0] 154 | return s.read(length) 155 | if b == 0x59: 156 | length = int.from_bytes(s.read(2), "big") 157 | return s.read(length) 158 | if b == 0x60: 159 | length = int.from_bytes(s.read(4), "big") 160 | return s.read(length) 161 | return None 162 | 163 | 164 | def encode_bech32(nums): 165 | """Convert from 5-bit array of integers to bech32 format""" 166 | result = "" 167 | for n in nums: 168 | result += BECH32_ALPHABET[n] 169 | return result 170 | 171 | 172 | def encode_bech32_checksum(s, network="mainnet"): 173 | """Convert a segwit ScriptPubKey to a bech32 address""" 174 | prefix = PREFIX.get(network) 175 | if not prefix: 176 | raise ValueError(f"unrecognized network: {network}") 177 | version = s[0] 178 | if version > 0: 179 | version -= 0x50 180 | length = s[1] 181 | data = [version] + group_32(s[2 : 2 + length]) 182 | if version == 0: 183 | checksum = bech32_create_checksum(prefix, data) 184 | else: 185 | checksum = bech32m_create_checksum(prefix, data) 186 | return prefix + "1" + encode_bech32(data + checksum) 187 | 188 | 189 | def decode_bech32(s): 190 | """Returns network, segwit version and the hash from the bech32 address""" 191 | regtest_prefix = PREFIX["regtest"] 192 | if s.startswith(regtest_prefix): 193 | hrp, raw_data = regtest_prefix, s[5:] 194 | else: 195 | hrp, raw_data = s.split("1") 196 | 197 | network = NET_FOR_PREFIX.get(hrp) 198 | if not network: 199 | raise ValueError(f"unknown human readable part: {hrp}") 200 | 201 | data = [BECH32_ALPHABET.index(c) for c in raw_data] 202 | version = data[0] 203 | verify_fnc = bech32_verify_checksum if version == 0 else bech32m_verify_checksum 204 | if not verify_fnc(hrp, data): 205 | raise ValueError(f"bad address: {s}") 206 | number = 0 207 | for digit in data[1:-6]: 208 | number = (number << 5) + digit 209 | num_bytes = (len(data) - 7) * 5 // 8 210 | bits_to_ignore = (len(data) - 7) * 5 % 8 211 | number >>= bits_to_ignore 212 | hash = int_to_big_endian(number, num_bytes) 213 | if num_bytes < 2 or num_bytes > 40: 214 | raise ValueError(f"bytes out of range: {num_bytes}") 215 | return [network, version, hash] 216 | -------------------------------------------------------------------------------- /buidl/blinding.py: -------------------------------------------------------------------------------- 1 | from buidl.hd import HDPublicKey, is_valid_bip32_path 2 | from secrets import randbelow 3 | 4 | 5 | def secure_secret_path(depth=4): 6 | """ 7 | Generate a secure_secret_path for blinding an xpub. 8 | 9 | Approx entropy by depth: 10 | 11 | for depth in range(1, 10): print(f"{depth}: {31*depth}") 12 | 1: 31 13 | 2: 62 14 | 3: 93 15 | 4: 124 16 | 5: 155 17 | 6: 186 18 | 7: 217 19 | 8: 248 20 | 9: 279 21 | """ 22 | if not isinstance(depth, int): 23 | raise ValueError(f"depth must be an int: {depth}") 24 | if depth >= 32: 25 | raise ValueError( 26 | f"BIP32 requries depth < 256, but this function will not allow you to go anywhere near this high: {depth}" 27 | ) 28 | if depth < 1: 29 | raise ValueError(f"Depth must be > 0: {depth}") 30 | to_return = ["m"] 31 | for _ in range(depth): 32 | # https://bitcoin.stackexchange.com/questions/92056/what-is-the-max-allowed-depth-for-bip32-derivation-paths#comment105756_92057 33 | rand_int = randbelow(2**31 - 1) 34 | to_return.append(str(rand_int)) 35 | return "/".join(to_return) 36 | 37 | 38 | def blind_xpub(starting_xpub, starting_path, secret_path): 39 | """ 40 | Blind a starting_xpub with a given (and unverifiable) path, using a secret path. 41 | 42 | Return the complete (combined) bip32 path, and 43 | """ 44 | 45 | starting_xpub_obj = HDPublicKey.parse(starting_xpub) 46 | # Note that we cannot verify the starting path, so it is essential that at least this safety check is accurate 47 | if starting_xpub_obj.depth != starting_path.count("/"): 48 | raise ValueError( 49 | f"starting_xpub_obj.depth {starting_xpub_obj.depth} != starting_path depth {starting_path.count('/')}" 50 | ) 51 | 52 | # This will automatically use the version byte that was parsed in the previous step 53 | blinded_child_xpub = starting_xpub_obj.traverse(secret_path).xpub() 54 | blinded_full_path = combine_bip32_paths( 55 | first_path=starting_path, second_path=secret_path 56 | ) 57 | return { 58 | "blinded_child_xpub": blinded_child_xpub, 59 | "blinded_full_path": blinded_full_path, 60 | } 61 | 62 | 63 | def combine_bip32_paths(first_path, second_path): 64 | for bip32_path in (first_path, second_path): 65 | if not is_valid_bip32_path(bip32_path): 66 | raise ValueError(f"Invalid bip32 path: {bip32_path}") 67 | 68 | # be forgiving 69 | first_path = first_path.lower().strip().replace("'", "h").replace("//", "/") 70 | second_path = second_path.lower().strip().replace("'", "h").replace("//", "/") 71 | 72 | if first_path == "m": 73 | return second_path 74 | 75 | if second_path == "m": 76 | return first_path 77 | 78 | # Trim of leading "m/" from second path: 79 | return f"{first_path}/{second_path[2:]}" 80 | -------------------------------------------------------------------------------- /buidl/block.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | from buidl.helper import ( 4 | bits_to_target, 5 | hash256, 6 | int_to_little_endian, 7 | little_endian_to_int, 8 | merkle_root, 9 | read_varint, 10 | ) 11 | from buidl.tx import Tx 12 | 13 | 14 | class Block: 15 | command = b"block" 16 | 17 | def __init__( 18 | self, 19 | version, 20 | prev_block, 21 | merkle_root, 22 | timestamp, 23 | bits, 24 | nonce, 25 | txs=None, 26 | tx_hashes=None, 27 | ): 28 | self.version = version 29 | self.prev_block = prev_block 30 | self.merkle_root = merkle_root 31 | self.timestamp = timestamp 32 | self.bits = bits 33 | self.nonce = nonce 34 | self.txs = txs 35 | self.tx_hashes = tx_hashes 36 | self.merkle_tree = None 37 | 38 | def __repr__(self): 39 | return f""" 40 | Version: {self.version} 41 | Previous: {self.prev_block.hex()} 42 | Merkle Root: {self.merkle_root.hex()} 43 | Timestamp: {self.timestamp} 44 | Bits: {self.bits[::-1].hex()} 45 | Nonce: {self.nonce.hex()} 46 | Num txs: {"unknown" if self.txs is None else len(self.txs)} 47 | """ 48 | 49 | @classmethod 50 | def parse_header(cls, stream=None, hex=None): 51 | """Takes a byte stream and parses block headers. Returns a Block object""" 52 | if hex: 53 | if stream: 54 | raise RuntimeError("One of stream or hex should be defined") 55 | stream = BytesIO(bytes.fromhex(hex)) 56 | # stream.read(n) will read n bytes from the stream 57 | # version - 4 bytes, little endian, interpret as int 58 | version = little_endian_to_int(stream.read(4)) 59 | # prev_block - 32 bytes, little endian (use [::-1] to reverse) 60 | prev_block = stream.read(32)[::-1] 61 | # merkle_root - 32 bytes, little endian (use [::-1] to reverse) 62 | merkle_root = stream.read(32)[::-1] 63 | # timestamp - 4 bytes, little endian, interpret as int 64 | timestamp = little_endian_to_int(stream.read(4)) 65 | # bits - 4 bytes 66 | bits = stream.read(4) 67 | # nonce - 4 bytes 68 | nonce = stream.read(4) 69 | # initialize class 70 | return cls(version, prev_block, merkle_root, timestamp, bits, nonce) 71 | 72 | @classmethod 73 | def parse(cls, s): 74 | """Takes a byte stream and parses a block. Returns a Block object""" 75 | b = cls.parse_header(s) 76 | num_txs = read_varint(s) 77 | b.txs = [] 78 | b.tx_hashes = [] 79 | for _ in range(num_txs): 80 | t = Tx.parse(s) 81 | b.txs.append(t) 82 | b.tx_hashes.append(t.hash()) 83 | return b 84 | 85 | def serialize(self): 86 | """Returns the 80 byte block header""" 87 | # version - 4 bytes, little endian 88 | result = int_to_little_endian(self.version, 4) 89 | # prev_block - 32 bytes, little endian 90 | result += self.prev_block[::-1] 91 | # merkle_root - 32 bytes, little endian 92 | result += self.merkle_root[::-1] 93 | # timestamp - 4 bytes, little endian 94 | result += int_to_little_endian(self.timestamp, 4) 95 | # bits - 4 bytes 96 | result += self.bits 97 | # nonce - 4 bytes 98 | result += self.nonce 99 | return result 100 | 101 | def hash(self): 102 | """Returns the hash256 interpreted little endian of the block""" 103 | # serialize 104 | s = self.serialize() 105 | # hash256 106 | h256 = hash256(s) 107 | # reverse 108 | return h256[::-1] 109 | 110 | def id(self): 111 | """Human-readable hexadecimal of the block hash""" 112 | return self.hash().hex() 113 | 114 | def bip9(self): 115 | """Returns whether this block is signaling readiness for BIP9""" 116 | # BIP9 is signalled if the top 3 bits are 001 117 | # remember version is 32 bytes so right shift 29 (>> 29) and see if 118 | # that is 001 119 | return self.version >> 29 == 0b001 120 | 121 | def bip91(self): 122 | """Returns whether this block is signaling readiness for BIP91""" 123 | # BIP91 is signalled if the 5th bit from the right is 1 124 | # shift 4 bits to the right and see if the last bit is 1 125 | return self.version >> 4 & 1 == 1 126 | 127 | def bip141(self): 128 | """Returns whether this block is signaling readiness for BIP141""" 129 | # BIP91 is signalled if the 2nd bit from the right is 1 130 | # shift 1 bit to the right and see if the last bit is 1 131 | return self.version >> 1 & 1 == 1 132 | 133 | def target(self): 134 | """Returns the proof-of-work target based on the bits""" 135 | return bits_to_target(self.bits) 136 | 137 | def difficulty(self): 138 | """Returns the block difficulty based on the bits""" 139 | # note difficulty is (target of lowest difficulty) / (self's target) 140 | # lowest difficulty has bits that equal 0xffff001d 141 | lowest = 0xFFFF * 256 ** (0x1D - 3) 142 | return lowest / self.target() 143 | 144 | def check_pow(self): 145 | """Returns whether this block satisfies proof of work""" 146 | # get the hash256 of the serialization of this block 147 | h256 = hash256(self.serialize()) 148 | # interpret this hash as a little-endian number 149 | proof = little_endian_to_int(h256) 150 | # return whether this integer is less than the target 151 | return proof < self.target() 152 | 153 | def validate_merkle_root(self): 154 | """Gets the merkle root of the tx_hashes and checks that it's 155 | the same as the merkle root of this block. 156 | """ 157 | # reverse all the transaction hashes (self.tx_hashes) 158 | hashes = [h[::-1] for h in self.tx_hashes] 159 | # get the Merkle Root 160 | root = merkle_root(hashes) 161 | # reverse the Merkle Root 162 | # return whether self.merkle root is the same as 163 | # the reverse of the calculated merkle root 164 | return root[::-1] == self.merkle_root 165 | 166 | def get_outpoints(self): 167 | if not self.txs: 168 | return [] 169 | for t in self.txs: 170 | for tx_out in t.tx_outs: 171 | if not tx_out.script_pubkey.has_op_return(): 172 | yield (tx_out.script_pubkey.raw_serialize()) 173 | 174 | 175 | GENESIS_BLOCK_MAINNET_HEX = "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c" 176 | GENESIS_BLOCK_TESTNET_HEX = "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff001d1aa4ae18" 177 | GENESIS_BLOCK_SIGNET_HEX = "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a008f4d5fae77031e8ad22203" 178 | GENESIS_BLOCK_REGTEST_HEX = "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4adae5494dffff7f2002000000" 179 | GENESIS_BLOCK_HEADERS = { 180 | "mainnet": Block.parse_header(hex=GENESIS_BLOCK_MAINNET_HEX), 181 | "testnet": Block.parse_header(hex=GENESIS_BLOCK_TESTNET_HEX), 182 | "signet": Block.parse_header(hex=GENESIS_BLOCK_SIGNET_HEX), 183 | "regtest": Block.parse_header(hex=GENESIS_BLOCK_REGTEST_HEX), 184 | } 185 | GENESIS_BLOCK_HASH = { 186 | "mainnet": GENESIS_BLOCK_HEADERS["mainnet"].hash(), 187 | "testnet": GENESIS_BLOCK_HEADERS["testnet"].hash(), 188 | "signet": GENESIS_BLOCK_HEADERS["signet"].hash(), 189 | "regtest": GENESIS_BLOCK_HEADERS["regtest"].hash(), 190 | } 191 | -------------------------------------------------------------------------------- /buidl/bloomfilter.py: -------------------------------------------------------------------------------- 1 | from buidl.helper import ( 2 | bit_field_to_bytes, 3 | encode_varint, 4 | int_to_byte, 5 | int_to_little_endian, 6 | murmur3, 7 | ) 8 | from buidl.network import GenericMessage 9 | 10 | 11 | BIP37_CONSTANT = 0xFBA4C795 12 | 13 | 14 | class BloomFilter: 15 | def __init__(self, size, function_count, tweak): 16 | self.size = size 17 | self.bit_field = [0] * (size * 8) 18 | self.function_count = function_count 19 | self.tweak = tweak 20 | 21 | def add(self, item): 22 | """Add an item to the filter""" 23 | # iterate self.function_count number of times 24 | for i in range(self.function_count): 25 | # BIP0037 spec seed is i*BIP37_CONSTANT + self.tweak 26 | seed = i * BIP37_CONSTANT + self.tweak 27 | # get the murmur3 hash given that seed 28 | h = murmur3(item, seed=seed) 29 | # set the bit at the hash mod the bitfield size (self.size*8) 30 | bit = h % (self.size * 8) 31 | # set the bit field at bit to be 1 32 | self.bit_field[bit] = 1 33 | 34 | def filter_bytes(self): 35 | return bit_field_to_bytes(self.bit_field) 36 | 37 | def filterload(self, flag=1): 38 | """Return a network message whose command is filterload""" 39 | # encode_varint self.size 40 | payload = encode_varint(self.size) 41 | # next is the self.filter_bytes() 42 | payload += self.filter_bytes() 43 | # function count is 4 bytes little endian 44 | payload += int_to_little_endian(self.function_count, 4) 45 | # tweak is 4 bytes little endian 46 | payload += int_to_little_endian(self.tweak, 4) 47 | # flag is 1 byte little endian 48 | payload += int_to_byte(flag) 49 | # return a GenericMessage with b'filterload' as the command 50 | return GenericMessage(b"filterload", payload) 51 | -------------------------------------------------------------------------------- /buidl/chash.py: -------------------------------------------------------------------------------- 1 | from buidl._libsec import ffi, lib 2 | 3 | 4 | GLOBAL_CTX = ffi.gc( 5 | lib.secp256k1_context_create( 6 | lib.SECP256K1_CONTEXT_SIGN | lib.SECP256K1_CONTEXT_VERIFY 7 | ), 8 | lib.secp256k1_context_destroy, 9 | ) 10 | 11 | 12 | def tagged_hash(tag, msg): 13 | result = ffi.new("unsigned char [32]") 14 | tag_length = len(tag) 15 | msg_length = len(msg) 16 | if not lib.secp256k1_tagged_sha256( 17 | GLOBAL_CTX, 18 | result, 19 | tag, 20 | tag_length, 21 | msg, 22 | msg_length, 23 | ): 24 | raise RuntimeError("libsecp256k1 tagged hash problem") 25 | return bytes(ffi.buffer(result, 32)) 26 | 27 | 28 | def hash_aux(msg): 29 | return tagged_hash(b"BIP0340/aux", msg) 30 | 31 | 32 | def hash_challenge(msg): 33 | return tagged_hash(b"BIP0340/challenge", msg) 34 | 35 | 36 | def hash_keyaggcoef(msg): 37 | return tagged_hash(b"KeyAgg coefficient", msg) 38 | 39 | 40 | def hash_keyagglist(msg): 41 | return tagged_hash(b"KeyAgg list", msg) 42 | 43 | 44 | def hash_musignonce(msg): 45 | return tagged_hash(b"MuSig/noncecoef", msg) 46 | 47 | 48 | def hash_nonce(msg): 49 | return tagged_hash(b"BIP0340/nonce", msg) 50 | 51 | 52 | def hash_tapbranch(msg): 53 | return tagged_hash(b"TapBranch", msg) 54 | 55 | 56 | def hash_tapleaf(msg): 57 | return tagged_hash(b"TapLeaf", msg) 58 | 59 | 60 | def hash_tapsighash(msg): 61 | return tagged_hash(b"TapSighash", msg) 62 | 63 | 64 | def hash_taptweak(msg): 65 | return tagged_hash(b"TapTweak", msg) 66 | -------------------------------------------------------------------------------- /buidl/compactfilter.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | from buidl.helper import ( 4 | encode_varint, 5 | hash256, 6 | int_to_little_endian, 7 | read_varint, 8 | read_varstr, 9 | ) 10 | from buidl.siphash import SipHash_2_4 11 | 12 | 13 | BASIC_FILTER_TYPE = 0 14 | GOLOMB_P = 19 15 | GOLOMB_M = int(round(1.497137 * 2**GOLOMB_P)) 16 | 17 | 18 | def _siphash(key, value): 19 | if len(key) != 16: 20 | raise ValueError("Key should be 16 bytes") 21 | sip = SipHash_2_4(key) 22 | sip.update(value) 23 | return sip.hash() 24 | 25 | 26 | def hash_to_range(key, value, f): 27 | """Returns a number between 0 and f-1, uniformly distributed. 28 | Uses siphash-2-4.""" 29 | return _siphash(key, value) * f >> 64 30 | 31 | 32 | def hashed_items(key, items): 33 | n = len(items) 34 | f = n * GOLOMB_M 35 | result = [] 36 | for item in items: 37 | result.append(hash_to_range(key, item, f)) 38 | return sorted(result) 39 | 40 | 41 | def encode_golomb(x, p): 42 | """converts a number x to a golomb-encoded array of 0's and 1's""" 43 | # quotient when dividing x by 2^p 44 | q = x >> p 45 | # q 1's and a 0 at the end 46 | result = [1] * q + [0] 47 | # the last p bits of x 48 | result += [x & (1 << (p - i - 1)) > 0 for i in range(p)] 49 | return result 50 | 51 | 52 | def decode_golomb(bits, p): 53 | """converts a golomb-encoded array of 0's and 1's to a number""" 54 | q = 0 55 | while bits[0] != 0: 56 | q += 1 57 | bits.pop(0) 58 | bits.pop(0) 59 | r = 0 60 | for _ in range(p): 61 | r <<= 1 62 | if bits.pop(0) == 1: 63 | r |= 1 64 | return (q << p) + r 65 | 66 | 67 | def pack_bits(bits): 68 | """converts bits to a byte-string""" 69 | num_bytes = len(bits) 70 | bits += [0] * (-num_bytes % 8) 71 | result = 0 72 | for bit in bits: 73 | result <<= 1 74 | if bit: 75 | result |= 1 76 | return result.to_bytes(len(bits) // 8, "big") 77 | 78 | 79 | def unpack_bits(byte_string): 80 | bits = [] 81 | for byte in byte_string: 82 | for _ in range(8): 83 | if byte & 0x80: 84 | bits.append(1) 85 | else: 86 | bits.append(0) 87 | byte <<= 1 88 | return bits 89 | 90 | 91 | def serialize_gcs(sorted_items): 92 | last_value = 0 93 | result = [] 94 | for item in sorted_items: 95 | delta = item - last_value 96 | result += encode_golomb(delta, GOLOMB_P) 97 | last_value = item 98 | return encode_varint(len(sorted_items)) + pack_bits(result) 99 | 100 | 101 | def encode_gcs(key, items): 102 | """Returns the golomb-coded-set byte-string which is the sorted 103 | hashes of the items""" 104 | sorted_items = hashed_items(key, items) 105 | return serialize_gcs(sorted_items) 106 | 107 | 108 | def decode_gcs(key, gcs): 109 | """Returns the sorted hashes of the items from the golomb-coded-set""" 110 | s = BytesIO(gcs) 111 | num_items = read_varint(s) 112 | bits = unpack_bits(s.read()) 113 | items = [] 114 | current = 0 115 | for _ in range(num_items): 116 | delta = decode_golomb(bits, GOLOMB_P) 117 | current += delta 118 | items.append(current) 119 | return items 120 | 121 | 122 | class CompactFilter: 123 | def __init__(self, key, hashes): 124 | self.key = key 125 | self.hashes = set(hashes) 126 | self.f = len(self.hashes) * GOLOMB_M 127 | 128 | def __repr__(self): 129 | result = f"{self.key.hex()}:\n\n" 130 | for h in sorted(list(self.hashes)): 131 | result += f"{h.hex()}\n" 132 | return result 133 | 134 | def __eq__(self, other): 135 | return self.key == other.key and sorted(list(self.hashes)) == sorted( 136 | list(other.hashes) 137 | ) 138 | 139 | @classmethod 140 | def parse(cls, key, filter_bytes): 141 | return cls(key, set(decode_gcs(key, filter_bytes))) 142 | 143 | def hash(self): 144 | return hash256(self.serialize()) 145 | 146 | def serialize(self): 147 | return serialize_gcs(sorted(list(self.hashes))) 148 | 149 | def compute_hash(self, raw_script_pubkey): 150 | return hash_to_range(self.key, raw_script_pubkey, self.f) 151 | 152 | def __contains__(self, script_pubkey): 153 | raw_script_pubkey = script_pubkey.raw_serialize() 154 | return self.compute_hash(raw_script_pubkey) in self.hashes 155 | 156 | 157 | class GetCFiltersMessage: 158 | command = b"getcfilters" 159 | define_network = False 160 | 161 | def __init__(self, filter_type=BASIC_FILTER_TYPE, start_height=1, stop_hash=None): 162 | self.filter_type = filter_type 163 | self.start_height = start_height 164 | if stop_hash is None: 165 | raise RuntimeError("A stop hash is required") 166 | self.stop_hash = stop_hash 167 | 168 | def serialize(self): 169 | result = self.filter_type.to_bytes(1, "big") 170 | result += int_to_little_endian(self.start_height, 4) 171 | result += self.stop_hash[::-1] 172 | return result 173 | 174 | 175 | class CFilterMessage: 176 | command = b"cfilter" 177 | define_network = False 178 | 179 | def __init__(self, filter_type, block_hash, filter_bytes): 180 | self.filter_type = filter_type 181 | self.block_hash = block_hash 182 | self.filter_bytes = filter_bytes 183 | self.cf = CompactFilter.parse(block_hash[::-1][:16], filter_bytes) 184 | 185 | def __eq__(self, other): 186 | return ( 187 | self.filter_type == other.filter_type 188 | and self.block_hash == other.block_hash 189 | and self.filter_bytes == other.filter_bytes 190 | ) 191 | 192 | @classmethod 193 | def parse(cls, s): 194 | filter_type = s.read(1)[0] 195 | block_hash = s.read(32)[::-1] 196 | filter_bytes = read_varstr(s) 197 | return cls(filter_type, block_hash, filter_bytes) 198 | 199 | def hash(self): 200 | return hash256(self.filter_bytes) 201 | 202 | def __contains__(self, script_pubkey): 203 | return script_pubkey in self.cf 204 | 205 | 206 | class GetCFHeadersMessage: 207 | command = b"getcfheaders" 208 | define_network = False 209 | 210 | def __init__(self, filter_type=BASIC_FILTER_TYPE, start_height=0, stop_hash=None): 211 | self.filter_type = filter_type 212 | self.start_height = start_height 213 | if stop_hash is None: 214 | raise RuntimeError 215 | self.stop_hash = stop_hash 216 | 217 | def serialize(self): 218 | result = self.filter_type.to_bytes(1, "big") 219 | result += int_to_little_endian(self.start_height, 4) 220 | result += self.stop_hash[::-1] 221 | return result 222 | 223 | 224 | class CFHeadersMessage: 225 | command = b"cfheaders" 226 | define_network = False 227 | 228 | def __init__(self, filter_type, stop_hash, previous_filter_header, filter_hashes): 229 | self.filter_type = filter_type 230 | self.stop_hash = stop_hash 231 | self.previous_filter_header = previous_filter_header 232 | self.filter_hashes = filter_hashes 233 | current = self.previous_filter_header 234 | for filter_hash in self.filter_hashes: 235 | current = hash256(filter_hash + current) 236 | self.last_header = current 237 | 238 | def __repr__(self): 239 | result = f"up to {self.stop_hash.hex()}\nstarting from {self.previous_filter_header.hex()}\n\n" 240 | for fh in self.filter_hashes: 241 | result += f"{fh.hex()}\n" 242 | return result 243 | 244 | @classmethod 245 | def parse(cls, s): 246 | filter_type = s.read(1)[0] 247 | stop_hash = s.read(32)[::-1] 248 | previous_filter_header = s.read(32) 249 | filter_hashes_length = read_varint(s) 250 | filter_hashes = [] 251 | for _ in range(filter_hashes_length): 252 | filter_hashes.append(s.read(32)) 253 | return cls(filter_type, stop_hash, previous_filter_header, filter_hashes) 254 | 255 | 256 | class GetCFCheckPointMessage: 257 | command = b"getcfcheckpt" 258 | define_network = False 259 | 260 | def __init__(self, filter_type=BASIC_FILTER_TYPE, stop_hash=None): 261 | self.filter_type = filter_type 262 | if stop_hash is None: 263 | raise RuntimeError("Need a stop hash") 264 | self.stop_hash = stop_hash 265 | 266 | def serialize(self): 267 | result = self.filter_type.to_bytes(1, "big") 268 | result += self.stop_hash[::-1] 269 | return result 270 | 271 | 272 | class CFCheckPointMessage: 273 | command = b"cfcheckpt" 274 | define_network = False 275 | 276 | def __init__(self, filter_type, stop_hash, filter_headers): 277 | self.filter_type = filter_type 278 | self.stop_hash = stop_hash 279 | self.filter_headers = filter_headers 280 | 281 | def __repr__(self): 282 | result = f"up to {self.stop_hash.hex()}\n\n" 283 | for fh in self.filter_headers: 284 | result += f"{fh.hex()}\n" 285 | return result 286 | 287 | @classmethod 288 | def parse(cls, s): 289 | filter_type = s.read(1)[0] 290 | stop_hash = s.read(32)[::-1] 291 | filter_headers_length = read_varint(s) 292 | filter_headers = [] 293 | for _ in range(filter_headers_length): 294 | filter_headers.append(s.read(32)) 295 | return cls(filter_type, stop_hash, filter_headers) 296 | -------------------------------------------------------------------------------- /buidl/ecc.py: -------------------------------------------------------------------------------- 1 | try: 2 | from buidl.cecc import * # noqa: F401,F403 3 | except ModuleNotFoundError: 4 | from buidl.pecc import * # noqa: F401,F403 5 | -------------------------------------------------------------------------------- /buidl/hash.py: -------------------------------------------------------------------------------- 1 | try: 2 | from buidl.chash import * # noqa: F401,F403 3 | except ModuleNotFoundError: 4 | from buidl.phash import * # noqa: F401,F403 5 | -------------------------------------------------------------------------------- /buidl/libsec.h: -------------------------------------------------------------------------------- 1 | #define SECP256K1_CONTEXT_VERIFY ... 2 | #define SECP256K1_CONTEXT_SIGN ... 3 | #define SECP256K1_EC_COMPRESSED ... 4 | #define SECP256K1_EC_UNCOMPRESSED ... 5 | 6 | typedef struct secp256k1_context_struct secp256k1_context; 7 | 8 | secp256k1_context* secp256k1_context_create( 9 | unsigned int flags 10 | ); 11 | int secp256k1_context_randomize( 12 | secp256k1_context* ctx, 13 | const unsigned char *seed32 14 | ); 15 | void secp256k1_context_destroy( 16 | secp256k1_context* ctx 17 | ); 18 | 19 | typedef struct { 20 | unsigned char data[64]; 21 | } secp256k1_pubkey; 22 | 23 | int secp256k1_ec_pubkey_parse( 24 | const secp256k1_context* ctx, 25 | secp256k1_pubkey* pubkey, 26 | const unsigned char *input, 27 | size_t inputlen 28 | ); 29 | int secp256k1_ec_pubkey_serialize( 30 | const secp256k1_context* ctx, 31 | unsigned char *output, 32 | size_t *outputlen, 33 | const secp256k1_pubkey* pubkey, 34 | unsigned int flags 35 | ); 36 | int secp256k1_ec_pubkey_tweak_add( 37 | const secp256k1_context* ctx, 38 | secp256k1_pubkey *pubkey, 39 | const unsigned char *tweak 40 | ); 41 | int secp256k1_ec_pubkey_tweak_mul( 42 | const secp256k1_context* ctx, 43 | secp256k1_pubkey *pubkey, 44 | const unsigned char *tweak 45 | ); 46 | int secp256k1_ec_pubkey_combine( 47 | const secp256k1_context* ctx, 48 | secp256k1_pubkey *out, 49 | const secp256k1_pubkey * const * ins, 50 | size_t n 51 | ); 52 | typedef struct { 53 | unsigned char data[64]; 54 | } secp256k1_ecdsa_signature; 55 | 56 | int secp256k1_ecdsa_signature_parse_der( 57 | const secp256k1_context* ctx, 58 | secp256k1_ecdsa_signature* sig, 59 | const unsigned char *input, 60 | size_t inputlen 61 | ); 62 | int secp256k1_ecdsa_signature_serialize_der( 63 | const secp256k1_context* ctx, 64 | unsigned char *output, 65 | size_t *outputlen, 66 | const secp256k1_ecdsa_signature* sig 67 | ); 68 | int secp256k1_ecdsa_verify( 69 | const secp256k1_context* ctx, 70 | const secp256k1_ecdsa_signature *sig, 71 | const unsigned char *msg32, 72 | const secp256k1_pubkey *pubkey 73 | ); 74 | 75 | typedef int (*secp256k1_nonce_function)( 76 | unsigned char *nonce32, 77 | const unsigned char *msg32, 78 | const unsigned char *key32, 79 | const unsigned char *algo16, 80 | void *data, 81 | unsigned int attempt 82 | ); 83 | 84 | int secp256k1_ecdsa_sign( 85 | const secp256k1_context* ctx, 86 | secp256k1_ecdsa_signature *sig, 87 | const unsigned char *msg32, 88 | const unsigned char *seckey, 89 | secp256k1_nonce_function noncefp, 90 | const void *ndata 91 | ); 92 | 93 | int secp256k1_tagged_sha256( 94 | const secp256k1_context* ctx, 95 | unsigned char *hash32, 96 | const unsigned char *tag, 97 | size_t taglen, 98 | const unsigned char *msg, 99 | size_t msglen 100 | ); 101 | 102 | typedef struct { 103 | unsigned char data[64]; 104 | } secp256k1_xonly_pubkey; 105 | 106 | int secp256k1_xonly_pubkey_parse( 107 | const secp256k1_context* ctx, 108 | secp256k1_xonly_pubkey* pubkey, 109 | const unsigned char *input32 110 | ); 111 | 112 | int secp256k1_xonly_pubkey_serialize( 113 | const secp256k1_context* ctx, 114 | unsigned char *output32, 115 | const secp256k1_xonly_pubkey* pubkey 116 | ); 117 | 118 | int secp256k1_xonly_pubkey_from_pubkey( 119 | const secp256k1_context* ctx, 120 | secp256k1_xonly_pubkey *xonly_pubkey, 121 | int *pk_parity, 122 | const secp256k1_pubkey *pubkey 123 | ); 124 | 125 | typedef struct { 126 | unsigned char data[96]; 127 | } secp256k1_keypair; 128 | 129 | int secp256k1_keypair_create( 130 | const secp256k1_context* ctx, 131 | secp256k1_keypair *keypair, 132 | const unsigned char *seckey 133 | ); 134 | 135 | int secp256k1_schnorrsig_sign( 136 | const secp256k1_context* ctx, 137 | unsigned char *sig64, 138 | const unsigned char *msg32, 139 | /* Not really void: secp256k1_keypair */ 140 | const void *keypair, 141 | const unsigned char *aux_rand32 142 | ); 143 | 144 | int secp256k1_schnorrsig_verify( 145 | const secp256k1_context* ctx, 146 | const unsigned char *sig64, 147 | const unsigned char *msg, 148 | size_t msglen, 149 | /* Not really void: secp256k1_xonly_pubkey */ 150 | const void *xonly_pubkey 151 | ); 152 | -------------------------------------------------------------------------------- /buidl/libsec_build.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 2 | 3 | from cffi import FFI 4 | 5 | 6 | source = open("libsec.h", "r").read() 7 | 8 | header = """ 9 | #include 10 | #include 11 | #include 12 | """ 13 | 14 | ffi = FFI() 15 | ffi.cdef(source) 16 | ffi.set_source( 17 | "_libsec", 18 | header, 19 | libraries=["secp256k1"], 20 | include_dirs=["/opt/homebrew/Cellar/libsecp256k1/0.1/include"], 21 | ) 22 | ffi.compile(verbose=True) 23 | -------------------------------------------------------------------------------- /buidl/libsec_status.py: -------------------------------------------------------------------------------- 1 | def is_libsec_enabled(): 2 | try: 3 | from buidl import cecc # noqa: F401 4 | 5 | return True 6 | except ModuleNotFoundError: 7 | return False 8 | -------------------------------------------------------------------------------- /buidl/merkleblock.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | from buidl.block import Block 4 | 5 | from buidl.helper import ( 6 | bytes_to_bit_field, 7 | little_endian_to_int, 8 | merkle_parent, 9 | read_varint, 10 | ) 11 | 12 | 13 | class MerkleTree: 14 | def __init__(self, total): 15 | self.total = total 16 | # compute max depth math.ceil(math.log(self.total, 2)) 17 | self.max_depth = math.ceil(math.log(self.total, 2)) 18 | # initialize the nodes property to hold the actual tree 19 | self.nodes = [] 20 | # loop over the number of levels (max_depth+1) 21 | for depth in range(self.max_depth + 1): 22 | # the number of items at this depth is 23 | # math.ceil(self.total / 2**(self.max_depth - depth)) 24 | num_items = math.ceil(self.total / 2 ** (self.max_depth - depth)) 25 | # create this level's hashes list with the right number of items 26 | level_hashes = [None] * num_items 27 | # append this level's hashes to the merkle tree 28 | self.nodes.append(level_hashes) 29 | # set the pointer to the root (depth=0, index=0) 30 | self.current_depth = 0 31 | self.current_index = 0 32 | self.proved_txs = [] 33 | 34 | def __repr__(self): 35 | result = [] 36 | for depth, level in enumerate(self.nodes): 37 | items = [] 38 | for index, h in enumerate(level): 39 | if h is None: 40 | short = "None" 41 | else: 42 | short = "{}...".format(h.hex()[:8]) 43 | if depth == self.current_depth and index == self.current_index: 44 | items.append("*{short[:-2]}*") 45 | else: 46 | items.append(short) 47 | result.append(", ".join(items)) 48 | return "\n".join(result) 49 | 50 | def up(self): 51 | # reduce depth by 1 and halve the index 52 | self.current_depth -= 1 53 | self.current_index //= 2 54 | 55 | def left(self): 56 | # increase depth by 1 and double the index 57 | self.current_depth += 1 58 | self.current_index *= 2 59 | 60 | def right(self): 61 | # increase depth by 1 and double the index + 1 62 | self.current_depth += 1 63 | self.current_index = self.current_index * 2 + 1 64 | 65 | def root(self): 66 | return self.nodes[0][0] 67 | 68 | def set_current_node(self, value): 69 | self.nodes[self.current_depth][self.current_index] = value 70 | 71 | def get_current_node(self): 72 | return self.nodes[self.current_depth][self.current_index] 73 | 74 | def get_left_node(self): 75 | return self.nodes[self.current_depth + 1][self.current_index * 2] 76 | 77 | def get_right_node(self): 78 | return self.nodes[self.current_depth + 1][self.current_index * 2 + 1] 79 | 80 | def is_leaf(self): 81 | return self.current_depth == self.max_depth 82 | 83 | def right_exists(self): 84 | return len(self.nodes[self.current_depth + 1]) > self.current_index * 2 + 1 85 | 86 | def populate_tree(self, flag_bits, hashes): 87 | # populate until we have the root 88 | while self.root() is None: 89 | # if we are a leaf, we know this position's hash 90 | if self.is_leaf(): 91 | # get the next bit from flag_bits: flag_bits.pop(0) 92 | flag_bit = flag_bits.pop(0) 93 | # get the current hash from hashes: hashes.pop(0) 94 | current_hash = hashes.pop(0) 95 | # set the current node in the merkle tree to the current hash 96 | self.set_current_node(current_hash) 97 | # if our flag bit is 1, add to the self.proved_txs array 98 | if flag_bit == 1: 99 | self.proved_txs.append(current_hash[::-1]) 100 | # go up a level 101 | self.up() 102 | # else 103 | else: 104 | # get the left hash 105 | left_hash = self.get_left_node() 106 | # if we don't have the left hash 107 | if left_hash is None: 108 | # if the next flag bit is 0, the next hash is our current node 109 | if flag_bits.pop(0) == 0: 110 | # set the current node to be the next hash 111 | self.set_current_node(hashes.pop(0)) 112 | # sub-tree doesn't need calculation, go up 113 | self.up() 114 | # else 115 | else: 116 | # go to the left node 117 | self.left() 118 | elif self.right_exists(): 119 | # get the right hash 120 | right_hash = self.get_right_node() 121 | # if we don't have the right hash 122 | if right_hash is None: 123 | # go to the right node 124 | self.right() 125 | # else 126 | else: 127 | # combine the left and right hashes 128 | self.set_current_node(merkle_parent(left_hash, right_hash)) 129 | # we've completed this sub-tree, go up 130 | self.up() 131 | # else 132 | else: 133 | # combine the left hash twice 134 | self.set_current_node(merkle_parent(left_hash, left_hash)) 135 | # we've completed this sub-tree, go up 136 | self.up() 137 | if len(hashes) != 0: 138 | raise RuntimeError(f"hashes not all consumed {len(hashes)}") 139 | for flag_bit in flag_bits: 140 | if flag_bit != 0: 141 | raise RuntimeError("flag bits not all consumed") 142 | 143 | 144 | class MerkleBlock: 145 | command = b"merkleblock" 146 | 147 | def __init__(self, header, total, hashes, flags): 148 | self.header = header 149 | self.total = total 150 | self.hashes = hashes 151 | self.flags = flags 152 | self.merkle_tree = None 153 | 154 | def __repr__(self): 155 | result = f"{self.total}\n" 156 | for h in self.hashes: 157 | result += "\t{}\n".format(h.hex()) 158 | result += "{}".format(self.flags.hex()) 159 | 160 | def hash(self): 161 | return self.header.hash() 162 | 163 | def id(self): 164 | return self.header.id() 165 | 166 | @classmethod 167 | def parse(cls, s): 168 | """Takes a byte stream and parses a merkle block. Returns a Merkle Block object""" 169 | # s.read(n) will read n bytes from the stream 170 | # header - use Block.parse_header with the stream 171 | header = Block.parse_header(s) 172 | # total number of transactions (4 bytes, little endian) 173 | total = little_endian_to_int(s.read(4)) 174 | # number of hashes is a varint 175 | num_txs = read_varint(s) 176 | # initialize the hashes array 177 | hashes = [] 178 | # loop through the number of hashes times 179 | for _ in range(num_txs): 180 | # each hash is 32 bytes, little endian 181 | hashes.append(s.read(32)[::-1]) 182 | # get the length of the flags field as a varint 183 | flags_length = read_varint(s) 184 | # read the flags field 185 | flags = s.read(flags_length) 186 | # initialize class 187 | return cls(header, total, hashes, flags) 188 | 189 | def is_valid(self): 190 | """Verifies whether the merkle tree information validates to the merkle root""" 191 | # use bytes_to_bit_field on self.flags to get the flag_bits 192 | flag_bits = bytes_to_bit_field(self.flags) 193 | # set hashes to be the reversed hashes of everything in self.hashes 194 | hashes = [h[::-1] for h in self.hashes] 195 | # initialize the merkle tree with self.total 196 | self.merkle_tree = MerkleTree(self.total) 197 | # populate_tree with flag_bits and hashes 198 | self.merkle_tree.populate_tree(flag_bits, hashes) 199 | # check if the computed root [::-1] is the same as the merkle root 200 | return self.merkle_tree.root()[::-1] == self.header.merkle_root 201 | 202 | def proved_txs(self): 203 | """Returns the list of proven transactions from the Merkle block""" 204 | if self.merkle_tree is None: 205 | return [] 206 | else: 207 | return self.merkle_tree.proved_txs 208 | -------------------------------------------------------------------------------- /buidl/mnemonic.py: -------------------------------------------------------------------------------- 1 | from os import path 2 | from secrets import randbits 3 | from time import time 4 | 5 | from buidl.helper import big_endian_to_int, int_to_big_endian, sha256 6 | 7 | 8 | class InvalidBIP39Length(Exception): 9 | pass 10 | 11 | 12 | class InvalidChecksumWordsError(Exception): 13 | pass 14 | 15 | 16 | def secure_mnemonic(num_bits=256, extra_entropy=0): 17 | """ 18 | Generates a mnemonic phrase using num_bits of entropy 19 | extra_entropy is optional and should not be saved as it is NOT SUFFICIENT to recover your mnemonic. 20 | extra_entropy exists only to prevent 100% reliance on your random number generator. 21 | """ 22 | if num_bits not in (128, 160, 192, 224, 256): 23 | raise ValueError(f"Invalid num_bits: {num_bits}") 24 | if not isinstance(extra_entropy, int): 25 | raise TypeError(f"extra_entropy must be an int: {extra_entropy}") 26 | if extra_entropy < 0: 27 | raise ValueError(f"extra_entropy cannot be negative: {extra_entropy}") 28 | 29 | # if we have more bits than needed, mask so we get what we need 30 | if len(bin(extra_entropy)) > num_bits + 2: 31 | extra_entropy &= (1 << num_bits) - 1 32 | 33 | # For added paranoia, xor current epoch to extra_entropy 34 | # Would use time.time_ns() but that requires python3.7 35 | extra_entropy ^= int(time() * 1_000_000) 36 | 37 | # xor some random bits with the extra_entropy that was passed in 38 | preseed = randbits(num_bits) ^ extra_entropy 39 | # convert the number to big-endian 40 | s = int_to_big_endian(preseed, num_bits // 8) 41 | # convert to mnemonic 42 | mnemonic = bytes_to_mnemonic(s, num_bits) 43 | # sanity check 44 | if mnemonic_to_bytes(mnemonic) != s: 45 | raise RuntimeError("Generated mnemonic does not correspond to random bits") 46 | return mnemonic 47 | 48 | 49 | def mnemonic_to_bytes(mnemonic): 50 | """returns a byte representation of the mnemonic""" 51 | all_bits = 0 52 | words = mnemonic.split() 53 | # check that there are 12, 15, 18, 21 or 24 words 54 | # if not, raise a ValueError 55 | if len(words) not in (12, 15, 18, 21, 24): 56 | raise InvalidBIP39Length( 57 | f"{len(words)} words (you need 12, 15, 18, 21, or 24 words)" 58 | ) 59 | num_words = len(words) 60 | for word in words: 61 | all_bits <<= 11 62 | all_bits += BIP39[word] 63 | num_checksum_bits = num_words // 3 64 | checksum = all_bits & ((1 << num_checksum_bits) - 1) 65 | all_bits >>= num_checksum_bits 66 | num_bytes = (num_words * 11 - num_checksum_bits) // 8 67 | s = int_to_big_endian(all_bits, num_bytes) 68 | computed_checksum = sha256(s)[0] >> (8 - num_checksum_bits) 69 | if checksum != computed_checksum: 70 | raise InvalidChecksumWordsError("Checksum is wrong") 71 | return s 72 | 73 | 74 | def bytes_to_mnemonic(b, num_bits): 75 | """returns a mnemonic given a byte representation""" 76 | if num_bits not in (128, 160, 192, 224, 256): 77 | raise InvalidBIP39Length( 78 | f"{num_bits} bits (you need 128, 160, 192, 224 or 256 bits)" 79 | ) 80 | preseed = big_endian_to_int(b) 81 | # 1 extra bit for checksum is needed per 32 bits 82 | num_checksum_bits = num_bits // 32 83 | # the checksum is the sha256's first n bits. At most this is 8 84 | checksum = sha256(b)[0] >> (8 - num_checksum_bits) 85 | # we concatenate the checksum to the preseed 86 | all_bits = (preseed << num_checksum_bits) | checksum 87 | # now we get the mnemonic passphrase 88 | mnemonic = [] 89 | # now group into groups of 11 bits 90 | for _ in range((num_bits + num_checksum_bits) // 11): 91 | # grab the last 11 bits 92 | current = all_bits & ((1 << 11) - 1) 93 | # insert the correct word at the front 94 | mnemonic.insert(0, BIP39[current]) 95 | # shift by 11 bits so we can move to the next set 96 | all_bits >>= 11 97 | # return the mnemonic phrase by putting spaces between 98 | return " ".join(mnemonic) 99 | 100 | 101 | class WordList: 102 | def __init__(self, filename, num_words): 103 | word_file = path.join(path.dirname(__file__), filename) 104 | with open(word_file, "r") as f: 105 | self.words = f.read().split() 106 | if len(self.words) != num_words: 107 | raise ValueError(f"Expected {num_words} but got {len(self.words)}") 108 | self.lookup = {} 109 | for i, word in enumerate(self.words): 110 | # add the word's index in the dict lookup 111 | self.lookup[word] = i 112 | # if the word is more than 4 characters, also keep 113 | # a lookup of just the first 4 characters 114 | if len(word) > 4: 115 | self.lookup[word[:4]] = i 116 | 117 | def __getitem__(self, key): 118 | if isinstance(key, str): 119 | return self.lookup[key] 120 | elif isinstance(key, int): 121 | return self.words[key] 122 | else: 123 | raise KeyError("key needs to be a str or int") 124 | 125 | def __iter__(self): 126 | for word in self.words: 127 | yield word 128 | 129 | def __contains__(self, key): 130 | return key in self.words 131 | 132 | def normalize(self, word): 133 | return self[self[word.lower()]] 134 | 135 | 136 | BIP39 = WordList("bip39_words.txt", 2048) 137 | -------------------------------------------------------------------------------- /buidl/pbkdf2.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | # -*- coding: ascii -*- 3 | ########################################################################### 4 | # pbkdf2 - PKCS#5 v2.0 Password-Based Key Derivation 5 | # 6 | # Copyright (C) 2007-2011 Dwayne C. Litzenberger 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining 9 | # a copy of this software and associated documentation files (the 10 | # "Software"), to deal in the Software without restriction, including 11 | # without limitation the rights to use, copy, modify, merge, publish, 12 | # distribute, sublicense, and/or sell copies of the Software, and to 13 | # permit persons to whom the Software is furnished to do so, subject to 14 | # the following conditions: 15 | # 16 | # The above copyright notice and this permission notice shall be 17 | # included in all copies or substantial portions of the Software. 18 | # 19 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 21 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 23 | # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 24 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 25 | # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 26 | # 27 | # Country of origin: Canada 28 | # 29 | ########################################################################### 30 | # Sample PBKDF2 usage: 31 | # from Crypto.Cipher import AES 32 | # from pbkdf2 import PBKDF2 33 | # import os 34 | # 35 | # salt = os.urandom(8) # 64-bit salt 36 | # key = PBKDF2("This passphrase is a secret.", salt).read(32) # 256-bit key 37 | # iv = os.urandom(16) # 128-bit IV 38 | # cipher = AES.new(key, AES.MODE_CBC, iv) 39 | # ... 40 | # 41 | # Sample crypt() usage: 42 | # from pbkdf2 import crypt 43 | # pwhash = crypt("secret") 44 | # alleged_pw = raw_input("Enter password: ") 45 | # if pwhash == crypt(alleged_pw, pwhash): 46 | # print "Password good" 47 | # else: 48 | # print "Invalid password" 49 | # 50 | ########################################################################### 51 | 52 | __version__ = "1.3" 53 | __all__ = ["PBKDF2", "crypt"] 54 | 55 | from struct import pack 56 | from random import randint 57 | import string 58 | import sys 59 | 60 | try: 61 | # Use PyCrypto (if available). 62 | from Crypto.Hash import HMAC, SHA as SHA1 63 | except ImportError: 64 | # PyCrypto not available. Use the Python standard library. 65 | import hmac as HMAC 66 | 67 | try: 68 | from hashlib import sha1 as SHA1 69 | except ImportError: 70 | # hashlib not available. Use the old sha module. 71 | import sha as SHA1 72 | 73 | # 74 | # Python 2.1 thru 3.2 compatibility 75 | # 76 | 77 | if sys.version_info[0] == 2: 78 | _0xffffffffL = long(1) << 32 79 | 80 | def isunicode(s): 81 | return isinstance(s, unicode) 82 | 83 | def isbytes(s): 84 | return isinstance(s, str) 85 | 86 | def isinteger(n): 87 | return isinstance(n, (int, long)) 88 | 89 | def b(s): 90 | return s 91 | 92 | def binxor(a, b): 93 | return "".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a, b)]) 94 | 95 | def b64encode(data, chars="+/"): 96 | tt = string.maketrans("+/", chars) 97 | return data.encode("base64").replace("\n", "").translate(tt) 98 | 99 | from binascii import b2a_hex 100 | else: 101 | _0xffffffffL = 0xFFFFFFFF 102 | 103 | def isunicode(s): 104 | return isinstance(s, str) 105 | 106 | def isbytes(s): 107 | return isinstance(s, bytes) 108 | 109 | def isinteger(n): 110 | return isinstance(n, int) 111 | 112 | def callable(obj): 113 | return hasattr(obj, "__call__") 114 | 115 | def b(s): 116 | return s.encode("latin-1") 117 | 118 | def binxor(a, b): 119 | return bytes([x ^ y for (x, y) in zip(a, b)]) 120 | 121 | from base64 import b64encode as _b64encode 122 | 123 | def b64encode(data, chars="+/"): 124 | if isunicode(chars): 125 | return _b64encode(data, chars.encode("utf-8")).decode("utf-8") 126 | else: 127 | return _b64encode(data, chars) 128 | 129 | from binascii import b2a_hex as _b2a_hex 130 | 131 | def b2a_hex(s): 132 | return _b2a_hex(s).decode("us-ascii") 133 | 134 | xrange = range 135 | 136 | 137 | class PBKDF2(object): 138 | """PBKDF2.py : PKCS#5 v2.0 Password-Based Key Derivation 139 | 140 | This implementation takes a passphrase and a salt (and optionally an 141 | iteration count, a digest module, and a MAC module) and provides a 142 | file-like object from which an arbitrarily-sized key can be read. 143 | 144 | If the passphrase and/or salt are unicode objects, they are encoded as 145 | UTF-8 before they are processed. 146 | 147 | The idea behind PBKDF2 is to derive a cryptographic key from a 148 | passphrase and a salt. 149 | 150 | PBKDF2 may also be used as a strong salted password hash. The 151 | 'crypt' function is provided for that purpose. 152 | 153 | Remember: Keys generated using PBKDF2 are only as strong as the 154 | passphrases they are derived from. 155 | """ 156 | 157 | def __init__( 158 | self, passphrase, salt, iterations=1000, digestmodule=SHA1, macmodule=HMAC 159 | ): 160 | self.__macmodule = macmodule 161 | self.__digestmodule = digestmodule 162 | self._setup(passphrase, salt, iterations, self._pseudorandom) 163 | 164 | def _pseudorandom(self, key, msg): 165 | """Pseudorandom function. e.g. HMAC-SHA1""" 166 | return self.__macmodule.new( 167 | key=key, msg=msg, digestmod=self.__digestmodule 168 | ).digest() 169 | 170 | def read(self, bytes): 171 | """Read the specified number of key bytes.""" 172 | if self.closed: 173 | raise ValueError("file-like object is closed") 174 | 175 | size = len(self.__buf) 176 | blocks = [self.__buf] 177 | i = self.__blockNum 178 | while size < bytes: 179 | i += 1 180 | if i > _0xffffffffL or i < 1: 181 | # We could return "" here, but 182 | raise OverflowError("derived key too long") 183 | block = self.__f(i) 184 | blocks.append(block) 185 | size += len(block) 186 | buf = b("").join(blocks) 187 | retval = buf[:bytes] 188 | self.__buf = buf[bytes:] 189 | self.__blockNum = i 190 | return retval 191 | 192 | def __f(self, i): 193 | # i must fit within 32 bits 194 | assert 1 <= i <= _0xffffffffL 195 | U = self.__prf(self.__passphrase, self.__salt + pack("!L", i)) 196 | result = U 197 | for j in xrange(2, 1 + self.__iterations): 198 | U = self.__prf(self.__passphrase, U) 199 | result = binxor(result, U) 200 | return result 201 | 202 | def hexread(self, octets): 203 | """Read the specified number of octets. Return them as hexadecimal. 204 | 205 | Note that len(obj.hexread(n)) == 2*n. 206 | """ 207 | return b2a_hex(self.read(octets)) 208 | 209 | def _setup(self, passphrase, salt, iterations, prf): 210 | # Sanity checks: 211 | 212 | # passphrase and salt must be str or unicode (in the latter 213 | # case, we convert to UTF-8) 214 | if isunicode(passphrase): 215 | passphrase = passphrase.encode("UTF-8") 216 | elif not isbytes(passphrase): 217 | raise TypeError("passphrase must be str or unicode") 218 | if isunicode(salt): 219 | salt = salt.encode("UTF-8") 220 | elif not isbytes(salt): 221 | raise TypeError("salt must be str or unicode") 222 | 223 | # iterations must be an integer >= 1 224 | if not isinteger(iterations): 225 | raise TypeError("iterations must be an integer") 226 | if iterations < 1: 227 | raise ValueError("iterations must be at least 1") 228 | 229 | # prf must be callable 230 | if not callable(prf): 231 | raise TypeError("prf must be callable") 232 | 233 | self.__passphrase = passphrase 234 | self.__salt = salt 235 | self.__iterations = iterations 236 | self.__prf = prf 237 | self.__blockNum = 0 238 | self.__buf = b("") 239 | self.closed = False 240 | 241 | def close(self): 242 | """Close the stream.""" 243 | if not self.closed: 244 | del self.__passphrase 245 | del self.__salt 246 | del self.__iterations 247 | del self.__prf 248 | del self.__blockNum 249 | del self.__buf 250 | self.closed = True 251 | 252 | 253 | def crypt(word, salt=None, iterations=None): 254 | """PBKDF2-based unix crypt(3) replacement. 255 | 256 | The number of iterations specified in the salt overrides the 'iterations' 257 | parameter. 258 | 259 | The effective hash length is 192 bits. 260 | """ 261 | 262 | # Generate a (pseudo-)random salt if the user hasn't provided one. 263 | if salt is None: 264 | salt = _makesalt() 265 | 266 | # salt must be a string or the us-ascii subset of unicode 267 | if isunicode(salt): 268 | salt = salt.encode("us-ascii").decode("us-ascii") 269 | elif isbytes(salt): 270 | salt = salt.decode("us-ascii") 271 | else: 272 | raise TypeError("salt must be a string") 273 | 274 | # word must be a string or unicode (in the latter case, we convert to UTF-8) 275 | if isunicode(word): 276 | word = word.encode("UTF-8") 277 | elif not isbytes(word): 278 | raise TypeError("word must be a string or unicode") 279 | 280 | # Try to extract the real salt and iteration count from the salt 281 | if salt.startswith("$p5k2$"): 282 | (iterations, salt, dummy) = salt.split("$")[2:5] 283 | if iterations == "": 284 | iterations = 400 285 | else: 286 | converted = int(iterations, 16) 287 | if iterations != "%x" % converted: # lowercase hex, minimum digits 288 | raise ValueError("Invalid salt") 289 | iterations = converted 290 | if not (iterations >= 1): 291 | raise ValueError("Invalid salt") 292 | 293 | # Make sure the salt matches the allowed character set 294 | allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./" 295 | for ch in salt: 296 | if ch not in allowed: 297 | raise ValueError("Illegal character %r in salt" % (ch,)) 298 | 299 | if iterations is None or iterations == 400: 300 | iterations = 400 301 | salt = "$p5k2$$" + salt 302 | else: 303 | salt = "$p5k2$%x$%s" % (iterations, salt) 304 | rawhash = PBKDF2(word, salt, iterations).read(24) 305 | return salt + "$" + b64encode(rawhash, "./") 306 | 307 | 308 | # Add crypt as a static method of the PBKDF2 class 309 | # This makes it easier to do "from PBKDF2 import PBKDF2" and still use 310 | # crypt. 311 | PBKDF2.crypt = staticmethod(crypt) 312 | 313 | 314 | def _makesalt(): 315 | """Return a 48-bit pseudorandom salt for crypt(). 316 | 317 | This function is not suitable for generating cryptographic secrets. 318 | """ 319 | binarysalt = b("").join([pack("@H", randint(0, 0xFFFF)) for i in range(3)]) 320 | return b64encode(binarysalt, "./") 321 | 322 | 323 | # vim:set ts=4 sw=4 sts=4 expandtab: 324 | -------------------------------------------------------------------------------- /buidl/phash.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | 3 | 4 | TAG_HASH_CACHE = {} 5 | 6 | 7 | def tagged_hash(tag: bytes, msg: bytes) -> bytes: 8 | if TAG_HASH_CACHE.get(tag) is None: 9 | TAG_HASH_CACHE[tag] = hashlib.sha256(tag).digest() * 2 10 | return hashlib.sha256(TAG_HASH_CACHE[tag] + msg).digest() 11 | 12 | 13 | def hash_aux(msg): 14 | return tagged_hash(b"BIP0340/aux", msg) 15 | 16 | 17 | def hash_challenge(msg): 18 | return tagged_hash(b"BIP0340/challenge", msg) 19 | 20 | 21 | def hash_keyaggcoef(msg): 22 | return tagged_hash(b"KeyAgg coefficient", msg) 23 | 24 | 25 | def hash_keyagglist(msg): 26 | return tagged_hash(b"KeyAgg list", msg) 27 | 28 | 29 | def hash_musignonce(msg): 30 | return tagged_hash(b"MuSig/noncecoef", msg) 31 | 32 | 33 | def hash_nonce(msg): 34 | return tagged_hash(b"BIP0340/nonce", msg) 35 | 36 | 37 | def hash_tapbranch(msg): 38 | return tagged_hash(b"TapBranch", msg) 39 | 40 | 41 | def hash_tapleaf(msg): 42 | return tagged_hash(b"TapLeaf", msg) 43 | 44 | 45 | def hash_tapsighash(msg): 46 | return tagged_hash(b"TapSighash", msg) 47 | 48 | 49 | def hash_taptweak(msg): 50 | return tagged_hash(b"TapTweak", msg) 51 | -------------------------------------------------------------------------------- /buidl/psbt_helper.py: -------------------------------------------------------------------------------- 1 | from collections import defaultdict 2 | 3 | from buidl.hd import get_unhardened_child_path, HDPublicKey 4 | from buidl.psbt import MixedNetwork, NamedHDPublicKey, PSBT 5 | from buidl.tx import Tx, TxIn, TxOut 6 | from buidl.script import RedeemScript, address_to_script_pubkey 7 | 8 | 9 | def _safe_get_child_hdpubkey(xfp_dict, xfp_hex, root_path, cnt): 10 | """ 11 | Given an xfp_dict, inteligently traverse all the xpubs until you find one that can traverse to the given root_path 12 | """ 13 | for base_path, xpub_obj in xfp_dict.get(xfp_hex, {}).items(): 14 | child_path = get_unhardened_child_path( 15 | root_path=root_path, 16 | base_path=base_path, 17 | ) 18 | if child_path: 19 | if base_path.count("/") != xpub_obj.depth: 20 | msg = f"xfp_hex {xfp_hex} for in/output #{cnt} base_path mismatch: {base_path} depth != {xpub_obj.depth} for {xpub_obj}" 21 | raise ValueError(msg) 22 | return xpub_obj.traverse(child_path) 23 | raise ValueError( 24 | f"xfp_hex {xfp_hex} with {root_path} for in/output #{cnt} not supplied in xpub_dict" 25 | ) 26 | 27 | 28 | def create_multisig_psbt( 29 | public_key_records, 30 | input_dicts, 31 | output_dicts, 32 | fee_sats, 33 | script_type="p2sh", 34 | ): 35 | """ 36 | Helper method to create a multisig PSBT whose change can be validated. 37 | 38 | network (testnet/mainnet/signet) will be inferred from xpubs/tpubs. 39 | 40 | public_key_records are a list of entries that loom like this: [xfp_hex, xpub_b58, base_path] 41 | # TODO: turn this into a new object? 42 | """ 43 | if script_type != "p2sh": 44 | raise NotImplementedError(f"script_type {script_type} not yet implemented") 45 | 46 | # initialize variables 47 | network = None 48 | tx_lookup, pubkey_lookup, redeem_lookup, hd_pubs = {}, {}, {}, {} 49 | 50 | # Use a nested default dict for increased readability 51 | # It's possible (though nonstandard) for one xfp to have multiple public_key_records in a multisig wallet 52 | # https://stackoverflow.com/a/19189356 53 | recursive_defaultdict = lambda: defaultdict(recursive_defaultdict) # noqa: E731 54 | xfp_dict = recursive_defaultdict() 55 | 56 | # This at the child pubkey lookup that each input will traverse off of 57 | for xfp_hex, xpub_b58, base_path in public_key_records: 58 | hd_pubkey_obj = HDPublicKey.parse(xpub_b58) 59 | 60 | # We will use this dict/list structure for each input/ouput in the for-loops below 61 | xfp_dict[xfp_hex][base_path] = hd_pubkey_obj 62 | 63 | named_global_hd_pubkey_obj = NamedHDPublicKey.from_hd_pub( 64 | child_hd_pub=hd_pubkey_obj, 65 | xfp_hex=xfp_hex, 66 | # we're only going to base path level 67 | path=base_path, 68 | ) 69 | hd_pubs[named_global_hd_pubkey_obj.serialize()] = named_global_hd_pubkey_obj 70 | 71 | if network is None: 72 | # Set the initial value 73 | network = hd_pubkey_obj.network 74 | else: 75 | # Confirm it hasn't changed 76 | if network != hd_pubkey_obj.network: 77 | raise MixedNetwork( 78 | f"Mixed networks in public key records: {public_key_records}" 79 | ) 80 | 81 | tx_ins, total_input_sats = [], 0 82 | for cnt, input_dict in enumerate(input_dicts): 83 | # Prev tx stuff 84 | prev_tx_dict = input_dict["prev_tx_dict"] 85 | prev_tx_obj = Tx.parse_hex(prev_tx_dict["hex"], network=network) 86 | tx_lookup[prev_tx_obj.hash()] = prev_tx_obj 87 | 88 | if prev_tx_dict["hash_hex"] != prev_tx_obj.hash().hex(): 89 | raise ValueError( 90 | f"Hash digest mismatch for input #{cnt}: {prev_tx_dict['hash_hex']} != {prev_tx_obj.hash().hex()}" 91 | ) 92 | 93 | if "path_dict" in input_dict: 94 | # Standard BIP67 unordered list of pubkeys (will be sorted lexicographically) 95 | iterator = input_dict["path_dict"].items() 96 | sort_keys = True 97 | elif "path_list" in input_dict: 98 | # Caller supplied ordering of pubkeys (will not be sorted) 99 | iterator = input_dict["path_list"] 100 | sort_keys = False 101 | else: 102 | raise RuntimeError( 103 | f"input_dict has no `path_dict` nor a `path_list`: {input_dict}" 104 | ) 105 | 106 | input_pubkey_hexes = [] 107 | for xfp_hex, root_path in iterator: 108 | # Get the correct xpub/path 109 | child_hd_pubkey = _safe_get_child_hdpubkey( 110 | xfp_dict=xfp_dict, 111 | xfp_hex=xfp_hex, 112 | root_path=root_path, 113 | cnt=cnt, 114 | ) 115 | input_pubkey_hexes.append(child_hd_pubkey.sec().hex()) 116 | 117 | # Enhance the PSBT 118 | named_hd_pubkey_obj = NamedHDPublicKey.from_hd_pub( 119 | child_hd_pub=child_hd_pubkey, 120 | xfp_hex=xfp_hex, 121 | path=root_path, 122 | ) 123 | # pubkey lookups needed for validation 124 | pubkey_lookup[named_hd_pubkey_obj.sec()] = named_hd_pubkey_obj 125 | 126 | utxo = prev_tx_obj.tx_outs[prev_tx_dict["output_idx"]] 127 | 128 | # Grab amount as developer safety check 129 | if prev_tx_dict["output_sats"] != utxo.amount: 130 | raise ValueError( 131 | f"Wrong number of sats for input #{cnt}! Expecting {prev_tx_dict['output_sats']} but got {utxo.amount}" 132 | ) 133 | total_input_sats += utxo.amount 134 | 135 | redeem_script = RedeemScript.create_p2sh_multisig( 136 | quorum_m=input_dict["quorum_m"], 137 | pubkey_hexes=input_pubkey_hexes, 138 | sort_keys=sort_keys, 139 | expected_addr=utxo.script_pubkey.address(network=network), 140 | expected_addr_network=network, 141 | ) 142 | 143 | # Confirm address matches previous ouput 144 | if redeem_script.address(network=network) != utxo.script_pubkey.address( 145 | network=network 146 | ): 147 | raise ValueError( 148 | f"Invalid redeem script for input #{cnt}. Expecting {redeem_script.address(network=network)} but got {utxo.script_pubkey.address(network=network)}" 149 | ) 150 | 151 | tx_in = TxIn(prev_tx=prev_tx_obj.hash(), prev_index=prev_tx_dict["output_idx"]) 152 | tx_ins.append(tx_in) 153 | 154 | # For enhancing the PSBT for HWWs: 155 | redeem_lookup[redeem_script.hash160()] = redeem_script 156 | 157 | tx_outs = [] 158 | for cnt, output_dict in enumerate(output_dicts): 159 | tx_out = TxOut( 160 | amount=output_dict["sats"], 161 | script_pubkey=address_to_script_pubkey(output_dict["address"]), 162 | ) 163 | tx_outs.append(tx_out) 164 | 165 | if output_dict.get("path_dict"): 166 | # This output claims to be change, so we must validate it here 167 | output_pubkey_hexes = [] 168 | for xfp_hex, root_path in output_dict["path_dict"].items(): 169 | child_hd_pubkey = _safe_get_child_hdpubkey( 170 | xfp_dict=xfp_dict, 171 | xfp_hex=xfp_hex, 172 | root_path=root_path, 173 | cnt=cnt, 174 | ) 175 | output_pubkey_hexes.append(child_hd_pubkey.sec().hex()) 176 | 177 | # Enhance the PSBT 178 | named_hd_pubkey_obj = NamedHDPublicKey.from_hd_pub( 179 | child_hd_pub=child_hd_pubkey, 180 | xfp_hex=xfp_hex, 181 | path=root_path, 182 | ) 183 | pubkey_lookup[named_hd_pubkey_obj.sec()] = named_hd_pubkey_obj 184 | 185 | redeem_script = RedeemScript.create_p2sh_multisig( 186 | quorum_m=output_dict["quorum_m"], 187 | pubkey_hexes=output_pubkey_hexes, 188 | # We intentionally only allow change addresses to be lexicographically sorted 189 | sort_keys=True, 190 | ) 191 | # Confirm address matches previous ouput 192 | if redeem_script.address(network=network) != output_dict["address"]: 193 | raise ValueError( 194 | f"Invalid redeem script for output #{cnt}. Expecting {redeem_script.address(network=network)} but got {output_dict['address']}" 195 | ) 196 | 197 | # For enhancing the PSBT for HWWs: 198 | redeem_lookup[redeem_script.hash160()] = redeem_script 199 | 200 | tx_obj = Tx( 201 | version=1, 202 | tx_ins=tx_ins, 203 | tx_outs=tx_outs, 204 | locktime=0, 205 | network=network, 206 | segwit=False, 207 | ) 208 | 209 | # Safety check to try and prevent footgun 210 | 211 | calculated_fee_sats = total_input_sats - sum([tx_out.amount for tx_out in tx_outs]) 212 | if fee_sats != calculated_fee_sats: 213 | raise ValueError( 214 | f"TX fee of {fee_sats} sats supplied != {calculated_fee_sats} sats calculated" 215 | ) 216 | 217 | return PSBT.create( 218 | tx_obj=tx_obj, 219 | validate=True, 220 | tx_lookup=tx_lookup, 221 | pubkey_lookup=pubkey_lookup, 222 | redeem_lookup=redeem_lookup, 223 | witness_lookup={}, 224 | hd_pubs=hd_pubs, 225 | ) 226 | -------------------------------------------------------------------------------- /buidl/siphash.py: -------------------------------------------------------------------------------- 1 | r""" 2 | 3 | Copyright (c) 2013 Marek Majkowski 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | 23 | 24 | 25 | SipHash-2-4 implementation, following the 'hashlib' API: 26 | 27 | >>> key = b'0123456789ABCDEF' 28 | >>> SipHash_2_4(key, b'a').hexdigest() 29 | b'864c339cb0dc0fac' 30 | >>> SipHash_2_4(key, b'a').digest() 31 | b'\x86L3\x9c\xb0\xdc\x0f\xac' 32 | >>> SipHash_2_4(key, b'a').hash() 33 | 12398370950267227270 34 | >>> SipHash_2_4(key).update(b'a').hash() 35 | 12398370950267227270 36 | 37 | >>> key = b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f' 38 | >>> SipHash_2_4(key, b'').hash() 39 | 8246050544436514353 40 | >>> SipHash_2_4(key, b'').hexdigest() 41 | b'310e0edd47db6f72' 42 | 43 | """ 44 | import struct 45 | import binascii 46 | 47 | 48 | def _doublesipround(v, m): 49 | """ 50 | Internal helper. Xors 'm' to 'v3', runs two rounds of siphash on 51 | vector 'v' and xors 'm' to 'v0'. 52 | 53 | >>> _doublesipround((1,2,3,4),0) 54 | (9263201270060220426, 2307743542053503000, 5255419393243893904, 10208987565802066018) 55 | >>> _doublesipround((1,2,3,4),0xff) 56 | (11557575153743626750, 2307780510495171325, 7519994316568162407, 5442382437785464174) 57 | >>> _doublesipround((0,0,0,0),0) 58 | (0, 0, 0, 0) 59 | >>> _doublesipround((0,0,0,0),0xff) 60 | (2368684213854535680, 36416423977725, 2305811110491594975, 15626573430810475768) 61 | """ 62 | a, b, c, d = v 63 | d ^= m 64 | 65 | e = (a + b) & 0xFFFFFFFFFFFFFFFF 66 | i = (((b & 0x7FFFFFFFFFFFF) << 13) | (b >> 51)) ^ e 67 | f = c + d 68 | j = ((((d) << 16) | (d >> 48)) ^ f) & 0xFFFFFFFFFFFFFFFF 69 | h = (f + i) & 0xFFFFFFFFFFFFFFFF 70 | 71 | k = ((e << 32) | (e >> 32)) + j 72 | l = (((i & 0x7FFFFFFFFFFF) << 17) | (i >> 47)) ^ h 73 | o = (((j << 21) | (j >> 43)) ^ k) & 0xFFFFFFFFFFFFFFFF 74 | 75 | p = (k + l) & 0xFFFFFFFFFFFFFFFF 76 | q = (((l & 0x7FFFFFFFFFFFF) << 13) | (l >> 51)) ^ p 77 | r = ((h << 32) | (h >> 32)) + o 78 | s = (((o << 16) | (o >> 48)) ^ r) & 0xFFFFFFFFFFFFFFFF 79 | t = (r + q) & 0xFFFFFFFFFFFFFFFF 80 | u = (((p << 32) | (p >> 32)) + s) & 0xFFFFFFFFFFFFFFFF 81 | 82 | return ( 83 | u ^ m, 84 | (((q & 0x7FFFFFFFFFFF) << 17) | (q >> 47)) ^ t, 85 | ((t & 0xFFFFFFFF) << 32) | (t >> 32), 86 | (((s & 0x7FFFFFFFFFF) << 21) | (s >> 43)) ^ u, 87 | ) 88 | 89 | 90 | _zeroes = b"\x00\x00\x00\x00\x00\x00\x00\x00" 91 | _oneQ = struct.Struct(">> SipHash_2_4(b'0123456789ABCDEF', b'a').hash() 98 | 12398370950267227270 99 | >>> SipHash_2_4(b'0123456789ABCDEF', b'').hash() 100 | 3627314469837380007 101 | >>> SipHash_2_4(b'FEDCBA9876543210', b'').hash() 102 | 2007056766899708634 103 | >>> SipHash_2_4(b'FEDCBA9876543210').update(b'').update(b'').hash() 104 | 2007056766899708634 105 | >>> SipHash_2_4(b'FEDCBA9876543210', b'a').hash() 106 | 6581475155582014123 107 | >>> SipHash_2_4(b'FEDCBA9876543210').update(b'a').hash() 108 | 6581475155582014123 109 | >>> SipHash_2_4(b'FEDCBA9876543210').update(b'a').update(b'').hash() 110 | 6581475155582014123 111 | >>> SipHash_2_4(b'FEDCBA9876543210').update(b'').update(b'a').hash() 112 | 6581475155582014123 113 | 114 | >>> a = SipHash_2_4(b'FEDCBA9876543210').update(b'a') 115 | >>> a.hash() 116 | 6581475155582014123 117 | >>> b = a.copy() 118 | >>> a.hash(), b.hash() 119 | (6581475155582014123, 6581475155582014123) 120 | >>> a.update(b'a') and None 121 | >>> a.hash(), b.hash() 122 | (3258273892680892829, 6581475155582014123) 123 | """ 124 | digest_size = 16 125 | block_size = 64 126 | 127 | s = b"" 128 | b = 0 129 | 130 | def __init__(self, secret, s=b""): 131 | # key's encoded as little endian 132 | k0, k1 = _twoQ.unpack(secret) 133 | self.v = ( 134 | 0x736F6D6570736575 ^ k0, 135 | 0x646F72616E646F6D ^ k1, 136 | 0x6C7967656E657261 ^ k0, 137 | 0x7465646279746573 ^ k1, 138 | ) 139 | self.update(s) 140 | 141 | def update(self, s): 142 | s = self.s + s 143 | lim = (len(s) // 8) * 8 144 | v = self.v 145 | off = 0 146 | 147 | for off in range(0, lim, 8): 148 | (m,) = _oneQ.unpack_from(s, off) 149 | 150 | # print 'v0 %016x' % v[0] 151 | # print 'v1 %016x' % v[1] 152 | # print 'v2 %016x' % v[2] 153 | # print 'v3 %016x' % v[3] 154 | # print 'compress %016x' % m 155 | 156 | v = _doublesipround(v, m) 157 | self.v = v 158 | self.b += lim 159 | self.s = s[lim:] 160 | return self 161 | 162 | def hash(self): 163 | l = len(self.s) 164 | assert l < 8 165 | 166 | b = ((self.b + l) & 0xFF) << 56 167 | b |= _oneQ.unpack_from(self.s + _zeroes)[0] 168 | v = self.v 169 | 170 | # print 'v0 %016x' % v[0] 171 | # print 'v1 %016x' % v[1] 172 | # print 'v2 %016x' % v[2] 173 | # print 'v3 %016x' % v[3] 174 | # print 'padding %016x' % b 175 | 176 | v = _doublesipround(v, b) 177 | 178 | # print 'v0 %016x' % v0 179 | # print 'v1 %016x' % v1 180 | # print 'v2 %016x' % v2 181 | # print 'v3 %016x' % v3 182 | 183 | v = list(v) 184 | v[2] ^= 0xFF 185 | v = _doublesipround(_doublesipround(v, 0), 0) 186 | return v[0] ^ v[1] ^ v[2] ^ v[3] 187 | 188 | def digest(self): 189 | return _oneQ.pack(self.hash()) 190 | 191 | def hexdigest(self): 192 | return binascii.hexlify(self.digest()) 193 | 194 | def copy(self): 195 | n = SipHash_2_4(_zeroes * 2) 196 | n.v, n.s, n.b = self.v, self.s, self.b 197 | return n 198 | 199 | 200 | siphash24 = SipHash_2_4 201 | SipHash24 = SipHash_2_4 202 | 203 | 204 | if __name__ == "__main__": 205 | # Test vectors as per spec 206 | vectors = [ 207 | c.encode("utf-8") 208 | for c in [ 209 | "310e0edd47db6f72", 210 | "fd67dc93c539f874", 211 | "5a4fa9d909806c0d", 212 | "2d7efbd796666785", 213 | "b7877127e09427cf", 214 | "8da699cd64557618", 215 | "cee3fe586e46c9cb", 216 | "37d1018bf50002ab", 217 | "6224939a79f5f593", 218 | "b0e4a90bdf82009e", 219 | "f3b9dd94c5bb5d7a", 220 | "a7ad6b22462fb3f4", 221 | "fbe50e86bc8f1e75", 222 | "903d84c02756ea14", 223 | "eef27a8e90ca23f7", 224 | "e545be4961ca29a1", 225 | "db9bc2577fcc2a3f", 226 | "9447be2cf5e99a69", 227 | "9cd38d96f0b3c14b", 228 | "bd6179a71dc96dbb", 229 | "98eea21af25cd6be", 230 | "c7673b2eb0cbf2d0", 231 | "883ea3e395675393", 232 | "c8ce5ccd8c030ca8", 233 | "94af49f6c650adb8", 234 | "eab8858ade92e1bc", 235 | "f315bb5bb835d817", 236 | "adcf6b0763612e2f", 237 | "a5c91da7acaa4dde", 238 | "716595876650a2a6", 239 | "28ef495c53a387ad", 240 | "42c341d8fa92d832", 241 | "ce7cf2722f512771", 242 | "e37859f94623f3a7", 243 | "381205bb1ab0e012", 244 | "ae97a10fd434e015", 245 | "b4a31508beff4d31", 246 | "81396229f0907902", 247 | "4d0cf49ee5d4dcca", 248 | "5c73336a76d8bf9a", 249 | "d0a704536ba93e0e", 250 | "925958fcd6420cad", 251 | "a915c29bc8067318", 252 | "952b79f3bc0aa6d4", 253 | "f21df2e41d4535f9", 254 | "87577519048f53a9", 255 | "10a56cf5dfcd9adb", 256 | "eb75095ccd986cd0", 257 | "51a9cb9ecba312e6", 258 | "96afadfc2ce666c7", 259 | "72fe52975a4364ee", 260 | "5a1645b276d592a1", 261 | "b274cb8ebf87870a", 262 | "6f9bb4203de7b381", 263 | "eaecb2a30b22a87f", 264 | "9924a43cc1315724", 265 | "bd838d3aafbf8db7", 266 | "0b1a2a3265d51aea", 267 | "135079a3231ce660", 268 | "932b2846e4d70666", 269 | "e1915f5cb1eca46c", 270 | "f325965ca16d629f", 271 | "575ff28e60381be5", 272 | "724506eb4c328a95", 273 | ] 274 | ] 275 | 276 | key = "".join(chr(i) for i in range(16)).encode("utf-8") 277 | plaintext = "".join(chr(i) for i in range(64)).encode("utf-8") 278 | for i in range(64): 279 | assert SipHash_2_4(key, plaintext[:i]).hexdigest() == vectors[i], ( 280 | "failed on test no %i" % i 281 | ) 282 | 283 | # Internal doctests 284 | # 285 | # To maintain compatibility with both python 2.x and 3.x in tests 286 | # we need to do a trick. Python 2.x doesn't like b'' notation, 287 | # Python 3.x doesn't have 2222L long integers notation. To 288 | # overcome that we'll pipe both results as well as the intended 289 | # doctest output through an `eval` function before comparison. To 290 | # do it we need to monkeypatch the OutputChecker: 291 | import doctest 292 | 293 | EVAL_FLAG = doctest.register_optionflag("EVAL") 294 | OrigOutputChecker = doctest.OutputChecker 295 | 296 | def relaxed_eval(s): 297 | if s.strip(): 298 | return eval(s) 299 | else: 300 | return None 301 | 302 | class MyOutputChecker: 303 | def __init__(self): 304 | self.orig = OrigOutputChecker() 305 | 306 | def check_output(self, want, got, optionflags): 307 | if optionflags & EVAL_FLAG: 308 | return relaxed_eval(got) == relaxed_eval(want) 309 | else: 310 | return self.orig.check_output(want, got, optionflags) 311 | 312 | def output_difference(self, example, got, optionflags): 313 | return self.orig.output_difference(example, got, optionflags) 314 | 315 | doctest.OutputChecker = MyOutputChecker 316 | # Monkey patching done. Go for doctests: 317 | 318 | if doctest.testmod(optionflags=EVAL_FLAG)[0] == 0: 319 | print("all tests ok") 320 | -------------------------------------------------------------------------------- /buidl/test/__init__.py: -------------------------------------------------------------------------------- 1 | from os.path import dirname, realpath, sep 2 | from os import getenv 3 | from unittest import TestCase 4 | 5 | from buidl.tx import TxFetcher 6 | 7 | 8 | class OfflineTestCase(TestCase): 9 | cache_file = dirname(realpath(__file__)) + sep + "tx.cache" 10 | 11 | @classmethod 12 | def setUpClass(cls): 13 | # fill with cache so we don't have to be online to run these tests 14 | TxFetcher.load_cache(cls.cache_file) 15 | 16 | def test_socket_guard(self): 17 | if getenv("INCLUDE_NETWORK_TESTS"): 18 | return 19 | 20 | with self.assertRaises(Exception) as cm: 21 | TxFetcher.fetch(tx_id="0" * 32) 22 | 23 | self.assertIn("Unit test requires internet", str(cm.exception)) 24 | -------------------------------------------------------------------------------- /buidl/test/conftest.py: -------------------------------------------------------------------------------- 1 | import socket 2 | 3 | from os import getenv 4 | 5 | # Disable networking during pytest 6 | # https://www.tonylykke.com/posts/2018/07/31/disabling-the-internet-for-pytest/ 7 | 8 | 9 | def guard(*args, **kwargs): 10 | raise Exception( 11 | "Unit test requires internet, perhaps you need to update test/tx.cache?" 12 | ) 13 | 14 | 15 | if not getenv("INCLUDE_NETWORK_TESTS"): 16 | socket.socket = guard 17 | -------------------------------------------------------------------------------- /buidl/test/test_bech32.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.bech32 import ( 4 | encode_bech32_checksum, 5 | decode_bech32, 6 | BECH32_ALPHABET, 7 | ) 8 | 9 | 10 | class Bech32Test(TestCase): 11 | def test_bech32(self): 12 | tests = [ 13 | { 14 | "hex_script": "00201863143c14c5166804bd19203356da136c985678cd4d27a1b8c6329604903262", 15 | "mainnet": "bc1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3qccfmv3", 16 | "testnet": "tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7", 17 | "signet": "tb1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3q0sl5k7", 18 | "regtest": "bcrt1qrp33g0q5c5txsp9arysrx4k6zdkfs4nce4xj0gdcccefvpysxf3qzf4jry", 19 | }, 20 | { 21 | "hex_script": "00200000000000000000000000000000000000000000000000000000000000000000", 22 | "mainnet": "bc1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqthqst8", 23 | "testnet": "tb1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqulkl3g", 24 | "signet": "tb1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqulkl3g", 25 | "regtest": "bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq3xueyj", 26 | }, 27 | { 28 | "hex_script": "00140000000000000000000000000000000000000000", 29 | "mainnet": "bc1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq9e75rs", 30 | "testnet": "tb1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq0l98cr", 31 | "signet": "tb1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq0l98cr", 32 | "regtest": "bcrt1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqdku202", 33 | }, 34 | { 35 | "hex_script": "0020000000c4a5cad46221b2a187905e5266362b99d5e91c6ce24d165dab93e86433", 36 | "mainnet": "bc1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvses5wp4dt", 37 | "testnet": "tb1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesrxh6hy", 38 | "signet": "tb1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesrxh6hy", 39 | "regtest": "bcrt1qqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvseswlauz7", 40 | }, 41 | { 42 | "hex_script": "5128751e76e8199196d454941c45d1b3a323f1433bd6751e76e8199196d454941c45d1b3a323f1433bd6", 43 | "mainnet": "bc1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kt5nd6y", 44 | "testnet": "tb1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kxwkgjv", 45 | "signet": "tb1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7kxwkgjv", 46 | "regtest": "bcrt1pw508d6qejxtdg4y5r3zarvary0c5xw7kw508d6qejxtdg4y5r3zarvary0c5xw7k0ylj56", 47 | }, 48 | { 49 | "hex_script": "5120000000c4a5cad46221b2a187905e5266362b99d5e91c6ce24d165dab93e86433", 50 | "mainnet": "bc1pqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvses7epu4h", 51 | "testnet": "tb1pqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesf3hn0c", 52 | "signet": "tb1pqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesf3hn0c", 53 | "regtest": "bcrt1pqqqqp399et2xygdj5xreqhjjvcmzhxw4aywxecjdzew6hylgvsesyga46z", 54 | }, 55 | { 56 | "hex_script": "512079be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798", 57 | "mainnet": "bc1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqzk5jj0", 58 | "testnet": "tb1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vq47zagq", 59 | "signet": "tb1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vq47zagq", 60 | "regtest": "bcrt1p0xlxvlhemja6c4dqv22uapctqupfhlxm9h8z3k2e72q4k9hcz7vqc8gma6", 61 | }, 62 | ] 63 | for test in tests: 64 | # Special-case signet here because it will decode as a testnet-inferred 65 | # address. 66 | raw = bytes.fromhex(test["hex_script"]) 67 | want = test["signet"] 68 | version = BECH32_ALPHABET.index(want[3:4]) 69 | result = encode_bech32_checksum(raw, network="signet") 70 | self.assertEqual(result, want) 71 | got_network, got_version, got_raw = decode_bech32(result) 72 | self.assertEqual(got_network, "testnet") 73 | self.assertEqual(got_version, version) 74 | self.assertEqual(got_raw, raw[2:]) 75 | 76 | for network in ("mainnet", "testnet", "regtest"): 77 | ver_index = 3 78 | if network == "regtest": 79 | # Account for the two extra letters ("rt") in the regtest prefix. 80 | ver_index += 2 81 | 82 | want = test[network] 83 | version = BECH32_ALPHABET.index(want[ver_index]) 84 | result = encode_bech32_checksum(raw, network=network) 85 | self.assertEqual(result, want) 86 | got_network, got_version, got_raw = decode_bech32(result) 87 | self.assertEqual(got_network, network) 88 | self.assertEqual(got_version, version) 89 | self.assertEqual(got_raw, raw[2:]) 90 | -------------------------------------------------------------------------------- /buidl/test/test_blinding.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.blinding import blind_xpub, combine_bip32_paths, secure_secret_path 4 | from buidl.hd import HDPrivateKey, is_valid_bip32_path 5 | 6 | 7 | class BlindingTest(TestCase): 8 | def test_generate_secure_secret_path(self): 9 | for depth in range(1, 8): 10 | res = secure_secret_path(depth=depth) 11 | self.assertTrue(is_valid_bip32_path(res)) 12 | self.assertEqual(depth, res.count("/")) 13 | 14 | def test_generate_secure_secret_path_fail(self): 15 | with self.assertRaises(ValueError): 16 | secure_secret_path(depth="foo") 17 | secure_secret_path(depth=-1) 18 | secure_secret_path(depth=0) 19 | secure_secret_path(depth=257) 20 | 21 | def test_combine_paths(self): 22 | self.assertEqual(combine_bip32_paths("m/1", "m/2/3"), "m/1/2/3") 23 | self.assertEqual(combine_bip32_paths("m/1h", "m/2/3"), "m/1h/2/3") 24 | self.assertEqual( 25 | combine_bip32_paths("m/48h/1h/0h/2h", "m/1/2/3/4/5"), 26 | "m/48h/1h/0h/2h/1/2/3/4/5", 27 | ) 28 | self.assertEqual(combine_bip32_paths("m", "m/1"), "m/1") 29 | self.assertEqual(combine_bip32_paths("m/1", "m"), "m/1") 30 | self.assertEqual(combine_bip32_paths("m", "m"), "m") 31 | 32 | def test_combine_paths_error(self): 33 | with self.assertRaises(ValueError): 34 | combine_bip32_paths("m/foo", "m/1/2/3") 35 | combine_bip32_paths("m/1/2/3", "m/foo") 36 | 37 | def test_blind_m48_vpub(self): 38 | # All test vectors below compared manually to https://iancoleman.io/bip39/ and then converted with https://jlopp.github.io/xpub-converter/ 39 | starting_path = "m/48h/1h/0h/2h" 40 | # Vpub version bytes 41 | starting_vpub = ( 42 | HDPrivateKey.from_mnemonic("oil " * 12) 43 | .traverse(starting_path) 44 | .xpub(bytes.fromhex("02575483")) 45 | ) 46 | self.assertEqual( 47 | starting_vpub, 48 | "Vpub5mvQbnmqKfpPjWfAZEw5Xjdr6UjnjyZEirzrhNMSuKjL8Qfd3nqLBkrBrVXNeMgKCjPXbyLnSCn6qcD8fHQCkNnNLnkpQtY3sh4MHmywvbe", 49 | ) 50 | secret_path = "m/920870093/318569592/821713943/1914815254/1398142787/9" # randomly generated 51 | have = blind_xpub( 52 | starting_xpub=starting_vpub, 53 | starting_path=starting_path, 54 | secret_path=secret_path, 55 | ) 56 | want = { 57 | "blinded_full_path": "m/48h/1h/0h/2h/920870093/318569592/821713943/1914815254/1398142787/9", 58 | "blinded_child_xpub": "Vpub5xWzaq6Ya7K9Y2UeFyL8SfHhydnp5FZDskm9mkAfUppkXVJDKrnLxU2Ezd55k8RSzSf4YETJj982NJQGCSzKJxUa6oQmbe1HWTRavCRzzxj", 59 | } 60 | self.assertEqual(have, want) 61 | 62 | def test_blind_m48_xpub(self): 63 | # All test vectors below compared manually to https://iancoleman.io/bip39/ and then converted with https://jlopp.github.io/xpub-converter/ 64 | starting_path = "m/48h/0h/0h/2h" 65 | starting_xpub = ( 66 | HDPrivateKey.from_mnemonic("oil " * 12).traverse(starting_path).xpub() 67 | ) 68 | 69 | self.assertEqual( 70 | starting_xpub, 71 | "xpub6F8WgTkiV8iDPFG1Kv4sNrcBNMMgKK4cjfxjdZWvR3kChfbt3L2dJF7xmCHBMGMmxjyzwgjdFkh9UN3623YpsmqN1KwZGR45Y3ANLQQX87u", 72 | ) 73 | secret_path = "m/920870093/318569592/821713943/1914815254/1398142787/9" # randomly generated 74 | have = blind_xpub( 75 | starting_xpub=starting_xpub, 76 | starting_path=starting_path, 77 | secret_path=secret_path, 78 | ) 79 | want = { 80 | "blinded_full_path": "m/48h/0h/0h/2h/920870093/318569592/821713943/1914815254/1398142787/9", 81 | "blinded_child_xpub": "xpub6RKvkus7fgUnP2trNVo2N1jaQeGPBHCV9m63Jaje8EW2Ry5KjYySb1tbPSi3E7Vh6ZzxRn15hUmBg5KmSaQvKjZmTvXKQnRPXcoJS9PXkiS", 82 | } 83 | self.assertEqual(have, want) 84 | 85 | def test_blind_root_xpub(self): 86 | # All test vectors below compared manually to https://iancoleman.io/bip39/ 87 | 88 | starting_path = "m" 89 | # The .traverse() here does nothing and is optional, just here for consistency/clarity: 90 | root_xpub = ( 91 | HDPrivateKey.from_mnemonic("bacon " * 24).traverse(starting_path).xpub() 92 | ) 93 | 94 | self.assertEqual( 95 | root_xpub, 96 | "xpub661MyMwAqRbcGMzZtBZrKWaDMAQKjd5bMSnHr8qBfCz5zMwZEuajnc8cxjsEUECAZDeDC7s4zbT3Z9KrrM9wJ3MaT6sH9eRYxLY1BNf45BF", 97 | ) 98 | secret_path = "m/1/2/3/4/5/6/7/8/9" 99 | have = blind_xpub( 100 | starting_xpub=root_xpub, starting_path="m", secret_path=secret_path 101 | ) 102 | want = { 103 | "blinded_full_path": secret_path, # the same since we started with the root path 104 | "blinded_child_xpub": "xpub6QbmAk7amacgfQVY3aTAiqhM1okb4Gi3EFzBJQMEja1uU4gZvG9vwiExr2C1ryZqnyczTvvaNhw63UALmXGkFo7FHhajX8SNimgK3zu3tp5", 105 | } 106 | self.assertEqual(have, want) 107 | 108 | starting_path = "m/48h/0h/0h/2h" 109 | child_xpub = ( 110 | HDPrivateKey.from_mnemonic("bacon " * 24).traverse(starting_path).xpub() 111 | ) 112 | self.assertEqual( 113 | child_xpub, 114 | "xpub6EeqK2JLwngrHJEQ4X4iqrySZV9qU3TgwMgf6NStLZa37AfNiHTtTE9ji1F9YQDLArJMLy8sw3Q2samVj5VQQjaaUHr5z2Hz57NWHJCfh31", 115 | ) 116 | secret_path = "m/920870093/318569592/821713943/1914815254/1398142787/9" # randomly generated 117 | have = blind_xpub( 118 | starting_xpub=child_xpub, 119 | starting_path=starting_path, 120 | secret_path=secret_path, 121 | ) 122 | want = { 123 | "blinded_full_path": "m/48h/0h/0h/2h/920870093/318569592/821713943/1914815254/1398142787/9", 124 | "blinded_child_xpub": "xpub6SDyub38LYeUd211VxRZCiVnU4fSr4ZwPssFGMxyyXjZ6EvrA4ZkZhp4f9My2qzqSJFHxkAv8ctaGipx7UM6sCzmso7wxiohHBhcBs7ipWz", 125 | } 126 | self.assertEqual(have, want) 127 | 128 | def test_bad_depth(self): 129 | starting_path = "m/48h/0h/0h/2h" 130 | starting_xpub = ( 131 | HDPrivateKey.from_mnemonic("bacon " * 24).traverse(starting_path).xpub() 132 | ) 133 | with self.assertRaises(ValueError): 134 | blind_xpub( 135 | starting_xpub=starting_xpub, starting_path="m/1", secret_path="m/999" 136 | ) 137 | -------------------------------------------------------------------------------- /buidl/test/test_block.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from io import BytesIO 3 | 4 | from buidl.block import Block 5 | 6 | 7 | class BlockTest(TestCase): 8 | def test_parse(self): 9 | block_raw = bytes.fromhex( 10 | "0100000000000000000000000000000000000000000000000000000000000000000000003ba3edfd7a7b12b27ac72c3e67768f617fc81bc3888a51323a9fb8aa4b1e5e4a29ab5f49ffff001d1dac2b7c0101000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e206272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000" 11 | ) 12 | stream = BytesIO(block_raw) 13 | block = Block.parse(stream) 14 | self.assertEqual(block.merkle_root.hex(), block.tx_hashes[0].hex()) 15 | 16 | def test_parse_header(self): 17 | block_raw = bytes.fromhex( 18 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 19 | ) 20 | stream = BytesIO(block_raw) 21 | block = Block.parse_header(stream) 22 | self.assertEqual(block.version, 0x20000002) 23 | want = bytes.fromhex( 24 | "000000000000000000fd0c220a0a8c3bc5a7b487e8c8de0dfa2373b12894c38e" 25 | ) 26 | self.assertEqual(block.prev_block, want) 27 | want = bytes.fromhex( 28 | "be258bfd38db61f957315c3f9e9c5e15216857398d50402d5089a8e0fc50075b" 29 | ) 30 | self.assertEqual(block.merkle_root, want) 31 | self.assertEqual(block.timestamp, 0x59A7771E) 32 | self.assertEqual(block.bits, bytes.fromhex("e93c0118")) 33 | self.assertEqual(block.nonce, bytes.fromhex("a4ffd71d")) 34 | 35 | def test_serialize(self): 36 | block_raw = bytes.fromhex( 37 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 38 | ) 39 | stream = BytesIO(block_raw) 40 | block = Block.parse_header(stream) 41 | self.assertEqual(block.serialize(), block_raw) 42 | 43 | def test_hash(self): 44 | block_raw = bytes.fromhex( 45 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 46 | ) 47 | stream = BytesIO(block_raw) 48 | block = Block.parse_header(stream) 49 | self.assertEqual( 50 | block.hash(), 51 | bytes.fromhex( 52 | "0000000000000000007e9e4c586439b0cdbe13b1370bdd9435d76a644d047523" 53 | ), 54 | ) 55 | 56 | def test_bip9(self): 57 | block_raw = bytes.fromhex( 58 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 59 | ) 60 | stream = BytesIO(block_raw) 61 | block = Block.parse_header(stream) 62 | self.assertTrue(block.bip9()) 63 | block_raw = bytes.fromhex( 64 | "0400000039fa821848781f027a2e6dfabbf6bda920d9ae61b63400030000000000000000ecae536a304042e3154be0e3e9a8220e5568c3433a9ab49ac4cbb74f8df8e8b0cc2acf569fb9061806652c27" 65 | ) 66 | stream = BytesIO(block_raw) 67 | block = Block.parse_header(stream) 68 | self.assertFalse(block.bip9()) 69 | 70 | def test_bip91(self): 71 | block_raw = bytes.fromhex( 72 | "1200002028856ec5bca29cf76980d368b0a163a0bb81fc192951270100000000000000003288f32a2831833c31a25401c52093eb545d28157e200a64b21b3ae8f21c507401877b5935470118144dbfd1" 73 | ) 74 | stream = BytesIO(block_raw) 75 | block = Block.parse_header(stream) 76 | self.assertTrue(block.bip91()) 77 | block_raw = bytes.fromhex( 78 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 79 | ) 80 | stream = BytesIO(block_raw) 81 | block = Block.parse_header(stream) 82 | self.assertFalse(block.bip91()) 83 | 84 | def test_bip141(self): 85 | block_raw = bytes.fromhex( 86 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 87 | ) 88 | stream = BytesIO(block_raw) 89 | block = Block.parse_header(stream) 90 | self.assertTrue(block.bip141()) 91 | block_raw = bytes.fromhex( 92 | "0000002066f09203c1cf5ef1531f24ed21b1915ae9abeb691f0d2e0100000000000000003de0976428ce56125351bae62c5b8b8c79d8297c702ea05d60feabb4ed188b59c36fa759e93c0118b74b2618" 93 | ) 94 | stream = BytesIO(block_raw) 95 | block = Block.parse_header(stream) 96 | self.assertFalse(block.bip141()) 97 | 98 | def test_target(self): 99 | block_raw = bytes.fromhex( 100 | "020000208ec39428b17323fa0ddec8e887b4a7c53b8c0a0a220cfd0000000000000000005b0750fce0a889502d40508d39576821155e9c9e3f5c3157f961db38fd8b25be1e77a759e93c0118a4ffd71d" 101 | ) 102 | stream = BytesIO(block_raw) 103 | block = Block.parse_header(stream) 104 | self.assertEqual( 105 | block.target(), 0x13CE9000000000000000000000000000000000000000000 106 | ) 107 | self.assertEqual(int(block.difficulty()), 888171856257) 108 | 109 | def test_check_pow(self): 110 | block_raw = bytes.fromhex( 111 | "04000000fbedbbf0cfdaf278c094f187f2eb987c86a199da22bbb20400000000000000007b7697b29129648fa08b4bcd13c9d5e60abb973a1efac9c8d573c71c807c56c3d6213557faa80518c3737ec1" 112 | ) 113 | stream = BytesIO(block_raw) 114 | block = Block.parse_header(stream) 115 | self.assertTrue(block.check_pow()) 116 | block_raw = bytes.fromhex( 117 | "04000000fbedbbf0cfdaf278c094f187f2eb987c86a199da22bbb20400000000000000007b7697b29129648fa08b4bcd13c9d5e60abb973a1efac9c8d573c71c807c56c3d6213557faa80518c3737ec0" 118 | ) 119 | stream = BytesIO(block_raw) 120 | block = Block.parse_header(stream) 121 | self.assertFalse(block.check_pow()) 122 | 123 | def test_validate_merkle_root(self): 124 | hashes_hex = [ 125 | "f54cb69e5dc1bd38ee6901e4ec2007a5030e14bdd60afb4d2f3428c88eea17c1", 126 | "c57c2d678da0a7ee8cfa058f1cf49bfcb00ae21eda966640e312b464414731c1", 127 | "b027077c94668a84a5d0e72ac0020bae3838cb7f9ee3fa4e81d1eecf6eda91f3", 128 | "8131a1b8ec3a815b4800b43dff6c6963c75193c4190ec946b93245a9928a233d", 129 | "ae7d63ffcb3ae2bc0681eca0df10dda3ca36dedb9dbf49e33c5fbe33262f0910", 130 | "61a14b1bbdcdda8a22e61036839e8b110913832efd4b086948a6a64fd5b3377d", 131 | "fc7051c8b536ac87344c5497595d5d2ffdaba471c73fae15fe9228547ea71881", 132 | "77386a46e26f69b3cd435aa4faac932027f58d0b7252e62fb6c9c2489887f6df", 133 | "59cbc055ccd26a2c4c4df2770382c7fea135c56d9e75d3f758ac465f74c025b8", 134 | "7c2bf5687f19785a61be9f46e031ba041c7f93e2b7e9212799d84ba052395195", 135 | "08598eebd94c18b0d59ac921e9ba99e2b8ab7d9fccde7d44f2bd4d5e2e726d2e", 136 | "f0bb99ef46b029dd6f714e4b12a7d796258c48fee57324ebdc0bbc4700753ab1", 137 | ] 138 | hashes = [bytes.fromhex(x) for x in hashes_hex] 139 | stream = BytesIO( 140 | bytes.fromhex( 141 | "00000020fcb19f7895db08cadc9573e7915e3919fb76d59868a51d995201000000000000acbcab8bcc1af95d8d563b77d24c3d19b18f1486383d75a5085c4e86c86beed691cfa85916ca061a00000000" 142 | ) 143 | ) 144 | block = Block.parse_header(stream) 145 | block.tx_hashes = hashes 146 | self.assertTrue(block.validate_merkle_root()) 147 | -------------------------------------------------------------------------------- /buidl/test/test_bloomfilter.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl import BloomFilter 4 | 5 | 6 | class BloomFilterTest(TestCase): 7 | def test_add(self): 8 | bf = BloomFilter(10, 5, 99) 9 | item = b"Hello World" 10 | bf.add(item) 11 | expected = "0000000a080000000140" 12 | self.assertEqual(bf.filter_bytes().hex(), expected) 13 | item = b"Goodbye!" 14 | bf.add(item) 15 | expected = "4000600a080000010940" 16 | self.assertEqual(bf.filter_bytes().hex(), expected) 17 | 18 | def test_filterload(self): 19 | bf = BloomFilter(10, 5, 99) 20 | item = b"Hello World" 21 | bf.add(item) 22 | item = b"Goodbye!" 23 | bf.add(item) 24 | expected = "0a4000600a080000010940050000006300000001" 25 | self.assertEqual(bf.filterload().payload.hex(), expected) 26 | -------------------------------------------------------------------------------- /buidl/test/test_ecc.py: -------------------------------------------------------------------------------- 1 | from os import urandom 2 | from random import randint 3 | from unittest import TestCase 4 | 5 | from buidl.ecc import G, N, S256Point, PrivateKey, Signature, SchnorrSignature 6 | from buidl.bech32 import decode_bech32 7 | from buidl.hash import hash_challenge 8 | from buidl.helper import big_endian_to_int, int_to_big_endian 9 | 10 | 11 | class S256Test(TestCase): 12 | def test_pubpoint(self): 13 | # write a test that tests the public point for the following 14 | points = ( 15 | # secret, x, y 16 | ( 17 | 7, 18 | "045CBDF0646E5DB4EAA398F365F2EA7A0E3D419B7E0330E39CE92BDDEDCAC4F9BC6AEBCA40BA255960A3178D6D861A54DBA813D0B813FDE7B5A5082628087264DA", 19 | ), 20 | ( 21 | 1485, 22 | "04C982196A7466FBBBB0E27A940B6AF926C1A74D5AD07128C82824A11B5398AFDA7A91F9EAE64438AFB9CE6448A1C133DB2D8FB9254E4546B6F001637D50901F55", 23 | ), 24 | ( 25 | 2**128, 26 | "048F68B9D2F63B5F339239C1AD981F162EE88C5678723EA3351B7B444C9EC4C0DA662A9F2DBA063986DE1D90C2B6BE215DBBEA2CFE95510BFDF23CBF79501FFF82", 27 | ), 28 | ( 29 | 2**240 + 2**31, 30 | "049577FF57C8234558F293DF502CA4F09CBC65A6572C842B39B366F2171794511610B49C67FA9365AD7B90DAB070BE339A1DAF9052373EC30FFAE4F72D5E66D053", 31 | ), 32 | ) 33 | 34 | # iterate over points 35 | sum_secrets = 0 36 | point_objects = [] 37 | for secret, sec in points: 38 | # initialize the secp256k1 point (S256Point) 39 | point = S256Point.parse(bytes.fromhex(sec)) 40 | # check that the secret*G is the same as the point 41 | self.assertEqual(secret * G, point) 42 | sum_secrets += secret 43 | point_objects.append(point) 44 | 45 | self.assertEqual(sum_secrets * G, S256Point.combine(point_objects)) 46 | 47 | def test_sec(self): 48 | coefficient = 999**3 49 | uncompressed = "049d5ca49670cbe4c3bfa84c96a8c87df086c6ea6a24ba6b809c9de234496808d56fa15cc7f3d38cda98dee2419f415b7513dde1301f8643cd9245aea7f3f911f9" 50 | compressed = ( 51 | "039d5ca49670cbe4c3bfa84c96a8c87df086c6ea6a24ba6b809c9de234496808d5" 52 | ) 53 | point = coefficient * G 54 | self.assertEqual(point.sec(compressed=False), bytes.fromhex(uncompressed)) 55 | self.assertEqual(point.sec(compressed=True), bytes.fromhex(compressed)) 56 | coefficient = 123 57 | uncompressed = "04a598a8030da6d86c6bc7f2f5144ea549d28211ea58faa70ebf4c1e665c1fe9b5204b5d6f84822c307e4b4a7140737aec23fc63b65b35f86a10026dbd2d864e6b" 58 | compressed = ( 59 | "03a598a8030da6d86c6bc7f2f5144ea549d28211ea58faa70ebf4c1e665c1fe9b5" 60 | ) 61 | point = coefficient * G 62 | self.assertEqual(point.sec(compressed=False), bytes.fromhex(uncompressed)) 63 | self.assertEqual(point.sec(compressed=True), bytes.fromhex(compressed)) 64 | coefficient = 42424242 65 | uncompressed = "04aee2e7d843f7430097859e2bc603abcc3274ff8169c1a469fee0f20614066f8e21ec53f40efac47ac1c5211b2123527e0e9b57ede790c4da1e72c91fb7da54a3" 66 | compressed = ( 67 | "03aee2e7d843f7430097859e2bc603abcc3274ff8169c1a469fee0f20614066f8e" 68 | ) 69 | point = coefficient * G 70 | self.assertEqual(point.sec(compressed=False), bytes.fromhex(uncompressed)) 71 | self.assertEqual(point.sec(compressed=True), bytes.fromhex(compressed)) 72 | 73 | def test_address(self): 74 | tests = ( 75 | ( 76 | 888**3, 77 | "148dY81A9BmdpMhvYEVznrM45kWN32vSCN", 78 | "mnabU9NCcRE5zcNZ2C16CnvKPELrFvisn3", 79 | ), 80 | ( 81 | 321, 82 | "1FNgueDbMYjNQ8HT77sHKxTwdrHMdTGwyN", 83 | "mfx3y63A7TfTtXKkv7Y6QzsPFY6QCBCXiP", 84 | ), 85 | ( 86 | 4242424242, 87 | "1HUYfVCXEmp76uh17bE2gA72Vuqv4wrM1a", 88 | "mgY3bVusRUL6ZB2Ss999CSrGVbdRwVpM8s", 89 | ), 90 | ) 91 | for secret, mainnet_legacy, testnet_legacy in tests: 92 | point = secret * G 93 | self.assertEqual(point.address(network="mainnet"), mainnet_legacy) 94 | self.assertEqual( 95 | point.address(compressed=False, network="testnet"), testnet_legacy 96 | ) 97 | self.assertEqual( 98 | point.address(compressed=False, network="signet"), testnet_legacy 99 | ) 100 | 101 | def test_p2wpkh_address(self): 102 | tests = ( 103 | ( 104 | 888**3, 105 | "bc1qyfvunnpszmjwcqgfk9dsne6j4edq3fglx9y5x7", 106 | "tb1qyfvunnpszmjwcqgfk9dsne6j4edq3fglvrl8ad", 107 | ), 108 | ( 109 | 321, 110 | "bc1qnk4u7vkat6ck9t4unlgvvle8dhsqp40mrssamm", 111 | "tb1qnk4u7vkat6ck9t4unlgvvle8dhsqp40mfktwqg", 112 | ), 113 | ( 114 | 4242424242, 115 | "bc1qkjm6e3c79zy7clsfx86q4pvy46ccc5u9xa6f6e", 116 | "tb1qkjm6e3c79zy7clsfx86q4pvy46ccc5u9vmp6p2", 117 | ), 118 | ) 119 | for secret, mainnet_bech32, testnet_bech32 in tests: 120 | point = secret * G 121 | self.assertEqual(point.p2wpkh_address(network="mainnet"), mainnet_bech32) 122 | self.assertEqual(decode_bech32(mainnet_bech32)[2], point.hash160()) 123 | self.assertEqual(point.p2wpkh_address(network="testnet"), testnet_bech32) 124 | self.assertEqual(point.p2wpkh_address(network="signet"), testnet_bech32) 125 | 126 | def test_p2sh_p2wpkh_address(self): 127 | tests = ( 128 | ( 129 | 888**3, 130 | "32cE3VHX5k1Z4gDCJBXMSLgd1akUzvqNvH", 131 | "2MtAS7EDYhCWuGTqjyK9E4HftDvxek7ELQn", 132 | ), 133 | ( 134 | 321, 135 | "3KPpFmmGNoKi5ikrH4QsMNmNnQtzkdw4Kx", 136 | "2NAx2KWhHzFq4HWPPxC2jyKkdzm7AVsEge4", 137 | ), 138 | ( 139 | 4242424242, 140 | "3M7oCrExZ6ZYjyn2oxXxYnE14m813espco", 141 | "2NCg1GbAzAZ4twmQaV69qAjDGH7LApz5kA4", 142 | ), 143 | ) 144 | for secret, mainnet_p2sh, testnet_p2sh in tests: 145 | point = secret * G 146 | self.assertEqual(point.p2sh_p2wpkh_address(network="mainnet"), mainnet_p2sh) 147 | self.assertEqual(point.p2sh_p2wpkh_address(network="testnet"), testnet_p2sh) 148 | self.assertEqual(point.p2sh_p2wpkh_address(network="signet"), testnet_p2sh) 149 | 150 | def test_verify(self): 151 | tests = ( 152 | ( 153 | 0xEC208BAA0FC1C19F708A9CA96FDEFF3AC3F230BB4A7BA4AEDE4942AD003C0F60, 154 | "3045022100ac8d1c87e51d0d441be8b3dd5b05c8795b48875dffe00b7ffcfac23010d3a3950220068342ceff8935ededd102dd876ffd6ba72d6a427a3edb13d26eb0781cb423c4", 155 | "04887387e452b8eacc4acfde10d9aaf7f6d9a0f975aabb10d006e4da568744d06c61de6d95231cd89026e286df3b6ae4a894a3378e393e93a0f45b666329a0ae34", 156 | ), 157 | ( 158 | 0x7C076FF316692A3D7EB3C3BB0F8B1488CF72E1AFCD929E29307032997A838A3D, 159 | "3044022000eff69ef2b1bd93a66ed5219add4fb51e11a840f404876325a1e8ffe0529a2c022038df8011e682d839e75159debf909408cb3f12ae472b1d88cf6280cf01c6568b", 160 | "04887387e452b8eacc4acfde10d9aaf7f6d9a0f975aabb10d006e4da568744d06c61de6d95231cd89026e286df3b6ae4a894a3378e393e93a0f45b666329a0ae34", 161 | ), 162 | ( 163 | 0x2270CB0316E68389A3A23DE16023A03B8FC271A21B467B1DC97E0FC0E2CE97F7, 164 | "3045022100ea6d640d5275d091607e1f4ad5cdb214e45f8d17cca1095074894dde347605ba022029062e1ff0d9eee52da1f3621caf92436877d7076720e2b3d9f226bf853e2b75", 165 | "04f47dc2ac0ecaadda5ee2b3ab9bc4e02c3eafb2abcc426643686ad95f6d4e8c44e33fa47d96fc2dace0ef2f583965cf6a0f8faa7a070c0f8ee986d192e2d21835", 166 | ), 167 | ) 168 | for z, der_hex, sec in tests: 169 | point = S256Point.parse(bytes.fromhex(sec)) 170 | der = bytes.fromhex(der_hex) 171 | self.assertTrue(point.verify(z, Signature.parse(der))) 172 | 173 | def test_parse(self): 174 | csec = bytes.fromhex( 175 | "0349fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278a" 176 | ) 177 | point = S256Point.parse(csec) 178 | usec = bytes.fromhex( 179 | "0449fc4e631e3624a545de3f89f5d8684c7b8138bd94bdd531d2e213bf016b278aa56c896489c71dfc65701ce25050f542f336893fb8cd15f4e8e5c124dbf58e47" 180 | ) 181 | self.assertEqual(point.sec(False), usec) 182 | 183 | 184 | class SignatureTest(TestCase): 185 | def test_der(self): 186 | der_hex = "3045022100ed81ff192e75a3fd2304004dcadb746fa5e24c5031ccfcf21320b0277457c98f02207a986d955c6e0cb35d446a89d3f56100f4d7f67801c31967743a9c8e10615bed" 187 | der = bytes.fromhex(der_hex) 188 | sig = Signature.parse(der) 189 | self.assertTrue("Signature" in sig.__repr__()) 190 | computed = sig.der() 191 | self.assertEqual(der, computed) 192 | 193 | # simple test to show repr works (otherwise this would throw an error) 194 | str(sig) 195 | 196 | 197 | class PrivateKeyTest(TestCase): 198 | def test_sign(self): 199 | pk = PrivateKey(randint(0, N)) 200 | z = randint(0, 1 << 256) 201 | sig = pk.sign(z) 202 | self.assertTrue(pk.point.verify(z, sig)) 203 | 204 | def test_sign_message(self): 205 | pk = PrivateKey(randint(0, N)) 206 | message = b"This is a test message" 207 | sig = pk.sign_message(message) 208 | self.assertTrue(pk.point.verify_message(message, sig)) 209 | 210 | def test_sign_schnorr(self): 211 | pk = PrivateKey(randint(1, N)) 212 | msg = int_to_big_endian(randint(1, N), 32) 213 | sig = pk.sign_schnorr(msg, aux=b"\x00" * 32) 214 | self.assertTrue(pk.point.verify_schnorr(msg, sig)) 215 | # merkle root 216 | merkle_root = urandom(32) 217 | tweak = big_endian_to_int(pk.point.tweak(merkle_root)) 218 | external_pubkey = pk.tweaked_key(merkle_root).point 219 | k = randint(1, N) 220 | r = k * G 221 | if r.parity: 222 | k = N - k 223 | r = k * G 224 | message = r.xonly() + external_pubkey.xonly() + msg 225 | challenge = big_endian_to_int(hash_challenge(message)) % N 226 | if pk.point.parity == external_pubkey.parity: 227 | secret = pk.secret 228 | else: 229 | secret = -pk.secret 230 | s = (k + challenge * secret) % N 231 | if external_pubkey.parity: 232 | s = (s - challenge * tweak) % N 233 | else: 234 | s = (s + challenge * tweak) % N 235 | sig = SchnorrSignature.parse(r.xonly() + int_to_big_endian(s, 32)) 236 | self.assertTrue(external_pubkey.verify_schnorr(msg, sig)) 237 | -------------------------------------------------------------------------------- /buidl/test/test_hash.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.hash import hash_keyaggcoef 4 | 5 | 6 | class HashTest(TestCase): 7 | def test_keyaggcoef(self): 8 | want = "55a02026378a033a97431c5ac6a72eeec43069940a330431216895c11eff3cc7" 9 | self.assertEqual(hash_keyaggcoef(b"").hex(), want) 10 | -------------------------------------------------------------------------------- /buidl/test/test_helper.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from io import BytesIO 4 | 5 | from buidl.helper import ( 6 | bit_field_to_bytes, 7 | bytes_to_bit_field, 8 | bytes_to_str, 9 | decode_base58, 10 | encode_base58_checksum, 11 | encode_varstr, 12 | int_to_little_endian, 13 | little_endian_to_int, 14 | merkle_parent, 15 | merkle_parent_level, 16 | merkle_root, 17 | read_varstr, 18 | str_to_bytes, 19 | ) 20 | 21 | 22 | class HelperTest(TestCase): 23 | def test_bytes(self): 24 | b = b"hello world" 25 | s = "hello world" 26 | self.assertEqual(b, str_to_bytes(s)) 27 | self.assertEqual(s, bytes_to_str(b)) 28 | 29 | def test_little_endian_to_int(self): 30 | h = bytes.fromhex("99c3980000000000") 31 | want = 10011545 32 | self.assertEqual(little_endian_to_int(h), want) 33 | h = bytes.fromhex("a135ef0100000000") 34 | want = 32454049 35 | self.assertEqual(little_endian_to_int(h), want) 36 | 37 | def test_int_to_little_endian(self): 38 | n = 1 39 | want = b"\x01\x00\x00\x00" 40 | self.assertEqual(int_to_little_endian(n, 4), want) 41 | n = 10011545 42 | want = b"\x99\xc3\x98\x00\x00\x00\x00\x00" 43 | self.assertEqual(int_to_little_endian(n, 8), want) 44 | 45 | def test_base58(self): 46 | addr = "mnrVtF8DWjMu839VW3rBfgYaAfKk8983Xf" 47 | h160 = decode_base58(addr).hex() 48 | want = "507b27411ccf7f16f10297de6cef3f291623eddf" 49 | self.assertEqual(h160, want) 50 | got = encode_base58_checksum(b"\x6f" + bytes.fromhex(h160)) 51 | self.assertEqual(got, addr) 52 | addr = "1111111111111111111114oLvT2" 53 | h160 = decode_base58(addr).hex() 54 | want = "0000000000000000000000000000000000000000" 55 | self.assertEqual(h160, want) 56 | got = encode_base58_checksum(b"\x00" + bytes.fromhex(h160)) 57 | self.assertEqual(got, addr) 58 | 59 | def test_encode_base58_checksum(self): 60 | raw = bytes.fromhex("005dedfbf9ea599dd4e3ca6a80b333c472fd0b3f69") 61 | want = "19ZewH8Kk1PDbSNdJ97FP4EiCjTRaZMZQA" 62 | self.assertEqual(encode_base58_checksum(raw), want) 63 | 64 | def test_merkle_parent(self): 65 | tx_hash0 = bytes.fromhex( 66 | "c117ea8ec828342f4dfb0ad6bd140e03a50720ece40169ee38bdc15d9eb64cf5" 67 | ) 68 | tx_hash1 = bytes.fromhex( 69 | "c131474164b412e3406696da1ee20ab0fc9bf41c8f05fa8ceea7a08d672d7cc5" 70 | ) 71 | want = bytes.fromhex( 72 | "8b30c5ba100f6f2e5ad1e2a742e5020491240f8eb514fe97c713c31718ad7ecd" 73 | ) 74 | self.assertEqual(merkle_parent(tx_hash0, tx_hash1), want) 75 | 76 | def test_merkle_parent_level(self): 77 | hex_hashes = [ 78 | "c117ea8ec828342f4dfb0ad6bd140e03a50720ece40169ee38bdc15d9eb64cf5", 79 | "c131474164b412e3406696da1ee20ab0fc9bf41c8f05fa8ceea7a08d672d7cc5", 80 | "f391da6ecfeed1814efae39e7fcb3838ae0b02c02ae7d0a5848a66947c0727b0", 81 | "3d238a92a94532b946c90e19c49351c763696cff3db400485b813aecb8a13181", 82 | "10092f2633be5f3ce349bf9ddbde36caa3dd10dfa0ec8106bce23acbff637dae", 83 | "7d37b3d54fa6a64869084bfd2e831309118b9e833610e6228adacdbd1b4ba161", 84 | "8118a77e542892fe15ae3fc771a4abfd2f5d5d5997544c3487ac36b5c85170fc", 85 | "dff6879848c2c9b62fe652720b8df5272093acfaa45a43cdb3696fe2466a3877", 86 | "b825c0745f46ac58f7d3759e6dc535a1fec7820377f24d4c2c6ad2cc55c0cb59", 87 | "95513952a04bd8992721e9b7e2937f1c04ba31e0469fbe615a78197f68f52b7c", 88 | "2e6d722e5e4dbdf2447ddecc9f7dabb8e299bae921c99ad5b0184cd9eb8e5908", 89 | ] 90 | tx_hashes = [bytes.fromhex(x) for x in hex_hashes] 91 | want_hex_hashes = [ 92 | "8b30c5ba100f6f2e5ad1e2a742e5020491240f8eb514fe97c713c31718ad7ecd", 93 | "7f4e6f9e224e20fda0ae4c44114237f97cd35aca38d83081c9bfd41feb907800", 94 | "ade48f2bbb57318cc79f3a8678febaa827599c509dce5940602e54c7733332e7", 95 | "68b3e2ab8182dfd646f13fdf01c335cf32476482d963f5cd94e934e6b3401069", 96 | "43e7274e77fbe8e5a42a8fb58f7decdb04d521f319f332d88e6b06f8e6c09e27", 97 | "1796cd3ca4fef00236e07b723d3ed88e1ac433acaaa21da64c4b33c946cf3d10", 98 | ] 99 | want_tx_hashes = [bytes.fromhex(x) for x in want_hex_hashes] 100 | self.assertEqual(merkle_parent_level(tx_hashes), want_tx_hashes) 101 | 102 | def test_merkle_root(self): 103 | hex_hashes = [ 104 | "c117ea8ec828342f4dfb0ad6bd140e03a50720ece40169ee38bdc15d9eb64cf5", 105 | "c131474164b412e3406696da1ee20ab0fc9bf41c8f05fa8ceea7a08d672d7cc5", 106 | "f391da6ecfeed1814efae39e7fcb3838ae0b02c02ae7d0a5848a66947c0727b0", 107 | "3d238a92a94532b946c90e19c49351c763696cff3db400485b813aecb8a13181", 108 | "10092f2633be5f3ce349bf9ddbde36caa3dd10dfa0ec8106bce23acbff637dae", 109 | "7d37b3d54fa6a64869084bfd2e831309118b9e833610e6228adacdbd1b4ba161", 110 | "8118a77e542892fe15ae3fc771a4abfd2f5d5d5997544c3487ac36b5c85170fc", 111 | "dff6879848c2c9b62fe652720b8df5272093acfaa45a43cdb3696fe2466a3877", 112 | "b825c0745f46ac58f7d3759e6dc535a1fec7820377f24d4c2c6ad2cc55c0cb59", 113 | "95513952a04bd8992721e9b7e2937f1c04ba31e0469fbe615a78197f68f52b7c", 114 | "2e6d722e5e4dbdf2447ddecc9f7dabb8e299bae921c99ad5b0184cd9eb8e5908", 115 | "b13a750047bc0bdceb2473e5fe488c2596d7a7124b4e716fdd29b046ef99bbf0", 116 | ] 117 | tx_hashes = [bytes.fromhex(x) for x in hex_hashes] 118 | want_hex_hash = ( 119 | "acbcab8bcc1af95d8d563b77d24c3d19b18f1486383d75a5085c4e86c86beed6" 120 | ) 121 | want_hash = bytes.fromhex(want_hex_hash) 122 | self.assertEqual(merkle_root(tx_hashes), want_hash) 123 | 124 | def test_bit_field_to_bytes(self): 125 | bit_field = [ 126 | 0, 127 | 0, 128 | 0, 129 | 0, 130 | 0, 131 | 0, 132 | 1, 133 | 0, 134 | 0, 135 | 0, 136 | 0, 137 | 0, 138 | 0, 139 | 0, 140 | 0, 141 | 0, 142 | 0, 143 | 0, 144 | 0, 145 | 0, 146 | 0, 147 | 1, 148 | 1, 149 | 0, 150 | 0, 151 | 1, 152 | 0, 153 | 1, 154 | 0, 155 | 0, 156 | 0, 157 | 0, 158 | 0, 159 | 0, 160 | 0, 161 | 1, 162 | 0, 163 | 0, 164 | 0, 165 | 0, 166 | 0, 167 | 0, 168 | 0, 169 | 0, 170 | 0, 171 | 0, 172 | 0, 173 | 0, 174 | 0, 175 | 0, 176 | 0, 177 | 0, 178 | 0, 179 | 0, 180 | 0, 181 | 0, 182 | 1, 183 | 0, 184 | 0, 185 | 0, 186 | 0, 187 | 0, 188 | 0, 189 | 0, 190 | 1, 191 | 0, 192 | 0, 193 | 1, 194 | 0, 195 | 0, 196 | 0, 197 | 0, 198 | 0, 199 | 0, 200 | 0, 201 | 0, 202 | 0, 203 | 0, 204 | 1, 205 | 0, 206 | ] 207 | want = "4000600a080000010940" 208 | self.assertEqual(bit_field_to_bytes(bit_field).hex(), want) 209 | self.assertEqual(bytes_to_bit_field(bytes.fromhex(want)), bit_field) 210 | 211 | def test_varstr(self): 212 | to_encode = b"hello" 213 | want = b"\x05hello" 214 | self.assertEqual(encode_varstr(to_encode), want) 215 | stream = BytesIO(want) 216 | self.assertEqual(read_varstr(stream), to_encode) 217 | -------------------------------------------------------------------------------- /buidl/test/test_merkleblock.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.helper import little_endian_to_int 4 | from buidl.merkleblock import MerkleBlock, MerkleTree 5 | 6 | from io import BytesIO 7 | 8 | 9 | class MerkleTreeTest(TestCase): 10 | def test_init(self): 11 | tree = MerkleTree(9) 12 | self.assertEqual(len(tree.nodes[0]), 1) 13 | self.assertEqual(len(tree.nodes[1]), 2) 14 | self.assertEqual(len(tree.nodes[2]), 3) 15 | self.assertEqual(len(tree.nodes[3]), 5) 16 | self.assertEqual(len(tree.nodes[4]), 9) 17 | 18 | def test_populate_tree_1(self): 19 | hex_hashes = [ 20 | "9745f7173ef14ee4155722d1cbf13304339fd00d900b759c6f9d58579b5765fb", 21 | "5573c8ede34936c29cdfdfe743f7f5fdfbd4f54ba0705259e62f39917065cb9b", 22 | "82a02ecbb6623b4274dfcab82b336dc017a27136e08521091e443e62582e8f05", 23 | "507ccae5ed9b340363a0e6d765af148be9cb1c8766ccc922f83e4ae681658308", 24 | "a7a4aec28e7162e1e9ef33dfa30f0bc0526e6cf4b11a576f6c5de58593898330", 25 | "bb6267664bd833fd9fc82582853ab144fece26b7a8a5bf328f8a059445b59add", 26 | "ea6d7ac1ee77fbacee58fc717b990c4fcccf1b19af43103c090f601677fd8836", 27 | "457743861de496c429912558a106b810b0507975a49773228aa788df40730d41", 28 | "7688029288efc9e9a0011c960a6ed9e5466581abf3e3a6c26ee317461add619a", 29 | "b1ae7f15836cb2286cdd4e2c37bf9bb7da0a2846d06867a429f654b2e7f383c9", 30 | "9b74f89fa3f93e71ff2c241f32945d877281a6a50a6bf94adac002980aafe5ab", 31 | "b3a92b5b255019bdaf754875633c2de9fec2ab03e6b8ce669d07cb5b18804638", 32 | "b5c0b915312b9bdaedd2b86aa2d0f8feffc73a2d37668fd9010179261e25e263", 33 | "c9d52c5cb1e557b92c84c52e7c4bfbce859408bedffc8a5560fd6e35e10b8800", 34 | "c555bc5fc3bc096df0a0c9532f07640bfb76bfe4fc1ace214b8b228a1297a4c2", 35 | "f9dbfafc3af3400954975da24eb325e326960a25b87fffe23eef3e7ed2fb610e", 36 | ] 37 | tree = MerkleTree(len(hex_hashes)) 38 | hashes = [bytes.fromhex(h) for h in hex_hashes] 39 | tree.populate_tree([1] * 31, hashes) 40 | root = "597c4bafe3832b17cbbabe56f878f4fc2ad0f6a402cee7fa851a9cb205f87ed1" 41 | self.assertEqual(tree.root().hex(), root) 42 | 43 | def test_populate_tree_2(self): 44 | hex_hashes = [ 45 | "42f6f52f17620653dcc909e58bb352e0bd4bd1381e2955d19c00959a22122b2e", 46 | "94c3af34b9667bf787e1c6a0a009201589755d01d02fe2877cc69b929d2418d4", 47 | "959428d7c48113cb9149d0566bde3d46e98cf028053c522b8fa8f735241aa953", 48 | "a9f27b99d5d108dede755710d4a1ffa2c74af70b4ca71726fa57d68454e609a2", 49 | "62af110031e29de1efcad103b3ad4bec7bdcf6cb9c9f4afdd586981795516577", 50 | ] 51 | tree = MerkleTree(len(hex_hashes)) 52 | hashes = [bytes.fromhex(h) for h in hex_hashes] 53 | tree.populate_tree([1] * 11, hashes) 54 | root = "a8e8bd023169b81bc56854137a135b97ef47a6a7237f4c6e037baed16285a5ab" 55 | self.assertEqual(tree.root().hex(), root) 56 | 57 | 58 | class MerkleBlockTest(TestCase): 59 | def test_parse(self): 60 | hex_merkle_block = "00000020df3b053dc46f162a9b00c7f0d5124e2676d47bbe7c5d0793a500000000000000ef445fef2ed495c275892206ca533e7411907971013ab83e3b47bd0d692d14d4dc7c835b67d8001ac157e670bf0d00000aba412a0d1480e370173072c9562becffe87aa661c1e4a6dbc305d38ec5dc088a7cf92e6458aca7b32edae818f9c2c98c37e06bf72ae0ce80649a38655ee1e27d34d9421d940b16732f24b94023e9d572a7f9ab8023434a4feb532d2adfc8c2c2158785d1bd04eb99df2e86c54bc13e139862897217400def5d72c280222c4cbaee7261831e1550dbb8fa82853e9fe506fc5fda3f7b919d8fe74b6282f92763cef8e625f977af7c8619c32a369b832bc2d051ecd9c73c51e76370ceabd4f25097c256597fa898d404ed53425de608ac6bfe426f6e2bb457f1c554866eb69dcb8d6bf6f880e9a59b3cd053e6c7060eeacaacf4dac6697dac20e4bd3f38a2ea2543d1ab7953e3430790a9f81e1c67f5b58c825acf46bd02848384eebe9af917274cdfbb1a28a5d58a23a17977def0de10d644258d9c54f886d47d293a411cb6226103b55635" 61 | mb = MerkleBlock.parse(BytesIO(bytes.fromhex(hex_merkle_block))) 62 | version = 0x20000000 63 | self.assertEqual(mb.header.version, version) 64 | merkle_root_hex = ( 65 | "ef445fef2ed495c275892206ca533e7411907971013ab83e3b47bd0d692d14d4" 66 | ) 67 | merkle_root = bytes.fromhex(merkle_root_hex)[::-1] 68 | self.assertEqual(mb.header.merkle_root, merkle_root) 69 | prev_block_hex = ( 70 | "df3b053dc46f162a9b00c7f0d5124e2676d47bbe7c5d0793a500000000000000" 71 | ) 72 | prev_block = bytes.fromhex(prev_block_hex)[::-1] 73 | self.assertEqual(mb.header.prev_block, prev_block) 74 | timestamp = little_endian_to_int(bytes.fromhex("dc7c835b")) 75 | self.assertEqual(mb.header.timestamp, timestamp) 76 | bits = bytes.fromhex("67d8001a") 77 | self.assertEqual(mb.header.bits, bits) 78 | nonce = bytes.fromhex("c157e670") 79 | self.assertEqual(mb.header.nonce, nonce) 80 | total = little_endian_to_int(bytes.fromhex("bf0d0000")) 81 | self.assertEqual(mb.total, total) 82 | hex_hashes = [ 83 | "ba412a0d1480e370173072c9562becffe87aa661c1e4a6dbc305d38ec5dc088a", 84 | "7cf92e6458aca7b32edae818f9c2c98c37e06bf72ae0ce80649a38655ee1e27d", 85 | "34d9421d940b16732f24b94023e9d572a7f9ab8023434a4feb532d2adfc8c2c2", 86 | "158785d1bd04eb99df2e86c54bc13e139862897217400def5d72c280222c4cba", 87 | "ee7261831e1550dbb8fa82853e9fe506fc5fda3f7b919d8fe74b6282f92763ce", 88 | "f8e625f977af7c8619c32a369b832bc2d051ecd9c73c51e76370ceabd4f25097", 89 | "c256597fa898d404ed53425de608ac6bfe426f6e2bb457f1c554866eb69dcb8d", 90 | "6bf6f880e9a59b3cd053e6c7060eeacaacf4dac6697dac20e4bd3f38a2ea2543", 91 | "d1ab7953e3430790a9f81e1c67f5b58c825acf46bd02848384eebe9af917274c", 92 | "dfbb1a28a5d58a23a17977def0de10d644258d9c54f886d47d293a411cb62261", 93 | ] 94 | hashes = [bytes.fromhex(h)[::-1] for h in hex_hashes] 95 | self.assertEqual(mb.hashes, hashes) 96 | flags = bytes.fromhex("b55635") 97 | self.assertEqual(mb.flags, flags) 98 | 99 | def test_is_valid(self): 100 | hex_merkle_block = "00000020df3b053dc46f162a9b00c7f0d5124e2676d47bbe7c5d0793a500000000000000ef445fef2ed495c275892206ca533e7411907971013ab83e3b47bd0d692d14d4dc7c835b67d8001ac157e670bf0d00000aba412a0d1480e370173072c9562becffe87aa661c1e4a6dbc305d38ec5dc088a7cf92e6458aca7b32edae818f9c2c98c37e06bf72ae0ce80649a38655ee1e27d34d9421d940b16732f24b94023e9d572a7f9ab8023434a4feb532d2adfc8c2c2158785d1bd04eb99df2e86c54bc13e139862897217400def5d72c280222c4cbaee7261831e1550dbb8fa82853e9fe506fc5fda3f7b919d8fe74b6282f92763cef8e625f977af7c8619c32a369b832bc2d051ecd9c73c51e76370ceabd4f25097c256597fa898d404ed53425de608ac6bfe426f6e2bb457f1c554866eb69dcb8d6bf6f880e9a59b3cd053e6c7060eeacaacf4dac6697dac20e4bd3f38a2ea2543d1ab7953e3430790a9f81e1c67f5b58c825acf46bd02848384eebe9af917274cdfbb1a28a5d58a23a17977def0de10d644258d9c54f886d47d293a411cb6226103b55635" 101 | mb = MerkleBlock.parse(BytesIO(bytes.fromhex(hex_merkle_block))) 102 | self.assertTrue(mb.is_valid()) 103 | want = "6122b61c413a297dd486f8549c8d2544d610def0de7779a1238ad5a5281abbdf" 104 | self.assertEqual(mb.proved_txs()[0].hex(), want) 105 | -------------------------------------------------------------------------------- /buidl/test/test_mnemonic.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.mnemonic import secure_mnemonic 4 | from buidl.hd import HDPrivateKey 5 | 6 | 7 | class MnemonicTest(TestCase): 8 | def test_secure_mnemonic_bits(self): 9 | tests = ( 10 | # num_bits, num_words 11 | (128, 12), 12 | (160, 15), 13 | (192, 18), 14 | (224, 21), 15 | (256, 24), 16 | ) 17 | 18 | for num_bits, num_words in tests: 19 | mnemonic = secure_mnemonic(num_bits=num_bits) 20 | self.assertEqual(num_words, len(mnemonic.split(" "))) 21 | # This is inherently non-deterministic, so we can't check the specific output 22 | HDPrivateKey.from_mnemonic(mnemonic, network="testnet") 23 | 24 | for invalid_num_bits in (-1, 1, 127, 129, 257, "notanint"): 25 | with self.assertRaises(ValueError): 26 | secure_mnemonic(num_bits=invalid_num_bits) 27 | 28 | def test_secure_mnemonic_extra_entropy(self): 29 | tests = ( 30 | # num_bits, num_words, extra_entropy 31 | (128, 12, 0), 32 | (160, 15, 1), 33 | (192, 18, 2**128), 34 | (224, 21, 2**256), 35 | (256, 24, 2**512), 36 | ) 37 | 38 | for num_bits, num_words, extra_entropy in tests: 39 | mnemonic = secure_mnemonic(num_bits=num_bits, extra_entropy=extra_entropy) 40 | self.assertEqual(num_words, len(mnemonic.split(" "))) 41 | # This is inherently non-deterministic, so we can't check the specific output 42 | HDPrivateKey.from_mnemonic(mnemonic, network="testnet") 43 | 44 | with self.assertRaises(TypeError): 45 | secure_mnemonic(extra_entropy="not an int") 46 | with self.assertRaises(ValueError): 47 | secure_mnemonic(extra_entropy=-1) 48 | -------------------------------------------------------------------------------- /buidl/test/test_network.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | from os import getenv 3 | from unittest import TestCase, skipUnless 4 | 5 | from buidl.block import Block 6 | from buidl.bloomfilter import BloomFilter 7 | from buidl.compactfilter import ( 8 | CFCheckPointMessage, 9 | CFHeadersMessage, 10 | CFilterMessage, 11 | GetCFCheckPointMessage, 12 | GetCFHeadersMessage, 13 | GetCFiltersMessage, 14 | ) 15 | from buidl.helper import decode_base58 16 | from buidl.network import ( 17 | BASIC_FILTER_TYPE, 18 | FILTERED_BLOCK_DATA_TYPE, 19 | GetDataMessage, 20 | GetHeadersMessage, 21 | HeadersMessage, 22 | NetworkEnvelope, 23 | SimpleNode, 24 | VersionMessage, 25 | ) 26 | from buidl.script import Script 27 | 28 | 29 | class NetworkEnvelopeTest(TestCase): 30 | def test_parse(self): 31 | msg = bytes.fromhex("f9beb4d976657261636b000000000000000000005df6e0e2") 32 | stream = BytesIO(msg) 33 | envelope = NetworkEnvelope.parse(stream) 34 | self.assertEqual(envelope.command, b"verack") 35 | self.assertEqual(envelope.payload, b"") 36 | msg = bytes.fromhex( 37 | "f9beb4d976657273696f6e0000000000650000005f1a69d2721101000100000000000000bc8f5e5400000000010000000000000000000000000000000000ffffc61b6409208d010000000000000000000000000000000000ffffcb0071c0208d128035cbc97953f80f2f5361746f7368693a302e392e332fcf05050001" 38 | ) 39 | stream = BytesIO(msg) 40 | envelope = NetworkEnvelope.parse(stream) 41 | self.assertEqual(envelope.command, b"version") 42 | self.assertEqual(envelope.payload, msg[24:]) 43 | 44 | def test_serialize(self): 45 | msg = bytes.fromhex("f9beb4d976657261636b000000000000000000005df6e0e2") 46 | stream = BytesIO(msg) 47 | envelope = NetworkEnvelope.parse(stream) 48 | self.assertEqual(envelope.serialize(), msg) 49 | msg = bytes.fromhex( 50 | "f9beb4d976657273696f6e0000000000650000005f1a69d2721101000100000000000000bc8f5e5400000000010000000000000000000000000000000000ffffc61b6409208d010000000000000000000000000000000000ffffcb0071c0208d128035cbc97953f80f2f5361746f7368693a302e392e332fcf05050001" 51 | ) 52 | stream = BytesIO(msg) 53 | envelope = NetworkEnvelope.parse(stream) 54 | self.assertEqual(envelope.serialize(), msg) 55 | 56 | 57 | class VersionMessageTest(TestCase): 58 | def test_serialize(self): 59 | v = VersionMessage(timestamp=0, nonce=b"\x00" * 8) 60 | self.assertEqual( 61 | v.serialize().hex(), 62 | "7f11010000000000000000000000000000000000000000000000000000000000000000000000ffff000000008d20000000000000000000000000000000000000ffff000000008d2000000000000000001b2f70726f6772616d6d696e67626c6f636b636861696e3a302e312f0000000001", 63 | ) 64 | 65 | 66 | class GetHeadersMessageTest(TestCase): 67 | def test_serialize(self): 68 | block_hex = "0000000000000000001237f46acddf58578a37e213d2a6edc4884a2fcad05ba3" 69 | gh = GetHeadersMessage(start_block=bytes.fromhex(block_hex)) 70 | self.assertEqual( 71 | gh.serialize().hex(), 72 | "7f11010001a35bd0ca2f4a88c4eda6d213e2378a5758dfcd6af437120000000000000000000000000000000000000000000000000000000000000000000000000000000000", 73 | ) 74 | 75 | 76 | class HeadersMessageTest(TestCase): 77 | def test_parse(self): 78 | hex_msg = "0200000020df3b053dc46f162a9b00c7f0d5124e2676d47bbe7c5d0793a500000000000000ef445fef2ed495c275892206ca533e7411907971013ab83e3b47bd0d692d14d4dc7c835b67d8001ac157e670000000002030eb2540c41025690160a1014c577061596e32e426b712c7ca00000000000000768b89f07044e6130ead292a3f51951adbd2202df447d98789339937fd006bd44880835b67d8001ade09204600" 79 | stream = BytesIO(bytes.fromhex(hex_msg)) 80 | headers = HeadersMessage.parse(stream) 81 | self.assertEqual(len(headers.headers), 2) 82 | for b in headers.headers: 83 | self.assertEqual(b.__class__, Block) 84 | 85 | 86 | class GetDataMessageTest(TestCase): 87 | def test_serialize(self): 88 | hex_msg = "020300000030eb2540c41025690160a1014c577061596e32e426b712c7ca00000000000000030000001049847939585b0652fba793661c361223446b6fc41089b8be00000000000000" 89 | get_data = GetDataMessage() 90 | block1 = bytes.fromhex( 91 | "00000000000000cac712b726e4326e596170574c01a16001692510c44025eb30" 92 | ) 93 | get_data.add_data(FILTERED_BLOCK_DATA_TYPE, block1) 94 | block2 = bytes.fromhex( 95 | "00000000000000beb88910c46f6b442312361c6693a7fb52065b583979844910" 96 | ) 97 | get_data.add_data(FILTERED_BLOCK_DATA_TYPE, block2) 98 | self.assertEqual(get_data.serialize().hex(), hex_msg) 99 | 100 | 101 | @skipUnless( 102 | getenv("INCLUDE_NETWORK_TESTS"), 103 | reason="Requires (unreliable) network connection", 104 | ) 105 | class SimpleNodeTest(TestCase): 106 | def test_handshake(self): 107 | node = SimpleNode("testnet.programmingbitcoin.com", network="testnet") 108 | node.handshake() 109 | 110 | def test_handshake_signet(self): 111 | node = SimpleNode("signet.programmingbitcoin.com", network="signet") 112 | node.handshake() 113 | 114 | def test_get_filtered_txs(self): 115 | bf = BloomFilter(30, 5, 90210) 116 | h160 = decode_base58("mseRGXB89UTFVkWJhTRTzzZ9Ujj4ZPbGK5") 117 | bf.add(h160) 118 | node = SimpleNode("testnet.programmingbitcoin.com", network="testnet") 119 | node.handshake() 120 | node.send(bf.filterload()) 121 | block_hash = bytes.fromhex( 122 | "00000000000377db7fde98411876c53e318a395af7304de298fd47b7c549d125" 123 | ) 124 | txs = node.get_filtered_txs([block_hash]) 125 | self.assertEqual( 126 | txs[0].id(), 127 | "0c024b9d3aa2ae8faae96603b8d40c88df2fc6bf50b3f446295206f70f3cf6ad", 128 | ) 129 | self.assertEqual( 130 | txs[1].id(), 131 | "0886537e27969a12478e0d33707bf6b9fe4fdaec8d5d471b5304453b04135e7e", 132 | ) 133 | self.assertEqual( 134 | txs[2].id(), 135 | "23d4effc88b80fb7dbcc2e6a0b0af9821c6fe3bb4c8dc3b61bcab7c45f0f6888", 136 | ) 137 | 138 | 139 | class CFilterTest(TestCase): 140 | def test_cfilter(self): 141 | stop_hash = bytes.fromhex( 142 | "000000006f27ddfe1dd680044a34548f41bed47eba9e6f0b310da21423bc5f33" 143 | ) 144 | getcfilters = GetCFiltersMessage(stop_hash=stop_hash) 145 | expected = b"\x00\x01\x00\x00\x00" + stop_hash[::-1] 146 | self.assertEqual(getcfilters.serialize(), expected) 147 | expected = ( 148 | b"\x00" + stop_hash[::-1] + b"\x09" + bytes.fromhex("0385acb4f0fe889ef0") 149 | ) 150 | cfilter = CFilterMessage.parse(BytesIO(expected)) 151 | self.assertEqual(cfilter.filter_type, 0) 152 | self.assertEqual(cfilter.block_hash, stop_hash) 153 | self.assertEqual(cfilter.cf.hashes, {1341840, 1483084, 570774}) 154 | included = Script.parse( 155 | BytesIO( 156 | bytes.fromhex( 157 | "22002027a5000c7917f785d8fc6e5a55adfca8717ecb973ebb7743849ff956d896a7ed" 158 | ) 159 | ) 160 | ) 161 | self.assertTrue(included in cfilter) 162 | self.assertFalse(Script() in cfilter) 163 | with self.assertRaises(RuntimeError): 164 | GetCFiltersMessage() 165 | 166 | def test_cfilter_without_network(self): 167 | # Example from Trezor Blog Post (https://blog.trezor.io/bip158-compact-block-filters-9b813b07a878) 168 | block_hash_hex = ( 169 | "000000000000015d6077a411a8f5cc95caf775ccf11c54e27df75ce58d187313" 170 | ) 171 | block_hash = bytes.fromhex(block_hash_hex) 172 | filter_hex = "09027acea61b6cc3fb33f5d52f7d088a6b2f75d234e89ca800" 173 | filter_bytes = bytes.fromhex(filter_hex) 174 | cfilter = CFilterMessage( 175 | filter_type=BASIC_FILTER_TYPE, 176 | block_hash=block_hash, 177 | filter_bytes=filter_bytes, 178 | ) 179 | for raw_script, want in ( 180 | ("1976a9143ebc40e411ed3c76f86711507ab952300890397288ac", True), 181 | ("1976a914c01a7ca16b47be50cbdbc60724f701d52d75156688ac", True), 182 | ("1976a914000000000000000000000000000000000000000088ac", False), # made up 183 | ): 184 | script = Script.parse(BytesIO(bytes.fromhex(raw_script))) 185 | self.assertEqual(script in cfilter, want) 186 | 187 | 188 | class CFHeaderTest(TestCase): 189 | def test_cfheader(self): 190 | stop_hash = bytes.fromhex( 191 | "000000006f27ddfe1dd680044a34548f41bed47eba9e6f0b310da21423bc5f33" 192 | ) 193 | getcfheaders = GetCFHeadersMessage(stop_hash=stop_hash) 194 | self.assertEqual( 195 | getcfheaders.serialize(), b"\x00\x00\x00\x00\x00" + stop_hash[::-1] 196 | ) 197 | hash2 = b"\x00" * 32 198 | stream = BytesIO( 199 | bytes.fromhex( 200 | "00335fbc2314a20d310b6f9eba7ed4be418f54344a0480d61dfedd276f000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000" 201 | ) 202 | ) 203 | cfheaders = CFHeadersMessage.parse(stream) 204 | self.assertEqual(cfheaders.filter_type, 0) 205 | self.assertEqual(cfheaders.stop_hash, stop_hash) 206 | self.assertEqual(cfheaders.previous_filter_header, hash2) 207 | self.assertEqual(cfheaders.filter_hashes, [hash2]) 208 | with self.assertRaises(RuntimeError): 209 | GetCFHeadersMessage() 210 | 211 | 212 | class CFCheckPointTest(TestCase): 213 | def test_cfcheckpoint(self): 214 | stop_hash = bytes.fromhex( 215 | "000000006f27ddfe1dd680044a34548f41bed47eba9e6f0b310da21423bc5f33" 216 | ) 217 | getcfcheckpoints = GetCFCheckPointMessage(stop_hash=stop_hash) 218 | self.assertEqual(getcfcheckpoints.serialize(), b"\x00" + stop_hash[::-1]) 219 | hash2 = b"\x00" * 32 220 | stream = BytesIO( 221 | bytes.fromhex( 222 | "00335fbc2314a20d310b6f9eba7ed4be418f54344a0480d61dfedd276f00000000010000000000000000000000000000000000000000000000000000000000000000000000" 223 | ) 224 | ) 225 | cfcheckpoints = CFCheckPointMessage.parse(stream) 226 | self.assertEqual(cfcheckpoints.filter_type, 0) 227 | self.assertEqual(cfcheckpoints.stop_hash, stop_hash) 228 | self.assertEqual(cfcheckpoints.filter_headers, [hash2]) 229 | with self.assertRaises(RuntimeError): 230 | GetCFCheckPointMessage() 231 | -------------------------------------------------------------------------------- /buidl/test/test_op.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | from buidl.op import ( 4 | decode_num, 5 | encode_num, 6 | op_checklocktimeverify, 7 | op_checkmultisig, 8 | op_checksequenceverify, 9 | op_checksig, 10 | op_hash160, 11 | ) 12 | from buidl.script import Script 13 | from buidl.timelock import Locktime, Sequence 14 | from buidl.tx import Tx, TxIn, TxOut 15 | 16 | from buidl.test import OfflineTestCase 17 | 18 | 19 | class OpTest(OfflineTestCase): 20 | def test_op_hash160(self): 21 | stack = [b"hello world"] 22 | self.assertTrue(op_hash160(stack)) 23 | self.assertEqual(stack[0].hex(), "d7d5ee7824ff93f94c3055af9382c86c68b5ca92") 24 | 25 | def test_op_checksig(self): 26 | tests = ( 27 | ( 28 | "010000000148dcc16482f5c835828020498ec1c35f48a578585721b5a77445a4ce93334d18000000006a4730440220636b9f822ea2f85e6375ecd066a49cc74c20ec4f7cf0485bebe6cc68da92d8ce022068ae17620b12d99353287d6224740b585ff89024370a3212b583fb454dce7c160121021f955d36390a38361530fb3724a835f4f504049492224a028fb0ab8c063511a7ffffffff0220960705000000001976a914d23541bd04c58a1265e78be912e63b2557fb439088aca0860100000000001976a91456d95dc3f2414a210efb7188d287bff487df96c688ac00000000", 29 | "30440220636b9f822ea2f85e6375ecd066a49cc74c20ec4f7cf0485bebe6cc68da92d8ce022068ae17620b12d99353287d6224740b585ff89024370a3212b583fb454dce7c1601", 30 | "021f955d36390a38361530fb3724a835f4f504049492224a028fb0ab8c063511a7", 31 | "testnet", 32 | ), 33 | ( 34 | "01000000000101e92e1c1d29218348f8ec9463a9fc94670f675a7f82ae100f3e8a5cbd63b4192e0100000017160014d52ad7ca9b3d096a38e752c2018e6fbc40cdf26fffffffff014c400f00000000001976a9146e13971913b9aa89659a9f53d327baa8826f2d7588ac0247304402205e3ae5ac9a0e0a16ae04b0678c5732973ce31051ba9f42193e69843e600d84f2022060a91cbd48899b1bf5d1ffb7532f69ab74bc1701a253a415196b38feb599163b012103935581e52c354cd2f484fe8ed83af7a3097005b2f9c60bff71d35bd795f54b6700000000", 35 | "304402205e3ae5ac9a0e0a16ae04b0678c5732973ce31051ba9f42193e69843e600d84f2022060a91cbd48899b1bf5d1ffb7532f69ab74bc1701a253a415196b38feb599163b01", 36 | "03935581e52c354cd2f484fe8ed83af7a3097005b2f9c60bff71d35bd795f54b67", 37 | "testnet", 38 | ), 39 | ( 40 | "0200000000010140d43a99926d43eb0e619bf0b3d83b4a31f60c176beecfb9d35bf45e54d0f7420100000017160014a4b4ca48de0b3fffc15404a1acdc8dbaae226955ffffffff0100e1f5050000000017a9144a1154d50b03292b3024370901711946cb7cccc387024830450221008604ef8f6d8afa892dee0f31259b6ce02dd70c545cfcfed8148179971876c54a022076d771d6e91bed212783c9b06e0de600fab2d518fad6f15a2b191d7fbd262a3e0121039d25ab79f41f75ceaf882411fd41fa670a4c672c23ffaf0e361a969cde0692e800000000", 41 | "30450221008604ef8f6d8afa892dee0f31259b6ce02dd70c545cfcfed8148179971876c54a022076d771d6e91bed212783c9b06e0de600fab2d518fad6f15a2b191d7fbd262a3e01", 42 | "039d25ab79f41f75ceaf882411fd41fa670a4c672c23ffaf0e361a969cde0692e8", 43 | "mainnet", 44 | ), 45 | ) 46 | for raw_tx, sig_hex, sec_hex, network in tests: 47 | tx_obj = Tx.parse(BytesIO(bytes.fromhex(raw_tx)), network=network) 48 | sec = bytes.fromhex(sec_hex) 49 | sig = bytes.fromhex(sig_hex) 50 | stack = [sig, sec] 51 | self.assertTrue(op_checksig(stack, tx_obj, 0)) 52 | self.assertEqual(decode_num(stack[0]), 1) 53 | 54 | def test_op_checkmultisig(self): 55 | raw_tx = "0100000001868278ed6ddfb6c1ed3ad5f8181eb0c7a385aa0836f01d5e4789e6bd304d87221a000000db00483045022100dc92655fe37036f47756db8102e0d7d5e28b3beb83a8fef4f5dc0559bddfb94e02205a36d4e4e6c7fcd16658c50783e00c341609977aed3ad00937bf4ee942a8993701483045022100da6bee3c93766232079a01639d07fa869598749729ae323eab8eef53577d611b02207bef15429dcadce2121ea07f233115c6f09034c0be68db99980b9a6c5e75402201475221022626e955ea6ea6d98850c994f9107b036b1334f18ca8830bfff1295d21cfdb702103b287eaf122eea69030a0e9feed096bed8045c8b98bec453e1ffac7fbdbd4bb7152aeffffffff04d3b11400000000001976a914904a49878c0adfc3aa05de7afad2cc15f483a56a88ac7f400900000000001976a914418327e3f3dda4cf5b9089325a4b95abdfa0334088ac722c0c00000000001976a914ba35042cfe9fc66fd35ac2224eebdafd1028ad2788acdc4ace020000000017a91474d691da1574e6b3c192ecfb52cc8984ee7b6c568700000000" 56 | tx_obj = Tx.parse(BytesIO(bytes.fromhex(raw_tx))) 57 | sig1 = bytes.fromhex( 58 | "3045022100dc92655fe37036f47756db8102e0d7d5e28b3beb83a8fef4f5dc0559bddfb94e02205a36d4e4e6c7fcd16658c50783e00c341609977aed3ad00937bf4ee942a8993701" 59 | ) 60 | sig2 = bytes.fromhex( 61 | "3045022100da6bee3c93766232079a01639d07fa869598749729ae323eab8eef53577d611b02207bef15429dcadce2121ea07f233115c6f09034c0be68db99980b9a6c5e75402201" 62 | ) 63 | sec1 = bytes.fromhex( 64 | "022626e955ea6ea6d98850c994f9107b036b1334f18ca8830bfff1295d21cfdb70" 65 | ) 66 | sec2 = bytes.fromhex( 67 | "03b287eaf122eea69030a0e9feed096bed8045c8b98bec453e1ffac7fbdbd4bb71" 68 | ) 69 | stack = [b"", sig1, sig2, b"\x02", sec1, sec2, b"\x02"] 70 | self.assertTrue(op_checkmultisig(stack, tx_obj, 0)) 71 | self.assertEqual(decode_num(stack[0]), 1) 72 | 73 | def test_op_cltv(self): 74 | locktime_0 = Locktime(1234) 75 | locktime_1 = Locktime(2345) 76 | sequence = Sequence() 77 | tx_in = TxIn(b"\x00" * 32, 0, sequence=sequence) 78 | tx_out = TxOut(1, Script()) 79 | tx_obj = Tx(1, [tx_in], [tx_out], locktime_1) 80 | stack = [] 81 | self.assertFalse(op_checklocktimeverify(stack, tx_obj, 0)) 82 | tx_in.sequence = Sequence(0xFFFFFFFE) 83 | self.assertFalse(op_checklocktimeverify(stack, tx_obj, 0)) 84 | stack = [encode_num(-5)] 85 | self.assertFalse(op_checklocktimeverify(stack, tx_obj, 0)) 86 | stack = [encode_num(locktime_0)] 87 | self.assertTrue(op_checklocktimeverify(stack, tx_obj, 0)) 88 | tx_obj.locktime = Locktime(1582820194) 89 | self.assertFalse(op_checklocktimeverify(stack, tx_obj, 0)) 90 | tx_obj.locktime = Locktime(500) 91 | self.assertFalse(op_checklocktimeverify(stack, tx_obj, 0)) 92 | 93 | def test_op_csv(self): 94 | sequence_0 = Sequence() 95 | sequence_1 = Sequence(2345) 96 | tx_in = TxIn(b"\x00" * 32, 0, sequence=sequence_0) 97 | tx_out = TxOut(1, Script()) 98 | tx_obj = Tx(1, [tx_in], [tx_out]) 99 | stack = [] 100 | self.assertFalse(op_checksequenceverify(stack, tx_obj, 0)) 101 | tx_in.sequence = sequence_1 102 | self.assertFalse(op_checksequenceverify(stack, tx_obj, 0)) 103 | stack = [encode_num(-5)] 104 | self.assertFalse(op_checksequenceverify(stack, tx_obj, 0)) 105 | tx_obj.version = 2 106 | self.assertFalse(op_checksequenceverify(stack, tx_obj, 0)) 107 | stack = [encode_num(1234 | (1 << 22))] 108 | self.assertFalse(op_checksequenceverify(stack, tx_obj, 0)) 109 | stack = [encode_num(9999)] 110 | self.assertFalse(op_checksequenceverify(stack, tx_obj, 0)) 111 | stack = [encode_num(1234)] 112 | self.assertTrue(op_checksequenceverify(stack, tx_obj, 0)) 113 | -------------------------------------------------------------------------------- /buidl/test/test_pecc.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.pecc import FieldElement, Point 4 | 5 | 6 | class FieldElementTest(TestCase): 7 | def test_ne(self): 8 | a = FieldElement(2, 31) 9 | b = FieldElement(2, 31) 10 | c = FieldElement(15, 31) 11 | self.assertEqual(a, b) 12 | self.assertTrue(a != c) 13 | self.assertFalse(a != b) 14 | 15 | def test_add(self): 16 | a = FieldElement(2, 31) 17 | b = FieldElement(15, 31) 18 | self.assertEqual(a + b, FieldElement(17, 31)) 19 | a = FieldElement(17, 31) 20 | b = FieldElement(21, 31) 21 | self.assertEqual(a + b, FieldElement(7, 31)) 22 | 23 | def test_sub(self): 24 | a = FieldElement(29, 31) 25 | b = FieldElement(4, 31) 26 | self.assertEqual(a - b, FieldElement(25, 31)) 27 | a = FieldElement(15, 31) 28 | b = FieldElement(30, 31) 29 | self.assertEqual(a - b, FieldElement(16, 31)) 30 | 31 | def test_mul(self): 32 | a = FieldElement(24, 31) 33 | b = FieldElement(19, 31) 34 | self.assertEqual(a * b, FieldElement(22, 31)) 35 | 36 | def test_pow(self): 37 | a = FieldElement(17, 31) 38 | self.assertEqual(a**3, FieldElement(15, 31)) 39 | a = FieldElement(5, 31) 40 | b = FieldElement(18, 31) 41 | self.assertEqual(a**5 * b, FieldElement(16, 31)) 42 | 43 | def test_div(self): 44 | a = FieldElement(3, 31) 45 | b = FieldElement(24, 31) 46 | self.assertEqual(a / b, FieldElement(4, 31)) 47 | a = FieldElement(17, 31) 48 | self.assertEqual(a**-3, FieldElement(29, 31)) 49 | a = FieldElement(4, 31) 50 | b = FieldElement(11, 31) 51 | self.assertEqual(a**-4 * b, FieldElement(13, 31)) 52 | 53 | 54 | class PointTest(TestCase): 55 | def test_ne(self): 56 | a = Point(x=3, y=-7, a=5, b=7) 57 | b = Point(x=18, y=77, a=5, b=7) 58 | self.assertTrue(a != b) 59 | self.assertFalse(a != a) 60 | 61 | def test_on_curve(self): 62 | with self.assertRaises(ValueError): 63 | Point(x=-2, y=4, a=5, b=7) 64 | # these should not raise an error 65 | Point(x=3, y=-7, a=5, b=7) 66 | Point(x=18, y=77, a=5, b=7) 67 | 68 | def test_add0(self): 69 | a = Point(x=None, y=None, a=5, b=7) 70 | b = Point(x=2, y=5, a=5, b=7) 71 | c = Point(x=2, y=-5, a=5, b=7) 72 | self.assertEqual(a + b, b) 73 | self.assertEqual(b + a, b) 74 | self.assertEqual(b + c, a) 75 | 76 | def test_add1(self): 77 | a = Point(x=3, y=7, a=5, b=7) 78 | b = Point(x=-1, y=-1, a=5, b=7) 79 | self.assertEqual(a + b, Point(x=2, y=-5, a=5, b=7)) 80 | 81 | def test_add2(self): 82 | a = Point(x=-1, y=1, a=5, b=7) 83 | self.assertEqual(a + a, Point(x=18, y=-77, a=5, b=7)) 84 | 85 | 86 | class ECCTest(TestCase): 87 | def test_on_curve(self): 88 | # tests the following points whether they are on the curve or not 89 | # on curve y^2=x^3-7 over F_223: 90 | # (192,105) (17,56) (200,119) (1,193) (42,99) 91 | # the ones that aren't should raise a ValueError 92 | prime = 223 93 | a = FieldElement(0, prime) 94 | b = FieldElement(7, prime) 95 | 96 | valid_points = ((192, 105), (17, 56), (1, 193)) 97 | invalid_points = ((200, 119), (42, 99)) 98 | 99 | # iterate over valid points 100 | for x_raw, y_raw in valid_points: 101 | # Initialize points this way: 102 | # x = FieldElement(x_raw, prime) 103 | # y = FieldElement(y_raw, prime) 104 | # Point(x, y, a, b) 105 | x = FieldElement(x_raw, prime) 106 | y = FieldElement(y_raw, prime) 107 | # Creating the point should not result in an error 108 | Point(x, y, a, b) 109 | 110 | # iterate over invalid points 111 | for x_raw, y_raw in invalid_points: 112 | # Initialize points this way: 113 | # x = FieldElement(x_raw, prime) 114 | # y = FieldElement(y_raw, prime) 115 | # Point(x, y, a, b) 116 | x = FieldElement(x_raw, prime) 117 | y = FieldElement(y_raw, prime) 118 | # check that creating the point results in a ValueError 119 | # with self.assertRaises(ValueError): 120 | # Point(x, y, a, b) 121 | with self.assertRaises(ValueError): 122 | Point(x, y, a, b) 123 | 124 | def test_add(self): 125 | # tests the following additions on curve y^2=x^3-7 over F_223: 126 | # (192,105) + (17,56) 127 | # (47,71) + (117,141) 128 | # (143,98) + (76,66) 129 | prime = 223 130 | a = FieldElement(0, prime) 131 | b = FieldElement(7, prime) 132 | 133 | additions = ( 134 | # (x1, y1, x2, y2, x3, y3) 135 | (192, 105, 17, 56, 170, 142), 136 | (47, 71, 117, 141, 60, 139), 137 | (143, 98, 76, 66, 47, 71), 138 | ) 139 | # iterate over the additions 140 | for x1_raw, y1_raw, x2_raw, y2_raw, x3_raw, y3_raw in additions: 141 | # Initialize points this way: 142 | # x1 = FieldElement(x1_raw, prime) 143 | # y1 = FieldElement(y1_raw, prime) 144 | # p1 = Point(x1, y1, a, b) 145 | # x2 = FieldElement(x2_raw, prime) 146 | # y2 = FieldElement(y2_raw, prime) 147 | # p2 = Point(x2, y2, a, b) 148 | # x3 = FieldElement(x3_raw, prime) 149 | # y3 = FieldElement(y3_raw, prime) 150 | # p3 = Point(x3, y3, a, b) 151 | x1 = FieldElement(x1_raw, prime) 152 | y1 = FieldElement(y1_raw, prime) 153 | p1 = Point(x1, y1, a, b) 154 | x2 = FieldElement(x2_raw, prime) 155 | y2 = FieldElement(y2_raw, prime) 156 | p2 = Point(x2, y2, a, b) 157 | x3 = FieldElement(x3_raw, prime) 158 | y3 = FieldElement(y3_raw, prime) 159 | p3 = Point(x3, y3, a, b) 160 | # check that p1 + p2 == p3 161 | self.assertEqual(p1 + p2, p3) 162 | 163 | def test_rmul(self): 164 | # tests the following scalar multiplications 165 | # 2*(192,105) 166 | # 2*(143,98) 167 | # 2*(47,71) 168 | # 4*(47,71) 169 | # 8*(47,71) 170 | # 21*(47,71) 171 | prime = 223 172 | a = FieldElement(0, prime) 173 | b = FieldElement(7, prime) 174 | 175 | multiplications = ( 176 | # (coefficient, x1, y1, x2, y2) 177 | (2, 192, 105, 49, 71), 178 | (2, 143, 98, 64, 168), 179 | (2, 47, 71, 36, 111), 180 | (4, 47, 71, 194, 51), 181 | (8, 47, 71, 116, 55), 182 | (21, 47, 71, None, None), 183 | ) 184 | 185 | # iterate over the multiplications 186 | for s, x1_raw, y1_raw, x2_raw, y2_raw in multiplications: 187 | # Initialize points this way: 188 | # x1 = FieldElement(x1_raw, prime) 189 | # y1 = FieldElement(y1_raw, prime) 190 | # p1 = Point(x1, y1, a, b) 191 | x1 = FieldElement(x1_raw, prime) 192 | y1 = FieldElement(y1_raw, prime) 193 | p1 = Point(x1, y1, a, b) 194 | # initialize the second point based on whether it's the point at infinity 195 | # x2 = FieldElement(x2_raw, prime) 196 | # y2 = FieldElement(y2_raw, prime) 197 | # p2 = Point(x2, y2, a, b) 198 | if x2_raw is None: 199 | p2 = Point(None, None, a, b) 200 | else: 201 | x2 = FieldElement(x2_raw, prime) 202 | y2 = FieldElement(y2_raw, prime) 203 | p2 = Point(x2, y2, a, b) 204 | # check that the product is equal to the expected point 205 | self.assertEqual(s * p1, p2) 206 | -------------------------------------------------------------------------------- /buidl/test/test_schnorr.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | 3 | from buidl.ecc import S256Point, PrivateKey, SchnorrSignature 4 | 5 | 6 | class SchnorrTest(TestCase): 7 | def test_signing(self): 8 | tests = [ 9 | ( 10 | "0", 11 | "0000000000000000000000000000000000000000000000000000000000000003", 12 | "F9308A019258C31049344F85F89D5229B531C845836F99B08601F113BCE036F9", 13 | "0000000000000000000000000000000000000000000000000000000000000000", 14 | "0000000000000000000000000000000000000000000000000000000000000000", 15 | "E907831F80848D1069A5371B402410364BDF1C5F8307B0084C55F1CE2DCA821525F66A4A85EA8B71E482A74F382D2CE5EBEEE8FDB2172F477DF4900D310536C0", 16 | "", 17 | ), 18 | ( 19 | "1", 20 | "B7E151628AED2A6ABF7158809CF4F3C762E7160F38B4DA56A784D9045190CFEF", 21 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 22 | "0000000000000000000000000000000000000000000000000000000000000001", 23 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 24 | "6896BD60EEAE296DB48A229FF71DFE071BDE413E6D43F917DC8DCF8C78DE33418906D11AC976ABCCB20B091292BFF4EA897EFCB639EA871CFA95F6DE339E4B0A", 25 | "", 26 | ), 27 | ( 28 | "2", 29 | "C90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B14E5C9", 30 | "DD308AFEC5777E13121FA72B9CC1B7CC0139715309B086C960E18FD969774EB8", 31 | "C87AA53824B4D7AE2EB035A2B5BBBCCC080E76CDC6D1692C4B0B62D798E6D906", 32 | "7E2D58D8B3BCDF1ABADEC7829054F90DDA9805AAB56C77333024B9D0A508B75C", 33 | "5831AAEED7B44BB74E5EAB94BA9D4294C49BCF2A60728D8B4C200F50DD313C1BAB745879A5AD954A72C45A91C3A51D3C7ADEA98D82F8481E0E1E03674A6F3FB7", 34 | "", 35 | ), 36 | ( 37 | "3", 38 | "0B432B2677937381AEF05BB02A66ECD012773062CF3FA2549E44F58ED2401710", 39 | "25D1DFF95105F5253C4022F628A996AD3A0D95FBF21D468A1B33F8C160D8F517", 40 | "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", 41 | "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", 42 | "7EB0509757E246F19449885651611CB965ECC1A187DD51B64FDA1EDC9637D5EC97582B9CB13DB3933705B32BA982AF5AF25FD78881EBB32771FC5922EFC66EA3", 43 | "test fails if msg is reduced modulo p or n", 44 | ), 45 | ] 46 | for ( 47 | index, 48 | secret, 49 | xonly_pk, 50 | aux_rand, 51 | message, 52 | signature, 53 | comment, 54 | ) in tests: 55 | private_key = PrivateKey(secret=int(secret, 16)) 56 | public_key = S256Point.parse(bytes.fromhex(xonly_pk)) 57 | aux = bytes.fromhex(aux_rand) 58 | msg = bytes.fromhex(message) 59 | want_sig = SchnorrSignature.parse(bytes.fromhex(signature)) 60 | self.assertTrue(public_key.verify_schnorr(msg, want_sig)) 61 | sig = private_key.sign_schnorr(msg, aux) 62 | self.assertEqual(sig, want_sig) 63 | 64 | def test_verify(self): 65 | tests = [ 66 | ( 67 | "D69C3509BB99E412E68B0FE8544E72837DFA30746D8BE2AA65975F29D22DC7B9", 68 | "4DF3C3F68FCC83B27E9D42C90431A72499F17875C81A599B566C9889B9696703", 69 | "00000000000000000000003B78CE563F89A0ED9414F5AA28AD0D96D6795F9C6376AFB1548AF603B3EB45C9F8207DEE1060CB71C04E80F593060B07D28308D7F4", 70 | ), 71 | ( 72 | "d0fa46cb883e940ac3dc5421f05b03859972639f51ed2eccbf3dc5a62e2e1b15", 73 | "11864b0142c248fdb090d08893745e0b36a78f988a8334d2056814ad5f541596", 74 | "23b1d4ff27b16af4b0fcb9672df671701a1a7f5a6bb7352b051f461edbc614aa6068b3e5313a174f90f3d95dc4e06f69bebd9cf5a3098fde034b01e69e8e7889", 75 | ), 76 | ] 77 | for xonly_pk, message, signature in tests: 78 | public_key = S256Point.parse(bytes.fromhex(xonly_pk)) 79 | msg = bytes.fromhex(message) 80 | sig = SchnorrSignature.parse(bytes.fromhex(signature)) 81 | self.assertTrue(public_key.verify_schnorr(msg, sig)) 82 | 83 | def test_errors(self): 84 | tests = [ 85 | ( 86 | "EEFDEA4CDB677750A420FEE807EACF21EB9898AE79B9768766E4FAA04A2D4A34", 87 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 88 | "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B", 89 | ValueError, 90 | "public key not on the curve", 91 | ), 92 | ( 93 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 94 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 95 | "FFF97BD5755EEEA420453A14355235D382F6472F8568A18B2F057A14602975563CC27944640AC607CD107AE10923D9EF7A73C643E166BE5EBEAFA34B1AC553E2", 96 | AssertionError, 97 | "has_even_y(R) is false", 98 | ), 99 | ( 100 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 101 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 102 | "1FA62E331EDBC21C394792D2AB1100A7B432B013DF3F6FF4F99FCB33E0E1515F28890B3EDB6E7189B630448B515CE4F8622A954CFE545735AAEA5134FCCDB2BD", 103 | AssertionError, 104 | "negated message", 105 | ), 106 | ( 107 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 108 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 109 | "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769961764B3AA9B2FFCB6EF947B6887A226E8D7C93E00C5ED0C1834FF0D0C2E6DA6", 110 | AssertionError, 111 | "negated s value", 112 | ), 113 | ( 114 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 115 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 116 | "0000000000000000000000000000000000000000000000000000000000000000123DDA8328AF9C23A94C1FEECFD123BA4FB73476F0D594DCB65C6425BD186051", 117 | AssertionError, 118 | "sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 0", 119 | ), 120 | ( 121 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 122 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 123 | "00000000000000000000000000000000000000000000000000000000000000017615FBAF5AE28864013C099742DEADB4DBA87F11AC6754F93780D5A1837CF197", 124 | AssertionError, 125 | "sG - eP is infinite. Test fails in single verification if has_even_y(inf) is defined as true and x(inf) as 1", 126 | ), 127 | ( 128 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 129 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 130 | "4A298DACAE57395A15D0795DDBFD1DCB564DA82B0F269BC70A74F8220429BA1D69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B", 131 | ValueError, 132 | "sig[0:32] is not an X coordinate on the curve", 133 | ), 134 | ( 135 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 136 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 137 | "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC2F69E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B", 138 | ValueError, 139 | "sig[0:32] is equal to field size", 140 | ), 141 | ( 142 | "DFF1D77F2A671C5F36183726DB2341BE58FEAE1DA2DECED843240F7B502BA659", 143 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 144 | "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E177769FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", 145 | ValueError, 146 | "sig[32:64] is equal to curve order", 147 | ), 148 | ( 149 | "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEFFFFFC30", 150 | "243F6A8885A308D313198A2E03707344A4093822299F31D0082EFA98EC4E6C89", 151 | "6CFF5C3BA86C69EA4B7376F31A9BCB4F74C1976089B2D9963DA2E5543E17776969E89B4C5564D00349106B8497785DD7D1D713A8AE82B32FA79D5F7FC407D39B", 152 | ValueError, 153 | "public key is not a valid X coordinate because it exceeds the field size", 154 | ), 155 | ] 156 | for xonly_pk, message, signature, error, comment in tests: 157 | with self.assertRaises(error): 158 | print(comment) 159 | public_key = S256Point.parse(bytes.fromhex(xonly_pk)) 160 | msg = bytes.fromhex(message) 161 | sig = SchnorrSignature.parse(bytes.fromhex(signature)) 162 | assert public_key.verify_schnorr(msg, sig) 163 | -------------------------------------------------------------------------------- /buidl/test/test_timelock.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | from time import time 3 | from unittest import TestCase 4 | 5 | from buidl.timelock import Locktime, Sequence, MAX_SEQUENCE 6 | 7 | 8 | class LocktimeTest(TestCase): 9 | def test_locktime(self): 10 | locktime_0 = Locktime() 11 | self.assertEqual(locktime_0, 0) 12 | self.assertEqual(locktime_0.block_height(), 0) 13 | self.assertIsNone(locktime_0.mtp()) 14 | self.assertEqual(Locktime.parse(BytesIO(locktime_0.serialize())), 0) 15 | current_time = int(time()) 16 | locktime_1 = Locktime(current_time) 17 | self.assertIsNone(locktime_1.block_height()) 18 | self.assertEqual(locktime_1.mtp(), current_time) 19 | self.assertEqual(Locktime.parse(BytesIO(locktime_1.serialize())), locktime_1) 20 | locktime_2 = Locktime(current_time - 1000000) 21 | self.assertTrue(locktime_2 < locktime_1) 22 | with self.assertRaises(ValueError): 23 | locktime_2 < locktime_0 24 | with self.assertRaises(ValueError): 25 | Locktime(-1) 26 | with self.assertRaises(ValueError): 27 | Locktime(1 << 32) 28 | 29 | 30 | class SequenceTest(TestCase): 31 | def test_sequence(self): 32 | sequence_0 = Sequence() 33 | self.assertEqual(sequence_0, MAX_SEQUENCE) 34 | self.assertTrue(sequence_0.is_max()) 35 | self.assertFalse(sequence_0.is_relative()) 36 | self.assertIsNone(sequence_0.relative_blocks()) 37 | self.assertIsNone(sequence_0.relative_time()) 38 | self.assertEqual(Sequence.parse(BytesIO(sequence_0.serialize())), sequence_0) 39 | time_amount = 512 * 1000 40 | sequence_1 = Sequence.from_relative_time(time_amount) 41 | self.assertFalse(sequence_1.is_comparable(sequence_0)) 42 | self.assertIsNone(sequence_1.relative_blocks()) 43 | self.assertEqual(sequence_1.relative_time(), time_amount) 44 | self.assertEqual(Sequence.parse(BytesIO(sequence_1.serialize())), sequence_1) 45 | blocks_amount = 144 46 | sequence_2 = Sequence.from_relative_blocks(blocks_amount) 47 | self.assertIsNone(sequence_2.relative_time()) 48 | self.assertEqual(sequence_2.relative_blocks(), blocks_amount) 49 | self.assertFalse(sequence_1.is_comparable(sequence_2)) 50 | sequence_3 = Sequence.from_relative_time(512 * 100) 51 | self.assertTrue(sequence_3 < sequence_1) 52 | with self.assertRaises(ValueError): 53 | sequence_2 < sequence_0 54 | with self.assertRaises(ValueError): 55 | sequence_2 < sequence_1 56 | with self.assertRaises(ValueError): 57 | Sequence(-1) 58 | with self.assertRaises(ValueError): 59 | Sequence(1 << 32) 60 | -------------------------------------------------------------------------------- /buidl/timelock.py: -------------------------------------------------------------------------------- 1 | from buidl.helper import ( 2 | int_to_little_endian, 3 | little_endian_to_int, 4 | ) 5 | 6 | MAX_LOCKTIME = (1 << 32) - 1 7 | MAX_SEQUENCE = (1 << 32) - 1 8 | BLOCK_LIMIT = 500000000 9 | SEQUENCE_DISABLE_RELATIVE_FLAG = 1 << 31 10 | SEQUENCE_RELATIVE_TIME_FLAG = 1 << 22 11 | SEQUENCE_MASK = (1 << 16) - 1 12 | 13 | 14 | class Locktime(int): 15 | def __new__(cls, n=0): 16 | if n < 0 or n > MAX_LOCKTIME: 17 | raise ValueError(f"Locktime must be between 0 and 2^32 - 1: {n}") 18 | return super().__new__(cls, n) 19 | 20 | @classmethod 21 | def parse(cls, s): 22 | return cls(little_endian_to_int(s.read(4))) 23 | 24 | def serialize(self): 25 | return int_to_little_endian(self, 4) 26 | 27 | def block_height(self): 28 | if self < BLOCK_LIMIT: 29 | return self 30 | else: 31 | return None 32 | 33 | def mtp(self): 34 | if self >= BLOCK_LIMIT: 35 | return self 36 | else: 37 | return None 38 | 39 | def is_comparable(self, other): 40 | return (self < BLOCK_LIMIT and other < BLOCK_LIMIT) or ( 41 | self >= BLOCK_LIMIT and other >= BLOCK_LIMIT 42 | ) 43 | 44 | def __lt__(self, other): 45 | if type(other) is int: 46 | return super().__lt__(other) 47 | if self.is_comparable(other): 48 | return super().__lt__(other) 49 | else: 50 | raise ValueError( 51 | "locktimes where one is a block height and the other a unix time cannot be compared" 52 | ) 53 | 54 | 55 | class Sequence(int): 56 | def __new__(cls, n=MAX_SEQUENCE): 57 | if n < 0 or n > MAX_SEQUENCE: 58 | raise ValueError(f"Sequence must be between 0 and 2^32 - 1: {n}") 59 | return super().__new__(cls, n) 60 | 61 | @classmethod 62 | def parse(cls, s): 63 | return cls(little_endian_to_int(s.read(4))) 64 | 65 | @classmethod 66 | def from_relative_time(cls, num_seconds): 67 | return cls(SEQUENCE_RELATIVE_TIME_FLAG | (num_seconds // 512)) 68 | 69 | @classmethod 70 | def from_relative_blocks(cls, num_blocks): 71 | return cls(num_blocks) 72 | 73 | def serialize(self): 74 | return int_to_little_endian(self, 4) 75 | 76 | def is_rbf_able(self): 77 | return self < MAX_SEQUENCE 78 | 79 | def is_max(self): 80 | return self == MAX_SEQUENCE 81 | 82 | def is_relative(self): 83 | return self & SEQUENCE_DISABLE_RELATIVE_FLAG == 0 84 | 85 | def is_relative_time(self): 86 | return self.is_relative() and self & SEQUENCE_RELATIVE_TIME_FLAG 87 | 88 | def is_relative_block(self): 89 | return self.is_relative() and not self.is_relative_time() 90 | 91 | def relative_blocks(self): 92 | """Returns the number of blocks that need to age""" 93 | if self.is_relative_block(): 94 | return self & SEQUENCE_MASK 95 | else: 96 | return None 97 | 98 | def relative_time(self): 99 | """Returns the number of seconds that need to age""" 100 | if self.is_relative_time(): 101 | return (self & SEQUENCE_MASK) << 9 102 | else: 103 | return None 104 | 105 | def is_comparable(self, other): 106 | return (self.is_relative_block() and other.is_relative_block()) or ( 107 | self.is_relative_time() and other.is_relative_time() 108 | ) 109 | 110 | def __lt__(self, other): 111 | if type(other) is int: 112 | return super().__lt__(other) 113 | if self.is_comparable(other): 114 | return self & SEQUENCE_MASK < other & SEQUENCE_MASK 115 | else: 116 | raise ValueError( 117 | "sequences where one is a relative block height and the other a relative unix time cannot be compared" 118 | ) 119 | -------------------------------------------------------------------------------- /buidl/witness.py: -------------------------------------------------------------------------------- 1 | from io import BytesIO 2 | 3 | from buidl.helper import ( 4 | encode_varint, 5 | encode_varstr, 6 | read_varint, 7 | read_varstr, 8 | ) 9 | from buidl.script import Script 10 | from buidl.taproot import ControlBlock, TapLeaf 11 | 12 | 13 | class Witness: 14 | def __init__(self, items=None): 15 | self.items = items or [] 16 | 17 | def __repr__(self): 18 | result = "" 19 | for item in self.items: 20 | if item == b"": 21 | result += " " 22 | else: 23 | result += f"{item.hex()} " 24 | return result 25 | 26 | def __getitem__(self, key): 27 | return self.items[key] 28 | 29 | def __len__(self): 30 | return len(self.items) 31 | 32 | def clone(self): 33 | return self.__class__(self.items[:]) 34 | 35 | def serialize(self): 36 | result = encode_varint(len(self)) 37 | for item in self.items: 38 | result += encode_varstr(item) 39 | return result 40 | 41 | def has_annex(self): 42 | return len(self.items) and self.items[-1][0] == 0x50 43 | 44 | def control_block(self): 45 | if self.has_annex(): 46 | return ControlBlock.parse(self.items[-2]) 47 | else: 48 | return ControlBlock.parse(self.items[-1]) 49 | 50 | def tap_script(self): 51 | if self.has_annex(): 52 | raw_tap_script = self.items[-3] 53 | else: 54 | raw_tap_script = self.items[-2] 55 | return Script.parse(BytesIO(encode_varstr(raw_tap_script))) 56 | 57 | def tap_leaf(self): 58 | leaf_version = self.control_block().tapleaf_version 59 | return TapLeaf(self.tap_script(), leaf_version) 60 | 61 | @classmethod 62 | def parse(cls, s): 63 | num_items = read_varint(s) 64 | items = [] 65 | for _ in range(num_items): 66 | items.append(read_varstr(s)) 67 | return cls(items) 68 | -------------------------------------------------------------------------------- /clean.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | set -o xtrace 4 | pip3 uninstall buidl -y 5 | rm -rf .venv3/ 6 | rm -rf dist/ 7 | rm -rf build/ 8 | rm -rf buidl.egg-info/ 9 | rm buidl/_libsec.c 10 | rm buidl/_libsec.cpython-*-darwin.so 11 | rm buidl/_libsec.o 12 | rm buidl/_libsec.so 13 | 14 | find . | grep -E "(__pycache__|\.pyc|\.pyo$)" | xargs rm -rf 15 | -------------------------------------------------------------------------------- /docs/multiwallet.md: -------------------------------------------------------------------------------- 1 | # Multiwallet 2 | 3 | Multiwallet is a Command Line Interface (CLI) multisig wallet tool built with the `buidl` python library. 4 | Most people looking at the readme for this repository are programmers trying to use `buidl` in their applications. 5 | This page has installation instructions for using the multiwallet CLI tool. 6 | 7 | ## Telegram Community Chat Group 8 | Ask Qs here: 9 | 10 | 11 | ## Install 12 | 13 | #### Online Computer 14 | 15 | Easy, but not as secure. 16 | Great for testing. 17 | 18 | ``` 19 | $ pip3 install buidl --upgrade 20 | ``` 21 | 22 | You can test your installation worked by running the following: 23 | ```bash 24 | $ multiwallet.py 25 | Welcome to multiwallet... 26 | ``` 27 | 28 | #### Offline (Airgap) Computer 29 | `buidl` has no dependencies, so this is relatively easy. 30 | 31 | Download this repo with `git` (while online): 32 | ``` 33 | $ git clone https://github.com/buidl-bitcoin/buidl-python.git 34 | ``` 35 | (you can also download a [.zip file from github](https://github.com/buidl-bitcoin/buidl-python/archive/main.zip) and then decompress it) 36 | 37 | Disconnect your computer from the internet, or copy this folder onto your offline computer. 38 | Go to the `buidl-python` directory: 39 | ```bash 40 | $ cd buidl-python 41 | ``` 42 | 43 | Start multiwallet without having to install anything: 44 | ```bash 45 | $ python3 multiwallet.py 46 | Welcome to multiwallet... 47 | ``` 48 | 49 | If you get a `permission denied` error, you may need to run: 50 | ```bash 51 | $ sudo python3 multiwallet.py 52 | ``` 53 | 54 | On TAILs, you need to setup an [Administration Password](https://tails.boum.org/doc/first_steps/welcome_screen/administration_password/) in order to `sudo`. 55 | 56 | ## Product Roadmap 57 | 58 | * Show change addresses (not just receiving addresses) 59 | * Save outputs to a file? 60 | * Dispay QR codes? 61 | -------------------------------------------------------------------------------- /requirements-libsec.txt: -------------------------------------------------------------------------------- 1 | cffi 2 | -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | black==22.6.0 2 | flake8==5.0.4 3 | pexpect==4.8.0 4 | pytest==6.2.5 5 | -------------------------------------------------------------------------------- /run_tests.sh: -------------------------------------------------------------------------------- 1 | black --diff --check . && pytest -v && flake8 . && printf "\nSUCCESS!\n" || printf "\n-----------------\nFAIL\n-----------------\n" 2 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore=E125,E203,E226,E501,W503 3 | exclude=pbkdf2.py,siphash.py,__init__.py,*/lib/*,*.venv3/*,tests_wycheproof_generate.py 4 | max-line-length=127 5 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | 4 | with open("README.md", "r") as fh: 5 | long_description = fh.read() 6 | 7 | setup( 8 | name="buidl", 9 | version="0.2.37", 10 | author="Example Author", 11 | author_email="author@example.com", 12 | description="An easy-to-use and fully featured bitcoin library written in pure python (no dependencies).", 13 | long_description=long_description, 14 | long_description_content_type="text/markdown", 15 | url="https://github.com/buidl-bitcoin/buidl-python", 16 | packages=find_packages(), 17 | include_package_data=True, # https://stackoverflow.com/a/56689053 18 | scripts=["multiwallet.py", "singlesweep.py"], 19 | classifiers=[ 20 | "Programming Language :: Python :: 3", 21 | "License :: OSI Approved :: MIT License", 22 | "Operating System :: OS Independent", 23 | ], 24 | python_requires=">=3.6", 25 | ) 26 | -------------------------------------------------------------------------------- /singlesweep.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # coding: utf-8 3 | 4 | import sys 5 | from cmd import Cmd 6 | from getpass import getpass 7 | from platform import platform 8 | from pkg_resources import DistributionNotFound, get_distribution 9 | 10 | import buidl # noqa: F401 (used below with pkg_resources for versioning) 11 | from buidl.ecc import PrivateKey 12 | from buidl.libsec_status import is_libsec_enabled 13 | from buidl.psbt import PSBT 14 | 15 | 16 | ##################################################################### 17 | # CLI UX 18 | ##################################################################### 19 | 20 | 21 | # https://stackoverflow.com/questions/287871/how-to-print-colored-text-in-python 22 | RESET_TERMINAL_COLOR = "\033[0m" 23 | 24 | 25 | def blue_fg(string): 26 | return f"\033[34m{string}{RESET_TERMINAL_COLOR}" 27 | 28 | 29 | def yellow_fg(string): 30 | return f"\033[93m{string}{RESET_TERMINAL_COLOR}" 31 | 32 | 33 | def green_fg(string): 34 | return f"\033[32m{string}{RESET_TERMINAL_COLOR}" 35 | 36 | 37 | def red_fg(string): 38 | return f"\033[31m{string}{RESET_TERMINAL_COLOR}" 39 | 40 | 41 | def print_blue(string): 42 | print(blue_fg(string)) 43 | 44 | 45 | def print_yellow(string): 46 | print(yellow_fg(string)) 47 | 48 | 49 | def print_green(string): 50 | print(green_fg(string)) 51 | 52 | 53 | def print_red(string): 54 | print(red_fg(string)) 55 | 56 | 57 | def _get_buidl_version(): 58 | try: 59 | return get_distribution("buidl").version 60 | except DistributionNotFound: 61 | return "Unknown" 62 | 63 | 64 | def _get_bool(prompt, default=True): 65 | if default is True: 66 | yn = "[Y/n]" 67 | else: 68 | yn = "[y/N]" 69 | 70 | while True: 71 | response_str = input(blue_fg(f"{prompt} {yn}: ")).strip().lower() 72 | if response_str == "": 73 | return default 74 | if response_str in ("n", "no"): 75 | return False 76 | if response_str in ("y", "yes"): 77 | return True 78 | print_red("Please choose either y or n") 79 | 80 | 81 | def _get_wif(): 82 | prompt = blue_fg("Enter WIF (Wallet Import Format) to use for signing: ") 83 | while True: 84 | wif = getpass(prompt=prompt).strip() 85 | try: 86 | return PrivateKey.parse(wif) 87 | except Exception as e: 88 | print_red(f"Could not parse WIF: {e}") 89 | 90 | 91 | def _get_psbt_obj(network): 92 | psbt_prompt = blue_fg( 93 | "Paste partially signed bitcoin transaction (PSBT) in base64 form: " 94 | ) 95 | while True: 96 | psbt_b64 = input(psbt_prompt).strip() 97 | 98 | if not psbt_b64: 99 | continue 100 | 101 | try: 102 | psbt_obj = PSBT.parse_base64(psbt_b64, network=network) 103 | except Exception as e: 104 | print_red(f"Could not parse PSBT: {e}") 105 | continue 106 | 107 | # redundant but explicit 108 | if psbt_obj.validate() is not True: 109 | print_red("PSBT does not validate") 110 | continue 111 | 112 | return psbt_obj 113 | 114 | 115 | ##################################################################### 116 | # Command Line App Code Starts Here 117 | ##################################################################### 118 | 119 | 120 | class MyPrompt(Cmd): 121 | intro = ( 122 | "Welcome to singlesweep, a stateless single sig sweeper that works with WIF and PSBTs.\n" 123 | "Single sig is DANGEROUS, this is an emergency recovery tool with NO WARRANTY OF ANY KIND.\n" 124 | "It is often used for collecting funds from old paper wallets.\n" 125 | "Type help or ? to list commands.\n" 126 | ) 127 | prompt = "(₿) " # the bitcoin symbol :) 128 | 129 | def __init__(self): 130 | super().__init__() 131 | 132 | def do_sweep(self, arg): 133 | """Sign a single-sig PSBT sweep transaction (1 output) using 1 WIF.""" 134 | 135 | # We ask for this upfront so we can infer the network from it (PSBT doesn't have network info) 136 | # Users SHOULD only run this code on an airgap machine 137 | privkey_obj = _get_wif() 138 | network = privkey_obj.network 139 | 140 | # TODO: create a new helper method in pecc.py/cecc.py? 141 | expected_utxo_addr = privkey_obj.point.address( 142 | compressed=privkey_obj.compressed, network=network 143 | ) 144 | print_yellow( 145 | f"Will attempt to spend from {network.upper()} {expected_utxo_addr}" 146 | ) 147 | 148 | psbt_obj = _get_psbt_obj(network=network) 149 | tx_obj = psbt_obj.tx_obj 150 | 151 | try: 152 | psbt_described = psbt_obj.describe_p2pkh_sweep(privkey_obj=privkey_obj) 153 | except Exception as e: 154 | print_red(f"ABORTING WITHOUT SIGNING, could not describe PSBT:\n{e}") 155 | return 156 | 157 | # Gather TX info and validate 158 | print_yellow(psbt_described["tx_summary_text"]) 159 | 160 | if _get_bool(prompt="In Depth Transaction View?", default=True): 161 | to_print = [] 162 | to_print.append("DETAILED VIEW") 163 | to_print.append(f"Fee: {psbt_described['tx_fee_sats']:,} (unverified)") 164 | to_print.append( 165 | f"Total Input Sats Consumed: {psbt_described['total_input_sats']:,} (unverified)" 166 | ) 167 | to_print.append( 168 | f"Total Output Sats Created: {psbt_described['output_spend_sats']:,}" 169 | ) 170 | to_print.append(f"Lock Time: {psbt_described['locktime']:,}") 171 | to_print.append( 172 | f"RBF: {'Enabled' if psbt_described['is_rbf_able'] else 'DISABLED'}" 173 | ) 174 | to_print.append( 175 | f"Size: {psbt_described['tx_size_bytes']} bytes (will increase after signing)" 176 | ) 177 | to_print.append("-" * 80) 178 | to_print.append(f"{len(psbt_described['inputs_desc'])} Input(s):") 179 | for cnt, input_desc in enumerate(psbt_described["inputs_desc"]): 180 | to_print.append(f" Input #{cnt}") 181 | for k, v in input_desc.items(): 182 | if k == "sats": 183 | # Comma separate ints 184 | val = f"{v:,} (unverified)" 185 | else: 186 | val = v 187 | to_print.append(f" {k}: {val}") 188 | to_print.append("-" * 80) 189 | to_print.append(f"{len(psbt_described['outputs_desc'])} Output(s):") 190 | for cnt, output_desc in enumerate(psbt_described["outputs_desc"]): 191 | to_print.append(f" Output #{cnt}") 192 | for k, v in output_desc.items(): 193 | if k == "sats": 194 | # Comma separate ints 195 | val = f"{v:,}" 196 | else: 197 | val = v 198 | to_print.append(f" {k}: {val}") 199 | print_yellow("\n".join(to_print)) 200 | 201 | if not _get_bool(prompt="Sign this transaction?", default=True): 202 | print_yellow(f"Transaction {tx_obj.id()} NOT signed") 203 | return 204 | 205 | # Sign the TX 206 | # TODO: would prefer to use psbt_obj.sign_with_private_keys(), but that requires NamedPublicKeys that we don't have (no paths in PSBT) 207 | for cnt, _ in enumerate(tx_obj.tx_ins): 208 | was_signed = tx_obj.sign_p2pkh(input_index=cnt, private_key=privkey_obj) 209 | if was_signed is not True: 210 | print_red("PSBT was NOT signed") 211 | 212 | print_yellow(f"SIGNED TX {tx_obj.hash().hex()} has the following hex:\n") 213 | print_green(tx_obj.serialize().hex()) 214 | print_yellow("\nYou can be broadcast this hex via:") 215 | print_yellow(" - Your bitcoin core node") 216 | print_yellow( 217 | ' - "pushtx" block explorers (Blockstream, BlockCypher, Blockchain.com, etc), mining pools, Electrum SPV network, etc' 218 | ) 219 | print_yellow( 220 | ' - Electrum signing of a previously unsigned transaction: "Combine" > "Merge Signatures From"\n' 221 | ) 222 | 223 | def do_version_info(self, arg): 224 | """Print program settings for debug purposes""" 225 | 226 | to_print = [ 227 | f"buidl Version: {_get_buidl_version()}", 228 | f"Python Version: {sys.version_info}", 229 | f"Platform: {platform()}", 230 | f"libsecp256k1 Configured: {is_libsec_enabled()}", 231 | ] 232 | print_yellow("\n".join(to_print)) 233 | 234 | def do_exit(self, arg): 235 | """Exit Program""" 236 | print_yellow("\nNo data saved") 237 | return True 238 | 239 | 240 | if __name__ == "__main__": 241 | try: 242 | MyPrompt().cmdloop() 243 | except KeyboardInterrupt: 244 | print_yellow("\nNo data saved") 245 | -------------------------------------------------------------------------------- /test_singlesweep.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import pexpect 4 | 5 | from os import getenv 6 | 7 | from buidl import PrivateKey 8 | 9 | 10 | @unittest.skipIf( 11 | getenv("SKIP_SLOW_TESTS"), 12 | reason="This test takes a while", 13 | ) 14 | class SinglesweepTest(unittest.TestCase): 15 | def expect(self, text): 16 | """ 17 | Expect a string of bytes one at a time (not waiting on a newline) 18 | """ 19 | buffer = "" 20 | while True: 21 | try: 22 | # This will error out at the end of the buffer 23 | latest_char = self.child.read(1) 24 | except Exception as e: 25 | raise Exception( 26 | f"Failed to find text `{text}` in buffer `{buffer}`.\nError: `{e}`" 27 | ) 28 | 29 | try: 30 | latest_char = latest_char.decode() 31 | if latest_char not in ("\n", "\r"): 32 | buffer += latest_char 33 | except UnicodeDecodeError: 34 | # Handle non-unicode char edge-case (bitcoin symbol) 35 | buffer += str(latest_char) 36 | 37 | if text in buffer: 38 | return True 39 | 40 | # this line should never be reached, the script would timeout first 41 | assert f"`{text}` not in buffer: {buffer}" 42 | 43 | def setUp(self): 44 | self.child = pexpect.spawn("python3 singlesweep.py", timeout=2) 45 | self.expect( 46 | "Welcome to singlesweep, a stateless single sig sweeper that works with WIF and PSBTs." 47 | ) 48 | 49 | def test_version_info(self): 50 | self.child.sendline("version_info") 51 | self.expect("buidl Version: ") 52 | self.expect("Python Version: ") 53 | self.expect("Platform: ") 54 | self.expect("libsecp256k1 Configured: ") 55 | 56 | def test_send_compressed(self): 57 | # This isn't strictly neccesary, just shows how this was generated 58 | privkey_obj = PrivateKey( 59 | secret=314159265358979323846, network="testnet", compressed=True 60 | ) 61 | self.assertEqual( 62 | privkey_obj.point.address(compressed=True, network="testnet"), 63 | "mxgA6BsDLcv4vooLx4j6MfHQRihbrdwV5P", 64 | ) 65 | self.assertEqual( 66 | privkey_obj.wif(compressed=True), 67 | "cMahea7zqjxrtgAbB7LSGbcQUr1uX1ojuh9HqjiLNWDVQudB7k4E", 68 | ) 69 | 70 | self.child.sendline("sweep") 71 | self.expect("Enter WIF (Wallet Import Format) to use for signing:") 72 | 73 | self.child.sendline("cMahea7zqjxrtgAbB7LSGbcQUr1uX1ojuh9HqjiLNWDVQudB7k4E") 74 | self.expect( 75 | "Will attempt to spend from TESTNET mxgA6BsDLcv4vooLx4j6MfHQRihbrdwV5P" 76 | ) 77 | self.expect("Paste partially signed bitcoin transaction (PSBT) in base64 form") 78 | 79 | psbt_to_sign = "cHNidP8BAFUCAAAAAVRZh97dheVJzHkcaznyZCtSunoNJgnNGBRKGYw5nBSQAQAAAAD9////ASCFAQAAAAAAGXapFJ+aer1gDAyqA5g6d8jD344GLLL6iKxZiR4AAAEA4gIAAAAAAQEy/IizvbchxG0F6yLb/g0qEa9HidaAzlDzGCUMNgwZTQAAAAAA/v///wKEOaMAAAAAABepFOpylCCZWu7JekQ9p98RPeFn3kRoh6CGAQAAAAAAGXapFLw3unG6dBaKPCHEL+A8dYsff8aRiKwCRzBEAiBanqsb6aKeGstvedoheUCnr7buvdOHz58/J803NfsOkAIgOIpcQ+OGZEzFo7E3FBvUHagLZJLik8vf9KqnfVwfn9MBIQO3M5Kw2cHk3i3s1FpZK69B/oUOubZhv6e/GU7n6RVAeViJHgAAAA==" 80 | self.child.sendline(psbt_to_sign) 81 | self.expect( 82 | "PSBT sends 99,616 sats to mv4rnyY3Su5gjcDNzbMLKBQkBicCtHUtFB with an UNVERIFIED fee of 384 sats (0.38% of spend)" 83 | ) 84 | self.expect("In Depth Transaction View? [Y/n]") 85 | 86 | self.child.sendline("Y") 87 | self.expect("DETAILED VIEW") 88 | self.expect("Sign this transaction?") 89 | 90 | self.child.sendline("Y") 91 | self.expect( 92 | "SIGNED TX 0dfc0c3b8e0e87b6321a75fca542c22f792b2d6f519720e0a974976c646b7d5e" 93 | ) 94 | self.expect( 95 | "0200000001545987dedd85e549cc791c6b39f2642b52ba7a0d2609cd18144a198c399c1490010000006a473044022076ce7079425632ca3d355d33c9f8d5152bdfef87e7ef4a8be3792f3cbc4c7f4702201da9fd053b42c4ea2b57717d8b4995caf6f3d47cf7572f54122f158c208c4d3c012102f64b30341c33fb908144acb898781e1cf011bae3e44489864a6c621ded2a29aafdffffff0120850100000000001976a9149f9a7abd600c0caa03983a77c8c3df8e062cb2fa88ac59891e00" 96 | ) 97 | 98 | def test_send_uncompressed(self): 99 | # This isn't strictly neccesary, just shows how this was generated 100 | privkey_obj = PrivateKey( 101 | secret=314159265358979323846, network="testnet", compressed=False 102 | ) 103 | self.assertEqual( 104 | privkey_obj.point.address(compressed=False, network="testnet"), 105 | "mzJtwV9LL6B3Nvm1uc1Z5NK3mqqaZyn9w1", 106 | ) 107 | self.assertEqual( 108 | privkey_obj.wif(compressed=False), 109 | "91avARGdfge8E4tZfYLoxeJ5sGBdNJQH4nM1C3RmiaMW6WFGmDS", 110 | ) 111 | 112 | self.child.sendline("sweep") 113 | self.expect("Enter WIF (Wallet Import Format) to use for signing:") 114 | 115 | self.child.sendline("91avARGdfge8E4tZfYLoxeJ5sGBdNJQH4nM1C3RmiaMW6WFGmDS") 116 | self.expect( 117 | "Will attempt to spend from TESTNET mzJtwV9LL6B3Nvm1uc1Z5NK3mqqaZyn9w1" 118 | ) 119 | self.expect("Paste partially signed bitcoin transaction (PSBT) in base64 form") 120 | psbt_to_sign = "cHNidP8BAFUCAAAAAaASEHE91UJrrmU5FMjXIVUV5HF91EGzcaktfooEUPBFAAAAAAD9////AWPhFwAAAAAAGXapFJ+aer1gDAyqA5g6d8jD344GLLL6iKxniR4AAAEA4gIAAAAAAQH96ccGxhmgYfsrP9xnIUl2WJxnE+Jz2iAH08QAkPiSiQAAAAAA/v///wLj4hcAAAAAABl2qRTOIpkaGEnqK3MB5zwY6WWk/ZKiRoisAWO7XQAAAAAXqRT2kti1/KAVtU90LS4zl1LNGa3NtocCRzBEAiAnsPi908ar1ROFyTWV4TlqlKHijNRbOuolJILCG2G6ywIgCPZLYkWebvcTOztJj3I+D6CX/y9DCZRRhrD9QJtdR80BIQPLHa2FJlrG7KzxKA6ZVJfJ2P3xGp/88a65XIkCNK6Xk1iJHgAAAA==" 121 | self.child.sendline(psbt_to_sign) 122 | self.expect( 123 | "PSBT sends 1,565,027 sats to mv4rnyY3Su5gjcDNzbMLKBQkBicCtHUtFB with an UNVERIFIED fee of 384 sats (0.02% of spend)" 124 | ) 125 | self.expect("In Depth Transaction View?") 126 | 127 | self.child.sendline("Y") 128 | self.expect("DETAILED VIEW") 129 | self.expect("Sign this transaction?") 130 | 131 | self.child.sendline("Y") 132 | self.expect( 133 | "SIGNED TX f3271bbac2b66d83379de855a79cead9d0e5210b857bee5c22462635033861c4" 134 | ) 135 | self.expect( 136 | "0200000001a01210713dd5426bae653914c8d7215515e4717dd441b371a92d7e8a0450f045000000008b4830450221008123f3ce37457a8c61709d873bddf3fc93e46f684749956571d59acbd00087c002202346068137f144df11d92fa185640c848dd89c22552803b502e34be56e9da6de014104f64b30341c33fb908144acb898781e1cf011bae3e44489864a6c621ded2a29aaee264a64a924c505d1e66bc7308b2d87806813ad203725d7a9548c9d79017d36fdffffff0163e11700000000001976a9149f9a7abd600c0caa03983a77c8c3df8e062cb2fa88ac67891e00" 137 | ) 138 | 139 | def test_fail(self): 140 | # This has to take some seconds to fail 141 | mw = pexpect.spawn("python3 singlesweep.py", timeout=1) 142 | with self.assertRaises(pexpect.exceptions.TIMEOUT): 143 | mw.expect("this text should not match") 144 | -------------------------------------------------------------------------------- /update_pypi.sh: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env bash 2 | 3 | # Verbose printing 4 | set -o xtrace 5 | 6 | # Exit virtualenv if we're in one 7 | deactivate 8 | 9 | # Abandon if anything errors 10 | set -e; 11 | 12 | # Remove old files 13 | rm -rf .venv3/ 14 | rm -rf dist/ 15 | rm -rf build/ 16 | rm -rf buidl.egg-info/ 17 | find . | grep -E "(__pycache__|\.pyc|\.pyo$)" | xargs rm -rf 18 | 19 | # Tests 20 | if [ -f requirements-test.txt ]; then python3 -m pip install -r requirements-test.txt; fi 21 | black --check . 22 | flake8 . 23 | pytest -v buidl/test/ 24 | pytest -v test_*.py 25 | 26 | # Safety 27 | git push 28 | 29 | # Virtualenv 30 | python3 --version 31 | # Install virtualenv (if not installed) 32 | # python3 -m pip uninstall virtualenv -y 33 | python3 -m pip install virtualenv 34 | # Create virtualenv and install our software inside it 35 | python3 -m virtualenv .venv3 36 | source .venv3/bin/activate 37 | # python3 -m pip uninstall pyinstaller -y 38 | if [ -f requirements.txt ]; then python3 -m pip install -r requirements.txt; fi 39 | python3 setup.py install 40 | python3 -m pip freeze 41 | # Package 42 | python3 setup.py sdist bdist_wheel 43 | # Upload to PyPI 44 | python3 -m pip install --upgrade twine 45 | python3 -m twine upload dist/* 46 | 47 | # Cleanup 48 | rm -rfv dist/ 49 | rm -rfv buidl.egg-info/ 50 | rm -rfv build/ 51 | find . | grep -E "(__pycache__|\.pyc|\.pyo$)" | xargs rm -rf 52 | 53 | # Hackey timer 54 | # https://askubuntu.com/questions/1028924/how-do-i-use-seconds-inside-a-bash-script 55 | hrs=$(( SECONDS/3600 )) 56 | mins=$(( (SECONDS-hrs*3600)/60)) 57 | secs=$(( SECONDS-hrs*3600-mins*60 )) 58 | printf 'Time spent: %02d:%02d:%02d\n' $hrs $mins $secs 59 | --------------------------------------------------------------------------------