├── CHANGELOG.md
├── torba
├── ui
│ └── __init__.py
├── client
│ ├── __init__.py
│ ├── words
│ │ └── __init__.py
│ ├── errors.py
│ ├── constants.py
│ ├── basemanager.py
│ ├── cli.py
│ ├── bcd_data_stream.py
│ ├── util.py
│ ├── wallet.py
│ ├── coinselection.py
│ ├── mnemonic.py
│ ├── baseheader.py
│ ├── hash.py
│ └── basenetwork.py
├── server
│ ├── __init__.py
│ ├── cli.py
│ ├── enum.py
│ ├── text.py
│ ├── hash.py
│ ├── storage.py
│ ├── server.py
│ └── script.py
├── workbench
│ ├── __init__.py
│ ├── Makefile
│ ├── output_dock.ui
│ ├── _output_dock.py
│ ├── blockchain_dock.ui
│ └── _blockchain_dock.py
├── coin
│ ├── __init__.py
│ ├── bitcoincash.py
│ └── bitcoinsegwit.py
├── orchstr8
│ ├── __init__.py
│ ├── cli.py
│ └── service.py
├── __init__.py
├── rpc
│ ├── __init__.py
│ ├── util.py
│ └── framing.py
├── tasks.py
├── stream.py
└── testcase.py
├── tests
└── client_tests
│ ├── __init__.py
│ ├── unit
│ ├── __init__.py
│ ├── test_mnemonic.py
│ ├── test_bcd_data_stream.py
│ ├── test_hash.py
│ ├── test_utils.py
│ ├── key_fixtures.py
│ ├── test_wallet.py
│ ├── test_headers.py
│ ├── test_bip32.py
│ ├── test_coinselection.py
│ ├── test_ledger.py
│ └── test_script.py
│ └── integration
│ ├── __init__.py
│ ├── test_blockchain_reorganization.py
│ ├── test_reconnect.py
│ ├── test_sync.py
│ └── test_transactions.py
├── torba.png
├── MANIFEST.in
├── .gitignore
├── README.md
├── tox.ini
├── .travis.yml
├── setup.cfg
├── LICENSE
└── setup.py
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/torba/ui/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/torba/client/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/client_tests/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/torba/client/words/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/client_tests/integration/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/torba/server/__init__.py:
--------------------------------------------------------------------------------
1 | from .server import Server
2 |
--------------------------------------------------------------------------------
/torba/workbench/__init__.py:
--------------------------------------------------------------------------------
1 | from .application import main
2 |
--------------------------------------------------------------------------------
/torba.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lbryio/torba/HEAD/torba.png
--------------------------------------------------------------------------------
/torba/client/errors.py:
--------------------------------------------------------------------------------
1 | class InsufficientFundsError(Exception):
2 | pass
3 |
--------------------------------------------------------------------------------
/torba/coin/__init__.py:
--------------------------------------------------------------------------------
1 | __path__: str = __import__('pkgutil').extend_path(__path__, __name__)
2 |
--------------------------------------------------------------------------------
/torba/orchstr8/__init__.py:
--------------------------------------------------------------------------------
1 | from .node import Conductor
2 | from .service import ConductorService
3 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include README.md
2 | include CHANGELOG.md
3 | include LICENSE
4 | recursive-include torba *.txt *.py
5 |
--------------------------------------------------------------------------------
/torba/__init__.py:
--------------------------------------------------------------------------------
1 | __path__: str = __import__('pkgutil').extend_path(__path__, __name__)
2 | __version__ = '0.5.7'
3 |
--------------------------------------------------------------------------------
/torba/client/constants.py:
--------------------------------------------------------------------------------
1 | NULL_HASH32 = b'\x00'*32
2 |
3 | CENT = 1000000
4 | COIN = 100*CENT
5 |
6 | TIMEOUT = 30.0
7 |
--------------------------------------------------------------------------------
/torba/workbench/Makefile:
--------------------------------------------------------------------------------
1 | all: _blockchain_dock.py _output_dock.py
2 | _blockchain_dock.py: blockchain_dock.ui
3 | pyside2-uic -d blockchain_dock.ui -o _blockchain_dock.py
4 | _output_dock.py: output_dock.ui
5 | pyside2-uic -d output_dock.ui -o _output_dock.py
6 |
--------------------------------------------------------------------------------
/torba/rpc/__init__.py:
--------------------------------------------------------------------------------
1 | from .framing import *
2 | from .jsonrpc import *
3 | from .socks import *
4 | from .session import *
5 | from .util import *
6 |
7 | __all__ = (framing.__all__ +
8 | jsonrpc.__all__ +
9 | socks.__all__ +
10 | session.__all__ +
11 | util.__all__)
12 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # packaging
2 | torba.egg-info/
3 | dist/
4 |
5 | # PyCharm
6 | .idea/
7 |
8 | # testing
9 | .tox/
10 | tests/client_tests/unit/bitcoin_headers
11 | torba/bin
12 |
13 | # cache and logs
14 | __pycache__/
15 | .mypy_cache/
16 | _trial_temp/
17 | _trial_temp-*/
18 |
19 | # OS X DS_Store
20 | *.DS_Store
21 |
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | #
Torba [](https://travis-ci.org/lbryio/torba) [](https://codecov.io/gh/lbryio/torba)
2 |
3 | A new wallet library to help bitcoin based projects build fast, correct and scalable crypto currency wallets in Python.
4 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | #envlist = unit,integration-{torba.coin.bitcoincash,torba.coin.bitcoinsegwit}
3 | envlist = py37-unit,py37-integration-torba.coin.bitcoinsegwit
4 |
5 | [travis:env]
6 | TESTTYPE =
7 | unit: unit
8 | integration: integration
9 |
10 | [testenv]
11 | deps = coverage
12 | changedir = {toxinidir}/tests
13 | setenv =
14 | integration: TORBA_LEDGER={envname}
15 | commands =
16 | unit: coverage run -p --source={envsitepackagesdir}/torba -m unittest discover -t . client_tests.unit
17 | integration: orchstr8 download
18 | integration: coverage run -p --source={envsitepackagesdir}/torba -m unittest discover -t . client_tests.integration
19 |
--------------------------------------------------------------------------------
/torba/tasks.py:
--------------------------------------------------------------------------------
1 | from asyncio import Event, get_event_loop
2 |
3 |
4 | class TaskGroup:
5 |
6 | def __init__(self, loop=None):
7 | self._loop = loop or get_event_loop()
8 | self._tasks = set()
9 | self.done = Event()
10 |
11 | def add(self, coro):
12 | task = self._loop.create_task(coro)
13 | self._tasks.add(task)
14 | self.done.clear()
15 | task.add_done_callback(self._remove)
16 | return task
17 |
18 | def _remove(self, task):
19 | self._tasks.remove(task)
20 | len(self._tasks) < 1 and self.done.set()
21 |
22 | def cancel(self):
23 | for task in self._tasks:
24 | task.cancel()
25 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_mnemonic.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from binascii import hexlify
3 |
4 | from torba.client.mnemonic import Mnemonic
5 |
6 |
7 | class TestMnemonic(unittest.TestCase):
8 |
9 | def test_mnemonic_to_seed(self):
10 | seed = Mnemonic.mnemonic_to_seed(mnemonic=u'foobar', passphrase=u'torba')
11 | self.assertEqual(
12 | hexlify(seed),
13 | b'475a419db4e991cab14f08bde2d357e52b3e7241f72c6d8a2f92782367feeee9f403dc6a37c26a3f02ab9'
14 | b'dec7f5063161eb139cea00da64cd77fba2f07c49ddc'
15 | )
16 |
17 | def test_make_seed_decode_encode(self):
18 | iters = 10
19 | m = Mnemonic('en')
20 | for _ in range(iters):
21 | seed = m.make_seed()
22 | i = m.mnemonic_decode(seed)
23 | self.assertEqual(m.mnemonic_encode(i), seed)
24 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: xenial
2 | sudo: true
3 | language: python
4 | python: "3.7"
5 |
6 | jobs:
7 | include:
8 |
9 | - stage: code quality
10 | name: "pylint & mypy"
11 | install:
12 | - pip install pylint mypy
13 | - pip install -e .
14 | script:
15 | - pylint --rcfile=setup.cfg torba
16 | - mypy --ignore-missing-imports torba
17 | after_success: skip
18 |
19 | - &tests
20 | stage: tests
21 | env: TESTTYPE=unit
22 | install:
23 | - pip install tox-travis
24 | script: tox
25 | - <<: *tests
26 | env: TESTTYPE=integration
27 |
28 | after_success:
29 | - pip install coverage
30 | - coverage combine tests/
31 | - bash <(curl -s https://codecov.io/bash)
32 |
33 | cache:
34 | directories:
35 | - $HOME/.cache/pip
36 | - $TRAVIS_BUILD_DIR/.tox
37 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_bcd_data_stream.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from torba.client.bcd_data_stream import BCDataStream
4 |
5 |
6 | class TestBCDataStream(unittest.TestCase):
7 |
8 | def test_write_read(self):
9 | s = BCDataStream()
10 | s.write_string(b'a'*252)
11 | s.write_string(b'b'*254)
12 | s.write_string(b'c'*(0xFFFF + 1))
13 | # s.write_string(b'd'*(0xFFFFFFFF + 1))
14 | s.write_boolean(True)
15 | s.write_boolean(False)
16 | s.reset()
17 |
18 | self.assertEqual(s.read_string(), b'a'*252)
19 | self.assertEqual(s.read_string(), b'b'*254)
20 | self.assertEqual(s.read_string(), b'c'*(0xFFFF + 1))
21 | # self.assertEqual(s.read_string(), b'd'*(0xFFFFFFFF + 1))
22 | self.assertEqual(s.read_boolean(), True)
23 | self.assertEqual(s.read_boolean(), False)
24 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [coverage:run]
2 | branch = True
3 |
4 | [coverage:paths]
5 | source =
6 | torba
7 | .tox/*/lib/python*/site-packages/torba
8 |
9 | [cryptography.*,coincurve.*,pbkdf2]
10 | ignore_missing_imports = True
11 |
12 | [pylint]
13 | ignore=words,server,workbench,rpc
14 | max-args=10
15 | max-line-length=110
16 | good-names=T,t,n,i,j,k,x,y,s,f,d,h,c,e,op,db,tx,io,cachedproperty,log,id
17 | valid-metaclass-classmethod-first-arg=mcs
18 | disable=
19 | fixme,
20 | broad-except,
21 | no-else-return,
22 | cyclic-import,
23 | missing-docstring,
24 | duplicate-code,
25 | expression-not-assigned,
26 | inconsistent-return-statements,
27 | too-few-public-methods,
28 | too-many-locals,
29 | too-many-branches,
30 | too-many-arguments,
31 | too-many-statements,
32 | too-many-public-methods,
33 | too-many-instance-attributes,
34 | protected-access,
35 | unused-argument
36 |
--------------------------------------------------------------------------------
/torba/workbench/output_dock.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | OutputDock
4 |
5 |
6 |
7 | 0
8 | 0
9 | 700
10 | 397
11 |
12 |
13 |
14 | false
15 |
16 |
17 | QDockWidget::AllDockWidgetFeatures
18 |
19 |
20 | Output
21 |
22 |
23 |
24 | -
25 |
26 |
27 | true
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 LBRY Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/torba/server/cli.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import traceback
3 | import argparse
4 | import importlib
5 | from torba.server.env import Env
6 | from torba.server.server import Server
7 |
8 |
9 | def get_argument_parser():
10 | parser = argparse.ArgumentParser(
11 | prog="torba-server"
12 | )
13 | parser.add_argument("spvserver", type=str, help="Python class path to SPV server implementation.")
14 | return parser
15 |
16 |
17 | def get_coin_class(spvserver):
18 | spvserver_path, coin_class_name = spvserver.rsplit('.', 1)
19 | spvserver_module = importlib.import_module(spvserver_path)
20 | return getattr(spvserver_module, coin_class_name)
21 |
22 |
23 | def main():
24 | parser = get_argument_parser()
25 | args = parser.parse_args()
26 | coin_class = get_coin_class(args.spvserver)
27 | logging.basicConfig(level=logging.INFO)
28 | logging.info('torba.server starting')
29 | try:
30 | server = Server(Env(coin_class))
31 | server.run()
32 | except Exception:
33 | traceback.print_exc()
34 | logging.critical('torba.server terminated abnormally')
35 | else:
36 | logging.info('torba.server terminated normally')
37 |
38 |
39 | if __name__ == "__main__":
40 | main()
41 |
--------------------------------------------------------------------------------
/tests/client_tests/integration/test_blockchain_reorganization.py:
--------------------------------------------------------------------------------
1 | import logging
2 | from torba.testcase import IntegrationTestCase
3 |
4 |
5 | class BlockchainReorganizationTests(IntegrationTestCase):
6 |
7 | VERBOSITY = logging.WARN
8 |
9 | async def assertBlockHash(self, height):
10 | self.assertEqual(
11 | self.ledger.headers.hash(height).decode(),
12 | await self.blockchain.get_block_hash(height)
13 | )
14 |
15 | async def test_reorg(self):
16 | # invalidate current block, move forward 2
17 | self.assertEqual(self.ledger.headers.height, 200)
18 | await self.assertBlockHash(200)
19 | await self.blockchain.invalidate_block(self.ledger.headers.hash(200).decode())
20 | await self.blockchain.generate(2)
21 | await self.ledger.on_header.where(lambda e: e.height == 201)
22 | self.assertEqual(self.ledger.headers.height, 201)
23 | await self.assertBlockHash(200)
24 | await self.assertBlockHash(201)
25 |
26 | # invalidate current block, move forward 3
27 | await self.blockchain.invalidate_block(self.ledger.headers.hash(200).decode())
28 | await self.blockchain.generate(3)
29 | await self.ledger.on_header.where(lambda e: e.height == 202)
30 | self.assertEqual(self.ledger.headers.height, 202)
31 | await self.assertBlockHash(200)
32 | await self.assertBlockHash(201)
33 | await self.assertBlockHash(202)
34 |
--------------------------------------------------------------------------------
/torba/workbench/_output_dock.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Form implementation generated from reading ui file 'output_dock.ui',
4 | # licensing of 'output_dock.ui' applies.
5 | #
6 | # Created: Sat Oct 27 16:41:03 2018
7 | # by: pyside2-uic running on PySide2 5.11.2
8 | #
9 | # WARNING! All changes made in this file will be lost!
10 |
11 | from PySide2 import QtCore, QtGui, QtWidgets
12 |
13 | class Ui_OutputDock(object):
14 | def setupUi(self, OutputDock):
15 | OutputDock.setObjectName("OutputDock")
16 | OutputDock.resize(700, 397)
17 | OutputDock.setFloating(False)
18 | OutputDock.setFeatures(QtWidgets.QDockWidget.AllDockWidgetFeatures)
19 | self.dockWidgetContents = QtWidgets.QWidget()
20 | self.dockWidgetContents.setObjectName("dockWidgetContents")
21 | self.horizontalLayout = QtWidgets.QHBoxLayout(self.dockWidgetContents)
22 | self.horizontalLayout.setObjectName("horizontalLayout")
23 | self.textEdit = QtWidgets.QTextEdit(self.dockWidgetContents)
24 | self.textEdit.setReadOnly(True)
25 | self.textEdit.setObjectName("textEdit")
26 | self.horizontalLayout.addWidget(self.textEdit)
27 | OutputDock.setWidget(self.dockWidgetContents)
28 |
29 | self.retranslateUi(OutputDock)
30 | QtCore.QMetaObject.connectSlotsByName(OutputDock)
31 |
32 | def retranslateUi(self, OutputDock):
33 | OutputDock.setWindowTitle(QtWidgets.QApplication.translate("OutputDock", "Output", None, -1))
34 |
35 |
--------------------------------------------------------------------------------
/torba/coin/bitcoincash.py:
--------------------------------------------------------------------------------
1 | __node_daemon__ = 'bitcoind'
2 | __node_cli__ = 'bitcoin-cli'
3 | __node_bin__ = 'bitcoin-abc-0.17.2/bin'
4 | __node_url__ = (
5 | 'https://download.bitcoinabc.org/0.17.2/linux/bitcoin-abc-0.17.2-x86_64-linux-gnu.tar.gz'
6 | )
7 | __spvserver__ = 'torba.server.coins.BitcoinCashRegtest'
8 |
9 | from binascii import unhexlify
10 | from torba.client.baseledger import BaseLedger
11 | from torba.client.basetransaction import BaseTransaction
12 | from .bitcoinsegwit import MainHeaders, UnverifiedHeaders
13 |
14 |
15 | class Transaction(BaseTransaction):
16 |
17 | def signature_hash_type(self, hash_type):
18 | return hash_type | 0x40
19 |
20 |
21 | class MainNetLedger(BaseLedger):
22 | name = 'BitcoinCash'
23 | symbol = 'BCH'
24 | network_name = 'mainnet'
25 |
26 | headers_class = MainHeaders
27 | transaction_class = Transaction
28 |
29 | pubkey_address_prefix = bytes((0,))
30 | script_address_prefix = bytes((5,))
31 | extended_public_key_prefix = unhexlify('0488b21e')
32 | extended_private_key_prefix = unhexlify('0488ade4')
33 |
34 | default_fee_per_byte = 50
35 |
36 |
37 | class RegTestLedger(MainNetLedger):
38 | headers_class = UnverifiedHeaders
39 | network_name = 'regtest'
40 |
41 | pubkey_address_prefix = bytes((111,))
42 | script_address_prefix = bytes((196,))
43 | extended_public_key_prefix = unhexlify('043587cf')
44 | extended_private_key_prefix = unhexlify('04358394')
45 |
46 | max_target = 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
47 | genesis_hash = '0f9188f13cb7b2c71f2a335e3a4fc328bf5beb436012afca590b1a11466e2206'
48 | genesis_bits = 0x207fffff
49 | target_timespan = 1
50 |
--------------------------------------------------------------------------------
/torba/server/enum.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2016, Neil Booth
2 | #
3 | # All rights reserved.
4 | #
5 | # See the file "LICENCE" for information about the copyright
6 | # and warranty status of this software.
7 |
8 | """An enum-like type with reverse lookup.
9 |
10 | Source: Python Cookbook, http://code.activestate.com/recipes/67107/
11 | """
12 |
13 |
14 | class EnumError(Exception):
15 | pass
16 |
17 |
18 | class Enumeration:
19 |
20 | def __init__(self, name, enumList):
21 | self.__doc__ = name
22 |
23 | lookup = {}
24 | reverseLookup = {}
25 | i = 0
26 | uniqueNames = set()
27 | uniqueValues = set()
28 | for x in enumList:
29 | if isinstance(x, tuple):
30 | x, i = x
31 | if not isinstance(x, str):
32 | raise EnumError("enum name {} not a string".format(x))
33 | if not isinstance(i, int):
34 | raise EnumError("enum value {} not an integer".format(i))
35 | if x in uniqueNames:
36 | raise EnumError("enum name {} not unique".format(x))
37 | if i in uniqueValues:
38 | raise EnumError("enum value {} not unique".format(x))
39 | uniqueNames.add(x)
40 | uniqueValues.add(i)
41 | lookup[x] = i
42 | reverseLookup[i] = x
43 | i = i + 1
44 | self.lookup = lookup
45 | self.reverseLookup = reverseLookup
46 |
47 | def __getattr__(self, attr):
48 | result = self.lookup.get(attr)
49 | if result is None:
50 | raise AttributeError('enumeration has no member {}'.format(attr))
51 | return result
52 |
53 | def whatis(self, value):
54 | return self.reverseLookup[value]
55 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_hash.py:
--------------------------------------------------------------------------------
1 | from unittest import TestCase, mock
2 | from torba.client.hash import aes_decrypt, aes_encrypt, better_aes_decrypt, better_aes_encrypt
3 |
4 |
5 | class TestAESEncryptDecrypt(TestCase):
6 | message = 'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks'
7 | expected = 'ZmZmZmZmZmZmZmZmZmZmZjlrKptoKD+MFwDxcg3XtCD9qz8UWhEhq/TVJT5+Mtp2a8sE' \
8 | 'CaO6WQj7fYsWGu2Hvbc0qYqxdN0HeTsiO+cZRo3eJISgr3F+rXFYi5oSBlD2'
9 | password = 'bubblegum'
10 |
11 | @mock.patch('os.urandom', side_effect=lambda i: b'd'*i)
12 | def test_encrypt_iv_f(self, _):
13 | self.assertEqual(
14 | aes_encrypt(self.password, self.message),
15 | 'ZGRkZGRkZGRkZGRkZGRkZKBP/4pR+47hLHbHyvDJm9aRKDuoBdTG8SrFvHqfagK6Co1VrHUOd'
16 | 'oF+6PGSxru3+VR63ybkXLNM75s/qVw+dnKVAkI8OfoVnJvGRSc49e38'
17 | )
18 |
19 | @mock.patch('os.urandom', side_effect=lambda i: b'f'*i)
20 | def test_encrypt_iv_d(self, _):
21 | self.assertEqual(
22 | aes_encrypt(self.password, self.message),
23 | 'ZmZmZmZmZmZmZmZmZmZmZjlrKptoKD+MFwDxcg3XtCD9qz8UWhEhq/TVJT5+Mtp2a8sE'
24 | 'CaO6WQj7fYsWGu2Hvbc0qYqxdN0HeTsiO+cZRo3eJISgr3F+rXFYi5oSBlD2'
25 | )
26 | self.assertEqual(
27 | aes_decrypt(self.password, self.expected),
28 | (self.message, b'f' * 16)
29 | )
30 |
31 | def test_encrypt_decrypt(self):
32 | self.assertEqual(
33 | aes_decrypt('bubblegum', aes_encrypt('bubblegum', self.message))[0],
34 | self.message
35 | )
36 |
37 | def test_better_encrypt_decrypt(self):
38 | self.assertEqual(
39 | b'valuable value',
40 | better_aes_decrypt(
41 | 'super secret',
42 | better_aes_encrypt('super secret', b'valuable value')))
43 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | from setuptools import setup, find_packages
4 |
5 | import torba
6 |
7 | BASE = os.path.dirname(__file__)
8 | with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh:
9 | long_description = fh.read()
10 |
11 | REQUIRES = [
12 | 'aiohttp==3.5.4',
13 | 'cffi==1.12.1', # TODO: 1.12.2 fails on travis in wine
14 | 'coincurve==11.0.0',
15 | 'pbkdf2==1.3',
16 | 'cryptography==2.5',
17 | 'attrs==18.2.0',
18 | 'pylru==1.1.0'
19 | ]
20 | if sys.platform.startswith('linux'):
21 | REQUIRES.append('plyvel==1.0.5')
22 |
23 |
24 | setup(
25 | name='torba',
26 | version=torba.__version__,
27 | url='https://github.com/lbryio/torba',
28 | license='MIT',
29 | author='LBRY Inc.',
30 | author_email='hello@lbry.io',
31 | description='Wallet client/server framework for bitcoin based currencies.',
32 | long_description=long_description,
33 | long_description_content_type="text/markdown",
34 | keywords='wallet,crypto,currency,money,bitcoin,electrum,electrumx',
35 | classifiers=[
36 | 'Framework :: AsyncIO',
37 | 'Intended Audience :: Developers',
38 | 'Intended Audience :: System Administrators',
39 | 'License :: OSI Approved :: MIT License',
40 | 'Programming Language :: Python :: 3',
41 | 'Operating System :: OS Independent',
42 | 'Topic :: Internet',
43 | 'Topic :: Software Development :: Testing',
44 | 'Topic :: Software Development :: Libraries :: Python Modules',
45 | 'Topic :: System :: Benchmark',
46 | 'Topic :: System :: Distributed Computing',
47 | 'Topic :: Utilities',
48 | ],
49 | packages=find_packages(exclude=('tests',)),
50 | python_requires='>=3.6',
51 | install_requires=REQUIRES,
52 | extras_require={
53 | 'gui': (
54 | 'pyside2',
55 | )
56 | },
57 | entry_points={
58 | 'console_scripts': [
59 | 'torba-client=torba.client.cli:main',
60 | 'torba-server=torba.server.cli:main',
61 | 'orchstr8=torba.orchstr8.cli:main',
62 | ],
63 | 'gui_scripts': [
64 | 'torba=torba.ui:main [gui]',
65 | 'torba-workbench=torba.workbench:main [gui]',
66 | ]
67 | }
68 | )
69 |
--------------------------------------------------------------------------------
/torba/client/basemanager.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | from typing import Type, MutableSequence, MutableMapping
4 |
5 | from torba.client.baseledger import BaseLedger, LedgerRegistry
6 | from torba.client.wallet import Wallet, WalletStorage
7 |
8 | log = logging.getLogger(__name__)
9 |
10 |
11 | class BaseWalletManager:
12 |
13 | def __init__(self, wallets: MutableSequence[Wallet] = None,
14 | ledgers: MutableMapping[Type[BaseLedger], BaseLedger] = None) -> None:
15 | self.wallets = wallets or []
16 | self.ledgers = ledgers or {}
17 | self.running = False
18 |
19 | @classmethod
20 | def from_config(cls, config: dict) -> 'BaseWalletManager':
21 | manager = cls()
22 | for ledger_id, ledger_config in config.get('ledgers', {}).items():
23 | manager.get_or_create_ledger(ledger_id, ledger_config)
24 | for wallet_path in config.get('wallets', []):
25 | wallet_storage = WalletStorage(wallet_path)
26 | wallet = Wallet.from_storage(wallet_storage, manager)
27 | manager.wallets.append(wallet)
28 | return manager
29 |
30 | def get_or_create_ledger(self, ledger_id, ledger_config=None):
31 | ledger_class = LedgerRegistry.get_ledger_class(ledger_id)
32 | ledger = self.ledgers.get(ledger_class)
33 | if ledger is None:
34 | ledger = ledger_class(ledger_config or {})
35 | self.ledgers[ledger_class] = ledger
36 | return ledger
37 |
38 | def import_wallet(self, path):
39 | storage = WalletStorage(path)
40 | wallet = Wallet.from_storage(storage, self)
41 | self.wallets.append(wallet)
42 | return wallet
43 |
44 | async def get_detailed_accounts(self, **kwargs):
45 | ledgers = {}
46 | for i, account in enumerate(self.accounts):
47 | details = await account.get_details(**kwargs)
48 | details['is_default'] = i == 0
49 | ledger_id = account.ledger.get_id()
50 | ledgers.setdefault(ledger_id, [])
51 | ledgers[ledger_id].append(details)
52 | return ledgers
53 |
54 | @property
55 | def default_wallet(self):
56 | for wallet in self.wallets:
57 | return wallet
58 |
59 | @property
60 | def default_account(self):
61 | for wallet in self.wallets:
62 | return wallet.default_account
63 |
64 | @property
65 | def accounts(self):
66 | for wallet in self.wallets:
67 | for account in wallet.accounts:
68 | yield account
69 |
70 | async def start(self):
71 | self.running = True
72 | await asyncio.gather(*(
73 | l.start() for l in self.ledgers.values()
74 | ))
75 |
76 | async def stop(self):
77 | await asyncio.gather(*(
78 | l.stop() for l in self.ledgers.values()
79 | ))
80 | self.running = False
81 |
--------------------------------------------------------------------------------
/torba/workbench/blockchain_dock.ui:
--------------------------------------------------------------------------------
1 |
2 |
3 | BlockchainDock
4 |
5 |
6 |
7 | 0
8 | 0
9 | 416
10 | 167
11 |
12 |
13 |
14 | false
15 |
16 |
17 | QDockWidget::AllDockWidgetFeatures
18 |
19 |
20 | Blockchain
21 |
22 |
23 |
24 | -
25 |
26 |
27 | generate
28 |
29 |
30 |
31 | -
32 |
33 |
34 | block(s)
35 |
36 |
37 | 1
38 |
39 |
40 | 9999
41 |
42 |
43 | 1
44 |
45 |
46 |
47 | -
48 |
49 |
50 | transfer
51 |
52 |
53 |
54 | -
55 |
56 |
-
57 |
58 |
59 |
60 |
61 |
62 | 9999.989999999999782
63 |
64 |
65 | 10.000000000000000
66 |
67 |
68 |
69 | -
70 |
71 |
72 | to
73 |
74 |
75 |
76 | -
77 |
78 |
79 | recipient address
80 |
81 |
82 |
83 |
84 |
85 | -
86 |
87 |
88 | invalidate
89 |
90 |
91 |
92 | -
93 |
94 |
95 | block hash
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
--------------------------------------------------------------------------------
/torba/client/cli.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import argparse
3 | import asyncio
4 | import aiohttp
5 |
6 | from torba.orchstr8.node import Conductor, get_ledger_from_environment, get_blockchain_node_from_ledger
7 | from torba.orchstr8.service import ConductorService
8 |
9 |
10 | def get_argument_parser():
11 | parser = argparse.ArgumentParser(
12 | prog="torba"
13 | )
14 | subparsers = parser.add_subparsers(dest='command', help='sub-command help')
15 |
16 | subparsers.add_parser("gui", help="Start Qt GUI.")
17 |
18 | subparsers.add_parser("download", help="Download blockchain node binary.")
19 |
20 | start = subparsers.add_parser("start", help="Start orchstr8 service.")
21 | start.add_argument("--blockchain", help="Start blockchain node.", action="store_true")
22 | start.add_argument("--spv", help="Start SPV server.", action="store_true")
23 | start.add_argument("--wallet", help="Start wallet daemon.", action="store_true")
24 |
25 | generate = subparsers.add_parser("generate", help="Call generate method on running orchstr8 instance.")
26 | generate.add_argument("blocks", type=int, help="Number of blocks to generate")
27 |
28 | subparsers.add_parser("transfer", help="Call transfer method on running orchstr8 instance.")
29 | return parser
30 |
31 |
32 | async def run_remote_command(command, **kwargs):
33 | async with aiohttp.ClientSession() as session:
34 | async with session.post('http://localhost:7954/'+command, data=kwargs) as resp:
35 | print(resp.status)
36 | print(await resp.text())
37 |
38 |
39 | def main():
40 | parser = get_argument_parser()
41 | args = parser.parse_args()
42 | command = getattr(args, 'command', 'help')
43 |
44 | if command == 'gui':
45 | from torba.workbench import main as start_app # pylint: disable=E0611,E0401
46 | return start_app()
47 |
48 | loop = asyncio.get_event_loop()
49 | ledger = get_ledger_from_environment()
50 |
51 | if command == 'download':
52 | logging.getLogger('blockchain').setLevel(logging.INFO)
53 | get_blockchain_node_from_ledger(ledger).ensure()
54 |
55 | elif command == 'generate':
56 | loop.run_until_complete(run_remote_command(
57 | 'generate', blocks=args.blocks
58 | ))
59 |
60 | elif command == 'start':
61 |
62 | conductor = Conductor()
63 | if getattr(args, 'blockchain', False):
64 | loop.run_until_complete(conductor.start_blockchain())
65 | if getattr(args, 'spv', False):
66 | loop.run_until_complete(conductor.start_spv())
67 | if getattr(args, 'wallet', False):
68 | loop.run_until_complete(conductor.start_wallet())
69 |
70 | service = ConductorService(conductor, loop)
71 | loop.run_until_complete(service.start())
72 |
73 | try:
74 | print('========== Orchstr8 API Service Started ========')
75 | loop.run_forever()
76 | except KeyboardInterrupt:
77 | pass
78 | finally:
79 | loop.run_until_complete(service.stop())
80 | loop.run_until_complete(conductor.stop())
81 |
82 | loop.close()
83 |
84 | else:
85 | parser.print_help()
86 |
87 |
88 | if __name__ == "__main__":
89 | main()
90 |
--------------------------------------------------------------------------------
/torba/coin/bitcoinsegwit.py:
--------------------------------------------------------------------------------
1 | __node_daemon__ = 'bitcoind'
2 | __node_cli__ = 'bitcoin-cli'
3 | __node_bin__ = 'bitcoin-0.16.3/bin'
4 | __node_url__ = (
5 | 'https://bitcoin.org/bin/bitcoin-core-0.16.3/bitcoin-0.16.3-x86_64-linux-gnu.tar.gz'
6 | )
7 | __spvserver__ = 'torba.server.coins.BitcoinSegwitRegtest'
8 |
9 | import struct
10 | from typing import Optional
11 | from binascii import hexlify, unhexlify
12 | from torba.client.baseledger import BaseLedger
13 | from torba.client.baseheader import BaseHeaders, ArithUint256
14 |
15 |
16 | class MainHeaders(BaseHeaders):
17 | header_size = 80
18 | chunk_size = 2016
19 | max_target = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
20 | genesis_hash: Optional[bytes] = b'000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
21 | target_timespan = 14 * 24 * 60 * 60
22 |
23 | @staticmethod
24 | def serialize(header: dict) -> bytes:
25 | return b''.join([
26 | struct.pack(' ArithUint256:
47 | if chunk == -1:
48 | return ArithUint256(self.max_target)
49 | previous = self[chunk * 2016]
50 | current = self[chunk * 2016 + 2015]
51 | actual_timespan = current['timestamp'] - previous['timestamp']
52 | actual_timespan = max(actual_timespan, int(self.target_timespan / 4))
53 | actual_timespan = min(actual_timespan, self.target_timespan * 4)
54 | target = ArithUint256.from_compact(current['bits'])
55 | new_target = min(ArithUint256(self.max_target), (target * actual_timespan) / self.target_timespan)
56 | return new_target
57 |
58 |
59 | class MainNetLedger(BaseLedger):
60 | name = 'BitcoinSegwit'
61 | symbol = 'BTC'
62 | network_name = 'mainnet'
63 | headers_class = MainHeaders
64 |
65 | pubkey_address_prefix = bytes((0,))
66 | script_address_prefix = bytes((5,))
67 | extended_public_key_prefix = unhexlify('0488b21e')
68 | extended_private_key_prefix = unhexlify('0488ade4')
69 |
70 | default_fee_per_byte = 50
71 |
72 |
73 | class UnverifiedHeaders(MainHeaders):
74 | max_target = 0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff
75 | genesis_hash = None
76 | validate_difficulty = False
77 |
78 |
79 | class RegTestLedger(MainNetLedger):
80 | network_name = 'regtest'
81 | headers_class = UnverifiedHeaders
82 |
83 | pubkey_address_prefix = bytes((111,))
84 | script_address_prefix = bytes((196,))
85 | extended_public_key_prefix = unhexlify('043587cf')
86 | extended_private_key_prefix = unhexlify('04358394')
87 |
--------------------------------------------------------------------------------
/torba/orchstr8/cli.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import argparse
3 | import asyncio
4 | import aiohttp
5 |
6 | from torba.orchstr8.node import Conductor, get_ledger_from_environment, get_blockchain_node_from_ledger
7 | from torba.orchstr8.service import ConductorService
8 |
9 |
10 | def get_argument_parser():
11 | parser = argparse.ArgumentParser(
12 | prog="torba"
13 | )
14 | subparsers = parser.add_subparsers(dest='command', help='sub-command help')
15 |
16 | subparsers.add_parser("gui", help="Start Qt GUI.")
17 |
18 | subparsers.add_parser("download", help="Download blockchain node binary.")
19 |
20 | start = subparsers.add_parser("start", help="Start orchstr8 service.")
21 | start.add_argument("--blockchain", help="Hostname to start blockchain node.")
22 | start.add_argument("--spv", help="Hostname to start SPV server.")
23 | start.add_argument("--wallet", help="Hostname to start wallet daemon.")
24 |
25 | generate = subparsers.add_parser("generate", help="Call generate method on running orchstr8 instance.")
26 | generate.add_argument("blocks", type=int, help="Number of blocks to generate")
27 |
28 | subparsers.add_parser("transfer", help="Call transfer method on running orchstr8 instance.")
29 | return parser
30 |
31 |
32 | async def run_remote_command(command, **kwargs):
33 | async with aiohttp.ClientSession() as session:
34 | async with session.post('http://localhost:7954/'+command, data=kwargs) as resp:
35 | print(resp.status)
36 | print(await resp.text())
37 |
38 |
39 | def main():
40 | parser = get_argument_parser()
41 | args = parser.parse_args()
42 | command = getattr(args, 'command', 'help')
43 |
44 | if command == 'gui':
45 | from torba.workbench import main as start_app # pylint: disable=E0611,E0401
46 | return start_app()
47 |
48 | loop = asyncio.get_event_loop()
49 | asyncio.set_event_loop(loop)
50 | ledger = get_ledger_from_environment()
51 |
52 | if command == 'download':
53 | logging.getLogger('blockchain').setLevel(logging.INFO)
54 | get_blockchain_node_from_ledger(ledger).ensure()
55 |
56 | elif command == 'generate':
57 | loop.run_until_complete(run_remote_command(
58 | 'generate', blocks=args.blocks
59 | ))
60 |
61 | elif command == 'start':
62 |
63 | conductor = Conductor()
64 | if getattr(args, 'blockchain', False):
65 | conductor.blockchain_node.hostname = args.blockchain
66 | loop.run_until_complete(conductor.start_blockchain())
67 | if getattr(args, 'spv', False):
68 | conductor.spv_node.hostname = args.spv
69 | loop.run_until_complete(conductor.start_spv())
70 | if getattr(args, 'wallet', False):
71 | conductor.wallet_node.hostname = args.wallet
72 | loop.run_until_complete(conductor.start_wallet())
73 |
74 | service = ConductorService(conductor, loop)
75 | loop.run_until_complete(service.start())
76 |
77 | try:
78 | print('========== Orchstr8 API Service Started ========')
79 | loop.run_forever()
80 | except KeyboardInterrupt:
81 | pass
82 | finally:
83 | loop.run_until_complete(service.stop())
84 | loop.run_until_complete(conductor.stop())
85 |
86 | loop.close()
87 |
88 | else:
89 | parser.print_help()
90 |
91 |
92 | if __name__ == "__main__":
93 | main()
94 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_utils.py:
--------------------------------------------------------------------------------
1 | import unittest
2 |
3 | from torba.client.util import ArithUint256
4 | from torba.client.util import coins_to_satoshis as c2s, satoshis_to_coins as s2c
5 |
6 |
7 | class TestCoinValueParsing(unittest.TestCase):
8 |
9 | def test_good_output(self):
10 | self.assertEqual(s2c(1), "0.00000001")
11 | self.assertEqual(s2c(10**7), "0.1")
12 | self.assertEqual(s2c(2*10**8), "2.0")
13 | self.assertEqual(s2c(2*10**17), "2000000000.0")
14 |
15 | def test_good_input(self):
16 | self.assertEqual(c2s("0.00000001"), 1)
17 | self.assertEqual(c2s("0.1"), 10**7)
18 | self.assertEqual(c2s("1.0"), 10**8)
19 | self.assertEqual(c2s("2.00000000"), 2*10**8)
20 | self.assertEqual(c2s("2000000000.0"), 2*10**17)
21 |
22 | def test_bad_input(self):
23 | with self.assertRaises(ValueError):
24 | c2s("1")
25 | with self.assertRaises(ValueError):
26 | c2s("-1.0")
27 | with self.assertRaises(ValueError):
28 | c2s("10000000000.0")
29 | with self.assertRaises(ValueError):
30 | c2s("1.000000000")
31 | with self.assertRaises(ValueError):
32 | c2s("-0")
33 | with self.assertRaises(ValueError):
34 | c2s("1")
35 | with self.assertRaises(ValueError):
36 | c2s(".1")
37 | with self.assertRaises(ValueError):
38 | c2s("1e-7")
39 |
40 |
41 | class TestArithUint256(unittest.TestCase):
42 |
43 | def test_arithunit256(self):
44 | # https://github.com/bitcoin/bitcoin/blob/master/src/test/arith_uint256_tests.cpp
45 |
46 | from_compact = ArithUint256.from_compact
47 | eq = self.assertEqual
48 |
49 | eq(from_compact(0).value, 0)
50 | eq(from_compact(0x00123456).value, 0)
51 | eq(from_compact(0x01003456).value, 0)
52 | eq(from_compact(0x02000056).value, 0)
53 | eq(from_compact(0x03000000).value, 0)
54 | eq(from_compact(0x04000000).value, 0)
55 | eq(from_compact(0x00923456).value, 0)
56 | eq(from_compact(0x01803456).value, 0)
57 | eq(from_compact(0x02800056).value, 0)
58 | eq(from_compact(0x03800000).value, 0)
59 | eq(from_compact(0x04800000).value, 0)
60 |
61 | # Make sure that we don't generate compacts with the 0x00800000 bit set
62 | uint = ArithUint256(0x80)
63 | eq(uint.compact, 0x02008000)
64 |
65 | uint = from_compact(0x01123456)
66 | eq(uint.value, 0x12)
67 | eq(uint.compact, 0x01120000)
68 |
69 | uint = from_compact(0x01fedcba)
70 | eq(uint.value, 0x7e)
71 | eq(uint.negative, 0x01fe0000)
72 |
73 | uint = from_compact(0x02123456)
74 | eq(uint.value, 0x1234)
75 | eq(uint.compact, 0x02123400)
76 |
77 | uint = from_compact(0x03123456)
78 | eq(uint.value, 0x123456)
79 | eq(uint.compact, 0x03123456)
80 |
81 | uint = from_compact(0x04123456)
82 | eq(uint.value, 0x12345600)
83 | eq(uint.compact, 0x04123456)
84 |
85 | uint = from_compact(0x04923456)
86 | eq(uint.value, 0x12345600)
87 | eq(uint.negative, 0x04923456)
88 |
89 | uint = from_compact(0x05009234)
90 | eq(uint.value, 0x92340000)
91 | eq(uint.compact, 0x05009234)
92 |
93 | uint = from_compact(0x20123456)
94 | eq(uint.value, 0x1234560000000000000000000000000000000000000000000000000000000000)
95 | eq(uint.compact, 0x20123456)
96 |
--------------------------------------------------------------------------------
/torba/rpc/util.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2018, Neil Booth
2 | #
3 | # All rights reserved.
4 | #
5 | # The MIT License (MIT)
6 | #
7 | # Permission is hereby granted, free of charge, to any person obtaining
8 | # a copy of this software and associated documentation files (the
9 | # "Software"), to deal in the Software without restriction, including
10 | # without limitation the rights to use, copy, modify, merge, publish,
11 | # distribute, sublicense, and/or sell copies of the Software, and to
12 | # permit persons to whom the Software is furnished to do so, subject to
13 | # the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be
16 | # included in all copies or substantial portions of the Software.
17 | #
18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
22 | # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
24 | # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 |
26 | __all__ = ()
27 |
28 |
29 | import asyncio
30 | from collections import namedtuple
31 | import inspect
32 |
33 | # other_params: None means cannot be called with keyword arguments only
34 | # any means any name is good
35 | SignatureInfo = namedtuple('SignatureInfo', 'min_args max_args '
36 | 'required_names other_names')
37 |
38 |
39 | def signature_info(func):
40 | params = inspect.signature(func).parameters
41 | min_args = max_args = 0
42 | required_names = []
43 | other_names = []
44 | no_names = False
45 | for p in params.values():
46 | if p.kind == p.POSITIONAL_OR_KEYWORD:
47 | max_args += 1
48 | if p.default is p.empty:
49 | min_args += 1
50 | required_names.append(p.name)
51 | else:
52 | other_names.append(p.name)
53 | elif p.kind == p.KEYWORD_ONLY:
54 | other_names.append(p.name)
55 | elif p.kind == p.VAR_POSITIONAL:
56 | max_args = None
57 | elif p.kind == p.VAR_KEYWORD:
58 | other_names = any
59 | elif p.kind == p.POSITIONAL_ONLY:
60 | max_args += 1
61 | if p.default is p.empty:
62 | min_args += 1
63 | no_names = True
64 |
65 | if no_names:
66 | other_names = None
67 |
68 | return SignatureInfo(min_args, max_args, required_names, other_names)
69 |
70 |
71 | class Concurrency(object):
72 |
73 | def __init__(self, max_concurrent):
74 | self._require_non_negative(max_concurrent)
75 | self._max_concurrent = max_concurrent
76 | self.semaphore = asyncio.Semaphore(max_concurrent)
77 |
78 | def _require_non_negative(self, value):
79 | if not isinstance(value, int) or value < 0:
80 | raise RuntimeError('concurrency must be a natural number')
81 |
82 | @property
83 | def max_concurrent(self):
84 | return self._max_concurrent
85 |
86 | async def set_max_concurrent(self, value):
87 | self._require_non_negative(value)
88 | diff = value - self._max_concurrent
89 | self._max_concurrent = value
90 | if diff >= 0:
91 | for _ in range(diff):
92 | self.semaphore.release()
93 | else:
94 | for _ in range(-diff):
95 | await self.semaphore.acquire()
96 |
--------------------------------------------------------------------------------
/torba/client/bcd_data_stream.py:
--------------------------------------------------------------------------------
1 | import struct
2 | from io import BytesIO
3 |
4 |
5 | class BCDataStream:
6 |
7 | def __init__(self, data=None):
8 | self.data = BytesIO(data)
9 |
10 | def reset(self):
11 | self.data.seek(0)
12 |
13 | def get_bytes(self):
14 | return self.data.getvalue()
15 |
16 | def read(self, size):
17 | return self.data.read(size)
18 |
19 | def write(self, data):
20 | self.data.write(data)
21 |
22 | def write_many(self, many):
23 | self.data.writelines(many)
24 |
25 | def read_string(self):
26 | return self.read(self.read_compact_size())
27 |
28 | def write_string(self, s):
29 | self.write_compact_size(len(s))
30 | self.write(s)
31 |
32 | def read_compact_size(self):
33 | size = self.read_uint8()
34 | if size < 253:
35 | return size
36 | if size == 253:
37 | return self.read_uint16()
38 | if size == 254:
39 | return self.read_uint32()
40 | if size == 255:
41 | return self.read_uint64()
42 |
43 | def write_compact_size(self, size):
44 | if size < 253:
45 | self.write_uint8(size)
46 | elif size <= 0xFFFF:
47 | self.write_uint8(253)
48 | self.write_uint16(size)
49 | elif size <= 0xFFFFFFFF:
50 | self.write_uint8(254)
51 | self.write_uint32(size)
52 | else:
53 | self.write_uint8(255)
54 | self.write_uint64(size)
55 |
56 | def read_boolean(self):
57 | return self.read_uint8() != 0
58 |
59 | def write_boolean(self, val):
60 | return self.write_uint8(1 if val else 0)
61 |
62 | int8 = struct.Struct('b')
63 | uint8 = struct.Struct('B')
64 | int16 = struct.Struct('17} {:>5} {:>5} {:>5} '
11 | '{:>7} {:>7} {:>7} {:>7} {:>7} {:>9} {:>21}')
12 | yield fmt.format('ID', 'Flags', 'Client', 'Proto',
13 | 'Reqs', 'Txs', 'Subs',
14 | 'Recv', 'Recv KB', 'Sent', 'Sent KB', 'Time', 'Peer')
15 | for (id_, flags, peer, client, proto, reqs, txs_sent, subs,
16 | recv_count, recv_size, send_count, send_size, time) in data:
17 | yield fmt.format(id_, flags, client, proto,
18 | '{:,d}'.format(reqs),
19 | '{:,d}'.format(txs_sent),
20 | '{:,d}'.format(subs),
21 | '{:,d}'.format(recv_count),
22 | '{:,d}'.format(recv_size // 1024),
23 | '{:,d}'.format(send_count),
24 | '{:,d}'.format(send_size // 1024),
25 | util.formatted_time(time, sep=''), peer)
26 |
27 |
28 | def groups_lines(data):
29 | """A generator returning lines for a list of groups.
30 |
31 | data is the return value of rpc_groups()."""
32 |
33 | fmt = ('{:<6} {:>9} {:>9} {:>6} {:>6} {:>8}'
34 | '{:>7} {:>9} {:>7} {:>9}')
35 | yield fmt.format('ID', 'Sessions', 'Bwidth KB', 'Reqs', 'Txs', 'Subs',
36 | 'Recv', 'Recv KB', 'Sent', 'Sent KB')
37 | for (id_, session_count, bandwidth, reqs, txs_sent, subs,
38 | recv_count, recv_size, send_count, send_size) in data:
39 | yield fmt.format(id_,
40 | '{:,d}'.format(session_count),
41 | '{:,d}'.format(bandwidth // 1024),
42 | '{:,d}'.format(reqs),
43 | '{:,d}'.format(txs_sent),
44 | '{:,d}'.format(subs),
45 | '{:,d}'.format(recv_count),
46 | '{:,d}'.format(recv_size // 1024),
47 | '{:,d}'.format(send_count),
48 | '{:,d}'.format(send_size // 1024))
49 |
50 |
51 | def peers_lines(data):
52 | """A generator returning lines for a list of peers.
53 |
54 | data is the return value of rpc_peers()."""
55 | def time_fmt(t):
56 | if not t:
57 | return 'Never'
58 | return util.formatted_time(now - t)
59 |
60 | now = time.time()
61 | fmt = ('{:<30} {:<6} {:>5} {:>5} {:<17} {:>4} '
62 | '{:>4} {:>8} {:>11} {:>11} {:>5} {:>20} {:<15}')
63 | yield fmt.format('Host', 'Status', 'TCP', 'SSL', 'Server', 'Min',
64 | 'Max', 'Pruning', 'Last Good', 'Last Try',
65 | 'Tries', 'Source', 'IP Address')
66 | for item in data:
67 | features = item['features']
68 | hostname = item['host']
69 | host = features['hosts'][hostname]
70 | yield fmt.format(hostname[:30],
71 | item['status'],
72 | host.get('tcp_port') or '',
73 | host.get('ssl_port') or '',
74 | features['server_version'] or 'unknown',
75 | features['protocol_min'],
76 | features['protocol_max'],
77 | features['pruning'] or '',
78 | time_fmt(item['last_good']),
79 | time_fmt(item['last_try']),
80 | item['try_count'],
81 | item['source'][:20],
82 | item['ip_addr'] or '')
83 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/key_fixtures.py:
--------------------------------------------------------------------------------
1 | expected_ids = [
2 | b'948adae2a128c0bd1fa238117fd0d9690961f26e',
3 | b'cd9f4f2adde7de0a53ab6d326bb6a62b489876dd',
4 | b'c479e02a74a809ffecff60255d1c14f4081a197a',
5 | b'4bab2fb2c424f31f170b15ec53c4a596db9d6710',
6 | b'689cb7c621f57b7c398e7e04ed9a5098ab8389e9',
7 | b'75116d6a689a0f9b56fe7cfec9cbbd0e16814288',
8 | b'2439f0993fb298497dd7f317b9737c356f664a86',
9 | b'32f1cb4799008cf5496bb8cafdaf59d5dabec6af',
10 | b'fa29aa536353904e9cc813b0cf18efcc09e5ad13',
11 | b'37df34002f34d7875428a2977df19be3f4f40a31',
12 | b'8c8a72b5d2747a3e7e05ed85110188769d5656c3',
13 | b'e5c8ef10c5bdaa79c9a237a096f50df4dcac27f0',
14 | b'4d5270dc100fba85974665c20cd0f95d4822e8d1',
15 | b'e76b07da0cdd59915475cd310599544b9744fa34',
16 | b'6f009bccf8be99707161abb279d8ccf8fd953721',
17 | b'f32f08b722cc8607c3f7f192b4d5f13a74c85785',
18 | b'46f4430a5c91b9b799e9be6b47ac7a749d8d9f30',
19 | b'ebbf9850abe0aae2d09e7e3ebd6b51f01282f39b',
20 | b'5f6655438f8ddc6b2f6ea8197c8babaffc9f5c09',
21 | b'e194e70ee8711b0ed765608121e4cceb551cdf28'
22 | ]
23 | expected_privkeys = [
24 | b'95557ee9a2bb7665e67e45246658b5c839f7dcd99b6ebc800eeebccd28bf134a',
25 | b'689b6921f65647a8e4fc1497924730c92ad4ad183f10fac2bdee65cc8fb6dcf9',
26 | b'977ee018b448c530327b7e927cc3645ca4cb152c5dd98e1bd917c52fd46fc80a',
27 | b'3c7fb05b0ab4da8b292e895f574f8213cadfe81b84ded7423eab61c5f884c8ae',
28 | b'b21fc7be1e69182827538683a48ac9d95684faf6c1c6deabb6e513d8c76afcc9',
29 | b'a5021734dbbf1d090b15509ba00f2c04a3d5afc19939b4594ca0850d4190b923',
30 | b'07dfe0aa94c1b948dc935be1f8179f3050353b46f3a3134e77c70e66208be72d',
31 | b'c331b2fb82cd91120b0703ee312042a854a51a8d945aa9e70fb14d68b0366fe1',
32 | b'3aa59ec4d8f1e7ce2775854b5e82433535b6e3503f9a8e7c4e60aac066d44718',
33 | b'ccc8b4ca73b266b4a0c89a9d33c4ec7532b434c9294c26832355e5e2bee2e005',
34 | b'280c074d8982e56d70c404072252c309694a6e5c05457a6abbe8fc225c2dfd52',
35 | b'546cee26da713a3a64b2066d5e3a52b7c1d927396d1ba8a3d9f6e3e973398856',
36 | b'7fbc4615d5e819eee22db440c5bcc4ff25bb046841c41a192003a6d9abfbafbf',
37 | b'5b63f13011cab965feea3a41fac2d7a877aa710ab20e2a9a1708474e3c05c050',
38 | b'394b36f528947557d317fd40a4adde5514c8745a5f64185421fa2c0c4a158938',
39 | b'8f101c8f5290ae6c0dd76d210b7effacd7f12db18f3befab711f533bde084c76',
40 | b'6637a656f897a66080fbe60027d32c3f4ebc0e3b5f96123a33f932a091b039c2',
41 | b'2815aa6667c042a3a4565fb789890cd33e380d047ed712759d097d479df71051',
42 | b'120e761c6382b07a9548650a20b3b9dd74b906093260fa6f92f790ba71f79e8d',
43 | b'823c8a613ea539f730a968518993195174bf973ed75c734b6898022867165d7b'
44 | ]
45 | expected_hardened_privkeys = [
46 | b'abdba45b0459e7804beb68edb899e58a5c2636bf67d096711904001406afbd4c',
47 | b'c9e804d4b8fdd99ef6ab2b0ca627a57f4283c28e11e9152ad9d3f863404d940e',
48 | b'4cf87d68ae99711261f8cb8e1bde83b8703ff5d689ef70ce23106d1e6e8ed4bd',
49 | b'dbf8d578c77f9bf62bb2ad40975e253af1e1d44d53abf84a22d2be29b9488f7f',
50 | b'633bb840505521ffe39cb89a04fb8bff3298d6b64a5d8f170aca1e456d6f89b9',
51 | b'92e80a38791bd8ba2105b9867fd58ac2cc4fb9853e18141b7fee1884bc5aae69',
52 | b'd3663339af1386d05dd90ee20f627661ae87ddb1db0c2dc73fd8a4485930d0e7',
53 | b'09a448303452d241b8a25670b36cc758975b97e88f62b6f25cd9084535e3c13a',
54 | b'ee22eb77df05ff53e9c2ba797c1f2ebf97ec4cf5a99528adec94972674aeabed',
55 | b'935facccb6120659c5b7c606a457c797e5a10ce4a728346e1a3a963251169651',
56 | b'8ac9b4a48da1def375640ca03bc6711040dfd4eea7106d42bb4c2de83d7f595e',
57 | b'51ecd3f7565c2b86d5782dbde2175ab26a7b896022564063fafe153588610be9',
58 | b'04918252f6b6f51cd75957289b56a324b45cc085df80839137d740f9ada6c062',
59 | b'2efbd0c839af971e3769c26938d776990ebf097989df4861535a7547a2701483',
60 | b'85c6e31e6b27bd188291a910f4a7faba7fceb3e09df72884b10907ecc1491cd0',
61 | b'05e245131885bebda993a31bb14ac98b794062a50af639ad22010aed1e533a54',
62 | b'ddca42cf7db93f3a3f0723d5fee4c21bf60b7afac35d5c30eb34bd91b35cc609',
63 | b'324a5c16030e0c3947e4dcd2b5057fd3a4d5bed96b23e3b476b2af0ab76369c9',
64 | b'da63c41cdb398cdcd93e832f3e198528afbb4065821b026c143cec910d8362f0'
65 | ]
66 |
--------------------------------------------------------------------------------
/torba/workbench/_blockchain_dock.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # Form implementation generated from reading ui file 'blockchain_dock.ui',
4 | # licensing of 'blockchain_dock.ui' applies.
5 | #
6 | # Created: Sun Jan 13 02:56:21 2019
7 | # by: pyside2-uic running on PySide2 5.12.0
8 | #
9 | # WARNING! All changes made in this file will be lost!
10 |
11 | from PySide2 import QtCore, QtGui, QtWidgets
12 |
13 | class Ui_BlockchainDock(object):
14 | def setupUi(self, BlockchainDock):
15 | BlockchainDock.setObjectName("BlockchainDock")
16 | BlockchainDock.resize(416, 167)
17 | BlockchainDock.setFloating(False)
18 | BlockchainDock.setFeatures(QtWidgets.QDockWidget.AllDockWidgetFeatures)
19 | self.dockWidgetContents = QtWidgets.QWidget()
20 | self.dockWidgetContents.setObjectName("dockWidgetContents")
21 | self.formLayout = QtWidgets.QFormLayout(self.dockWidgetContents)
22 | self.formLayout.setObjectName("formLayout")
23 | self.generate = QtWidgets.QPushButton(self.dockWidgetContents)
24 | self.generate.setObjectName("generate")
25 | self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.generate)
26 | self.blocks = QtWidgets.QSpinBox(self.dockWidgetContents)
27 | self.blocks.setMinimum(1)
28 | self.blocks.setMaximum(9999)
29 | self.blocks.setProperty("value", 1)
30 | self.blocks.setObjectName("blocks")
31 | self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.blocks)
32 | self.transfer = QtWidgets.QPushButton(self.dockWidgetContents)
33 | self.transfer.setObjectName("transfer")
34 | self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.transfer)
35 | self.horizontalLayout = QtWidgets.QHBoxLayout()
36 | self.horizontalLayout.setObjectName("horizontalLayout")
37 | self.amount = QtWidgets.QDoubleSpinBox(self.dockWidgetContents)
38 | self.amount.setSuffix("")
39 | self.amount.setMaximum(9999.99)
40 | self.amount.setProperty("value", 10.0)
41 | self.amount.setObjectName("amount")
42 | self.horizontalLayout.addWidget(self.amount)
43 | self.to_label = QtWidgets.QLabel(self.dockWidgetContents)
44 | self.to_label.setObjectName("to_label")
45 | self.horizontalLayout.addWidget(self.to_label)
46 | self.address = QtWidgets.QLineEdit(self.dockWidgetContents)
47 | self.address.setObjectName("address")
48 | self.horizontalLayout.addWidget(self.address)
49 | self.formLayout.setLayout(1, QtWidgets.QFormLayout.FieldRole, self.horizontalLayout)
50 | self.invalidate = QtWidgets.QPushButton(self.dockWidgetContents)
51 | self.invalidate.setObjectName("invalidate")
52 | self.formLayout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.invalidate)
53 | self.block_hash = QtWidgets.QLineEdit(self.dockWidgetContents)
54 | self.block_hash.setObjectName("block_hash")
55 | self.formLayout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.block_hash)
56 | BlockchainDock.setWidget(self.dockWidgetContents)
57 |
58 | self.retranslateUi(BlockchainDock)
59 | QtCore.QMetaObject.connectSlotsByName(BlockchainDock)
60 |
61 | def retranslateUi(self, BlockchainDock):
62 | BlockchainDock.setWindowTitle(QtWidgets.QApplication.translate("BlockchainDock", "Blockchain", None, -1))
63 | self.generate.setText(QtWidgets.QApplication.translate("BlockchainDock", "generate", None, -1))
64 | self.blocks.setSuffix(QtWidgets.QApplication.translate("BlockchainDock", " block(s)", None, -1))
65 | self.transfer.setText(QtWidgets.QApplication.translate("BlockchainDock", "transfer", None, -1))
66 | self.to_label.setText(QtWidgets.QApplication.translate("BlockchainDock", "to", None, -1))
67 | self.address.setPlaceholderText(QtWidgets.QApplication.translate("BlockchainDock", "recipient address", None, -1))
68 | self.invalidate.setText(QtWidgets.QApplication.translate("BlockchainDock", "invalidate", None, -1))
69 | self.block_hash.setPlaceholderText(QtWidgets.QApplication.translate("BlockchainDock", "block hash", None, -1))
70 |
71 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_wallet.py:
--------------------------------------------------------------------------------
1 | import tempfile
2 | from binascii import hexlify
3 |
4 | from torba.testcase import AsyncioTestCase
5 |
6 | from torba.coin.bitcoinsegwit import MainNetLedger as BTCLedger
7 | from torba.coin.bitcoincash import MainNetLedger as BCHLedger
8 | from torba.client.basemanager import BaseWalletManager
9 | from torba.client.wallet import Wallet, WalletStorage
10 |
11 |
12 | class TestWalletCreation(AsyncioTestCase):
13 |
14 | async def asyncSetUp(self):
15 | self.manager = BaseWalletManager()
16 | config = {'data_path': '/tmp/wallet'}
17 | self.btc_ledger = self.manager.get_or_create_ledger(BTCLedger.get_id(), config)
18 | self.bch_ledger = self.manager.get_or_create_ledger(BCHLedger.get_id(), config)
19 |
20 | def test_create_wallet_and_accounts(self):
21 | wallet = Wallet()
22 | self.assertEqual(wallet.name, 'Wallet')
23 | self.assertEqual(wallet.accounts, [])
24 |
25 | account1 = wallet.generate_account(self.btc_ledger)
26 | wallet.generate_account(self.btc_ledger)
27 | wallet.generate_account(self.bch_ledger)
28 | self.assertEqual(wallet.default_account, account1)
29 | self.assertEqual(len(wallet.accounts), 3)
30 |
31 | def test_load_and_save_wallet(self):
32 | wallet_dict = {
33 | 'version': 1,
34 | 'name': 'Main Wallet',
35 | 'accounts': [
36 | {
37 | 'name': 'An Account',
38 | 'ledger': 'btc_mainnet',
39 | 'modified_on': 123.456,
40 | 'seed':
41 | "carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
42 | "h absent",
43 | 'encrypted': False,
44 | 'private_key':
45 | 'xprv9s21ZrQH143K3TsAz5efNV8K93g3Ms3FXcjaWB9fVUsMwAoE3Z'
46 | 'T4vYymkp5BxKKfnpz8J6sHDFriX1SnpvjNkzcks8XBnxjGLS83BTyfpna',
47 | 'public_key':
48 | 'xpub661MyMwAqRbcFwwe67Bfjd53h5WXmKm6tqfBJZZH3pQLoy8Nb6'
49 | 'mKUMJFc7UbpVNzmwFPN2evn3YHnig1pkKVYcvCV8owTd2yAcEkJfCX53g',
50 | 'address_generator': {
51 | 'name': 'deterministic-chain',
52 | 'receiving': {'gap': 17, 'maximum_uses_per_address': 3},
53 | 'change': {'gap': 10, 'maximum_uses_per_address': 3}
54 | }
55 | }
56 | ]
57 | }
58 |
59 | storage = WalletStorage(default=wallet_dict)
60 | wallet = Wallet.from_storage(storage, self.manager)
61 | self.assertEqual(wallet.name, 'Main Wallet')
62 | self.assertEqual(
63 | hexlify(wallet.hash), b'9f462b8dd802eb8c913e54f09a09827ebc14abbc13f33baa90d8aec5ae920fc7'
64 | )
65 | self.assertEqual(len(wallet.accounts), 1)
66 | account = wallet.default_account
67 | self.assertIsInstance(account, BTCLedger.account_class)
68 | self.maxDiff = None
69 | self.assertDictEqual(wallet_dict, wallet.to_dict())
70 |
71 | encrypted = wallet.pack('password')
72 | decrypted = Wallet.unpack('password', encrypted)
73 | self.assertEqual(decrypted['accounts'][0]['name'], 'An Account')
74 |
75 | def test_read_write(self):
76 | manager = BaseWalletManager()
77 | config = {'data_path': '/tmp/wallet'}
78 | ledger = manager.get_or_create_ledger(BTCLedger.get_id(), config)
79 |
80 | with tempfile.NamedTemporaryFile(suffix='.json') as wallet_file:
81 | wallet_file.write(b'{"version": 1}')
82 | wallet_file.seek(0)
83 |
84 | # create and write wallet to a file
85 | wallet = manager.import_wallet(wallet_file.name)
86 | account = wallet.generate_account(ledger)
87 | wallet.save()
88 |
89 | # read wallet from file
90 | wallet_storage = WalletStorage(wallet_file.name)
91 | wallet = Wallet.from_storage(wallet_storage, manager)
92 |
93 | self.assertEqual(account.public_key.address, wallet.default_account.public_key.address)
94 |
--------------------------------------------------------------------------------
/tests/client_tests/integration/test_sync.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | from torba.testcase import IntegrationTestCase, WalletNode
4 | from torba.client.constants import CENT
5 |
6 |
7 | class SyncTests(IntegrationTestCase):
8 |
9 | VERBOSITY = logging.WARN
10 |
11 | def __init__(self, *args, **kwargs):
12 | super().__init__(*args, **kwargs)
13 | self.api_port = 5280
14 | self.started_nodes = []
15 |
16 | async def asyncTearDown(self):
17 | for node in self.started_nodes:
18 | try:
19 | await node.stop(cleanup=True)
20 | except Exception as e:
21 | print(e)
22 | await super().asyncTearDown()
23 |
24 | async def make_wallet_node(self, seed=None):
25 | self.api_port += 1
26 | wallet_node = WalletNode(
27 | self.wallet_node.manager_class,
28 | self.wallet_node.ledger_class,
29 | port=self.api_port
30 | )
31 | await wallet_node.start(self.conductor.spv_node, seed)
32 | self.started_nodes.append(wallet_node)
33 | return wallet_node
34 |
35 | async def test_nodes_with_same_account_stay_in_sync(self):
36 | # destination node/account for receiving TXs
37 | node0 = await self.make_wallet_node()
38 | account0 = node0.account
39 | # main node/account creating TXs
40 | node1 = self.wallet_node
41 | account1 = self.wallet_node.account
42 | # mirror node/account, expected to reflect everything in main node as it happens
43 | node2 = await self.make_wallet_node(account1.seed)
44 | account2 = node2.account
45 |
46 | self.assertNotEqual(account0.id, account1.id)
47 | self.assertEqual(account1.id, account2.id)
48 | await self.assertBalance(account0, '0.0')
49 | await self.assertBalance(account1, '0.0')
50 | await self.assertBalance(account2, '0.0')
51 | self.assertEqual(await account0.get_address_count(chain=0), 20)
52 | self.assertEqual(await account1.get_address_count(chain=0), 20)
53 | self.assertEqual(await account2.get_address_count(chain=0), 20)
54 | self.assertEqual(await account1.get_address_count(chain=1), 6)
55 | self.assertEqual(await account2.get_address_count(chain=1), 6)
56 |
57 | # check that main node and mirror node generate 5 address to fill gap
58 | fifth_address = (await account1.receiving.get_addresses())[4]
59 | await self.blockchain.send_to_address(fifth_address, 1.00)
60 | await asyncio.wait([
61 | account1.ledger.on_address.first,
62 | account2.ledger.on_address.first
63 | ])
64 | self.assertEqual(await account1.get_address_count(chain=0), 25)
65 | self.assertEqual(await account2.get_address_count(chain=0), 25)
66 | await self.assertBalance(account1, '1.0')
67 | await self.assertBalance(account2, '1.0')
68 |
69 | await self.blockchain.generate(1)
70 |
71 | # pay 0.01 from main node to receiving node, would have increased change addresses
72 | address0 = (await account0.receiving.get_addresses())[0]
73 | hash0 = self.ledger.address_to_hash160(address0)
74 | tx = await account1.ledger.transaction_class.create(
75 | [],
76 | [self.ledger.transaction_class.output_class.pay_pubkey_hash(CENT, hash0)],
77 | [account1], account1
78 | )
79 | await self.broadcast(tx)
80 | await asyncio.wait([
81 | account0.ledger.wait(tx),
82 | account1.ledger.wait(tx),
83 | account2.ledger.wait(tx),
84 | ])
85 | self.assertEqual(await account0.get_address_count(chain=0), 21)
86 | self.assertGreater(await account1.get_address_count(chain=1), 6)
87 | self.assertGreater(await account2.get_address_count(chain=1), 6)
88 | await self.assertBalance(account0, '0.01')
89 | await self.assertBalance(account1, '0.989876')
90 | await self.assertBalance(account2, '0.989876')
91 |
92 | await self.blockchain.generate(1)
93 |
94 | # create a new mirror node and see if it syncs to same balance from scratch
95 | node3 = await self.make_wallet_node(account1.seed)
96 | account3 = node3.account
97 | await self.assertBalance(account3, '0.989876')
98 |
--------------------------------------------------------------------------------
/torba/client/util.py:
--------------------------------------------------------------------------------
1 | import re
2 | from binascii import unhexlify, hexlify
3 | from typing import TypeVar, Sequence, Optional
4 | from torba.client.constants import COIN
5 |
6 |
7 | def coins_to_satoshis(coins):
8 | if not isinstance(coins, str):
9 | raise ValueError("{coins} must be a string")
10 | result = re.search(r'^(\d{1,10})\.(\d{1,8})$', coins)
11 | if result is not None:
12 | whole, fractional = result.groups()
13 | return int(whole+fractional.ljust(8, "0"))
14 | raise ValueError("'{lbc}' is not a valid coin decimal")
15 |
16 |
17 | def satoshis_to_coins(satoshis):
18 | coins = '{:.8f}'.format(satoshis / COIN).rstrip('0')
19 | if coins.endswith('.'):
20 | return coins+'0'
21 | else:
22 | return coins
23 |
24 |
25 | T = TypeVar('T')
26 |
27 |
28 | class ReadOnlyList(Sequence[T]):
29 |
30 | def __init__(self, lst):
31 | self.lst = lst
32 |
33 | def __getitem__(self, key):
34 | return self.lst[key]
35 |
36 | def __len__(self) -> int:
37 | return len(self.lst)
38 |
39 |
40 | def subclass_tuple(name, base):
41 | return type(name, (base,), {'__slots__': ()})
42 |
43 |
44 | class cachedproperty:
45 |
46 | def __init__(self, f):
47 | self.f = f
48 |
49 | def __get__(self, obj, objtype):
50 | obj = obj or objtype
51 | value = self.f(obj)
52 | setattr(obj, self.f.__name__, value)
53 | return value
54 |
55 |
56 | def bytes_to_int(be_bytes):
57 | """ Interprets a big-endian sequence of bytes as an integer. """
58 | return int(hexlify(be_bytes), 16)
59 |
60 |
61 | def int_to_bytes(value):
62 | """ Converts an integer to a big-endian sequence of bytes. """
63 | length = (value.bit_length() + 7) // 8
64 | s = '%x' % value
65 | return unhexlify(('0' * (len(s) % 2) + s).zfill(length * 2))
66 |
67 |
68 | class ArithUint256:
69 | # https://github.com/bitcoin/bitcoin/blob/master/src/arith_uint256.cpp
70 |
71 | __slots__ = '_value', '_compact'
72 |
73 | def __init__(self, value: int) -> None:
74 | self._value = value
75 | self._compact: Optional[int] = None
76 |
77 | @classmethod
78 | def from_compact(cls, compact) -> 'ArithUint256':
79 | size = compact >> 24
80 | word = compact & 0x007fffff
81 | if size <= 3:
82 | return cls(word >> 8 * (3 - size))
83 | else:
84 | return cls(word << 8 * (size - 3))
85 |
86 | @property
87 | def value(self) -> int:
88 | return self._value
89 |
90 | @property
91 | def compact(self) -> int:
92 | if self._compact is None:
93 | self._compact = self._calculate_compact()
94 | return self._compact
95 |
96 | @property
97 | def negative(self) -> int:
98 | return self._calculate_compact(negative=True)
99 |
100 | @property
101 | def bits(self) -> int:
102 | """ Returns the position of the highest bit set plus one. """
103 | bits = bin(self._value)[2:]
104 | for i, d in enumerate(bits):
105 | if d:
106 | return (len(bits) - i) + 1
107 | return 0
108 |
109 | @property
110 | def low64(self) -> int:
111 | return self._value & 0xffffffffffffffff
112 |
113 | def _calculate_compact(self, negative=False) -> int:
114 | size = (self.bits + 7) // 8
115 | if size <= 3:
116 | compact = self.low64 << 8 * (3 - size)
117 | else:
118 | compact = ArithUint256(self._value >> 8 * (size - 3)).low64
119 | # The 0x00800000 bit denotes the sign.
120 | # Thus, if it is already set, divide the mantissa by 256 and increase the exponent.
121 | if compact & 0x00800000:
122 | compact >>= 8
123 | size += 1
124 | assert (compact & ~0x007fffff) == 0
125 | assert size < 256
126 | compact |= size << 24
127 | if negative and compact & 0x007fffff:
128 | compact |= 0x00800000
129 | return compact
130 |
131 | def __mul__(self, x):
132 | # Take the mod because we are limited to an unsigned 256 bit number
133 | return ArithUint256((self._value * x) % 2 ** 256)
134 |
135 | def __truediv__(self, x):
136 | return ArithUint256(int(self._value / x))
137 |
138 | def __gt__(self, other):
139 | return self._value > other
140 |
141 | def __lt__(self, other):
142 | return self._value < other
143 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_headers.py:
--------------------------------------------------------------------------------
1 | import os
2 | from urllib.request import Request, urlopen
3 |
4 | from torba.testcase import AsyncioTestCase
5 |
6 | from torba.coin.bitcoinsegwit import MainHeaders
7 |
8 |
9 | def block_bytes(blocks):
10 | return blocks * MainHeaders.header_size
11 |
12 |
13 | class BitcoinHeadersTestCase(AsyncioTestCase):
14 |
15 | # Download headers instead of storing them in git.
16 | HEADER_URL = 'http://headers.electrum.org/blockchain_headers'
17 | HEADER_FILE = 'bitcoin_headers'
18 | HEADER_BYTES = block_bytes(32260) # 2.6MB
19 | RETARGET_BLOCK = 32256 # difficulty: 1 -> 1.18
20 |
21 | def setUp(self):
22 | self.maxDiff = None
23 | self.header_file_name = os.path.join(os.path.dirname(__file__), self.HEADER_FILE)
24 | if not os.path.exists(self.header_file_name):
25 | req = Request(self.HEADER_URL)
26 | req.add_header('Range', 'bytes=0-{}'.format(self.HEADER_BYTES-1))
27 | with urlopen(req) as response, open(self.header_file_name, 'wb') as header_file:
28 | header_file.write(response.read())
29 | if os.path.getsize(self.header_file_name) != self.HEADER_BYTES:
30 | os.remove(self.header_file_name)
31 | raise Exception(
32 | "Downloaded headers for testing are not the correct number of bytes. "
33 | "They were deleted. Try running the tests again."
34 | )
35 |
36 | def get_bytes(self, upto: int = -1, after: int = 0) -> bytes:
37 | with open(self.header_file_name, 'rb') as headers:
38 | headers.seek(after, os.SEEK_SET)
39 | return headers.read(upto)
40 |
41 | async def get_headers(self, upto: int = -1):
42 | h = MainHeaders(':memory:')
43 | h.io.write(self.get_bytes(upto))
44 | return h
45 |
46 |
47 | class BasicHeadersTests(BitcoinHeadersTestCase):
48 |
49 | async def test_serialization(self):
50 | h = await self.get_headers()
51 | self.assertEqual(h[0], {
52 | 'bits': 486604799,
53 | 'block_height': 0,
54 | 'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
55 | 'nonce': 2083236893,
56 | 'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000',
57 | 'timestamp': 1231006505,
58 | 'version': 1
59 | })
60 | self.assertEqual(h[self.RETARGET_BLOCK-1], {
61 | 'bits': 486604799,
62 | 'block_height': 32255,
63 | 'merkle_root': b'89b4f223789e40b5b475af6483bb05bceda54059e17d2053334b358f6bb310ac',
64 | 'nonce': 312762301,
65 | 'prev_block_hash': b'000000006baebaa74cecde6c6787c26ee0a616a3c333261bff36653babdac149',
66 | 'timestamp': 1262152739,
67 | 'version': 1
68 | })
69 | self.assertEqual(h[self.RETARGET_BLOCK], {
70 | 'bits': 486594666,
71 | 'block_height': 32256,
72 | 'merkle_root': b'64b5e5f5a262f47af443a0120609206a3305877693edfe03e994f20a024ab627',
73 | 'nonce': 121087187,
74 | 'prev_block_hash': b'00000000984f962134a7291e3693075ae03e521f0ee33378ec30a334d860034b',
75 | 'timestamp': 1262153464,
76 | 'version': 1
77 | })
78 | self.assertEqual(h[self.RETARGET_BLOCK+1], {
79 | 'bits': 486594666,
80 | 'block_height': 32257,
81 | 'merkle_root': b'4d1488981f08b3037878193297dbac701a2054e0f803d4424fe6a4d763d62334',
82 | 'nonce': 274675219,
83 | 'prev_block_hash': b'000000004f2886a170adb7204cb0c7a824217dd24d11a74423d564c4e0904967',
84 | 'timestamp': 1262154352,
85 | 'version': 1
86 | })
87 | self.assertEqual(
88 | h.serialize(h[0]),
89 | h.get_raw_header(0)
90 | )
91 | self.assertEqual(
92 | h.serialize(h[self.RETARGET_BLOCK]),
93 | h.get_raw_header(self.RETARGET_BLOCK)
94 | )
95 |
96 | async def test_connect_from_genesis_to_3000_past_first_chunk_at_2016(self):
97 | headers = MainHeaders(':memory:')
98 | self.assertEqual(headers.height, -1)
99 | await headers.connect(0, self.get_bytes(block_bytes(3001)))
100 | self.assertEqual(headers.height, 3000)
101 |
102 | async def test_connect_9_blocks_passing_a_retarget_at_32256(self):
103 | retarget = block_bytes(self.RETARGET_BLOCK-5)
104 | headers = await self.get_headers(upto=retarget)
105 | remainder = self.get_bytes(after=retarget)
106 | self.assertEqual(headers.height, 32250)
107 | await headers.connect(len(headers), remainder)
108 | self.assertEqual(headers.height, 32259)
109 |
--------------------------------------------------------------------------------
/torba/client/wallet.py:
--------------------------------------------------------------------------------
1 | import os
2 | import stat
3 | import json
4 | import zlib
5 | import typing
6 | from typing import Sequence, MutableSequence
7 | from hashlib import sha256
8 | from operator import attrgetter
9 | from torba.client.hash import better_aes_encrypt, better_aes_decrypt
10 |
11 | if typing.TYPE_CHECKING:
12 | from torba.client import basemanager, baseaccount, baseledger
13 |
14 |
15 | class Wallet:
16 | """ The primary role of Wallet is to encapsulate a collection
17 | of accounts (seed/private keys) and the spending rules / settings
18 | for the coins attached to those accounts. Wallets are represented
19 | by physical files on the filesystem.
20 | """
21 |
22 | def __init__(self, name: str = 'Wallet', accounts: MutableSequence['baseaccount.BaseAccount'] = None,
23 | storage: 'WalletStorage' = None) -> None:
24 | self.name = name
25 | self.accounts = accounts or []
26 | self.storage = storage or WalletStorage()
27 |
28 | def add_account(self, account):
29 | self.accounts.append(account)
30 |
31 | def generate_account(self, ledger: 'baseledger.BaseLedger') -> 'baseaccount.BaseAccount':
32 | return ledger.account_class.generate(ledger, self)
33 |
34 | @classmethod
35 | def from_storage(cls, storage: 'WalletStorage', manager: 'basemanager.BaseWalletManager') -> 'Wallet':
36 | json_dict = storage.read()
37 | wallet = cls(
38 | name=json_dict.get('name', 'Wallet'),
39 | storage=storage
40 | )
41 | account_dicts: Sequence[dict] = json_dict.get('accounts', [])
42 | for account_dict in account_dicts:
43 | ledger = manager.get_or_create_ledger(account_dict['ledger'])
44 | ledger.account_class.from_dict(ledger, wallet, account_dict)
45 | return wallet
46 |
47 | def to_dict(self):
48 | return {
49 | 'version': WalletStorage.LATEST_VERSION,
50 | 'name': self.name,
51 | 'accounts': [a.to_dict() for a in self.accounts]
52 | }
53 |
54 | def save(self):
55 | self.storage.write(self.to_dict())
56 |
57 | @property
58 | def default_account(self):
59 | for account in self.accounts:
60 | return account
61 |
62 | @property
63 | def hash(self) -> bytes:
64 | h = sha256()
65 | for account in sorted(self.accounts, key=attrgetter('id')):
66 | h.update(account.hash)
67 | return h.digest()
68 |
69 | def pack(self, password):
70 | new_data = json.dumps(self.to_dict())
71 | new_data_compressed = zlib.compress(new_data.encode())
72 | return better_aes_encrypt(password, new_data_compressed)
73 |
74 | @classmethod
75 | def unpack(cls, password, encrypted):
76 | decrypted = better_aes_decrypt(password, encrypted)
77 | decompressed = zlib.decompress(decrypted)
78 | return json.loads(decompressed)
79 |
80 |
81 | class WalletStorage:
82 |
83 | LATEST_VERSION = 1
84 |
85 | def __init__(self, path=None, default=None):
86 | self.path = path
87 | self._default = default or {
88 | 'version': self.LATEST_VERSION,
89 | 'name': 'My Wallet',
90 | 'accounts': []
91 | }
92 |
93 | def read(self):
94 | if self.path and os.path.exists(self.path):
95 | with open(self.path, 'r') as f:
96 | json_data = f.read()
97 | json_dict = json.loads(json_data)
98 | if json_dict.get('version') == self.LATEST_VERSION and \
99 | set(json_dict) == set(self._default):
100 | return json_dict
101 | else:
102 | return self.upgrade(json_dict)
103 | else:
104 | return self._default.copy()
105 |
106 | def upgrade(self, json_dict):
107 | json_dict = json_dict.copy()
108 | version = json_dict.pop('version', -1)
109 | if version == -1:
110 | pass
111 | upgraded = self._default.copy()
112 | upgraded.update(json_dict)
113 | return json_dict
114 |
115 | def write(self, json_dict):
116 |
117 | json_data = json.dumps(json_dict, indent=4, sort_keys=True)
118 | if self.path is None:
119 | return json_data
120 |
121 | temp_path = "%s.tmp.%s" % (self.path, os.getpid())
122 | with open(temp_path, "w") as f:
123 | f.write(json_data)
124 | f.flush()
125 | os.fsync(f.fileno())
126 |
127 | if os.path.exists(self.path):
128 | mode = os.stat(self.path).st_mode
129 | else:
130 | mode = stat.S_IREAD | stat.S_IWRITE
131 | try:
132 | os.rename(temp_path, self.path)
133 | except Exception: # pylint: disable=broad-except
134 | os.remove(self.path)
135 | os.rename(temp_path, self.path)
136 | os.chmod(self.path, mode)
137 |
--------------------------------------------------------------------------------
/torba/server/hash.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2016-2017, Neil Booth
2 | #
3 | # All rights reserved.
4 | #
5 | # The MIT License (MIT)
6 | #
7 | # Permission is hereby granted, free of charge, to any person obtaining
8 | # a copy of this software and associated documentation files (the
9 | # "Software"), to deal in the Software without restriction, including
10 | # without limitation the rights to use, copy, modify, merge, publish,
11 | # distribute, sublicense, and/or sell copies of the Software, and to
12 | # permit persons to whom the Software is furnished to do so, subject to
13 | # the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be
16 | # included in all copies or substantial portions of the Software.
17 | #
18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
22 | # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
24 | # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 |
26 | """Cryptograph hash functions and related classes."""
27 |
28 |
29 | import hashlib
30 | import hmac
31 |
32 | from torba.server.util import bytes_to_int, int_to_bytes, hex_to_bytes
33 |
34 | _sha256 = hashlib.sha256
35 | _sha512 = hashlib.sha512
36 | _new_hash = hashlib.new
37 | _new_hmac = hmac.new
38 | HASHX_LEN = 11
39 |
40 |
41 | def sha256(x):
42 | """Simple wrapper of hashlib sha256."""
43 | return _sha256(x).digest()
44 |
45 |
46 | def ripemd160(x):
47 | """Simple wrapper of hashlib ripemd160."""
48 | h = _new_hash('ripemd160')
49 | h.update(x)
50 | return h.digest()
51 |
52 |
53 | def double_sha256(x):
54 | """SHA-256 of SHA-256, as used extensively in bitcoin."""
55 | return sha256(sha256(x))
56 |
57 |
58 | def hmac_sha512(key, msg):
59 | """Use SHA-512 to provide an HMAC."""
60 | return _new_hmac(key, msg, _sha512).digest()
61 |
62 |
63 | def hash160(x):
64 | """RIPEMD-160 of SHA-256.
65 |
66 | Used to make bitcoin addresses from pubkeys."""
67 | return ripemd160(sha256(x))
68 |
69 |
70 | def hash_to_hex_str(x):
71 | """Convert a big-endian binary hash to displayed hex string.
72 |
73 | Display form of a binary hash is reversed and converted to hex.
74 | """
75 | return bytes(reversed(x)).hex()
76 |
77 |
78 | def hex_str_to_hash(x):
79 | """Convert a displayed hex string to a binary hash."""
80 | return bytes(reversed(hex_to_bytes(x)))
81 |
82 |
83 | class Base58Error(Exception):
84 | """Exception used for Base58 errors."""
85 |
86 |
87 | class Base58:
88 | """Class providing base 58 functionality."""
89 |
90 | chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
91 | assert len(chars) == 58
92 | cmap = {c: n for n, c in enumerate(chars)}
93 |
94 | @staticmethod
95 | def char_value(c):
96 | val = Base58.cmap.get(c)
97 | if val is None:
98 | raise Base58Error('invalid base 58 character "{}"'.format(c))
99 | return val
100 |
101 | @staticmethod
102 | def decode(txt):
103 | """Decodes txt into a big-endian bytearray."""
104 | if not isinstance(txt, str):
105 | raise TypeError('a string is required')
106 |
107 | if not txt:
108 | raise Base58Error('string cannot be empty')
109 |
110 | value = 0
111 | for c in txt:
112 | value = value * 58 + Base58.char_value(c)
113 |
114 | result = int_to_bytes(value)
115 |
116 | # Prepend leading zero bytes if necessary
117 | count = 0
118 | for c in txt:
119 | if c != '1':
120 | break
121 | count += 1
122 | if count:
123 | result = bytes(count) + result
124 |
125 | return result
126 |
127 | @staticmethod
128 | def encode(be_bytes):
129 | """Converts a big-endian bytearray into a base58 string."""
130 | value = bytes_to_int(be_bytes)
131 |
132 | txt = ''
133 | while value:
134 | value, mod = divmod(value, 58)
135 | txt += Base58.chars[mod]
136 |
137 | for byte in be_bytes:
138 | if byte != 0:
139 | break
140 | txt += '1'
141 |
142 | return txt[::-1]
143 |
144 | @staticmethod
145 | def decode_check(txt, *, hash_fn=double_sha256):
146 | """Decodes a Base58Check-encoded string to a payload. The version
147 | prefixes it."""
148 | be_bytes = Base58.decode(txt)
149 | result, check = be_bytes[:-4], be_bytes[-4:]
150 | if check != hash_fn(result)[:4]:
151 | raise Base58Error('invalid base 58 checksum for {}'.format(txt))
152 | return result
153 |
154 | @staticmethod
155 | def encode_check(payload, *, hash_fn=double_sha256):
156 | """Encodes a payload bytearray (which includes the version byte(s))
157 | into a Base58Check string."""
158 | be_bytes = payload + hash_fn(payload)[:4]
159 | return Base58.encode(be_bytes)
160 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_bip32.py:
--------------------------------------------------------------------------------
1 | from binascii import unhexlify, hexlify
2 |
3 | from torba.testcase import AsyncioTestCase
4 |
5 | from client_tests.unit.key_fixtures import expected_ids, expected_privkeys, expected_hardened_privkeys
6 | from torba.client.bip32 import PubKey, PrivateKey, from_extended_key_string
7 | from torba.coin.bitcoinsegwit import MainNetLedger as ledger_class
8 |
9 |
10 | class BIP32Tests(AsyncioTestCase):
11 |
12 | def test_pubkey_validation(self):
13 | with self.assertRaisesRegex(TypeError, 'chain code must be raw bytes'):
14 | PubKey(None, None, 1, None, None, None)
15 | with self.assertRaisesRegex(ValueError, 'invalid chain code'):
16 | PubKey(None, None, b'abcd', None, None, None)
17 | with self.assertRaisesRegex(ValueError, 'invalid child number'):
18 | PubKey(None, None, b'abcd'*8, -1, None, None)
19 | with self.assertRaisesRegex(ValueError, 'invalid depth'):
20 | PubKey(None, None, b'abcd'*8, 0, 256, None)
21 | with self.assertRaisesRegex(TypeError, 'pubkey must be raw bytes'):
22 | PubKey(None, None, b'abcd'*8, 0, 255, None)
23 | with self.assertRaisesRegex(ValueError, 'pubkey must be 33 bytes'):
24 | PubKey(None, b'abcd', b'abcd'*8, 0, 255, None)
25 | with self.assertRaisesRegex(ValueError, 'invalid pubkey prefix byte'):
26 | PubKey(
27 | None,
28 | unhexlify('33d1a3dc8155673bc1e2214fa493ccc82d57961b66054af9b6b653ac28eeef3ffe'),
29 | b'abcd'*8, 0, 255, None
30 | )
31 | pubkey = PubKey( # success
32 | None,
33 | unhexlify('03d1a3dc8155673bc1e2214fa493ccc82d57961b66054af9b6b653ac28eeef3ffe'),
34 | b'abcd'*8, 0, 1, None
35 | )
36 | with self.assertRaisesRegex(ValueError, 'invalid BIP32 public key child number'):
37 | pubkey.child(-1)
38 | for i in range(20):
39 | new_key = pubkey.child(i)
40 | self.assertIsInstance(new_key, PubKey)
41 | self.assertEqual(hexlify(new_key.identifier()), expected_ids[i])
42 |
43 | async def test_private_key_validation(self):
44 | with self.assertRaisesRegex(TypeError, 'private key must be raw bytes'):
45 | PrivateKey(None, None, b'abcd'*8, 0, 255)
46 | with self.assertRaisesRegex(ValueError, 'private key must be 32 bytes'):
47 | PrivateKey(None, b'abcd', b'abcd'*8, 0, 255)
48 | private_key = PrivateKey(
49 | ledger_class({
50 | 'db': ledger_class.database_class(':memory:'),
51 | 'headers': ledger_class.headers_class(':memory:'),
52 | }),
53 | unhexlify('2423f3dc6087d9683f73a684935abc0ccd8bc26370588f56653128c6a6f0bf7c'),
54 | b'abcd'*8, 0, 1
55 | )
56 | ec_point = private_key.ec_point()
57 | self.assertEqual(
58 | ec_point[0], 30487144161998778625547553412379759661411261804838752332906558028921886299019
59 | )
60 | self.assertEqual(
61 | ec_point[1], 86198965946979720220333266272536217633917099472454294641561154971209433250106
62 | )
63 | self.assertEqual(private_key.address(), '1GVM5dEhThbiyCZ9gqBZBv6p9whga7MTXo' )
64 | with self.assertRaisesRegex(ValueError, 'invalid BIP32 private key child number'):
65 | private_key.child(-1)
66 | self.assertIsInstance(private_key.child(PrivateKey.HARDENED), PrivateKey)
67 |
68 | async def test_private_key_derivation(self):
69 | private_key = PrivateKey(
70 | ledger_class({
71 | 'db': ledger_class.database_class(':memory:'),
72 | 'headers': ledger_class.headers_class(':memory:'),
73 | }),
74 | unhexlify('2423f3dc6087d9683f73a684935abc0ccd8bc26370588f56653128c6a6f0bf7c'),
75 | b'abcd'*8, 0, 1
76 | )
77 | for i in range(20):
78 | new_privkey = private_key.child(i)
79 | self.assertIsInstance(new_privkey, PrivateKey)
80 | self.assertEqual(hexlify(new_privkey.private_key_bytes), expected_privkeys[i])
81 | for i in range(PrivateKey.HARDENED + 1, private_key.HARDENED + 20):
82 | new_privkey = private_key.child(i)
83 | self.assertIsInstance(new_privkey, PrivateKey)
84 | self.assertEqual(hexlify(new_privkey.private_key_bytes), expected_hardened_privkeys[i - 1 - PrivateKey.HARDENED])
85 |
86 | async def test_from_extended_keys(self):
87 | ledger = ledger_class({
88 | 'db': ledger_class.database_class(':memory:'),
89 | 'headers': ledger_class.headers_class(':memory:'),
90 | })
91 | self.assertIsInstance(
92 | from_extended_key_string(
93 | ledger,
94 | 'xprv9s21ZrQH143K2dyhK7SevfRG72bYDRNv25yKPWWm6dqApNxm1Zb1m5gGcBWYfbsPjTr2v5joit8Af2Zp5P'
95 | '6yz3jMbycrLrRMpeAJxR8qDg8',
96 | ), PrivateKey
97 | )
98 | self.assertIsInstance(
99 | from_extended_key_string(
100 | ledger,
101 | 'xpub661MyMwAqRbcF84AR8yfHoMzf4S2ct6mPJtvBtvNeyN9hBHuZ6uGJszkTSn5fQUCdz3XU17eBzFeAUwV6f'
102 | 'iW44g14WF52fYC5J483wqQ5ZP',
103 | ), PubKey
104 | )
105 |
--------------------------------------------------------------------------------
/torba/stream.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 |
4 | class BroadcastSubscription:
5 |
6 | def __init__(self, controller, on_data, on_error, on_done):
7 | self._controller = controller
8 | self._previous = self._next = None
9 | self._on_data = on_data
10 | self._on_error = on_error
11 | self._on_done = on_done
12 | self.is_paused = False
13 | self.is_canceled = False
14 | self.is_closed = False
15 |
16 | def pause(self):
17 | self.is_paused = True
18 |
19 | def resume(self):
20 | self.is_paused = False
21 |
22 | def cancel(self):
23 | self._controller._cancel(self)
24 | self.is_canceled = True
25 |
26 | @property
27 | def can_fire(self):
28 | return not any((self.is_paused, self.is_canceled, self.is_closed))
29 |
30 | def _add(self, data):
31 | if self.can_fire and self._on_data is not None:
32 | return self._on_data(data)
33 |
34 | def _add_error(self, exception):
35 | if self.can_fire and self._on_error is not None:
36 | return self._on_error(exception)
37 |
38 | def _close(self):
39 | try:
40 | if self.can_fire and self._on_done is not None:
41 | return self._on_done()
42 | finally:
43 | self.is_closed = True
44 |
45 |
46 | class StreamController:
47 |
48 | def __init__(self):
49 | self.stream = Stream(self)
50 | self._first_subscription = None
51 | self._last_subscription = None
52 |
53 | @property
54 | def has_listener(self):
55 | return self._first_subscription is not None
56 |
57 | @property
58 | def _iterate_subscriptions(self):
59 | next_sub = self._first_subscription
60 | while next_sub is not None:
61 | subscription = next_sub
62 | next_sub = next_sub._next
63 | yield subscription
64 |
65 | def _notify_and_ensure_future(self, notify):
66 | tasks = []
67 | for subscription in self._iterate_subscriptions:
68 | maybe_coroutine = notify(subscription)
69 | if asyncio.iscoroutine(maybe_coroutine):
70 | tasks.append(maybe_coroutine)
71 | if tasks:
72 | return asyncio.ensure_future(asyncio.wait(tasks))
73 | else:
74 | f = asyncio.get_event_loop().create_future()
75 | f.set_result(None)
76 | return f
77 |
78 | def add(self, event):
79 | return self._notify_and_ensure_future(
80 | lambda subscription: subscription._add(event)
81 | )
82 |
83 | def add_error(self, exception):
84 | return self._notify_and_ensure_future(
85 | lambda subscription: subscription._add_error(exception)
86 | )
87 |
88 | def close(self):
89 | for subscription in self._iterate_subscriptions:
90 | subscription._close()
91 |
92 | def _cancel(self, subscription):
93 | previous = subscription._previous
94 | next_sub = subscription._next
95 | if previous is None:
96 | self._first_subscription = next_sub
97 | else:
98 | previous._next = next_sub
99 | if next_sub is None:
100 | self._last_subscription = previous
101 | else:
102 | next_sub._previous = previous
103 | subscription._next = subscription._previous = subscription
104 |
105 | def _listen(self, on_data, on_error, on_done):
106 | subscription = BroadcastSubscription(self, on_data, on_error, on_done)
107 | old_last = self._last_subscription
108 | self._last_subscription = subscription
109 | subscription._previous = old_last
110 | subscription._next = None
111 | if old_last is None:
112 | self._first_subscription = subscription
113 | else:
114 | old_last._next = subscription
115 | return subscription
116 |
117 |
118 | class Stream:
119 |
120 | def __init__(self, controller):
121 | self._controller = controller
122 |
123 | def listen(self, on_data, on_error=None, on_done=None):
124 | return self._controller._listen(on_data, on_error, on_done)
125 |
126 | def where(self, condition) -> asyncio.Future:
127 | future = asyncio.get_event_loop().create_future()
128 |
129 | def where_test(value):
130 | if condition(value):
131 | self._cancel_and_callback(subscription, future, value)
132 |
133 | subscription = self.listen(
134 | where_test,
135 | lambda exception: self._cancel_and_error(subscription, future, exception)
136 | )
137 |
138 | return future
139 |
140 | @property
141 | def first(self):
142 | future = asyncio.get_event_loop().create_future()
143 | subscription = self.listen(
144 | lambda value: self._cancel_and_callback(subscription, future, value),
145 | lambda exception: self._cancel_and_error(subscription, future, exception)
146 | )
147 | return future
148 |
149 | @staticmethod
150 | def _cancel_and_callback(subscription: BroadcastSubscription, future: asyncio.Future, value):
151 | subscription.cancel()
152 | future.set_result(value)
153 |
154 | @staticmethod
155 | def _cancel_and_error(subscription: BroadcastSubscription, future: asyncio.Future, exception):
156 | subscription.cancel()
157 | future.set_exception(exception)
158 |
--------------------------------------------------------------------------------
/torba/server/storage.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2016-2017, the ElectrumX authors
2 | #
3 | # All rights reserved.
4 | #
5 | # See the file "LICENCE" for information about the copyright
6 | # and warranty status of this software.
7 |
8 | """Backend database abstraction."""
9 |
10 | import os
11 | from functools import partial
12 |
13 | from torba.server import util
14 |
15 |
16 | def db_class(name):
17 | """Returns a DB engine class."""
18 | for db_class in util.subclasses(Storage):
19 | if db_class.__name__.lower() == name.lower():
20 | db_class.import_module()
21 | return db_class
22 | raise RuntimeError('unrecognised DB engine "{}"'.format(name))
23 |
24 |
25 | class Storage:
26 | """Abstract base class of the DB backend abstraction."""
27 |
28 | def __init__(self, name, for_sync):
29 | self.is_new = not os.path.exists(name)
30 | self.for_sync = for_sync or self.is_new
31 | self.open(name, create=self.is_new)
32 |
33 | @classmethod
34 | def import_module(cls):
35 | """Import the DB engine module."""
36 | raise NotImplementedError
37 |
38 | def open(self, name, create):
39 | """Open an existing database or create a new one."""
40 | raise NotImplementedError
41 |
42 | def close(self):
43 | """Close an existing database."""
44 | raise NotImplementedError
45 |
46 | def get(self, key):
47 | raise NotImplementedError
48 |
49 | def put(self, key, value):
50 | raise NotImplementedError
51 |
52 | def write_batch(self):
53 | """Return a context manager that provides `put` and `delete`.
54 |
55 | Changes should only be committed when the context manager
56 | closes without an exception.
57 | """
58 | raise NotImplementedError
59 |
60 | def iterator(self, prefix=b'', reverse=False):
61 | """Return an iterator that yields (key, value) pairs from the
62 | database sorted by key.
63 |
64 | If `prefix` is set, only keys starting with `prefix` will be
65 | included. If `reverse` is True the items are returned in
66 | reverse order.
67 | """
68 | raise NotImplementedError
69 |
70 |
71 | class LevelDB(Storage):
72 | """LevelDB database engine."""
73 |
74 | @classmethod
75 | def import_module(cls):
76 | import plyvel
77 | cls.module = plyvel
78 |
79 | def open(self, name, create):
80 | mof = 512 if self.for_sync else 128
81 | # Use snappy compression (the default)
82 | self.db = self.module.DB(name, create_if_missing=create,
83 | max_open_files=mof)
84 | self.close = self.db.close
85 | self.get = self.db.get
86 | self.put = self.db.put
87 | self.iterator = self.db.iterator
88 | self.write_batch = partial(self.db.write_batch, transaction=True,
89 | sync=True)
90 |
91 |
92 | class RocksDB(Storage):
93 | """RocksDB database engine."""
94 |
95 | @classmethod
96 | def import_module(cls):
97 | import rocksdb
98 | cls.module = rocksdb
99 |
100 | def open(self, name, create):
101 | mof = 512 if self.for_sync else 128
102 | # Use snappy compression (the default)
103 | options = self.module.Options(create_if_missing=create,
104 | use_fsync=True,
105 | target_file_size_base=33554432,
106 | max_open_files=mof)
107 | self.db = self.module.DB(name, options)
108 | self.get = self.db.get
109 | self.put = self.db.put
110 |
111 | def close(self):
112 | # PyRocksDB doesn't provide a close method; hopefully this is enough
113 | self.db = self.get = self.put = None
114 | import gc
115 | gc.collect()
116 |
117 | def write_batch(self):
118 | return RocksDBWriteBatch(self.db)
119 |
120 | def iterator(self, prefix=b'', reverse=False):
121 | return RocksDBIterator(self.db, prefix, reverse)
122 |
123 |
124 | class RocksDBWriteBatch:
125 | """A write batch for RocksDB."""
126 |
127 | def __init__(self, db):
128 | self.batch = RocksDB.module.WriteBatch()
129 | self.db = db
130 |
131 | def __enter__(self):
132 | return self.batch
133 |
134 | def __exit__(self, exc_type, exc_val, exc_tb):
135 | if not exc_val:
136 | self.db.write(self.batch)
137 |
138 |
139 | class RocksDBIterator:
140 | """An iterator for RocksDB."""
141 |
142 | def __init__(self, db, prefix, reverse):
143 | self.prefix = prefix
144 | if reverse:
145 | self.iterator = reversed(db.iteritems())
146 | nxt_prefix = util.increment_byte_string(prefix)
147 | if nxt_prefix:
148 | self.iterator.seek(nxt_prefix)
149 | try:
150 | next(self.iterator)
151 | except StopIteration:
152 | self.iterator.seek(nxt_prefix)
153 | else:
154 | self.iterator.seek_to_last()
155 | else:
156 | self.iterator = db.iteritems()
157 | self.iterator.seek(prefix)
158 |
159 | def __iter__(self):
160 | return self
161 |
162 | def __next__(self):
163 | k, v = next(self.iterator)
164 | if not k.startswith(self.prefix):
165 | raise StopIteration
166 | return k, v
167 |
--------------------------------------------------------------------------------
/torba/server/server.py:
--------------------------------------------------------------------------------
1 | import signal
2 | import logging
3 | import asyncio
4 | from concurrent.futures.thread import ThreadPoolExecutor
5 |
6 | import torba
7 | from torba.server.mempool import MemPool, MemPoolAPI
8 | from torba.server.session import SessionManager
9 |
10 |
11 | class Notifications:
12 | # hashX notifications come from two sources: new blocks and
13 | # mempool refreshes.
14 | #
15 | # A user with a pending transaction is notified after the block it
16 | # gets in is processed. Block processing can take an extended
17 | # time, and the prefetcher might poll the daemon after the mempool
18 | # code in any case. In such cases the transaction will not be in
19 | # the mempool after the mempool refresh. We want to avoid
20 | # notifying clients twice - for the mempool refresh and when the
21 | # block is done. This object handles that logic by deferring
22 | # notifications appropriately.
23 |
24 | def __init__(self):
25 | self._touched_mp = {}
26 | self._touched_bp = {}
27 | self._highest_block = -1
28 |
29 | async def _maybe_notify(self):
30 | tmp, tbp = self._touched_mp, self._touched_bp
31 | common = set(tmp).intersection(tbp)
32 | if common:
33 | height = max(common)
34 | elif tmp and max(tmp) == self._highest_block:
35 | height = self._highest_block
36 | else:
37 | # Either we are processing a block and waiting for it to
38 | # come in, or we have not yet had a mempool update for the
39 | # new block height
40 | return
41 | touched = tmp.pop(height)
42 | for old in [h for h in tmp if h <= height]:
43 | del tmp[old]
44 | for old in [h for h in tbp if h <= height]:
45 | touched.update(tbp.pop(old))
46 | await self.notify(height, touched)
47 |
48 | async def notify(self, height, touched):
49 | pass
50 |
51 | async def start(self, height, notify_func):
52 | self._highest_block = height
53 | self.notify = notify_func
54 | await self.notify(height, set())
55 |
56 | async def on_mempool(self, touched, height):
57 | self._touched_mp[height] = touched
58 | await self._maybe_notify()
59 |
60 | async def on_block(self, touched, height):
61 | self._touched_bp[height] = touched
62 | self._highest_block = height
63 | await self._maybe_notify()
64 |
65 |
66 | class Server:
67 |
68 | def __init__(self, env):
69 | self.env = env
70 | self.log = logging.getLogger(__name__).getChild(self.__class__.__name__)
71 | self.shutdown_event = asyncio.Event()
72 | self.cancellable_tasks = []
73 |
74 | self.notifications = notifications = Notifications()
75 | self.daemon = daemon = env.coin.DAEMON(env.coin, env.daemon_url)
76 | self.db = db = env.coin.DB(env)
77 | self.bp = bp = env.coin.BLOCK_PROCESSOR(env, db, daemon, notifications)
78 |
79 | # Set notifications up to implement the MemPoolAPI
80 | notifications.height = daemon.height
81 | notifications.cached_height = daemon.cached_height
82 | notifications.mempool_hashes = daemon.mempool_hashes
83 | notifications.raw_transactions = daemon.getrawtransactions
84 | notifications.lookup_utxos = db.lookup_utxos
85 | MemPoolAPI.register(Notifications)
86 | self.mempool = mempool = MemPool(env.coin, notifications)
87 |
88 | self.session_mgr = SessionManager(
89 | env, db, bp, daemon, mempool, self.shutdown_event
90 | )
91 |
92 | async def start(self):
93 | env = self.env
94 | min_str, max_str = env.coin.SESSIONCLS.protocol_min_max_strings()
95 | self.log.info(f'software version: {torba.__version__}')
96 | self.log.info(f'supported protocol versions: {min_str}-{max_str}')
97 | self.log.info(f'event loop policy: {env.loop_policy}')
98 | self.log.info(f'reorg limit is {env.reorg_limit:,d} blocks')
99 |
100 | await self.daemon.height()
101 |
102 | def _start_cancellable(run, *args):
103 | _flag = asyncio.Event()
104 | self.cancellable_tasks.append(asyncio.ensure_future(run(*args, _flag)))
105 | return _flag.wait()
106 |
107 | await _start_cancellable(self.bp.fetch_and_process_blocks)
108 | await self.db.populate_header_merkle_cache()
109 | await _start_cancellable(self.mempool.keep_synchronized)
110 | await _start_cancellable(self.session_mgr.serve, self.notifications)
111 |
112 | async def stop(self):
113 | for task in reversed(self.cancellable_tasks):
114 | task.cancel()
115 | await asyncio.wait(self.cancellable_tasks)
116 | self.shutdown_event.set()
117 |
118 | def run(self):
119 | loop = asyncio.get_event_loop()
120 | executor = ThreadPoolExecutor(1)
121 | loop.set_default_executor(executor)
122 |
123 | def __exit():
124 | raise SystemExit()
125 | try:
126 | loop.add_signal_handler(signal.SIGINT, __exit)
127 | loop.add_signal_handler(signal.SIGTERM, __exit)
128 | loop.run_until_complete(self.start())
129 | loop.run_until_complete(self.shutdown_event.wait())
130 | except (SystemExit, KeyboardInterrupt):
131 | pass
132 | finally:
133 | loop.run_until_complete(self.stop())
134 | executor.shutdown(True)
135 |
--------------------------------------------------------------------------------
/torba/client/coinselection.py:
--------------------------------------------------------------------------------
1 | from random import Random
2 | from typing import List
3 |
4 | from torba.client import basetransaction
5 |
6 | MAXIMUM_TRIES = 100000
7 |
8 | STRATEGIES = []
9 |
10 | def strategy(method):
11 | STRATEGIES.append(method.__name__)
12 | return method
13 |
14 |
15 | class CoinSelector:
16 |
17 | def __init__(self, txos: List[basetransaction.BaseOutputEffectiveAmountEstimator],
18 | target: int, cost_of_change: int, seed: str = None) -> None:
19 | self.txos = txos
20 | self.target = target
21 | self.cost_of_change = cost_of_change
22 | self.exact_match = False
23 | self.tries = 0
24 | self.available = sum(c.effective_amount for c in self.txos)
25 | self.random = Random(seed)
26 | if seed is not None:
27 | self.random.seed(seed, version=1)
28 |
29 | def select(self, strategy_name: str = None) -> List[basetransaction.BaseOutputEffectiveAmountEstimator]:
30 | if not self.txos:
31 | return []
32 | if self.target > self.available:
33 | return []
34 | if strategy_name is not None:
35 | return getattr(self, strategy_name)()
36 | return (
37 | self.branch_and_bound() or
38 | self.closest_match() or
39 | self.random_draw()
40 | )
41 |
42 | @strategy
43 | def prefer_confirmed(self) -> List[basetransaction.BaseOutputEffectiveAmountEstimator]:
44 | self.txos = [t for t in self.txos if t.txo.tx_ref and t.txo.tx_ref.height > 0] or self.txos
45 | self.available = sum(c.effective_amount for c in self.txos)
46 | return (
47 | self.branch_and_bound() or
48 | self.closest_match() or
49 | self.random_draw()
50 | )
51 |
52 | @strategy
53 | def branch_and_bound(self) -> List[basetransaction.BaseOutputEffectiveAmountEstimator]:
54 | # see bitcoin implementation for more info:
55 | # https://github.com/bitcoin/bitcoin/blob/master/src/wallet/coinselection.cpp
56 |
57 | self.txos.sort(reverse=True)
58 |
59 | current_value = 0
60 | current_available_value = self.available
61 | current_selection: List[bool] = []
62 | best_waste = self.cost_of_change
63 | best_selection: List[bool] = []
64 |
65 | while self.tries < MAXIMUM_TRIES:
66 | self.tries += 1
67 |
68 | backtrack = False
69 | if current_value + current_available_value < self.target or \
70 | current_value > self.target + self.cost_of_change:
71 | backtrack = True
72 | elif current_value >= self.target:
73 | new_waste = current_value - self.target
74 | if new_waste <= best_waste:
75 | best_waste = new_waste
76 | best_selection = current_selection[:]
77 | backtrack = True
78 |
79 | if backtrack:
80 | while current_selection and not current_selection[-1]:
81 | current_selection.pop()
82 | current_available_value += self.txos[len(current_selection)].effective_amount
83 |
84 | if not current_selection:
85 | break
86 |
87 | current_selection[-1] = False
88 | utxo = self.txos[len(current_selection) - 1]
89 | current_value -= utxo.effective_amount
90 |
91 | else:
92 | utxo = self.txos[len(current_selection)]
93 | current_available_value -= utxo.effective_amount
94 | previous_utxo = self.txos[len(current_selection) - 1] if current_selection else None
95 | if current_selection and not current_selection[-1] and previous_utxo and \
96 | utxo.effective_amount == previous_utxo.effective_amount and \
97 | utxo.fee == previous_utxo.fee:
98 | current_selection.append(False)
99 | else:
100 | current_selection.append(True)
101 | current_value += utxo.effective_amount
102 |
103 | if best_selection:
104 | self.exact_match = True
105 | return [
106 | self.txos[i] for i, include in enumerate(best_selection) if include
107 | ]
108 |
109 | return []
110 |
111 | @strategy
112 | def closest_match(self) -> List[basetransaction.BaseOutputEffectiveAmountEstimator]:
113 | """ Pick one UTXOs that is larger than the target but with the smallest change. """
114 | target = self.target + self.cost_of_change
115 | smallest_change = None
116 | best_match = None
117 | for txo in self.txos:
118 | if txo.effective_amount >= target:
119 | change = txo.effective_amount - target
120 | if smallest_change is None or change < smallest_change:
121 | smallest_change, best_match = change, txo
122 | return [best_match] if best_match else []
123 |
124 | @strategy
125 | def random_draw(self) -> List[basetransaction.BaseOutputEffectiveAmountEstimator]:
126 | """ Accumulate UTXOs at random until there is enough to cover the target. """
127 | target = self.target + self.cost_of_change
128 | self.random.shuffle(self.txos, self.random.random)
129 | selection = []
130 | amount = 0
131 | for coin in self.txos:
132 | selection.append(coin)
133 | amount += coin.effective_amount
134 | if amount >= target:
135 | return selection
136 | return []
137 |
--------------------------------------------------------------------------------
/torba/client/mnemonic.py:
--------------------------------------------------------------------------------
1 | # Copyright (C) 2014 Thomas Voegtlin
2 | # Copyright (C) 2018 LBRY Inc.
3 |
4 | import hmac
5 | import math
6 | import hashlib
7 | import importlib
8 | import unicodedata
9 | import string
10 | from binascii import hexlify
11 | from secrets import randbelow
12 |
13 | import pbkdf2
14 |
15 | from torba.client.hash import hmac_sha512
16 | from torba.client.words import english
17 |
18 | # The hash of the mnemonic seed must begin with this
19 | SEED_PREFIX = b'01' # Standard wallet
20 | SEED_PREFIX_2FA = b'101' # Two-factor authentication
21 | SEED_PREFIX_SW = b'100' # Segwit wallet
22 |
23 | # http://www.asahi-net.or.jp/~ax2s-kmtn/ref/unicode/e_asia.html
24 | CJK_INTERVALS = [
25 | (0x4E00, 0x9FFF, 'CJK Unified Ideographs'),
26 | (0x3400, 0x4DBF, 'CJK Unified Ideographs Extension A'),
27 | (0x20000, 0x2A6DF, 'CJK Unified Ideographs Extension B'),
28 | (0x2A700, 0x2B73F, 'CJK Unified Ideographs Extension C'),
29 | (0x2B740, 0x2B81F, 'CJK Unified Ideographs Extension D'),
30 | (0xF900, 0xFAFF, 'CJK Compatibility Ideographs'),
31 | (0x2F800, 0x2FA1D, 'CJK Compatibility Ideographs Supplement'),
32 | (0x3190, 0x319F, 'Kanbun'),
33 | (0x2E80, 0x2EFF, 'CJK Radicals Supplement'),
34 | (0x2F00, 0x2FDF, 'CJK Radicals'),
35 | (0x31C0, 0x31EF, 'CJK Strokes'),
36 | (0x2FF0, 0x2FFF, 'Ideographic Description Characters'),
37 | (0xE0100, 0xE01EF, 'Variation Selectors Supplement'),
38 | (0x3100, 0x312F, 'Bopomofo'),
39 | (0x31A0, 0x31BF, 'Bopomofo Extended'),
40 | (0xFF00, 0xFFEF, 'Halfwidth and Fullwidth Forms'),
41 | (0x3040, 0x309F, 'Hiragana'),
42 | (0x30A0, 0x30FF, 'Katakana'),
43 | (0x31F0, 0x31FF, 'Katakana Phonetic Extensions'),
44 | (0x1B000, 0x1B0FF, 'Kana Supplement'),
45 | (0xAC00, 0xD7AF, 'Hangul Syllables'),
46 | (0x1100, 0x11FF, 'Hangul Jamo'),
47 | (0xA960, 0xA97F, 'Hangul Jamo Extended A'),
48 | (0xD7B0, 0xD7FF, 'Hangul Jamo Extended B'),
49 | (0x3130, 0x318F, 'Hangul Compatibility Jamo'),
50 | (0xA4D0, 0xA4FF, 'Lisu'),
51 | (0x16F00, 0x16F9F, 'Miao'),
52 | (0xA000, 0xA48F, 'Yi Syllables'),
53 | (0xA490, 0xA4CF, 'Yi Radicals'),
54 | ]
55 |
56 |
57 | def is_cjk(c):
58 | n = ord(c)
59 | for start, end, _ in CJK_INTERVALS:
60 | if start <= n <= end:
61 | return True
62 | return False
63 |
64 |
65 | def normalize_text(seed):
66 | seed = unicodedata.normalize('NFKD', seed)
67 | seed = seed.lower()
68 | # remove accents
69 | seed = u''.join([c for c in seed if not unicodedata.combining(c)])
70 | # normalize whitespaces
71 | seed = u' '.join(seed.split())
72 | # remove whitespaces between CJK
73 | seed = u''.join([
74 | seed[i] for i in range(len(seed))
75 | if not (seed[i] in string.whitespace and is_cjk(seed[i-1]) and is_cjk(seed[i+1]))
76 | ])
77 | return seed
78 |
79 |
80 | def load_words(language_name):
81 | if language_name == 'english':
82 | return english.words
83 | language_module = importlib.import_module('torba.words.'+language_name)
84 | return list(map(
85 | lambda s: unicodedata.normalize('NFKD', s),
86 | language_module.words
87 | ))
88 |
89 |
90 | LANGUAGE_NAMES = {
91 | 'en': 'english',
92 | 'es': 'spanish',
93 | 'ja': 'japanese',
94 | 'pt': 'portuguese',
95 | 'zh': 'chinese_simplified'
96 | }
97 |
98 |
99 | class Mnemonic:
100 | # Seed derivation no longer follows BIP39
101 | # Mnemonic phrase uses a hash based checksum, instead of a words-dependent checksum
102 |
103 | def __init__(self, lang='en'):
104 | language_name = LANGUAGE_NAMES.get(lang, 'english')
105 | self.words = load_words(language_name)
106 |
107 | @staticmethod
108 | def mnemonic_to_seed(mnemonic, passphrase=u''):
109 | pbkdf2_rounds = 2048
110 | mnemonic = normalize_text(mnemonic)
111 | passphrase = normalize_text(passphrase)
112 | return pbkdf2.PBKDF2(
113 | mnemonic, passphrase, iterations=pbkdf2_rounds, macmodule=hmac, digestmodule=hashlib.sha512
114 | ).read(64)
115 |
116 | def mnemonic_encode(self, i):
117 | n = len(self.words)
118 | words = []
119 | while i:
120 | x = i%n
121 | i = i//n
122 | words.append(self.words[x])
123 | return ' '.join(words)
124 |
125 | def mnemonic_decode(self, seed):
126 | n = len(self.words)
127 | words = seed.split()
128 | i = 0
129 | while words:
130 | word = words.pop()
131 | k = self.words.index(word)
132 | i = i*n + k
133 | return i
134 |
135 | def make_seed(self, prefix=SEED_PREFIX, num_bits=132):
136 | # increase num_bits in order to obtain a uniform distribution for the last word
137 | bpw = math.log(len(self.words), 2)
138 | # rounding
139 | n = int(math.ceil(num_bits/bpw) * bpw)
140 | entropy = 1
141 | while 0 < entropy < pow(2, n - bpw):
142 | # try again if seed would not contain enough words
143 | entropy = randbelow(pow(2, n))
144 | nonce = 0
145 | while True:
146 | nonce += 1
147 | i = entropy + nonce
148 | seed = self.mnemonic_encode(i)
149 | if i != self.mnemonic_decode(seed):
150 | raise Exception('Cannot extract same entropy from mnemonic!')
151 | if is_new_seed(seed, prefix):
152 | break
153 | return seed
154 |
155 |
156 | def is_new_seed(seed, prefix):
157 | seed = normalize_text(seed)
158 | seed_hash = hexlify(hmac_sha512(b"Seed version", seed.encode('utf8')))
159 | return seed_hash.startswith(prefix)
160 |
--------------------------------------------------------------------------------
/torba/orchstr8/service.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | from aiohttp.web import Application, WebSocketResponse, json_response
4 | from aiohttp.http_websocket import WSMsgType, WSCloseCode
5 |
6 | from torba.client.util import satoshis_to_coins
7 | from .node import Conductor, set_logging
8 |
9 |
10 | PORT = 7954
11 |
12 |
13 | class WebSocketLogHandler(logging.Handler):
14 |
15 | def __init__(self, send_message):
16 | super().__init__()
17 | self.send_message = send_message
18 |
19 | def emit(self, record):
20 | try:
21 | self.send_message({
22 | 'type': 'log',
23 | 'name': record.name,
24 | 'message': self.format(record)
25 | })
26 | except Exception:
27 | self.handleError(record)
28 |
29 |
30 | class ConductorService:
31 |
32 | def __init__(self, stack: Conductor, loop: asyncio.AbstractEventLoop) -> None:
33 | self.stack = stack
34 | self.loop = loop
35 | self.app = Application()
36 | self.app.router.add_post('/start', self.start_stack)
37 | self.app.router.add_post('/generate', self.generate)
38 | self.app.router.add_post('/transfer', self.transfer)
39 | self.app.router.add_post('/balance', self.balance)
40 | self.app.router.add_get('/log', self.log)
41 | self.app['websockets'] = set()
42 | self.app.on_shutdown.append(self.on_shutdown)
43 | self.handler = self.app.make_handler()
44 | self.server = None
45 |
46 | async def start(self):
47 | self.server = await self.loop.create_server(
48 | self.handler, '0.0.0.0', PORT
49 | )
50 | print('serving on', self.server.sockets[0].getsockname())
51 |
52 | async def stop(self):
53 | await self.stack.stop()
54 | self.server.close()
55 | await self.server.wait_closed()
56 | await self.app.shutdown()
57 | await self.handler.shutdown(60.0)
58 | await self.app.cleanup()
59 |
60 | async def start_stack(self, _):
61 | set_logging(
62 | self.stack.ledger_module, logging.DEBUG, WebSocketLogHandler(self.send_message)
63 | )
64 | self.stack.blockchain_started or await self.stack.start_blockchain()
65 | self.send_message({'type': 'service', 'name': 'blockchain', 'port': self.stack.blockchain_node.port})
66 | self.stack.spv_started or await self.stack.start_spv()
67 | self.send_message({'type': 'service', 'name': 'spv', 'port': self.stack.spv_node.port})
68 | self.stack.wallet_started or await self.stack.start_wallet()
69 | self.send_message({'type': 'service', 'name': 'wallet', 'port': self.stack.wallet_node.port})
70 | self.stack.wallet_node.ledger.on_header.listen(self.on_status)
71 | self.stack.wallet_node.ledger.on_transaction.listen(self.on_status)
72 | return json_response({'started': True})
73 |
74 | async def generate(self, request):
75 | data = await request.post()
76 | blocks = data.get('blocks', 1)
77 | await self.stack.blockchain_node.generate(int(blocks))
78 | return json_response({'blocks': blocks})
79 |
80 | async def transfer(self, request):
81 | data = await request.post()
82 | address = data.get('address')
83 | if not address and self.stack.wallet_started:
84 | address = await self.stack.wallet_node.account.receiving.get_or_create_usable_address()
85 | if not address:
86 | raise ValueError("No address was provided.")
87 | amount = data.get('amount', 1)
88 | txid = await self.stack.blockchain_node.send_to_address(address, amount)
89 | if self.stack.wallet_started:
90 | await self.stack.wallet_node.ledger.on_transaction.where(
91 | lambda e: e.tx.id == txid and e.address == address
92 | )
93 | return json_response({
94 | 'address': address,
95 | 'amount': amount,
96 | 'txid': txid
97 | })
98 |
99 | async def balance(self, _):
100 | return json_response({
101 | 'balance': await self.stack.blockchain_node.get_balance()
102 | })
103 |
104 | async def log(self, request):
105 | web_socket = WebSocketResponse()
106 | await web_socket.prepare(request)
107 | self.app['websockets'].add(web_socket)
108 | try:
109 | async for msg in web_socket:
110 | if msg.type == WSMsgType.TEXT:
111 | if msg.data == 'close':
112 | await web_socket.close()
113 | elif msg.type == WSMsgType.ERROR:
114 | print('web socket connection closed with exception %s' %
115 | web_socket.exception())
116 | finally:
117 | self.app['websockets'].remove(web_socket)
118 | return web_socket
119 |
120 | @staticmethod
121 | async def on_shutdown(app):
122 | for web_socket in app['websockets']:
123 | await web_socket.close(code=WSCloseCode.GOING_AWAY, message='Server shutdown')
124 |
125 | async def on_status(self, _):
126 | if not self.app['websockets']:
127 | return
128 | self.send_message({
129 | 'type': 'status',
130 | 'height': self.stack.wallet_node.ledger.headers.height,
131 | 'balance': satoshis_to_coins(await self.stack.wallet_node.account.get_balance()),
132 | 'miner': await self.stack.blockchain_node.get_balance()
133 | })
134 |
135 | def send_message(self, msg):
136 | for web_socket in self.app['websockets']:
137 | self.loop.create_task(web_socket.send_json(msg))
138 |
--------------------------------------------------------------------------------
/tests/client_tests/integration/test_transactions.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import asyncio
3 | from itertools import chain
4 | from torba.testcase import IntegrationTestCase
5 | from torba.client.util import satoshis_to_coins, coins_to_satoshis
6 |
7 |
8 | class BasicTransactionTests(IntegrationTestCase):
9 |
10 | VERBOSITY = logging.WARN
11 |
12 | async def test_variety_of_transactions_and_longish_history(self):
13 | await self.blockchain.generate(300)
14 | await self.assertBalance(self.account, '0.0')
15 | addresses = await self.account.receiving.get_addresses()
16 |
17 | # send 10 coins to first 10 receiving addresses and then 10 transactions worth 10 coins each
18 | # to the 10th receiving address for a total of 30 UTXOs on the entire account
19 | sends = list(chain(
20 | (self.blockchain.send_to_address(address, 10) for address in addresses[:10]),
21 | (self.blockchain.send_to_address(addresses[9], 10) for _ in range(10))
22 | ))
23 | # use batching to reduce issues with send_to_address on cli
24 | for batch in range(0, len(sends), 10):
25 | txids = await asyncio.gather(*sends[batch:batch+10])
26 | await asyncio.wait([self.on_transaction_id(txid) for txid in txids])
27 | await self.assertBalance(self.account, '200.0')
28 | self.assertEqual(20, await self.account.get_utxo_count())
29 |
30 | # address gap should have increase by 10 to cover the first 10 addresses we've used up
31 | addresses = await self.account.receiving.get_addresses()
32 | self.assertEqual(30, len(addresses))
33 |
34 | # there used to be a sync bug which failed to save TXIs between
35 | # daemon restarts, clearing cache replicates that behavior
36 | self.ledger._tx_cache.clear()
37 |
38 | # spend from each of the first 10 addresses to the subsequent 10 addresses
39 | txs = []
40 | for address in addresses[10:20]:
41 | txs.append(await self.ledger.transaction_class.create(
42 | [],
43 | [self.ledger.transaction_class.output_class.pay_pubkey_hash(
44 | coins_to_satoshis('1.0'), self.ledger.address_to_hash160(address)
45 | )],
46 | [self.account], self.account
47 | ))
48 | await asyncio.wait([self.broadcast(tx) for tx in txs])
49 | await asyncio.wait([self.ledger.wait(tx) for tx in txs])
50 |
51 | # verify that a previous bug which failed to save TXIs doesn't come back
52 | # this check must happen before generating a new block
53 | self.assertTrue(all([
54 | tx.inputs[0].txo_ref.txo is not None
55 | for tx in await self.ledger.db.get_transactions(txid__in=[tx.id for tx in txs])
56 | ]))
57 |
58 | await self.blockchain.generate(1)
59 | await asyncio.wait([self.ledger.wait(tx) for tx in txs])
60 | await self.assertBalance(self.account, '199.99876')
61 |
62 | # 10 of the UTXOs have been split into a 1 coin UTXO and a 9 UTXO change
63 | self.assertEqual(30, await self.account.get_utxo_count())
64 |
65 | # spend all 30 UTXOs into a a 199 coin UTXO and change
66 | tx = await self.ledger.transaction_class.create(
67 | [],
68 | [self.ledger.transaction_class.output_class.pay_pubkey_hash(
69 | coins_to_satoshis('199.0'), self.ledger.address_to_hash160(addresses[-1])
70 | )],
71 | [self.account], self.account
72 | )
73 | await self.broadcast(tx)
74 | await self.ledger.wait(tx)
75 | await self.blockchain.generate(1)
76 | await self.ledger.wait(tx)
77 |
78 | self.assertEqual(2, await self.account.get_utxo_count()) # 199 + change
79 | await self.assertBalance(self.account, '199.99649')
80 |
81 | async def test_sending_and_receiving(self):
82 | account1, account2 = self.account, self.wallet.generate_account(self.ledger)
83 | await self.ledger.subscribe_account(account2)
84 |
85 | await self.assertBalance(account1, '0.0')
86 | await self.assertBalance(account2, '0.0')
87 |
88 | addresses = await self.account.receiving.get_addresses()
89 | txids = await asyncio.gather(*(
90 | self.blockchain.send_to_address(address, 1.1) for address in addresses[:5]
91 | ))
92 | await asyncio.wait([self.on_transaction_id(txid) for txid in txids]) # mempool
93 | await self.blockchain.generate(1)
94 | await asyncio.wait([self.on_transaction_id(txid) for txid in txids]) # confirmed
95 | await self.assertBalance(account1, '5.5')
96 | await self.assertBalance(account2, '0.0')
97 |
98 | address2 = await account2.receiving.get_or_create_usable_address()
99 | tx = await self.ledger.transaction_class.create(
100 | [],
101 | [self.ledger.transaction_class.output_class.pay_pubkey_hash(
102 | coins_to_satoshis('2.0'), self.ledger.address_to_hash160(address2)
103 | )],
104 | [account1], account1
105 | )
106 | await self.broadcast(tx)
107 | await self.ledger.wait(tx) # mempool
108 | await self.blockchain.generate(1)
109 | await self.ledger.wait(tx) # confirmed
110 |
111 | await self.assertBalance(account1, '3.499802')
112 | await self.assertBalance(account2, '2.0')
113 |
114 | utxos = await self.account.get_utxos()
115 | tx = await self.ledger.transaction_class.create(
116 | [self.ledger.transaction_class.input_class.spend(utxos[0])],
117 | [],
118 | [account1], account1
119 | )
120 | await self.broadcast(tx)
121 | await self.ledger.wait(tx) # mempool
122 | await self.blockchain.generate(1)
123 | await self.ledger.wait(tx) # confirmed
124 |
125 | tx = (await account1.get_transactions())[1]
126 | self.assertEqual(satoshis_to_coins(tx.inputs[0].amount), '1.1')
127 | self.assertEqual(satoshis_to_coins(tx.inputs[1].amount), '1.1')
128 | self.assertEqual(satoshis_to_coins(tx.outputs[0].amount), '2.0')
129 | self.assertEqual(tx.outputs[0].get_address(self.ledger), address2)
130 | self.assertEqual(tx.outputs[0].is_change, False)
131 | self.assertEqual(tx.outputs[1].is_change, True)
132 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_coinselection.py:
--------------------------------------------------------------------------------
1 | from types import GeneratorType
2 |
3 | from torba.testcase import AsyncioTestCase
4 |
5 | from torba.coin.bitcoinsegwit import MainNetLedger as ledger_class
6 | from torba.client.coinselection import CoinSelector, MAXIMUM_TRIES
7 | from torba.client.constants import CENT
8 |
9 | from client_tests.unit.test_transaction import get_output as utxo
10 |
11 |
12 | NULL_HASH = b'\x00'*32
13 |
14 |
15 | def search(*args, **kwargs):
16 | selection = CoinSelector(*args, **kwargs).branch_and_bound()
17 | return [o.txo.amount for o in selection] if selection else selection
18 |
19 |
20 | class BaseSelectionTestCase(AsyncioTestCase):
21 |
22 | async def asyncSetUp(self):
23 | self.ledger = ledger_class({
24 | 'db': ledger_class.database_class(':memory:'),
25 | 'headers': ledger_class.headers_class(':memory:'),
26 | })
27 | await self.ledger.db.open()
28 |
29 | async def asyncTearDown(self):
30 | await self.ledger.db.close()
31 |
32 | def estimates(self, *args):
33 | txos = args[0] if isinstance(args[0], (GeneratorType, list)) else args
34 | return [txo.get_estimator(self.ledger) for txo in txos]
35 |
36 |
37 | class TestCoinSelectionTests(BaseSelectionTestCase):
38 |
39 | def test_empty_coins(self):
40 | self.assertEqual(CoinSelector([], 0, 0).select(), [])
41 |
42 | def test_skip_binary_search_if_total_not_enough(self):
43 | fee = utxo(CENT).get_estimator(self.ledger).fee
44 | big_pool = self.estimates(utxo(CENT+fee) for _ in range(100))
45 | selector = CoinSelector(big_pool, 101 * CENT, 0)
46 | self.assertEqual(selector.select(), [])
47 | self.assertEqual(selector.tries, 0) # Never tried.
48 | # check happy path
49 | selector = CoinSelector(big_pool, 100 * CENT, 0)
50 | self.assertEqual(len(selector.select()), 100)
51 | self.assertEqual(selector.tries, 201)
52 |
53 | def test_exact_match(self):
54 | fee = utxo(CENT).get_estimator(self.ledger).fee
55 | utxo_pool = self.estimates(
56 | utxo(CENT + fee),
57 | utxo(CENT),
58 | utxo(CENT - fee)
59 | )
60 | selector = CoinSelector(utxo_pool, CENT, 0)
61 | match = selector.select()
62 | self.assertEqual([CENT + fee], [c.txo.amount for c in match])
63 | self.assertTrue(selector.exact_match)
64 |
65 | def test_random_draw(self):
66 | utxo_pool = self.estimates(
67 | utxo(2 * CENT),
68 | utxo(3 * CENT),
69 | utxo(4 * CENT)
70 | )
71 | selector = CoinSelector(utxo_pool, CENT, 0, '\x00')
72 | match = selector.select()
73 | self.assertEqual([2 * CENT], [c.txo.amount for c in match])
74 | self.assertFalse(selector.exact_match)
75 |
76 | def test_pick(self):
77 | utxo_pool = self.estimates(
78 | utxo(1*CENT),
79 | utxo(1*CENT),
80 | utxo(3*CENT),
81 | utxo(5*CENT),
82 | utxo(10*CENT),
83 | )
84 | selector = CoinSelector(utxo_pool, 3*CENT, 0)
85 | match = selector.select()
86 | self.assertEqual([5*CENT], [c.txo.amount for c in match])
87 |
88 | def test_prefer_confirmed_strategy(self):
89 | utxo_pool = self.estimates(
90 | utxo(11*CENT, height=5),
91 | utxo(11*CENT, height=0),
92 | utxo(11*CENT, height=-2),
93 | utxo(11*CENT, height=5),
94 | )
95 | selector = CoinSelector(utxo_pool, 20*CENT, 0)
96 | match = selector.select("prefer_confirmed")
97 | self.assertEqual([5, 5], [c.txo.tx_ref.height for c in match])
98 |
99 |
100 | class TestOfficialBitcoinCoinSelectionTests(BaseSelectionTestCase):
101 |
102 | # Bitcoin implementation:
103 | # https://github.com/bitcoin/bitcoin/blob/master/src/wallet/coinselection.cpp
104 | #
105 | # Bitcoin implementation tests:
106 | # https://github.com/bitcoin/bitcoin/blob/master/src/wallet/test/coinselector_tests.cpp
107 | #
108 | # Branch and Bound coin selection white paper:
109 | # https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf
110 |
111 | def make_hard_case(self, utxos):
112 | target = 0
113 | utxo_pool = []
114 | for i in range(utxos):
115 | amount = 1 << (utxos+i)
116 | target += amount
117 | utxo_pool.append(utxo(amount))
118 | utxo_pool.append(utxo(amount + (1 << (utxos-1-i))))
119 | return self.estimates(utxo_pool), target
120 |
121 | def test_branch_and_bound_coin_selection(self):
122 | self.ledger.fee_per_byte = 0
123 |
124 | utxo_pool = self.estimates(
125 | utxo(1 * CENT),
126 | utxo(2 * CENT),
127 | utxo(3 * CENT),
128 | utxo(4 * CENT)
129 | )
130 |
131 | # Select 1 Cent
132 | self.assertEqual([1 * CENT], search(utxo_pool, 1 * CENT, 0.5 * CENT))
133 |
134 | # Select 2 Cent
135 | self.assertEqual([2 * CENT], search(utxo_pool, 2 * CENT, 0.5 * CENT))
136 |
137 | # Select 5 Cent
138 | self.assertEqual([3 * CENT, 2 * CENT], search(utxo_pool, 5 * CENT, 0.5 * CENT))
139 |
140 | # Select 11 Cent, not possible
141 | self.assertEqual([], search(utxo_pool, 11 * CENT, 0.5 * CENT))
142 |
143 | # Select 10 Cent
144 | utxo_pool += self.estimates(utxo(5 * CENT))
145 | self.assertEqual(
146 | [4 * CENT, 3 * CENT, 2 * CENT, 1 * CENT],
147 | search(utxo_pool, 10 * CENT, 0.5 * CENT)
148 | )
149 |
150 | # Negative effective value
151 | # Select 10 Cent but have 1 Cent not be possible because too small
152 | # TODO: bitcoin has [5, 3, 2]
153 | self.assertEqual(
154 | [4 * CENT, 3 * CENT, 2 * CENT, 1 * CENT],
155 | search(utxo_pool, 10 * CENT, 5000)
156 | )
157 |
158 | # Select 0.25 Cent, not possible
159 | self.assertEqual(search(utxo_pool, 0.25 * CENT, 0.5 * CENT), [])
160 |
161 | # Iteration exhaustion test
162 | utxo_pool, target = self.make_hard_case(17)
163 | selector = CoinSelector(utxo_pool, target, 0)
164 | self.assertEqual(selector.branch_and_bound(), [])
165 | self.assertEqual(selector.tries, MAXIMUM_TRIES) # Should exhaust
166 | utxo_pool, target = self.make_hard_case(14)
167 | self.assertIsNotNone(search(utxo_pool, target, 0)) # Should not exhaust
168 |
169 | # Test same value early bailout optimization
170 | utxo_pool = self.estimates([
171 | utxo(7 * CENT),
172 | utxo(7 * CENT),
173 | utxo(7 * CENT),
174 | utxo(7 * CENT),
175 | utxo(2 * CENT)
176 | ] + [utxo(5 * CENT)]*50000)
177 | self.assertEqual(
178 | [7 * CENT, 7 * CENT, 7 * CENT, 7 * CENT, 2 * CENT],
179 | search(utxo_pool, 30 * CENT, 5000)
180 | )
181 |
182 | # Select 1 Cent with pool of only greater than 5 Cent
183 | utxo_pool = self.estimates(utxo(i * CENT) for i in range(5, 21))
184 | for _ in range(100):
185 | self.assertEqual(search(utxo_pool, 1 * CENT, 2 * CENT), [])
186 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_ledger.py:
--------------------------------------------------------------------------------
1 | import os
2 | from binascii import hexlify
3 |
4 | from torba.coin.bitcoinsegwit import MainNetLedger
5 | from torba.client.wallet import Wallet
6 |
7 | from client_tests.unit.test_transaction import get_transaction, get_output
8 | from client_tests.unit.test_headers import BitcoinHeadersTestCase, block_bytes
9 |
10 |
11 | class MockNetwork:
12 |
13 | def __init__(self, history, transaction):
14 | self.history = history
15 | self.transaction = transaction
16 | self.address = None
17 | self.get_history_called = []
18 | self.get_transaction_called = []
19 | self.is_connected = False
20 |
21 | async def get_history(self, address):
22 | self.get_history_called.append(address)
23 | self.address = address
24 | return self.history
25 |
26 | async def get_merkle(self, txid, height):
27 | return {'merkle': ['abcd01'], 'pos': 1}
28 |
29 | async def get_transaction(self, tx_hash):
30 | self.get_transaction_called.append(tx_hash)
31 | return self.transaction[tx_hash]
32 |
33 |
34 | class LedgerTestCase(BitcoinHeadersTestCase):
35 |
36 | async def asyncSetUp(self):
37 | self.ledger = MainNetLedger({
38 | 'db': MainNetLedger.database_class(':memory:'),
39 | 'headers': MainNetLedger.headers_class(':memory:')
40 | })
41 | await self.ledger.db.open()
42 |
43 | async def asyncTearDown(self):
44 | await self.ledger.db.close()
45 |
46 | def make_header(self, **kwargs):
47 | header = {
48 | 'bits': 486604799,
49 | 'block_height': 0,
50 | 'merkle_root': b'4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b',
51 | 'nonce': 2083236893,
52 | 'prev_block_hash': b'0000000000000000000000000000000000000000000000000000000000000000',
53 | 'timestamp': 1231006505,
54 | 'version': 1
55 | }
56 | header.update(kwargs)
57 | header['merkle_root'] = header['merkle_root'].ljust(64, b'a')
58 | header['prev_block_hash'] = header['prev_block_hash'].ljust(64, b'0')
59 | return self.ledger.headers.serialize(header)
60 |
61 | def add_header(self, **kwargs):
62 | serialized = self.make_header(**kwargs)
63 | self.ledger.headers.io.seek(0, os.SEEK_END)
64 | self.ledger.headers.io.write(serialized)
65 | self.ledger.headers._size = None
66 |
67 |
68 | class TestSynchronization(LedgerTestCase):
69 |
70 | async def test_update_history(self):
71 | account = self.ledger.account_class.generate(self.ledger, Wallet(), "torba")
72 | address = await account.receiving.get_or_create_usable_address()
73 | address_details = await self.ledger.db.get_address(address=address)
74 | self.assertEqual(address_details['history'], None)
75 |
76 | self.add_header(block_height=0, merkle_root=b'abcd04')
77 | self.add_header(block_height=1, merkle_root=b'abcd04')
78 | self.add_header(block_height=2, merkle_root=b'abcd04')
79 | self.add_header(block_height=3, merkle_root=b'abcd04')
80 | self.ledger.network = MockNetwork([
81 | {'tx_hash': 'abcd01', 'height': 0},
82 | {'tx_hash': 'abcd02', 'height': 1},
83 | {'tx_hash': 'abcd03', 'height': 2},
84 | ], {
85 | 'abcd01': hexlify(get_transaction(get_output(1)).raw),
86 | 'abcd02': hexlify(get_transaction(get_output(2)).raw),
87 | 'abcd03': hexlify(get_transaction(get_output(3)).raw),
88 | })
89 | await self.ledger.update_history(address, '')
90 | self.assertEqual(self.ledger.network.get_history_called, [address])
91 | self.assertEqual(self.ledger.network.get_transaction_called, ['abcd01', 'abcd02', 'abcd03'])
92 |
93 | address_details = await self.ledger.db.get_address(address=address)
94 | self.assertEqual(
95 | address_details['history'],
96 | '252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:'
97 | 'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:'
98 | 'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:'
99 | )
100 |
101 | self.ledger.network.get_history_called = []
102 | self.ledger.network.get_transaction_called = []
103 | await self.ledger.update_history(address, '')
104 | self.assertEqual(self.ledger.network.get_history_called, [address])
105 | self.assertEqual(self.ledger.network.get_transaction_called, [])
106 |
107 | self.ledger.network.history.append({'tx_hash': 'abcd04', 'height': 3})
108 | self.ledger.network.transaction['abcd04'] = hexlify(get_transaction(get_output(4)).raw)
109 | self.ledger.network.get_history_called = []
110 | self.ledger.network.get_transaction_called = []
111 | await self.ledger.update_history(address, '')
112 | self.assertEqual(self.ledger.network.get_history_called, [address])
113 | self.assertEqual(self.ledger.network.get_transaction_called, ['abcd04'])
114 | address_details = await self.ledger.db.get_address(address=address)
115 | self.assertEqual(
116 | address_details['history'],
117 | '252bda9b22cc902ca2aa2de3548ee8baf06b8501ff7bfb3b0b7d980dbd1bf792:0:'
118 | 'ab9c0654dd484ac20437030f2034e25dcb29fc507e84b91138f80adc3af738f9:1:'
119 | 'a2ae3d1db3c727e7d696122cab39ee20a7f81856dab7019056dd539f38c548a0:2:'
120 | '047cf1d53ef68f0fd586d46f90c09ff8e57a4180f67e7f4b8dd0135c3741e828:3:'
121 | )
122 |
123 |
124 | class MocHeaderNetwork:
125 | def __init__(self, responses):
126 | self.responses = responses
127 |
128 | async def get_headers(self, height, blocks):
129 | return self.responses[height]
130 |
131 |
132 | class BlockchainReorganizationTests(LedgerTestCase):
133 |
134 | async def test_1_block_reorganization(self):
135 | self.ledger.network = MocHeaderNetwork({
136 | 20: {'height': 20, 'count': 5, 'hex': hexlify(
137 | self.get_bytes(after=block_bytes(20), upto=block_bytes(5))
138 | )},
139 | 25: {'height': 25, 'count': 0, 'hex': b''}
140 | })
141 | headers = self.ledger.headers
142 | await headers.connect(0, self.get_bytes(upto=block_bytes(20)))
143 | self.add_header(block_height=len(headers))
144 | self.assertEqual(headers.height, 20)
145 | await self.ledger.receive_header([{
146 | 'height': 21, 'hex': hexlify(self.make_header(block_height=21))
147 | }])
148 |
149 | async def test_3_block_reorganization(self):
150 | self.ledger.network = MocHeaderNetwork({
151 | 20: {'height': 20, 'count': 5, 'hex': hexlify(
152 | self.get_bytes(after=block_bytes(20), upto=block_bytes(5))
153 | )},
154 | 21: {'height': 21, 'count': 1, 'hex': hexlify(self.make_header(block_height=21))},
155 | 22: {'height': 22, 'count': 1, 'hex': hexlify(self.make_header(block_height=22))},
156 | 25: {'height': 25, 'count': 0, 'hex': b''}
157 | })
158 | headers = self.ledger.headers
159 | await headers.connect(0, self.get_bytes(upto=block_bytes(20)))
160 | self.add_header(block_height=len(headers))
161 | self.add_header(block_height=len(headers))
162 | self.add_header(block_height=len(headers))
163 | self.assertEqual(headers.height, 22)
164 | await self.ledger.receive_header(({
165 | 'height': 23, 'hex': hexlify(self.make_header(block_height=23))
166 | },))
167 |
--------------------------------------------------------------------------------
/torba/client/baseheader.py:
--------------------------------------------------------------------------------
1 | import os
2 | import asyncio
3 | import logging
4 | from io import BytesIO
5 | from typing import Optional, Iterator, Tuple
6 | from binascii import hexlify
7 |
8 | from torba.client.util import ArithUint256
9 | from torba.client.hash import double_sha256
10 |
11 | log = logging.getLogger(__name__)
12 |
13 |
14 | class InvalidHeader(Exception):
15 |
16 | def __init__(self, height, message):
17 | super().__init__(message)
18 | self.message = message
19 | self.height = height
20 |
21 |
22 | class BaseHeaders:
23 |
24 | header_size: int
25 | chunk_size: int
26 |
27 | max_target: int
28 | genesis_hash: Optional[bytes]
29 | target_timespan: int
30 |
31 | validate_difficulty: bool = True
32 |
33 | def __init__(self, path) -> None:
34 | if path == ':memory:':
35 | self.io = BytesIO()
36 | self.path = path
37 | self._size: Optional[int] = None
38 | self._header_connect_lock = asyncio.Lock()
39 |
40 | async def open(self):
41 | if self.path != ':memory:':
42 | if not os.path.exists(self.path):
43 | self.io = open(self.path, 'w+b')
44 | else:
45 | self.io = open(self.path, 'r+b')
46 |
47 | async def close(self):
48 | self.io.close()
49 |
50 | @staticmethod
51 | def serialize(header: dict) -> bytes:
52 | raise NotImplementedError
53 |
54 | @staticmethod
55 | def deserialize(height, header):
56 | raise NotImplementedError
57 |
58 | def get_next_chunk_target(self, chunk: int) -> ArithUint256:
59 | return ArithUint256(self.max_target)
60 |
61 | @staticmethod
62 | def get_next_block_target(chunk_target: ArithUint256, previous: Optional[dict],
63 | current: Optional[dict]) -> ArithUint256:
64 | return chunk_target
65 |
66 | def __len__(self) -> int:
67 | if self._size is None:
68 | self._size = self.io.seek(0, os.SEEK_END) // self.header_size
69 | return self._size
70 |
71 | def __bool__(self):
72 | return True
73 |
74 | def __getitem__(self, height) -> dict:
75 | assert not isinstance(height, slice), \
76 | "Slicing of header chain has not been implemented yet."
77 | return self.deserialize(height, self.get_raw_header(height))
78 |
79 | def get_raw_header(self, height) -> bytes:
80 | self.io.seek(height * self.header_size, os.SEEK_SET)
81 | return self.io.read(self.header_size)
82 |
83 | @property
84 | def height(self) -> int:
85 | return len(self)-1
86 |
87 | def hash(self, height=None) -> bytes:
88 | return self.hash_header(
89 | self.get_raw_header(height if height is not None else self.height)
90 | )
91 |
92 | @staticmethod
93 | def hash_header(header: bytes) -> bytes:
94 | if header is None:
95 | return b'0' * 64
96 | return hexlify(double_sha256(header)[::-1])
97 |
98 | async def connect(self, start: int, headers: bytes) -> int:
99 | added = 0
100 | bail = False
101 | loop = asyncio.get_running_loop()
102 | async with self._header_connect_lock:
103 | for height, chunk in self._iterate_chunks(start, headers):
104 | try:
105 | # validate_chunk() is CPU bound and reads previous chunks from file system
106 | await loop.run_in_executor(None, self.validate_chunk, height, chunk)
107 | except InvalidHeader as e:
108 | bail = True
109 | chunk = chunk[:(height-e.height)*self.header_size]
110 | written = 0
111 | if chunk:
112 | self.io.seek(height * self.header_size, os.SEEK_SET)
113 | written = self.io.write(chunk) // self.header_size
114 | self.io.truncate()
115 | # .seek()/.write()/.truncate() might also .flush() when needed
116 | # the goal here is mainly to ensure we're definitely flush()'ing
117 | await loop.run_in_executor(None, self.io.flush)
118 | self._size = None
119 | added += written
120 | if bail:
121 | break
122 | return added
123 |
124 | def validate_chunk(self, height, chunk):
125 | previous_hash, previous_header, previous_previous_header = None, None, None
126 | if height > 0:
127 | previous_header = self[height-1]
128 | previous_hash = self.hash(height-1)
129 | if height > 1:
130 | previous_previous_header = self[height-2]
131 | chunk_target = self.get_next_chunk_target(height // 2016 - 1)
132 | for current_hash, current_header in self._iterate_headers(height, chunk):
133 | block_target = self.get_next_block_target(chunk_target, previous_previous_header, previous_header)
134 | self.validate_header(height, current_hash, current_header, previous_hash, block_target)
135 | previous_previous_header = previous_header
136 | previous_header = current_header
137 | previous_hash = current_hash
138 |
139 | def validate_header(self, height: int, current_hash: bytes,
140 | header: dict, previous_hash: bytes, target: ArithUint256):
141 |
142 | if previous_hash is None:
143 | if self.genesis_hash is not None and self.genesis_hash != current_hash:
144 | raise InvalidHeader(
145 | height, "genesis header doesn't match: {} vs expected {}".format(
146 | current_hash.decode(), self.genesis_hash.decode())
147 | )
148 | return
149 |
150 | if header['prev_block_hash'] != previous_hash:
151 | raise InvalidHeader(
152 | height, "previous hash mismatch: {} vs expected {}".format(
153 | header['prev_block_hash'].decode(), previous_hash.decode())
154 | )
155 |
156 | if self.validate_difficulty:
157 |
158 | if header['bits'] != target.compact:
159 | raise InvalidHeader(
160 | height, "bits mismatch: {} vs expected {}".format(
161 | header['bits'], target.compact)
162 | )
163 |
164 | proof_of_work = self.get_proof_of_work(current_hash)
165 | if proof_of_work > target:
166 | raise InvalidHeader(
167 | height, "insufficient proof of work: {} vs target {}".format(
168 | proof_of_work.value, target.value)
169 | )
170 |
171 | @staticmethod
172 | def get_proof_of_work(header_hash: bytes) -> ArithUint256:
173 | return ArithUint256(int(b'0x' + header_hash, 16))
174 |
175 | def _iterate_chunks(self, height: int, headers: bytes) -> Iterator[Tuple[int, bytes]]:
176 | assert len(headers) % self.header_size == 0
177 | start = 0
178 | end = (self.chunk_size - height % self.chunk_size) * self.header_size
179 | while start < end:
180 | yield height + (start // self.header_size), headers[start:end]
181 | start = end
182 | end = min(len(headers), end + self.chunk_size * self.header_size)
183 |
184 | def _iterate_headers(self, height: int, headers: bytes) -> Iterator[Tuple[bytes, dict]]:
185 | assert len(headers) % self.header_size == 0
186 | for idx in range(len(headers) // self.header_size):
187 | start, end = idx * self.header_size, (idx + 1) * self.header_size
188 | header = headers[start:end]
189 | yield self.hash_header(header), self.deserialize(height+idx, header)
190 |
--------------------------------------------------------------------------------
/torba/client/hash.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2016-2017, Neil Booth
2 | # Copyright (c) 2018, LBRY Inc.
3 | #
4 | # All rights reserved.
5 | #
6 | # See the file "LICENCE" for information about the copyright
7 | # and warranty status of this software.
8 |
9 | """ Cryptography hash functions and related classes. """
10 |
11 | import os
12 | import base64
13 | import hashlib
14 | import hmac
15 | import typing
16 | from binascii import hexlify, unhexlify
17 | from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
18 | from cryptography.hazmat.primitives.ciphers import Cipher, modes
19 | from cryptography.hazmat.primitives.ciphers.algorithms import AES
20 | from cryptography.hazmat.primitives.padding import PKCS7
21 | from cryptography.hazmat.backends import default_backend
22 |
23 | from torba.client.util import bytes_to_int, int_to_bytes
24 | from torba.client.constants import NULL_HASH32
25 |
26 |
27 | class TXRef:
28 |
29 | __slots__ = '_id', '_hash'
30 |
31 | def __init__(self):
32 | self._id = None
33 | self._hash = None
34 |
35 | @property
36 | def id(self):
37 | return self._id
38 |
39 | @property
40 | def hash(self):
41 | return self._hash
42 |
43 | @property
44 | def height(self):
45 | return -1
46 |
47 | @property
48 | def is_null(self):
49 | return self.hash == NULL_HASH32
50 |
51 |
52 | class TXRefImmutable(TXRef):
53 |
54 | __slots__ = ('_height',)
55 |
56 | def __init__(self):
57 | super().__init__()
58 | self._height = -1
59 |
60 | @classmethod
61 | def from_hash(cls, tx_hash: bytes, height: int) -> 'TXRefImmutable':
62 | ref = cls()
63 | ref._hash = tx_hash
64 | ref._id = hexlify(tx_hash[::-1]).decode()
65 | ref._height = height
66 | return ref
67 |
68 | @classmethod
69 | def from_id(cls, tx_id: str, height: int) -> 'TXRefImmutable':
70 | ref = cls()
71 | ref._id = tx_id
72 | ref._hash = unhexlify(tx_id)[::-1]
73 | ref._height = height
74 | return ref
75 |
76 | @property
77 | def height(self):
78 | return self._height
79 |
80 |
81 | def sha256(x):
82 | """ Simple wrapper of hashlib sha256. """
83 | return hashlib.sha256(x).digest()
84 |
85 |
86 | def sha512(x):
87 | """ Simple wrapper of hashlib sha512. """
88 | return hashlib.sha512(x).digest()
89 |
90 |
91 | def ripemd160(x):
92 | """ Simple wrapper of hashlib ripemd160. """
93 | h = hashlib.new('ripemd160')
94 | h.update(x)
95 | return h.digest()
96 |
97 |
98 | def double_sha256(x):
99 | """ SHA-256 of SHA-256, as used extensively in bitcoin. """
100 | return sha256(sha256(x))
101 |
102 |
103 | def hmac_sha512(key, msg):
104 | """ Use SHA-512 to provide an HMAC. """
105 | return hmac.new(key, msg, hashlib.sha512).digest()
106 |
107 |
108 | def hash160(x):
109 | """ RIPEMD-160 of SHA-256.
110 | Used to make bitcoin addresses from pubkeys. """
111 | return ripemd160(sha256(x))
112 |
113 |
114 | def hash_to_hex_str(x):
115 | """ Convert a big-endian binary hash to displayed hex string.
116 | Display form of a binary hash is reversed and converted to hex. """
117 | return hexlify(reversed(x))
118 |
119 |
120 | def hex_str_to_hash(x):
121 | """ Convert a displayed hex string to a binary hash. """
122 | return reversed(unhexlify(x))
123 |
124 |
125 | def aes_encrypt(secret: str, value: str, init_vector: bytes = None) -> str:
126 | if init_vector is not None:
127 | assert len(init_vector) == 16
128 | else:
129 | init_vector = os.urandom(16)
130 | key = double_sha256(secret.encode())
131 | encryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).encryptor()
132 | padder = PKCS7(AES.block_size).padder()
133 | padded_data = padder.update(value.encode()) + padder.finalize()
134 | encrypted_data = encryptor.update(padded_data) + encryptor.finalize()
135 | return base64.b64encode(init_vector + encrypted_data).decode()
136 |
137 |
138 | def aes_decrypt(secret: str, value: str) -> typing.Tuple[str, bytes]:
139 | data = base64.b64decode(value.encode())
140 | key = double_sha256(secret.encode())
141 | init_vector, data = data[:16], data[16:]
142 | decryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).decryptor()
143 | unpadder = PKCS7(AES.block_size).unpadder()
144 | result = unpadder.update(decryptor.update(data)) + unpadder.finalize()
145 | return result.decode(), init_vector
146 |
147 |
148 | def better_aes_encrypt(secret: str, value: bytes) -> bytes:
149 | init_vector = os.urandom(16)
150 | key = scrypt(secret.encode(), salt=init_vector)
151 | encryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).encryptor()
152 | padder = PKCS7(AES.block_size).padder()
153 | padded_data = padder.update(value) + padder.finalize()
154 | encrypted_data = encryptor.update(padded_data) + encryptor.finalize()
155 | return base64.b64encode(b's:8192:16:1:' + init_vector + encrypted_data)
156 |
157 |
158 | def better_aes_decrypt(secret: str, value: bytes) -> bytes:
159 | data = base64.b64decode(value)
160 | _, scryp_n, scrypt_r, scrypt_p, data = data.split(b':', maxsplit=4)
161 | init_vector, data = data[:16], data[16:]
162 | key = scrypt(secret.encode(), init_vector, int(scryp_n), int(scrypt_r), int(scrypt_p))
163 | decryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).decryptor()
164 | unpadder = PKCS7(AES.block_size).unpadder()
165 | return unpadder.update(decryptor.update(data)) + unpadder.finalize()
166 |
167 |
168 | def scrypt(passphrase, salt, scrypt_n=1<<13, scrypt_r=16, scrypt_p=1):
169 | kdf = Scrypt(salt, length=32, n=scrypt_n, r=scrypt_r, p=scrypt_p, backend=default_backend())
170 | return kdf.derive(passphrase)
171 |
172 |
173 | class Base58Error(Exception):
174 | """ Exception used for Base58 errors. """
175 |
176 |
177 | class Base58:
178 | """ Class providing base 58 functionality. """
179 |
180 | chars = u'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
181 | assert len(chars) == 58
182 | char_map = {c: n for n, c in enumerate(chars)}
183 |
184 | @classmethod
185 | def char_value(cls, c):
186 | val = cls.char_map.get(c)
187 | if val is None:
188 | raise Base58Error('invalid base 58 character "{}"'.format(c))
189 | return val
190 |
191 | @classmethod
192 | def decode(cls, txt):
193 | """ Decodes txt into a big-endian bytearray. """
194 | if isinstance(txt, memoryview):
195 | txt = str(txt)
196 |
197 | if isinstance(txt, bytes):
198 | txt = txt.decode()
199 |
200 | if not isinstance(txt, str):
201 | raise TypeError('a string is required')
202 |
203 | if not txt:
204 | raise Base58Error('string cannot be empty')
205 |
206 | value = 0
207 | for c in txt:
208 | value = value * 58 + cls.char_value(c)
209 |
210 | result = int_to_bytes(value)
211 |
212 | # Prepend leading zero bytes if necessary
213 | count = 0
214 | for c in txt:
215 | if c != u'1':
216 | break
217 | count += 1
218 | if count:
219 | result = bytes((0,)) * count + result
220 |
221 | return result
222 |
223 | @classmethod
224 | def encode(cls, be_bytes):
225 | """Converts a big-endian bytearray into a base58 string."""
226 | value = bytes_to_int(be_bytes)
227 |
228 | txt = u''
229 | while value:
230 | value, mod = divmod(value, 58)
231 | txt += cls.chars[mod]
232 |
233 | for byte in be_bytes:
234 | if byte != 0:
235 | break
236 | txt += u'1'
237 |
238 | return txt[::-1]
239 |
240 | @classmethod
241 | def decode_check(cls, txt, hash_fn=double_sha256):
242 | """ Decodes a Base58Check-encoded string to a payload. The version prefixes it. """
243 | be_bytes = cls.decode(txt)
244 | result, check = be_bytes[:-4], be_bytes[-4:]
245 | if check != hash_fn(result)[:4]:
246 | raise Base58Error('invalid base 58 checksum for {}'.format(txt))
247 | return result
248 |
249 | @classmethod
250 | def encode_check(cls, payload, hash_fn=double_sha256):
251 | """ Encodes a payload bytearray (which includes the version byte(s))
252 | into a Base58Check string."""
253 | be_bytes = payload + hash_fn(payload)[:4]
254 | return cls.encode(be_bytes)
255 |
--------------------------------------------------------------------------------
/torba/rpc/framing.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2018, Neil Booth
2 | #
3 | # All rights reserved.
4 | #
5 | # The MIT License (MIT)
6 | #
7 | # Permission is hereby granted, free of charge, to any person obtaining
8 | # a copy of this software and associated documentation files (the
9 | # "Software"), to deal in the Software without restriction, including
10 | # without limitation the rights to use, copy, modify, merge, publish,
11 | # distribute, sublicense, and/or sell copies of the Software, and to
12 | # permit persons to whom the Software is furnished to do so, subject to
13 | # the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be
16 | # included in all copies or substantial portions of the Software.
17 | #
18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
22 | # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
24 | # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 |
26 | """RPC message framing in a byte stream."""
27 |
28 | __all__ = ('FramerBase', 'NewlineFramer', 'BinaryFramer', 'BitcoinFramer',
29 | 'OversizedPayloadError', 'BadChecksumError', 'BadMagicError')
30 |
31 | from hashlib import sha256 as _sha256
32 | from struct import Struct
33 | from asyncio import Queue
34 |
35 |
36 | class FramerBase(object):
37 | """Abstract base class for a framer.
38 |
39 | A framer breaks an incoming byte stream into protocol messages,
40 | buffering if necesary. It also frames outgoing messages into
41 | a byte stream.
42 | """
43 |
44 | def frame(self, message):
45 | """Return the framed message."""
46 | raise NotImplementedError
47 |
48 | def received_bytes(self, data):
49 | """Pass incoming network bytes."""
50 | raise NotImplementedError
51 |
52 | async def receive_message(self):
53 | """Wait for a complete unframed message to arrive, and return it."""
54 | raise NotImplementedError
55 |
56 |
57 | class NewlineFramer(FramerBase):
58 | """A framer for a protocol where messages are separated by newlines."""
59 |
60 | # The default max_size value is motivated by JSONRPC, where a
61 | # normal request will be 250 bytes or less, and a reasonable
62 | # batch may contain 4000 requests.
63 | def __init__(self, max_size=250 * 4000):
64 | """max_size - an anti-DoS measure. If, after processing an incoming
65 | message, buffered data would exceed max_size bytes, that
66 | buffered data is dropped entirely and the framer waits for a
67 | newline character to re-synchronize the stream.
68 | """
69 | self.max_size = max_size
70 | self.queue = Queue()
71 | self.received_bytes = self.queue.put_nowait
72 | self.synchronizing = False
73 | self.residual = b''
74 |
75 | def frame(self, message):
76 | return message + b'\n'
77 |
78 | async def receive_message(self):
79 | parts = []
80 | buffer_size = 0
81 | while True:
82 | part = self.residual
83 | self.residual = b''
84 | if not part:
85 | part = await self.queue.get()
86 |
87 | npos = part.find(b'\n')
88 | if npos == -1:
89 | parts.append(part)
90 | buffer_size += len(part)
91 | # Ignore over-sized messages; re-synchronize
92 | if buffer_size <= self.max_size:
93 | continue
94 | self.synchronizing = True
95 | raise MemoryError(f'dropping message over {self.max_size:,d} '
96 | f'bytes and re-synchronizing')
97 |
98 | tail, self.residual = part[:npos], part[npos + 1:]
99 | if self.synchronizing:
100 | self.synchronizing = False
101 | return await self.receive_message()
102 | else:
103 | parts.append(tail)
104 | return b''.join(parts)
105 |
106 |
107 | class ByteQueue(object):
108 | """A producer-comsumer queue. Incoming network data is put as it
109 | arrives, and the consumer calls an async method waiting for data of
110 | a specific length."""
111 |
112 | def __init__(self):
113 | self.queue = Queue()
114 | self.parts = []
115 | self.parts_len = 0
116 | self.put_nowait = self.queue.put_nowait
117 |
118 | async def receive(self, size):
119 | while self.parts_len < size:
120 | part = await self.queue.get()
121 | self.parts.append(part)
122 | self.parts_len += len(part)
123 | self.parts_len -= size
124 | whole = b''.join(self.parts)
125 | self.parts = [whole[size:]]
126 | return whole[:size]
127 |
128 |
129 | class BinaryFramer(object):
130 | """A framer for binary messaging protocols."""
131 |
132 | def __init__(self):
133 | self.byte_queue = ByteQueue()
134 | self.message_queue = Queue()
135 | self.received_bytes = self.byte_queue.put_nowait
136 |
137 | def frame(self, message):
138 | command, payload = message
139 | return b''.join((
140 | self._build_header(command, payload),
141 | payload
142 | ))
143 |
144 | async def receive_message(self):
145 | command, payload_len, checksum = await self._receive_header()
146 | payload = await self.byte_queue.receive(payload_len)
147 | payload_checksum = self._checksum(payload)
148 | if payload_checksum != checksum:
149 | raise BadChecksumError(payload_checksum, checksum)
150 | return command, payload
151 |
152 | def _checksum(self, payload):
153 | raise NotImplementedError
154 |
155 | def _build_header(self, command, payload):
156 | raise NotImplementedError
157 |
158 | async def _receive_header(self):
159 | raise NotImplementedError
160 |
161 |
162 | # Helpers
163 | struct_le_I = Struct(' 1024 * 1024:
237 | if command != b'block' or payload_len > self._max_block_size:
238 | raise OversizedPayloadError(command, payload_len)
239 | return command, payload_len, checksum
240 |
--------------------------------------------------------------------------------
/torba/testcase.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import logging
3 | import functools
4 | import asyncio
5 | from asyncio.runners import _cancel_all_tasks # type: ignore
6 | import unittest
7 | from unittest.case import _Outcome
8 | from typing import Optional
9 | from torba.orchstr8 import Conductor
10 | from torba.orchstr8.node import BlockchainNode, WalletNode
11 | from torba.client.baseledger import BaseLedger
12 | from torba.client.baseaccount import BaseAccount
13 | from torba.client.basemanager import BaseWalletManager
14 | from torba.client.wallet import Wallet
15 | from torba.client.util import satoshis_to_coins
16 |
17 |
18 | class ColorHandler(logging.StreamHandler):
19 |
20 | level_color = {
21 | logging.DEBUG: "black",
22 | logging.INFO: "light_gray",
23 | logging.WARNING: "yellow",
24 | logging.ERROR: "red"
25 | }
26 |
27 | color_code = dict(
28 | black=30,
29 | red=31,
30 | green=32,
31 | yellow=33,
32 | blue=34,
33 | magenta=35,
34 | cyan=36,
35 | white=37,
36 | light_gray='0;37',
37 | dark_gray='1;30'
38 | )
39 |
40 | def emit(self, record):
41 | try:
42 | msg = self.format(record)
43 | color_name = self.level_color.get(record.levelno, "black")
44 | color_code = self.color_code[color_name]
45 | stream = self.stream
46 | stream.write('\x1b[%sm%s\x1b[0m' % (color_code, msg))
47 | stream.write(self.terminator)
48 | self.flush()
49 | except Exception:
50 | self.handleError(record)
51 |
52 |
53 | HANDLER = ColorHandler(sys.stdout)
54 | HANDLER.setFormatter(
55 | logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
56 | )
57 | logging.getLogger().addHandler(HANDLER)
58 |
59 |
60 | class AsyncioTestCase(unittest.TestCase):
61 | # Implementation inspired by discussion:
62 | # https://bugs.python.org/issue32972
63 |
64 | maxDiff = None
65 |
66 | async def asyncSetUp(self): # pylint: disable=C0103
67 | pass
68 |
69 | async def asyncTearDown(self): # pylint: disable=C0103
70 | pass
71 |
72 | def run(self, result=None): # pylint: disable=R0915
73 | orig_result = result
74 | if result is None:
75 | result = self.defaultTestResult()
76 | startTestRun = getattr(result, 'startTestRun', None) # pylint: disable=C0103
77 | if startTestRun is not None:
78 | startTestRun()
79 |
80 | result.startTest(self)
81 |
82 | testMethod = getattr(self, self._testMethodName) # pylint: disable=C0103
83 | if (getattr(self.__class__, "__unittest_skip__", False) or
84 | getattr(testMethod, "__unittest_skip__", False)):
85 | # If the class or method was skipped.
86 | try:
87 | skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
88 | or getattr(testMethod, '__unittest_skip_why__', ''))
89 | self._addSkip(result, self, skip_why)
90 | finally:
91 | result.stopTest(self)
92 | return
93 | expecting_failure_method = getattr(testMethod,
94 | "__unittest_expecting_failure__", False)
95 | expecting_failure_class = getattr(self,
96 | "__unittest_expecting_failure__", False)
97 | expecting_failure = expecting_failure_class or expecting_failure_method
98 | outcome = _Outcome(result)
99 |
100 | self.loop = asyncio.new_event_loop() # pylint: disable=W0201
101 | asyncio.set_event_loop(self.loop)
102 | self.loop.set_debug(True)
103 |
104 | try:
105 | self._outcome = outcome
106 |
107 | with outcome.testPartExecutor(self):
108 | self.setUp()
109 | self.loop.run_until_complete(self.asyncSetUp())
110 | if outcome.success:
111 | outcome.expecting_failure = expecting_failure
112 | with outcome.testPartExecutor(self, isTest=True):
113 | maybe_coroutine = testMethod()
114 | if asyncio.iscoroutine(maybe_coroutine):
115 | self.loop.run_until_complete(maybe_coroutine)
116 | outcome.expecting_failure = False
117 | with outcome.testPartExecutor(self):
118 | self.loop.run_until_complete(self.asyncTearDown())
119 | self.tearDown()
120 |
121 | self.doAsyncCleanups()
122 |
123 | try:
124 | _cancel_all_tasks(self.loop)
125 | self.loop.run_until_complete(self.loop.shutdown_asyncgens())
126 | finally:
127 | asyncio.set_event_loop(None)
128 | self.loop.close()
129 |
130 | for test, reason in outcome.skipped:
131 | self._addSkip(result, test, reason)
132 | self._feedErrorsToResult(result, outcome.errors)
133 | if outcome.success:
134 | if expecting_failure:
135 | if outcome.expectedFailure:
136 | self._addExpectedFailure(result, outcome.expectedFailure)
137 | else:
138 | self._addUnexpectedSuccess(result)
139 | else:
140 | result.addSuccess(self)
141 | return result
142 | finally:
143 | result.stopTest(self)
144 | if orig_result is None:
145 | stopTestRun = getattr(result, 'stopTestRun', None) # pylint: disable=C0103
146 | if stopTestRun is not None:
147 | stopTestRun() # pylint: disable=E1102
148 |
149 | # explicitly break reference cycles:
150 | # outcome.errors -> frame -> outcome -> outcome.errors
151 | # outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
152 | outcome.errors.clear()
153 | outcome.expectedFailure = None
154 |
155 | # clear the outcome, no more needed
156 | self._outcome = None
157 |
158 | def doAsyncCleanups(self): # pylint: disable=C0103
159 | outcome = self._outcome or _Outcome()
160 | while self._cleanups:
161 | function, args, kwargs = self._cleanups.pop()
162 | with outcome.testPartExecutor(self):
163 | maybe_coroutine = function(*args, **kwargs)
164 | if asyncio.iscoroutine(maybe_coroutine):
165 | self.loop.run_until_complete(maybe_coroutine)
166 |
167 |
168 | class AdvanceTimeTestCase(AsyncioTestCase):
169 |
170 | async def asyncSetUp(self):
171 | self._time = 0 # pylint: disable=W0201
172 | self.loop.time = functools.wraps(self.loop.time)(lambda: self._time)
173 | await super().asyncSetUp()
174 |
175 | async def advance(self, seconds):
176 | while self.loop._ready:
177 | await asyncio.sleep(0)
178 | self._time += seconds
179 | await asyncio.sleep(0)
180 | while self.loop._ready:
181 | await asyncio.sleep(0)
182 |
183 |
184 | class IntegrationTestCase(AsyncioTestCase):
185 |
186 | LEDGER = None
187 | MANAGER = None
188 | VERBOSITY = logging.WARN
189 |
190 | def __init__(self, *args, **kwargs):
191 | super().__init__(*args, **kwargs)
192 | self.conductor: Optional[Conductor] = None
193 | self.blockchain: Optional[BlockchainNode] = None
194 | self.wallet_node: Optional[WalletNode] = None
195 | self.manager: Optional[BaseWalletManager] = None
196 | self.ledger: Optional[BaseLedger] = None
197 | self.wallet: Optional[Wallet] = None
198 | self.account: Optional[BaseAccount] = None
199 |
200 | async def asyncSetUp(self):
201 | self.conductor = Conductor(
202 | ledger_module=self.LEDGER, manager_module=self.MANAGER, verbosity=self.VERBOSITY
203 | )
204 | await self.conductor.start_blockchain()
205 | self.addCleanup(self.conductor.stop_blockchain)
206 | await self.conductor.start_spv()
207 | self.addCleanup(self.conductor.stop_spv)
208 | await self.conductor.start_wallet()
209 | self.addCleanup(self.conductor.stop_wallet)
210 | self.blockchain = self.conductor.blockchain_node
211 | self.wallet_node = self.conductor.wallet_node
212 | self.manager = self.wallet_node.manager
213 | self.ledger = self.wallet_node.ledger
214 | self.wallet = self.wallet_node.wallet
215 | self.account = self.wallet_node.wallet.default_account
216 |
217 | async def assertBalance(self, account, expected_balance: str): # pylint: disable=C0103
218 | balance = await account.get_balance()
219 | self.assertEqual(satoshis_to_coins(balance), expected_balance)
220 |
221 | def broadcast(self, tx):
222 | return self.ledger.broadcast(tx)
223 |
224 | async def on_header(self, height):
225 | if self.ledger.headers.height < height:
226 | await self.ledger.on_header.where(
227 | lambda e: e.height == height
228 | )
229 | return True
230 |
231 | def on_transaction_id(self, txid, ledger=None):
232 | return (ledger or self.ledger).on_transaction.where(
233 | lambda e: e.tx.id == txid
234 | )
235 |
236 | def on_transaction_address(self, tx, address):
237 | return self.ledger.on_transaction.where(
238 | lambda e: e.tx.id == tx.id and e.address == address
239 | )
240 |
--------------------------------------------------------------------------------
/torba/client/basenetwork.py:
--------------------------------------------------------------------------------
1 | import logging
2 | import asyncio
3 | from asyncio import CancelledError
4 | from time import time
5 | from typing import List
6 |
7 | from torba.rpc import RPCSession as BaseClientSession, Connector, RPCError
8 |
9 | from torba import __version__
10 | from torba.stream import StreamController
11 |
12 | log = logging.getLogger(__name__)
13 |
14 |
15 | class ClientSession(BaseClientSession):
16 |
17 | def __init__(self, *args, network, server, **kwargs):
18 | self.network = network
19 | self.server = server
20 | super().__init__(*args, **kwargs)
21 | self._on_disconnect_controller = StreamController()
22 | self.on_disconnected = self._on_disconnect_controller.stream
23 | self.bw_limit = self.framer.max_size = self.max_errors = 1 << 32
24 | self.max_seconds_idle = 60
25 | self.ping_task = None
26 |
27 | async def send_request(self, method, args=()):
28 | try:
29 | return await super().send_request(method, args)
30 | except RPCError as e:
31 | log.warning("Wallet server returned an error. Code: %s Message: %s", *e.args)
32 | raise e
33 |
34 | async def ping_forever(self):
35 | # TODO: change to 'ping' on newer protocol (above 1.2)
36 | while not self.is_closing():
37 | if (time() - self.last_send) > self.max_seconds_idle:
38 | await self.send_request('server.banner')
39 | await asyncio.sleep(self.max_seconds_idle//3)
40 |
41 | async def create_connection(self, timeout=6):
42 | connector = Connector(lambda: self, *self.server)
43 | await asyncio.wait_for(connector.create_connection(), timeout=timeout)
44 | self.ping_task = asyncio.create_task(self.ping_forever())
45 |
46 | async def handle_request(self, request):
47 | controller = self.network.subscription_controllers[request.method]
48 | controller.add(request.args)
49 |
50 | def connection_lost(self, exc):
51 | super().connection_lost(exc)
52 | self._on_disconnect_controller.add(True)
53 | if self.ping_task:
54 | self.ping_task.cancel()
55 |
56 |
57 | class BaseNetwork:
58 |
59 | def __init__(self, ledger):
60 | self.config = ledger.config
61 | self.client: ClientSession = None
62 | self.session_pool: SessionPool = None
63 | self.running = False
64 |
65 | self._on_connected_controller = StreamController()
66 | self.on_connected = self._on_connected_controller.stream
67 |
68 | self._on_header_controller = StreamController()
69 | self.on_header = self._on_header_controller.stream
70 |
71 | self._on_status_controller = StreamController()
72 | self.on_status = self._on_status_controller.stream
73 |
74 | self.subscription_controllers = {
75 | 'blockchain.headers.subscribe': self._on_header_controller,
76 | 'blockchain.address.subscribe': self._on_status_controller,
77 | }
78 |
79 | async def start(self):
80 | self.running = True
81 | connect_timeout = self.config.get('connect_timeout', 6)
82 | self.session_pool = SessionPool(network=self, timeout=connect_timeout)
83 | self.session_pool.start(self.config['default_servers'])
84 | while True:
85 | try:
86 | self.client = await self.pick_fastest_session()
87 | if self.is_connected:
88 | await self.ensure_server_version()
89 | log.info("Successfully connected to SPV wallet server: %s:%d", *self.client.server)
90 | self._on_connected_controller.add(True)
91 | await self.client.on_disconnected.first
92 | except CancelledError:
93 | self.running = False
94 | except asyncio.TimeoutError:
95 | log.warning("Timed out while trying to find a server!")
96 | except Exception: # pylint: disable=broad-except
97 | log.exception("Exception while trying to find a server!")
98 | if not self.running:
99 | return
100 | elif self.client:
101 | await self.client.close()
102 | self.client.connection.cancel_pending_requests()
103 |
104 | async def stop(self):
105 | self.running = False
106 | if self.session_pool:
107 | self.session_pool.stop()
108 | if self.is_connected:
109 | disconnected = self.client.on_disconnected.first
110 | await self.client.close()
111 | await disconnected
112 |
113 | @property
114 | def is_connected(self):
115 | return self.client is not None and not self.client.is_closing()
116 |
117 | def rpc(self, list_or_method, args):
118 | if self.is_connected:
119 | return self.client.send_request(list_or_method, args)
120 | else:
121 | raise ConnectionError("Attempting to send rpc request when connection is not available.")
122 |
123 | async def pick_fastest_session(self):
124 | sessions = await self.session_pool.get_online_sessions()
125 | done, pending = await asyncio.wait([
126 | self.probe_session(session)
127 | for session in sessions if not session.is_closing()
128 | ], return_when='FIRST_COMPLETED')
129 | for task in pending:
130 | task.cancel()
131 | for session in done:
132 | return await session
133 |
134 | async def probe_session(self, session: ClientSession):
135 | await session.send_request('server.banner')
136 | return session
137 |
138 | def ensure_server_version(self, required='1.2'):
139 | return self.rpc('server.version', [__version__, required])
140 |
141 | def broadcast(self, raw_transaction):
142 | return self.rpc('blockchain.transaction.broadcast', [raw_transaction])
143 |
144 | def get_history(self, address):
145 | return self.rpc('blockchain.address.get_history', [address])
146 |
147 | def get_transaction(self, tx_hash):
148 | return self.rpc('blockchain.transaction.get', [tx_hash])
149 |
150 | def get_transaction_height(self, tx_hash):
151 | return self.rpc('blockchain.transaction.get_height', [tx_hash])
152 |
153 | def get_merkle(self, tx_hash, height):
154 | return self.rpc('blockchain.transaction.get_merkle', [tx_hash, height])
155 |
156 | def get_headers(self, height, count=10000):
157 | return self.rpc('blockchain.block.headers', [height, count])
158 |
159 | def subscribe_headers(self):
160 | return self.rpc('blockchain.headers.subscribe', [True])
161 |
162 | def subscribe_address(self, address):
163 | return self.rpc('blockchain.address.subscribe', [address])
164 |
165 |
166 | class SessionPool:
167 |
168 | def __init__(self, network: BaseNetwork, timeout: float):
169 | self.network = network
170 | self.sessions: List[ClientSession] = []
171 | self._dead_servers: List[ClientSession] = []
172 | self.maintain_connections_task = None
173 | self.timeout = timeout
174 | # triggered when the master server is out, to speed up reconnect
175 | self._lost_master = asyncio.Event()
176 |
177 | @property
178 | def online(self):
179 | for session in self.sessions:
180 | if not session.is_closing():
181 | return True
182 | return False
183 |
184 | def start(self, default_servers):
185 | self.sessions = [
186 | ClientSession(network=self.network, server=server)
187 | for server in default_servers
188 | ]
189 | self.maintain_connections_task = asyncio.create_task(self.ensure_connections())
190 |
191 | def stop(self):
192 | if self.maintain_connections_task:
193 | self.maintain_connections_task.cancel()
194 | for session in self.sessions:
195 | if not session.is_closing():
196 | session.abort()
197 | self.sessions, self._dead_servers, self.maintain_connections_task = [], [], None
198 |
199 | async def ensure_connections(self):
200 | while True:
201 | await asyncio.gather(*[
202 | self.ensure_connection(session)
203 | for session in self.sessions
204 | ], return_exceptions=True)
205 | await asyncio.wait([asyncio.sleep(3), self._lost_master.wait()], return_when='FIRST_COMPLETED')
206 | self._lost_master.clear()
207 | if not self.sessions:
208 | self.sessions.extend(self._dead_servers)
209 | self._dead_servers = []
210 |
211 | async def ensure_connection(self, session):
212 | if not session.is_closing():
213 | return
214 | try:
215 | return await session.create_connection(self.timeout)
216 | except asyncio.TimeoutError:
217 | log.warning("Timeout connecting to %s:%d", *session.server)
218 | except asyncio.CancelledError: # pylint: disable=try-except-raise
219 | raise
220 | except Exception as err: # pylint: disable=broad-except
221 | if 'Connect call failed' in str(err):
222 | log.warning("Could not connect to %s:%d", *session.server)
223 | else:
224 | log.exception("Connecting to %s:%d raised an exception:", *session.server)
225 | self._dead_servers.append(session)
226 | self.sessions.remove(session)
227 |
228 | async def get_online_sessions(self):
229 | self._lost_master.set()
230 | while not self.online:
231 | await asyncio.sleep(0.1)
232 | return self.sessions
233 |
--------------------------------------------------------------------------------
/torba/server/script.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2016-2017, Neil Booth
2 | #
3 | # All rights reserved.
4 | #
5 | # The MIT License (MIT)
6 | #
7 | # Permission is hereby granted, free of charge, to any person obtaining
8 | # a copy of this software and associated documentation files (the
9 | # "Software"), to deal in the Software without restriction, including
10 | # without limitation the rights to use, copy, modify, merge, publish,
11 | # distribute, sublicense, and/or sell copies of the Software, and to
12 | # permit persons to whom the Software is furnished to do so, subject to
13 | # the following conditions:
14 | #
15 | # The above copyright notice and this permission notice shall be
16 | # included in all copies or substantial portions of the Software.
17 | #
18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
19 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
21 | # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
22 | # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
23 | # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
24 | # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 | # and warranty status of this software.
26 |
27 | """Script-related classes and functions."""
28 |
29 |
30 | import struct
31 | from collections import namedtuple
32 |
33 | from torba.server.enum import Enumeration
34 | from torba.server.hash import hash160
35 | from torba.server.util import unpack_le_uint16_from, unpack_le_uint32_from, \
36 | pack_le_uint16, pack_le_uint32
37 |
38 |
39 | class ScriptError(Exception):
40 | """Exception used for script errors."""
41 |
42 |
43 | OpCodes = Enumeration("Opcodes", [
44 | ("OP_0", 0), ("OP_PUSHDATA1", 76),
45 | "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE",
46 | "OP_RESERVED",
47 | "OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7", "OP_8",
48 | "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
49 | "OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF",
50 | "OP_ELSE", "OP_ENDIF", "OP_VERIFY", "OP_RETURN",
51 | "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP",
52 | "OP_2OVER", "OP_2ROT", "OP_2SWAP", "OP_IFDUP", "OP_DEPTH", "OP_DROP",
53 | "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL", "OP_ROT",
54 | "OP_SWAP", "OP_TUCK",
55 | "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE",
56 | "OP_INVERT", "OP_AND", "OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY",
57 | "OP_RESERVED1", "OP_RESERVED2",
58 | "OP_1ADD", "OP_1SUB", "OP_2MUL", "OP_2DIV", "OP_NEGATE", "OP_ABS",
59 | "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV", "OP_MOD",
60 | "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR", "OP_NUMEQUAL",
61 | "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN", "OP_GREATERTHAN",
62 | "OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
63 | "OP_WITHIN",
64 | "OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160", "OP_HASH256",
65 | "OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
66 | "OP_CHECKMULTISIGVERIFY",
67 | "OP_NOP1",
68 | "OP_CHECKLOCKTIMEVERIFY", "OP_CHECKSEQUENCEVERIFY"
69 | ])
70 |
71 |
72 | # Paranoia to make it hard to create bad scripts
73 | assert OpCodes.OP_DUP == 0x76
74 | assert OpCodes.OP_HASH160 == 0xa9
75 | assert OpCodes.OP_EQUAL == 0x87
76 | assert OpCodes.OP_EQUALVERIFY == 0x88
77 | assert OpCodes.OP_CHECKSIG == 0xac
78 | assert OpCodes.OP_CHECKMULTISIG == 0xae
79 |
80 |
81 | def _match_ops(ops, pattern):
82 | if len(ops) != len(pattern):
83 | return False
84 | for op, pop in zip(ops, pattern):
85 | if pop != op:
86 | # -1 means 'data push', whose op is an (op, data) tuple
87 | if pop == -1 and isinstance(op, tuple):
88 | continue
89 | return False
90 |
91 | return True
92 |
93 |
94 | class ScriptPubKey:
95 | """A class for handling a tx output script that gives conditions
96 | necessary for spending.
97 | """
98 |
99 | TO_ADDRESS_OPS = [OpCodes.OP_DUP, OpCodes.OP_HASH160, -1,
100 | OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]
101 | TO_P2SH_OPS = [OpCodes.OP_HASH160, -1, OpCodes.OP_EQUAL]
102 | TO_PUBKEY_OPS = [-1, OpCodes.OP_CHECKSIG]
103 |
104 | PayToHandlers = namedtuple('PayToHandlers', 'address script_hash pubkey '
105 | 'unspendable strange')
106 |
107 | @classmethod
108 | def pay_to(cls, handlers, script):
109 | """Parse a script, invoke the appropriate handler and
110 | return the result.
111 |
112 | One of the following handlers is invoked:
113 | handlers.address(hash160)
114 | handlers.script_hash(hash160)
115 | handlers.pubkey(pubkey)
116 | handlers.unspendable()
117 | handlers.strange(script)
118 | """
119 | try:
120 | ops = Script.get_ops(script)
121 | except ScriptError:
122 | return handlers.unspendable()
123 |
124 | match = _match_ops
125 |
126 | if match(ops, cls.TO_ADDRESS_OPS):
127 | return handlers.address(ops[2][-1])
128 | if match(ops, cls.TO_P2SH_OPS):
129 | return handlers.script_hash(ops[1][-1])
130 | if match(ops, cls.TO_PUBKEY_OPS):
131 | return handlers.pubkey(ops[0][-1])
132 | if ops and ops[0] == OpCodes.OP_RETURN:
133 | return handlers.unspendable()
134 | return handlers.strange(script)
135 |
136 | @classmethod
137 | def P2SH_script(cls, hash160):
138 | return (bytes([OpCodes.OP_HASH160])
139 | + Script.push_data(hash160)
140 | + bytes([OpCodes.OP_EQUAL]))
141 |
142 | @classmethod
143 | def P2PKH_script(cls, hash160):
144 | return (bytes([OpCodes.OP_DUP, OpCodes.OP_HASH160])
145 | + Script.push_data(hash160)
146 | + bytes([OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]))
147 |
148 | @classmethod
149 | def validate_pubkey(cls, pubkey, req_compressed=False):
150 | if isinstance(pubkey, (bytes, bytearray)):
151 | if len(pubkey) == 33 and pubkey[0] in (2, 3):
152 | return # Compressed
153 | if len(pubkey) == 65 and pubkey[0] == 4:
154 | if not req_compressed:
155 | return
156 | raise PubKeyError('uncompressed pubkeys are invalid')
157 | raise PubKeyError('invalid pubkey {}'.format(pubkey))
158 |
159 | @classmethod
160 | def pubkey_script(cls, pubkey):
161 | cls.validate_pubkey(pubkey)
162 | return Script.push_data(pubkey) + bytes([OpCodes.OP_CHECKSIG])
163 |
164 | @classmethod
165 | def multisig_script(cls, m, pubkeys):
166 | """Returns the script for a pay-to-multisig transaction."""
167 | n = len(pubkeys)
168 | if not 1 <= m <= n <= 15:
169 | raise ScriptError('{:d} of {:d} multisig script not possible'
170 | .format(m, n))
171 | for pubkey in pubkeys:
172 | cls.validate_pubkey(pubkey, req_compressed=True)
173 | # See https://bitcoin.org/en/developer-guide
174 | # 2 of 3 is: OP_2 pubkey1 pubkey2 pubkey3 OP_3 OP_CHECKMULTISIG
175 | return (bytes([OP_1 + m - 1])
176 | + b''.join(cls.push_data(pubkey) for pubkey in pubkeys)
177 | + bytes([OP_1 + n - 1, OP_CHECK_MULTISIG]))
178 |
179 |
180 | class Script:
181 |
182 | @classmethod
183 | def get_ops(cls, script):
184 | ops = []
185 |
186 | # The unpacks or script[n] below throw on truncated scripts
187 | try:
188 | n = 0
189 | while n < len(script):
190 | op = script[n]
191 | n += 1
192 |
193 | if op <= OpCodes.OP_PUSHDATA4:
194 | # Raw bytes follow
195 | if op < OpCodes.OP_PUSHDATA1:
196 | dlen = op
197 | elif op == OpCodes.OP_PUSHDATA1:
198 | dlen = script[n]
199 | n += 1
200 | elif op == OpCodes.OP_PUSHDATA2:
201 | dlen, = unpack_le_uint16_from(script[n: n + 2])
202 | n += 2
203 | else:
204 | dlen, = unpack_le_uint32_from(script[n: n + 4])
205 | n += 4
206 | if n + dlen > len(script):
207 | raise IndexError
208 | op = (op, script[n:n + dlen])
209 | n += dlen
210 |
211 | ops.append(op)
212 | except Exception:
213 | # Truncated script; e.g. tx_hash
214 | # ebc9fa1196a59e192352d76c0f6e73167046b9d37b8302b6bb6968dfd279b767
215 | raise ScriptError('truncated script')
216 |
217 | return ops
218 |
219 | @classmethod
220 | def push_data(cls, data):
221 | """Returns the opcodes to push the data on the stack."""
222 | assert isinstance(data, (bytes, bytearray))
223 |
224 | n = len(data)
225 | if n < OpCodes.OP_PUSHDATA1:
226 | return bytes([n]) + data
227 | if n < 256:
228 | return bytes([OpCodes.OP_PUSHDATA1, n]) + data
229 | if n < 65536:
230 | return bytes([OpCodes.OP_PUSHDATA2]) + pack_le_uint16(n) + data
231 | return bytes([OpCodes.OP_PUSHDATA4]) + pack_le_uint32(n) + data
232 |
233 | @classmethod
234 | def opcode_name(cls, opcode):
235 | if OpCodes.OP_0 < opcode < OpCodes.OP_PUSHDATA1:
236 | return 'OP_{:d}'.format(opcode)
237 | try:
238 | return OpCodes.whatis(opcode)
239 | except KeyError:
240 | return 'OP_UNKNOWN:{:d}'.format(opcode)
241 |
242 | @classmethod
243 | def dump(cls, script):
244 | opcodes, datas = cls.get_ops(script)
245 | for opcode, data in zip(opcodes, datas):
246 | name = cls.opcode_name(opcode)
247 | if data is None:
248 | print(name)
249 | else:
250 | print('{} {} ({:d} bytes)'
251 | .format(name, data.hex(), len(data)))
252 |
--------------------------------------------------------------------------------
/tests/client_tests/unit/test_script.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from binascii import hexlify, unhexlify
3 |
4 | from torba.client.bcd_data_stream import BCDataStream
5 | from torba.client.basescript import Template, ParseError, tokenize, push_data
6 | from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, PUSH_MANY, OP_HASH160, OP_EQUAL
7 | from torba.client.basescript import BaseInputScript, BaseOutputScript
8 |
9 |
10 | def parse(opcodes, source):
11 | template = Template('test', opcodes)
12 | s = BCDataStream()
13 | for t in source:
14 | if isinstance(t, bytes):
15 | s.write_many(push_data(t))
16 | elif isinstance(t, int):
17 | s.write_uint8(t)
18 | else:
19 | raise ValueError()
20 | s.reset()
21 | return template.parse(tokenize(s))
22 |
23 |
24 | class TestScriptTemplates(unittest.TestCase):
25 |
26 | def test_push_data(self):
27 | self.assertEqual(parse(
28 | (PUSH_SINGLE('script_hash'),),
29 | (b'abcdef',)
30 | ), {
31 | 'script_hash': b'abcdef'
32 | }
33 | )
34 | self.assertEqual(parse(
35 | (PUSH_SINGLE('first'), PUSH_INTEGER('rating')),
36 | (b'Satoshi', (1000).to_bytes(2, 'little'))
37 | ), {
38 | 'first': b'Satoshi',
39 | 'rating': 1000,
40 | }
41 | )
42 | self.assertEqual(parse(
43 | (OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL),
44 | (OP_HASH160, b'abcdef', OP_EQUAL)
45 | ), {
46 | 'script_hash': b'abcdef'
47 | }
48 | )
49 |
50 | def test_push_data_many(self):
51 | self.assertEqual(parse(
52 | (PUSH_MANY('names'),),
53 | (b'amit',)
54 | ), {
55 | 'names': [b'amit']
56 | }
57 | )
58 | self.assertEqual(parse(
59 | (PUSH_MANY('names'),),
60 | (b'jeremy', b'amit', b'victor')
61 | ), {
62 | 'names': [b'jeremy', b'amit', b'victor']
63 | }
64 | )
65 | self.assertEqual(parse(
66 | (OP_HASH160, PUSH_MANY('names'), OP_EQUAL),
67 | (OP_HASH160, b'grin', b'jack', OP_EQUAL)
68 | ), {
69 | 'names': [b'grin', b'jack']
70 | }
71 | )
72 |
73 | def test_push_data_mixed(self):
74 | self.assertEqual(parse(
75 | (PUSH_SINGLE('CEO'), PUSH_MANY('Devs'), PUSH_SINGLE('CTO'), PUSH_SINGLE('State')),
76 | (b'jeremy', b'lex', b'amit', b'victor', b'jack', b'grin', b'NH')
77 | ), {
78 | 'CEO': b'jeremy',
79 | 'CTO': b'grin',
80 | 'Devs': [b'lex', b'amit', b'victor', b'jack'],
81 | 'State': b'NH'
82 | }
83 | )
84 |
85 | def test_push_data_many_separated(self):
86 | self.assertEqual(parse(
87 | (PUSH_MANY('Chiefs'), OP_HASH160, PUSH_MANY('Devs')),
88 | (b'jeremy', b'grin', OP_HASH160, b'lex', b'jack')
89 | ), {
90 | 'Chiefs': [b'jeremy', b'grin'],
91 | 'Devs': [b'lex', b'jack']
92 | }
93 | )
94 |
95 | def test_push_data_many_not_separated(self):
96 | with self.assertRaisesRegex(ParseError, 'consecutive PUSH_MANY'):
97 | parse((PUSH_MANY('Chiefs'), PUSH_MANY('Devs')), (b'jeremy', b'grin', b'lex', b'jack'))
98 |
99 |
100 | class TestRedeemPubKeyHash(unittest.TestCase):
101 |
102 | def redeem_pubkey_hash(self, sig, pubkey):
103 | # this checks that factory function correctly sets up the script
104 | src1 = BaseInputScript.redeem_pubkey_hash(unhexlify(sig), unhexlify(pubkey))
105 | self.assertEqual(src1.template.name, 'pubkey_hash')
106 | self.assertEqual(hexlify(src1.values['signature']), sig)
107 | self.assertEqual(hexlify(src1.values['pubkey']), pubkey)
108 | # now we test that it will round trip
109 | src2 = BaseInputScript(src1.source)
110 | self.assertEqual(src2.template.name, 'pubkey_hash')
111 | self.assertEqual(hexlify(src2.values['signature']), sig)
112 | self.assertEqual(hexlify(src2.values['pubkey']), pubkey)
113 | return hexlify(src1.source)
114 |
115 | def test_redeem_pubkey_hash_1(self):
116 | self.assertEqual(
117 | self.redeem_pubkey_hash(
118 | b'30450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e'
119 | b'02dc5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a8301',
120 | b'025415a06514230521bff3aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
121 | ),
122 | b'4830450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e02d'
123 | b'c5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a830121025415a06514230521bff3'
124 | b'aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
125 | )
126 |
127 |
128 | class TestRedeemScriptHash(unittest.TestCase):
129 |
130 | def redeem_script_hash(self, sigs, pubkeys):
131 | # this checks that factory function correctly sets up the script
132 | src1 = BaseInputScript.redeem_script_hash(
133 | [unhexlify(sig) for sig in sigs],
134 | [unhexlify(pubkey) for pubkey in pubkeys]
135 | )
136 | subscript1 = src1.values['script']
137 | self.assertEqual(src1.template.name, 'script_hash')
138 | self.assertEqual([hexlify(v) for v in src1.values['signatures']], sigs)
139 | self.assertEqual([hexlify(p) for p in subscript1.values['pubkeys']], pubkeys)
140 | self.assertEqual(subscript1.values['signatures_count'], len(sigs))
141 | self.assertEqual(subscript1.values['pubkeys_count'], len(pubkeys))
142 | # now we test that it will round trip
143 | src2 = BaseInputScript(src1.source)
144 | subscript2 = src2.values['script']
145 | self.assertEqual(src2.template.name, 'script_hash')
146 | self.assertEqual([hexlify(v) for v in src2.values['signatures']], sigs)
147 | self.assertEqual([hexlify(p) for p in subscript2.values['pubkeys']], pubkeys)
148 | self.assertEqual(subscript2.values['signatures_count'], len(sigs))
149 | self.assertEqual(subscript2.values['pubkeys_count'], len(pubkeys))
150 | return hexlify(src1.source)
151 |
152 | def test_redeem_script_hash_1(self):
153 | self.assertEqual(
154 | self.redeem_script_hash([
155 | b'3045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575'
156 | b'e40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401',
157 | b'3044022024890462f731bd1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac68'
158 | b'9e35c4648e6beff1d42490207ba14027a638a62663b2ee40153299141eb01',
159 | b'30450221009910823e0142967a73c2d16c1560054d71c0625a385904ba2f1f53e0bc1daa8d02205cd'
160 | b'70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc318777a01'
161 | ], [
162 | b'0372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a4',
163 | b'03061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb7692',
164 | b'02463bfbc1eaec74b5c21c09239ae18dbf6fc07833917df10d0b43e322810cee0c',
165 | b'02fa6a6455c26fb516cfa85ea8de81dd623a893ffd579ee2a00deb6cdf3633d6bb',
166 | b'0382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171ad0abeaa89'
167 | ]),
168 | b'00483045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575e'
169 | b'40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401473044022024890462f731bd'
170 | b'1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac689e35c4648e6beff1d42490207ba'
171 | b'14027a638a62663b2ee40153299141eb014830450221009910823e0142967a73c2d16c1560054d71c0625a'
172 | b'385904ba2f1f53e0bc1daa8d02205cd70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc3'
173 | b'18777a014cad53210372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a42103'
174 | b'061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb76922102463bfbc1eaec74b5c2'
175 | b'1c09239ae18dbf6fc07833917df10d0b43e322810cee0c2102fa6a6455c26fb516cfa85ea8de81dd623a89'
176 | b'3ffd579ee2a00deb6cdf3633d6bb210382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171'
177 | b'ad0abeaa8955ae'
178 | )
179 |
180 |
181 | class TestPayPubKeyHash(unittest.TestCase):
182 |
183 | def pay_pubkey_hash(self, pubkey_hash):
184 | # this checks that factory function correctly sets up the script
185 | src1 = BaseOutputScript.pay_pubkey_hash(unhexlify(pubkey_hash))
186 | self.assertEqual(src1.template.name, 'pay_pubkey_hash')
187 | self.assertEqual(hexlify(src1.values['pubkey_hash']), pubkey_hash)
188 | # now we test that it will round trip
189 | src2 = BaseOutputScript(src1.source)
190 | self.assertEqual(src2.template.name, 'pay_pubkey_hash')
191 | self.assertEqual(hexlify(src2.values['pubkey_hash']), pubkey_hash)
192 | return hexlify(src1.source)
193 |
194 | def test_pay_pubkey_hash_1(self):
195 | self.assertEqual(
196 | self.pay_pubkey_hash(b'64d74d12acc93ba1ad495e8d2d0523252d664f4d'),
197 | b'76a91464d74d12acc93ba1ad495e8d2d0523252d664f4d88ac'
198 | )
199 |
200 |
201 | class TestPayScriptHash(unittest.TestCase):
202 |
203 | def pay_script_hash(self, script_hash):
204 | # this checks that factory function correctly sets up the script
205 | src1 = BaseOutputScript.pay_script_hash(unhexlify(script_hash))
206 | self.assertEqual(src1.template.name, 'pay_script_hash')
207 | self.assertEqual(hexlify(src1.values['script_hash']), script_hash)
208 | # now we test that it will round trip
209 | src2 = BaseOutputScript(src1.source)
210 | self.assertEqual(src2.template.name, 'pay_script_hash')
211 | self.assertEqual(hexlify(src2.values['script_hash']), script_hash)
212 | return hexlify(src1.source)
213 |
214 | def test_pay_pubkey_hash_1(self):
215 | self.assertEqual(
216 | self.pay_script_hash(b'63d65a2ee8c44426d06050cfd71c0f0ff3fc41ac'),
217 | b'a91463d65a2ee8c44426d06050cfd71c0f0ff3fc41ac87'
218 | )
219 |
--------------------------------------------------------------------------------