├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── Vagrantfile ├── docs ├── Makefile ├── cli.rst ├── conf.py ├── core.rst ├── dev-requirements.txt ├── examples.rst ├── index.rst ├── install.rst ├── issue_template.md ├── low_level.rst ├── steem.rst └── tools.rst ├── hive ├── __init__.py ├── account.py ├── amount.py ├── block.py ├── blockchain.py ├── blog.py ├── cli.py ├── commit.py ├── converter.py ├── dex.py ├── hive.py ├── hived.py ├── instance.py ├── post.py ├── profile.py ├── transactionbuilder.py ├── utils.py ├── wallet.py └── witness.py ├── hivebase ├── __init__.py ├── account.py ├── aes.py ├── base58.py ├── bip38.py ├── chains.py ├── dictionary.py ├── exceptions.py ├── http_client.py ├── memo.py ├── operationids.py ├── operations.py ├── storage.py ├── transactions.py ├── types.py └── utils.py ├── scripts ├── Dockerfile ├── doc_rst_convert.py ├── hived_gen.py └── nuke_legacy.sh ├── setup.cfg ├── setup.py └── tests ├── __init__.py ├── block_data ├── account_create.json ├── account_update.json ├── account_witness_proxy.json ├── account_witness_vote.json ├── block.json ├── cancel_transfer_from_savings.json ├── change_recovery_account.json ├── comment.json ├── comment_options.json ├── convert.json ├── custom.json ├── custom_json.json ├── delete_comment.json ├── feed_publish.json ├── limit_order_cancel.json ├── limit_order_create.json ├── pow.json ├── pow2.json ├── recover_account.json ├── request_account_recovery.json ├── set_withdraw_vesting_route.json ├── transaction.json ├── transfer.json ├── transfer_from_savings.json ├── transfer_to_savings.json ├── transfer_to_vesting.json ├── vote.json ├── withdraw_vesting.json ├── witness_set_properties.json └── witness_update.json ├── steem ├── test_account.py ├── test_amount.py ├── test_broadcast.py ├── test_memo.py ├── test_post.py ├── test_steemd.py ├── test_transactions.py └── test_utils.py ├── steembase ├── test_base58.py ├── test_base_account.py ├── test_bip38.py ├── test_bip38_pylibscrypt.py └── test_bip38_scrypt.py └── test_import.py /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.toptal.com/developers/gitignore/api/python 3 | # Edit at https://www.toptal.com/developers/gitignore?templates=python 4 | 5 | ### Python ### 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | *$py.class 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | wheels/ 28 | share/python-wheels/ 29 | *.egg-info/ 30 | .installed.cfg 31 | *.egg 32 | MANIFEST 33 | 34 | # PyInstaller 35 | # Usually these files are written by a python script from a template 36 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 37 | *.manifest 38 | *.spec 39 | 40 | # Installer logs 41 | pip-log.txt 42 | pip-delete-this-directory.txt 43 | 44 | # Unit test / coverage reports 45 | htmlcov/ 46 | .tox/ 47 | .nox/ 48 | .coverage 49 | .coverage.* 50 | .cache 51 | nosetests.xml 52 | coverage.xml 53 | *.cover 54 | *.py,cover 55 | .hypothesis/ 56 | .pytest_cache/ 57 | cover/ 58 | 59 | # Translations 60 | *.mo 61 | *.pot 62 | 63 | # Django stuff: 64 | *.log 65 | local_settings.py 66 | db.sqlite3 67 | db.sqlite3-journal 68 | 69 | # Flask stuff: 70 | instance/ 71 | .webassets-cache 72 | 73 | # Scrapy stuff: 74 | .scrapy 75 | 76 | # Sphinx documentation 77 | docs/_build/ 78 | 79 | # PyBuilder 80 | .pybuilder/ 81 | target/ 82 | 83 | # Jupyter Notebook 84 | .ipynb_checkpoints 85 | 86 | # IPython 87 | profile_default/ 88 | ipython_config.py 89 | 90 | # pyenv 91 | # For a library or package, you might want to ignore these files since the code is 92 | # intended to run in multiple environments; otherwise, check them in: 93 | # .python-version 94 | 95 | # pipenv 96 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 97 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 98 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 99 | # install all needed dependencies. 100 | #Pipfile.lock 101 | 102 | # poetry 103 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 104 | # This is especially recommended for binary packages to ensure reproducibility, and is more 105 | # commonly ignored for libraries. 106 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 107 | #poetry.lock 108 | 109 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 110 | __pypackages__/ 111 | 112 | # Celery stuff 113 | celerybeat-schedule 114 | celerybeat.pid 115 | 116 | # SageMath parsed files 117 | *.sage.py 118 | 119 | # Environments 120 | .env 121 | .venv 122 | env/ 123 | venv/ 124 | ENV/ 125 | env.bak/ 126 | venv.bak/ 127 | 128 | # Spyder project settings 129 | .spyderproject 130 | .spyproject 131 | 132 | # Rope project settings 133 | .ropeproject 134 | 135 | # mkdocs documentation 136 | /site 137 | 138 | # mypy 139 | .mypy_cache/ 140 | .dmypy.json 141 | dmypy.json 142 | 143 | # Pyre type checker 144 | .pyre/ 145 | 146 | # pytype static type analyzer 147 | .pytype/ 148 | 149 | # Cython debug symbols 150 | cython_debug/ 151 | 152 | # PyCharm 153 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 154 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 155 | # and can be added to the global gitignore or merged into this file. For a more nuclear 156 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 157 | #.idea/ 158 | 159 | # End of https://www.toptal.com/developers/gitignore/api/python 160 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Portions copyright (c) 2020 pharesim 4 | Portions copyright (c) 2017 Steemit Inc., and contributors. 5 | Portions copyright (c) 2017 ChainSquad GmbH, and contributors. 6 | Portions copyright (c) 2016 Fabian Schuh 7 | Portions copyright (c) 2015 Fabian Schuh 8 | 9 | Permission is hereby granted, free of charge, to any person obtaining a copy 10 | of this software and associated documentation files (the "Software"), to deal 11 | in the Software without restriction, including without limitation the rights 12 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the Software is 14 | furnished to do so, subject to the following conditions: 15 | 16 | The above copyright notice and this permission notice shall be included in 17 | all copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 25 | THE SOFTWARE. 26 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | PROJECT := $(shell basename $(shell pwd)) 2 | PYTHON_FILES := hive hivebase tests setup.py 3 | 4 | .PHONY: clean test fmt install 5 | 6 | clean: 7 | rm -rf build/ dist/ *.egg-info .eggs/ .tox/ \ 8 | __pycache__/ .cache/ .coverage htmlcov src 9 | 10 | test: clean 11 | python setup.py test 12 | 13 | fmt: 14 | yapf --recursive --in-place --style pep8 $(PYTHON_FILES) 15 | pycodestyle $(PYTHON_FILES) 16 | 17 | install: 18 | python setup.py install 19 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # A Python Hive Library 2 | 3 | `hive-python` is a Hive library for Python. It comes with a 4 | BIP38 encrypted wallet and a practical CLI utility called `hivepy`. 5 | 6 | This library currently works on Python 2.7, 3.5 and 3.6. Python 3.3 and 3.4 support forthcoming. 7 | 8 | # Installation 9 | 10 | With pip: 11 | 12 | ``` 13 | pip3 install hivepy # pip install hivepy for 2.7 14 | ``` 15 | 16 | From Source: 17 | 18 | ``` 19 | git clone https://github.com/pharesim/hive-python.git 20 | cd hive-python 21 | python3 setup.py install # python setup.py install for 2.7 22 | ``` 23 | 24 | ## Homebrew Build Prereqs 25 | 26 | If you're on a mac, you may need to do the following first: 27 | 28 | ``` 29 | brew install openssl 30 | export CFLAGS="-I$(brew --prefix openssl)/include $CFLAGS" 31 | export LDFLAGS="-L$(brew --prefix openssl)/lib $LDFLAGS" 32 | ``` 33 | 34 | # CLI tools bundled 35 | 36 | The library comes with a few console scripts. 37 | 38 | * `hivepy`: 39 | * rudimentary blockchain CLI (needs some TLC and more TLAs) 40 | * `hivetail`: 41 | * useful for e.g. `hivetail -f -j | jq --unbuffered --sort-keys .` 42 | 43 | # Documentation 44 | 45 | Documentation is available at **http://hive.readthedocs.io** 46 | 47 | # Tests 48 | 49 | Some tests are included. They can be run via: 50 | 51 | * `python setup.py test` 52 | 53 | # TODO 54 | 55 | * more unit tests 56 | * 100% documentation coverage, consistent documentation 57 | * migrate to click CLI library 58 | 59 | # Notice 60 | 61 | This library is *under development*. Beware. 62 | -------------------------------------------------------------------------------- /Vagrantfile: -------------------------------------------------------------------------------- 1 | # -*- mode: ruby -*- 2 | # vi: set ft=ruby : 3 | 4 | Vagrant.configure("2") do |config| 5 | config.vm.box = "ubuntu/xenial64" 6 | 7 | # config.vm.provider "virtualbox" do |vb| 8 | # # Display the VirtualBox GUI when booting the machine 9 | # vb.gui = true 10 | # 11 | # # Customize the amount of memory on the VM: 12 | # vb.memory = "1024" 13 | # end 14 | config.vm.provision "shell", inline: <<-SHELL 15 | apt-get update 16 | apt-get upgrade -y 17 | apt-get install -y libssl-dev 18 | apt-get install -y python3-pip 19 | cd /vagrant && make test 20 | hivepy info 21 | SHELL 22 | end 23 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = hive-python 8 | SOURCEDIR = . 9 | BUILDDIR = _build 10 | _README = ../README.rst 11 | 12 | # Put it first so that "make" without argument is like "make help". 13 | help: 14 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 15 | 16 | .PHONY: help Makefile 17 | 18 | # Catch-all target: route all unknown targets to Sphinx using the new 19 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 20 | %: Makefile 21 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 22 | -------------------------------------------------------------------------------- /docs/cli.rst: -------------------------------------------------------------------------------- 1 | hivepy CLI 2 | ~~~~~~~~~~~ 3 | `hivepy` is a convenient CLI utility that enables you to manage your wallet, transfer funds, check 4 | balances and more. 5 | 6 | Using the Wallet 7 | ---------------- 8 | `hivepy` lets you leverage your BIP38 encrypted wallet to perform various actions on your accounts. 9 | 10 | The first time you use `hivepy`, you will be prompted to enter a password. This password will be used to encrypt 11 | the `hivepy` wallet, which contains your private keys. 12 | 13 | You can change the password via `changewalletpassphrase` command. 14 | 15 | :: 16 | 17 | hivepy changewalletpassphrase 18 | 19 | 20 | From this point on, every time an action requires your private keys, you will be prompted to enter 21 | this password (from CLI as well as while using `hive` library). 22 | 23 | To bypass password entry, you can set an environmnet variable ``UNLOCK``. 24 | 25 | :: 26 | 27 | UNLOCK=mysecretpassword hivepy transfer 100 HIVE 28 | 29 | Common Commands 30 | --------------- 31 | First, you may like to import your Hive account: 32 | 33 | :: 34 | 35 | hivepy importaccount 36 | 37 | 38 | You can also import individual private keys: 39 | 40 | :: 41 | 42 | hivepy addkey 43 | 44 | Listing accounts: 45 | 46 | :: 47 | 48 | hivepy listaccounts 49 | 50 | Show balances: 51 | 52 | :: 53 | 54 | hivepy balance account_name1 account_name2 55 | 56 | Sending funds: 57 | 58 | :: 59 | 60 | hivepy transfer --account 100 HIVE memo 61 | 62 | Upvoting a post: 63 | 64 | :: 65 | 66 | hivepy upvote --account https://hive.blog/funny/@mynameisbrian/the-content-stand-a-comic 67 | 68 | 69 | Setting Defaults 70 | ---------------- 71 | For a more convenient use of ``hivepy`` as well as the ``hive`` library, you can set some defaults. 72 | This is especially useful if you have a single Hive account. 73 | 74 | :: 75 | 76 | hivepy set default_account furion 77 | hivepy set default_vote_weight 100 78 | 79 | hivepy config 80 | +---------------------+--------+ 81 | | Key | Value | 82 | +---------------------+--------+ 83 | | default_account | furion | 84 | | default_vote_weight | 100 | 85 | +---------------------+--------+ 86 | 87 | If you've set up your `default_account`, you can now send funds by omitting this field: 88 | 89 | :: 90 | 91 | hivepy transfer 100 HIVE memo 92 | 93 | 94 | Help 95 | ---- 96 | You can see all available commands with ``hivepy -h`` 97 | 98 | :: 99 | 100 | ~ % hivepy -h 101 | usage: hivepy [-h] [--node NODE] [--no-broadcast] [--no-wallet] [--unsigned] 102 | [--expires EXPIRES] [--verbose VERBOSE] [--version] 103 | {set,config,info,changewalletpassphrase,addkey,delkey,getkey,listkeys,listaccounts,upvote,downvote,transfer,powerup,powerdown,powerdownroute,convert,balance,interest,permissions,allow,disallow,newaccount,importaccount,updatememokey,approvewitness,disapprovewitness,sign,broadcast,orderbook,buy,sell,cancel,reblog,follow,unfollow,setprofile,delprofile,witnessupdate,witnesscreate} 104 | ... 105 | 106 | Command line tool to interact with the Hive network 107 | 108 | positional arguments: 109 | {set,config,info,changewalletpassphrase,addkey,delkey,getkey,listkeys,listaccounts,upvote,downvote,transfer,powerup,powerdown,powerdownroute,convert,balance,interest,permissions,allow,disallow,newaccount,importaccount,updatememokey,approvewitness,disapprovewitness,sign,broadcast,orderbook,buy,sell,cancel,reblog,follow,unfollow,setprofile,delprofile,witnessupdate,witnesscreate} 110 | sub-command help 111 | set Set configuration 112 | config Show local configuration 113 | info Show basic Hive blockchain info 114 | changewalletpassphrase 115 | Change wallet password 116 | addkey Add a new key to the wallet 117 | delkey Delete keys from the wallet 118 | getkey Dump the privatekey of a pubkey from the wallet 119 | listkeys List available keys in your wallet 120 | listaccounts List available accounts in your wallet 121 | upvote Upvote a post 122 | downvote Downvote a post 123 | transfer Transfer HIVE 124 | powerup Power up (vest HIVE as HIVE POWER) 125 | powerdown Power down (start withdrawing HIVE from HIVE POWER) 126 | powerdownroute Setup a powerdown route 127 | convert Convert HBD to HIVE (takes a week to settle) 128 | balance Show the balance of one more more accounts 129 | interest Get information about interest payment 130 | permissions Show permissions of an account 131 | allow Allow an account/key to interact with your account 132 | disallow Remove allowance an account/key to interact with your 133 | account 134 | newaccount Create a new account 135 | importaccount Import an account using a passphrase 136 | updatememokey Update an account's memo key 137 | approvewitness Approve a witnesses 138 | disapprovewitness Disapprove a witnesses 139 | sign Sign a provided transaction with available and 140 | required keys 141 | broadcast broadcast a signed transaction 142 | orderbook Obtain orderbook of the internal market 143 | buy Buy HIVE or HBD from the internal market 144 | sell Sell HIVE or HBD from the internal market 145 | cancel Cancel order in the internal market 146 | reblog Reblog an existing post 147 | follow Follow another account 148 | unfollow unfollow another account 149 | setprofile Set a variable in an account's profile 150 | delprofile Set a variable in an account's profile 151 | witnessupdate Change witness properties 152 | witnesscreate Create a witness 153 | 154 | optional arguments: 155 | -h, --help show this help message and exit 156 | --node NODE URL for public Hive API (default: 157 | "https://api.hive.blog") 158 | --no-broadcast, -d Do not broadcast anything 159 | --no-wallet, -p Do not load the wallet 160 | --unsigned, -x Do not try to sign the transaction 161 | --expires EXPIRES, -e EXPIRES 162 | Expiration time in seconds (defaults to 30) 163 | --verbose VERBOSE, -v VERBOSE 164 | Verbosity 165 | --version show program's version number and exit 166 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # hive-python documentation build configuration file, created by 5 | # sphinx-quickstart on Mon Mar 6 14:00:07 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | # import os 21 | # import sys 22 | # sys.path.insert(0, os.path.abspath('.')) 23 | 24 | # -- General configuration ------------------------------------------------ 25 | 26 | # If your documentation needs a minimal Sphinx version, state it here. 27 | # 28 | # needs_sphinx = '1.0' 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be 31 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 32 | # ones. 33 | import sphinx_rtd_theme 34 | 35 | extensions = [ 36 | 'sphinx.ext.autodoc', 37 | 'sphinx.ext.doctest', 38 | 'sphinx.ext.imgmath', 39 | 'sphinx.ext.ifconfig', 40 | 'sphinx.ext.viewcode', 41 | 'sphinx.ext.githubpages', 42 | 'sphinx.ext.napoleon', 43 | # 'sphinxcontrib.programoutput', 44 | # 'sphinxcontrib.restbuilder', 45 | ] 46 | 47 | # Add any paths that contain templates here, relative to this directory. 48 | templates_path = ['_templates'] 49 | 50 | # The suffix(es) of source filenames. 51 | # You can specify multiple suffix as a list of string: 52 | # 53 | source_suffix = ['.rst', '.md'] 54 | # source_suffix = '.rst' 55 | 56 | # The master toctree document. 57 | master_doc = 'index' 58 | 59 | # General information about the project. 60 | project = 'hive-python' 61 | copyright = '2020, pharesim' 62 | author = 'pharesim@protonmail.com' 63 | 64 | # The version info for the project you're documenting, acts as replacement for 65 | # |version| and |release|, also used in various other places throughout the 66 | # built documents. 67 | # 68 | # The short X.Y version. 69 | version = '0.10' 70 | # The full version, including alpha/beta/rc tags. 71 | release = '0.10' 72 | 73 | # The language for content autogenerated by Sphinx. Refer to documentation 74 | # for a list of supported languages. 75 | # 76 | # This is also used if you do content translation via gettext catalogs. 77 | # Usually you set "language" from the command line for these cases. 78 | language = None 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | # This patterns also effect to html_static_path and html_extra_path 83 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 84 | 85 | # The name of the Pygments (syntax highlighting) style to use. 86 | pygments_style = 'sphinx' 87 | 88 | # If true, `todo` and `todoList` produce output, else they produce nothing. 89 | todo_include_todos = False 90 | 91 | # -- Options for HTML output ---------------------------------------------- 92 | 93 | # The theme to use for HTML and HTML Help pages. See the documentation for 94 | # a list of builtin themes. 95 | # 96 | html_theme = 'sphinx_rtd_theme' 97 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 98 | 99 | # Theme options are theme-specific and customize the look and feel of a theme 100 | # further. For a list of options available for each theme, see the 101 | # documentation. 102 | # 103 | # html_theme_options = {} 104 | 105 | # Add any paths that contain custom static files (such as style sheets) here, 106 | # relative to this directory. They are copied after the builtin static files, 107 | # so a file named "default.css" will overwrite the builtin "default.css". 108 | html_static_path = ['_static'] 109 | 110 | # -- Options for HTMLHelp output ------------------------------------------ 111 | 112 | # Output file base name for HTML help builder. 113 | htmlhelp_basename = 'hive-pythondoc' 114 | 115 | # -- Options for LaTeX output --------------------------------------------- 116 | 117 | latex_elements = { 118 | # The paper size ('letterpaper' or 'a4paper'). 119 | # 120 | # 'papersize': 'letterpaper', 121 | 122 | # The font size ('10pt', '11pt' or '12pt'). 123 | # 124 | # 'pointsize': '10pt', 125 | 126 | # Additional stuff for the LaTeX preamble. 127 | # 128 | # 'preamble': '', 129 | 130 | # Latex figure (float) alignment 131 | # 132 | # 'figure_align': 'htbp', 133 | } 134 | 135 | # Grouping the document tree into LaTeX files. List of tuples 136 | # (source start file, target name, title, 137 | # author, documentclass [howto, manual, or own class]). 138 | latex_documents = [ 139 | (master_doc, 'hive-python.tex', 'hive-python Documentation', 140 | 'pharesim@protonmail.com', 'manual'), 141 | ] 142 | 143 | # -- Options for manual page output --------------------------------------- 144 | 145 | # One entry per manual page. List of tuples 146 | # (source start file, name, description, authors, manual section). 147 | man_pages = [(master_doc, 'hive-python', 'hive-python Documentation', 148 | [author], 1)] 149 | 150 | # -- Options for Texinfo output ------------------------------------------- 151 | 152 | # Grouping the document tree into Texinfo files. List of tuples 153 | # (source start file, target name, title, author, 154 | # dir menu entry, description, category) 155 | texinfo_documents = [ 156 | (master_doc, 'hive-python', 'hive-python Documentation', author, 157 | 'hive-python', 'One line description of project.', 'Miscellaneous'), 158 | ] 159 | -------------------------------------------------------------------------------- /docs/core.rst: -------------------------------------------------------------------------------- 1 | Transactions and Accounts 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | 4 | Commit 5 | ====== 6 | 7 | The Commit class contains helper methods for `posting, voting, transferring funds, updating witnesses` and more. 8 | You don't have to use this class directly, all of its methods are accessible trough main ``Hive`` class. 9 | 10 | .. code-block:: python 11 | 12 | # accessing commit methods trough Hive 13 | h = Hive() 14 | h.commit.transfer(...) 15 | 16 | # is same as 17 | c = Commit(hive=Hive()) 18 | c.transfer(..) 19 | 20 | .. autoclass:: hive.hive.Commit 21 | :members: 22 | 23 | -------- 24 | 25 | 26 | TransactionBuilder 27 | ================== 28 | 29 | .. autoclass:: hive.transactionbuilder.TransactionBuilder 30 | :members: 31 | 32 | -------- 33 | 34 | Wallet 35 | ====== 36 | 37 | Wallet is a low-level utility. 38 | It could be used to create 3rd party cli and GUI wallets on top of ``hive-python``'s infrastructure. 39 | 40 | .. automodule:: hive.wallet 41 | :members: 42 | -------------------------------------------------------------------------------- /docs/dev-requirements.txt: -------------------------------------------------------------------------------- 1 | pytest 2 | pytest-runner 3 | pytest-pylint 4 | pep8 5 | yapf 6 | sphinx 7 | recommonmark 8 | sphinxcontrib-restbuilder 9 | sphinxcontrib-programoutput 10 | pytest-console-scripts 11 | sphinxcontrib-napoleon 12 | sphinx_rtd_theme 13 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | Examples 2 | ~~~~~~~~ 3 | 4 | Syncing Blockchain to a Flat File 5 | ================================= 6 | 7 | Here is a relatively simple script built on top of ``hive-python`` that will let you sync 8 | Hive blockchain into a simple file. 9 | You can run this script as many times as you like, and it will continue from the last block it synced. 10 | 11 | :: 12 | 13 | import json 14 | import os 15 | 16 | from hive.blockchain import Blockchain 17 | 18 | 19 | def get_last_line(filename): 20 | if os.path.isfile(filename): 21 | if os.stat(filename).st_size == 0: 22 | fp = open(filename) 23 | return fp.read() 24 | with open(filename, 'rb') as f: 25 | f.seek(-2, 2) 26 | while f.read(1) != b"\n": 27 | f.seek(-2, 1) 28 | return f.readline() 29 | 30 | 31 | def get_previous_block_num(block): 32 | if not block: 33 | return -1 34 | 35 | if type(block) == bytes: 36 | block = block.decode('utf-8') 37 | 38 | if type(block) == str: 39 | block = json.loads(block) 40 | 41 | return int(block['previous'][:8], base=16) 42 | 43 | 44 | def run(filename): 45 | b = Blockchain() 46 | # automatically resume from where we left off 47 | # previous + last + 1 48 | start_block = get_previous_block_num(get_last_line(filename)) + 2 49 | with open(filename, 'a+') as file: 50 | for block in b.stream_from(start_block=start_block, full_blocks=True): 51 | file.write(json.dumps(block, sort_keys=True) + '\n') 52 | 53 | 54 | if __name__ == '__main__': 55 | output_file = '/home/user/Downloads/hive.blockchain.json' 56 | try: 57 | run(output_file) 58 | except KeyboardInterrupt: 59 | pass 60 | 61 | 62 | To see how many blocks we currently have, we can simply perform a line count. 63 | 64 | :: 65 | 66 | 67 | wc -l hive.blockchain.json 68 | 69 | 70 | We can also inspect an arbitrary block, and pretty-print it. 71 | *Replace 10000 with desired block_number + 1.* 72 | 73 | :: 74 | 75 | sed '10000q;d' hive.blockchain.json | python -m json.tool 76 | 77 | 78 | 79 | Witness Killswitch 80 | ================== 81 | 82 | Occasionally things go wrong: software crashes, servers go down... 83 | One of the main roles for Hive witnesses is to reliably mint blocks. 84 | This script acts as a kill-switch to protect the network from missed blocks and 85 | prevents embarrassment when things go totally wrong. 86 | 87 | :: 88 | 89 | import time 90 | from hive import Hive 91 | 92 | hive = Hive() 93 | 94 | # variables 95 | disable_after = 10 # disable witness after 10 blocks are missed 96 | witness_name = 'pharesim' 97 | witness_url = "http://pharesim.me" 98 | witness_props = { 99 | "account_creation_fee": "0.500 HIVE", 100 | "maximum_block_size": 65536, 101 | "hbd_interest_rate": 15, 102 | } 103 | 104 | 105 | def total_missed(): 106 | return hive.get_witness_by_account(witness_name)['total_missed'] 107 | 108 | 109 | if __name__ == '__main__': 110 | treshold = total_missed() + disable_after 111 | while True: 112 | if total_missed() > treshold: 113 | tx = hive.commit.witness_update( 114 | signing_key=None, 115 | url=witness_url, 116 | props=witness_props, 117 | account=witness_name) 118 | 119 | print("Witness %s Disabled!" % witness_name) 120 | quit(0) 121 | 122 | time.sleep(60) 123 | 124 | Batching Operations 125 | =================== 126 | 127 | Most of the time each transaction contains only one operation (for example, an upvote, a transfer or a new post). 128 | We can however cram multiple operations in a single transaction, to achieve better efficiency and size reduction. 129 | 130 | This script will also teach us how to create and sign transactions ourselves. 131 | 132 | :: 133 | 134 | from hive.transactionbuilder import TransactionBuilder 135 | from hivebase import operations 136 | 137 | # lets create 3 transfers, to 3 different people 138 | transfers = [ 139 | { 140 | 'from': 'richguy', 141 | 'to': 'recipient1', 142 | 'amount': '0.001 HIVE', 143 | 'memo': 'Test Transfer 1' 144 | }, 145 | { 146 | 'from': 'richguy', 147 | 'to': 'recipient2', 148 | 'amount': '0.002 HIVE', 149 | 'memo': 'Test Transfer 2' 150 | }, 151 | { 152 | 'from': 'richguy', 153 | 'to': 'recipient3', 154 | 'amount': '0.003 HIVE', 155 | 'memo': 'Test Transfer 3' 156 | } 157 | 158 | ] 159 | 160 | # now we can construct the transaction 161 | # we will set no_broadcast to True because 162 | # we don't want to really send funds, just testing. 163 | tb = TransactionBuilder(no_broadcast=True) 164 | 165 | # lets serialize our transfers into a format Hive can understand 166 | operations = [operations.Transfer(**x) for x in transfers] 167 | 168 | # tell TransactionBuilder to use our serialized transfers 169 | tb.appendOps(operations) 170 | 171 | # we need to tell TransactionBuilder about 172 | # everyone who needs to sign the transaction. 173 | # since all payments are made from `richguy`, 174 | # we just need to do this once 175 | tb.appendSigner('richguy', 'active') 176 | 177 | # sign the transaction 178 | tb.sign() 179 | 180 | # broadcast the transaction (publish to hive) 181 | # since we specified no_broadcast=True earlier 182 | # this method won't actually do anything 183 | tx = tb.broadcast() 184 | 185 | Simple Voting Bot 186 | ================= 187 | 188 | Here is a simple bot that will reciprocate by upvoting all new posts that mention us. 189 | Make sure to set ``whoami`` to your Hive username before running. 190 | 191 | :: 192 | 193 | from hive.blockchain import Blockchain 194 | from hive.post import Post 195 | 196 | 197 | def run(): 198 | # upvote posts with 30% weight 199 | upvote_pct = 30 200 | whoami = 'my-hive-username' 201 | 202 | # stream comments as they are published on the blockchain 203 | # turn them into convenient Post objects while we're at it 204 | b = Blockchain() 205 | stream = map(Post, b.stream(filter_by=['comment'])) 206 | 207 | for post in stream: 208 | if post.json_metadata: 209 | mentions = post.json_metadata.get('users', []) 210 | 211 | # if post mentions more than 10 people its likely spam 212 | if mentions and len(mentions) < 10: 213 | post.upvote(weight=upvote_pct, voter=whoami) 214 | 215 | if __name__ == '__main__': 216 | try: 217 | run() 218 | except KeyboardInterrupt: 219 | pass 220 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to hive-python 2 | ======================= 3 | 4 | ``hive-python`` is an adaption of the official Steem library for Python for Hive. 5 | It comes with a BIP38 encrypted wallet and a practical CLI utility called `hivepy`. 6 | 7 | The Hive library has been designed to allow developers to easily access its routines and make use of the network without dealing with all the releated blockchain technology and cryptography. This library can be used to do anything that is allowed according to the Hive blockchain protocol. 8 | 9 | 10 | Installation 11 | ------------ 12 | 13 | To install the library, simply run: 14 | 15 | :: 16 | 17 | pip install -U hivepy 18 | 19 | 20 | Getting Started 21 | --------------- 22 | .. toctree:: 23 | :maxdepth: 1 24 | 25 | install 26 | cli 27 | examples 28 | 29 | 30 | 31 | Digging Deeper 32 | -------------- 33 | .. toctree:: 34 | :maxdepth: 2 35 | 36 | hive 37 | core 38 | tools 39 | low_level 40 | 41 | 42 | Other 43 | ----- 44 | 45 | * :ref:`genindex` 46 | -------------------------------------------------------------------------------- /docs/install.rst: -------------------------------------------------------------------------------- 1 | ************ 2 | Installation 3 | ************ 4 | 5 | `hive-python` requires Python 3.5 or higher. We don't recommend usage of Python that ships with OS. 6 | If you're just looking for a quick and easy cross-platform solution, feel free to install Python 3.x via easy to use 7 | `Anaconda `_ installer. 8 | 9 | 10 | Afterwards, you can install `hive-python` with `pip`: 11 | 12 | :: 13 | 14 | $ pip install hive 15 | 16 | You can also perform the installation manually using `setup.py`: 17 | 18 | :: 19 | 20 | $ git clone https://github.com/pharesim/hive-python 21 | $ cd hive-python 22 | $ make install 23 | 24 | Upgrade 25 | ####### 26 | 27 | :: 28 | 29 | $ pip install -U hive 30 | -------------------------------------------------------------------------------- /docs/issue_template.md: -------------------------------------------------------------------------------- 1 | ### Version of Python you are running ### 2 | 3 | 4 | 5 | ### Version of hive-python you are running ### 6 | 7 | 8 | 9 | ### Expected Behavior ### 10 | 11 | 12 | 13 | ### Actual Behavior ### 14 | 15 | 16 | 17 | ### Steps to reproduce ### 18 | 19 | 20 | 21 | ### Stack Trace ### 22 | -------------------------------------------------------------------------------- /docs/low_level.rst: -------------------------------------------------------------------------------- 1 | Low Level 2 | ~~~~~~~~~ 3 | 4 | HttpClient 5 | ---------- 6 | 7 | A fast ``urllib3`` based HTTP client that features: 8 | 9 | * Connection Pooling 10 | * Concurrent Processing 11 | * Automatic Node Failover 12 | 13 | The functionality of ``HttpClient`` is encapsulated by ``Hive`` class. You shouldn't be using ``HttpClient`` directly, 14 | unless you know exactly what you're doing. 15 | 16 | .. autoclass:: hivebase.http_client.HttpClient 17 | :members: 18 | 19 | ------------- 20 | 21 | hivebase 22 | --------- 23 | 24 | HiveBase contains various primitives for building higher level abstractions. 25 | This module should only be used by library developers or people with deep domain knowledge. 26 | 27 | **Warning:** 28 | Not all methods are documented. Please see source. 29 | 30 | .. image:: https://i.imgur.com/A9urMG9.png 31 | 32 | Account 33 | ======= 34 | 35 | .. automodule:: hivebase.account 36 | :members: 37 | 38 | -------- 39 | 40 | Base58 41 | ====== 42 | 43 | .. automodule:: hivebase.base58 44 | :members: 45 | 46 | -------- 47 | 48 | Bip38 49 | ===== 50 | 51 | .. automodule:: hivebase.bip38 52 | :members: 53 | 54 | 55 | -------- 56 | 57 | Memo 58 | ==== 59 | 60 | .. automodule::hivebase.memo 61 | :members: 62 | 63 | 64 | -------- 65 | 66 | Operations 67 | ========== 68 | 69 | .. automodule:: hivebase.operations 70 | :members: 71 | 72 | 73 | -------- 74 | 75 | Transactions 76 | ============ 77 | 78 | .. automodule:: hivebase.transactions 79 | :members: 80 | 81 | 82 | 83 | -------- 84 | 85 | Types 86 | ===== 87 | 88 | .. automodule:: hivebase.types 89 | :members: 90 | 91 | -------- 92 | 93 | Exceptions 94 | ========== 95 | 96 | .. automodule:: hivebase.exceptions 97 | :members: 98 | -------------------------------------------------------------------------------- /docs/steem.rst: -------------------------------------------------------------------------------- 1 | Hive - Your Starting Point 2 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~ 3 | 4 | Quick Start 5 | ----------- 6 | You can start using the library with just a few lines of code, as seen in this quick example: 7 | 8 | .. code-block:: python 9 | 10 | # first, we initialize Hive class 11 | from hive import Hive 12 | h = Hive() 13 | 14 | .. code-block:: python 15 | 16 | # check @pharesim's balance 17 | >>> h.get_account('pharesim')['hbd_balance'] 18 | '980.211 HBD' 19 | 20 | # lets send $1.0 HBD to @pharesim 21 | >>> h.commit.transfer(to='pharesim', amount=1, asset='HBD', account='furion') 22 | {'expiration': '2017-03-12T17:54:43', 23 | 'extensions': [], 24 | 'operations': [['transfer', 25 | {'amount': '1.000 HBD', 'from': 'furion', 'memo': '', 'to': 'pharesim'}]], 26 | 'ref_block_num': 23008, 27 | 'ref_block_prefix': 961695589, 28 | 'signatures': ['1f1322be9ca0c22b27c0385c929c9863901ac78cdaedea2162024ea040e22c4f8b542c02d96cbc761cbe4a188a932bc715bb7bcaf823b6739a44bb29fa85f96d2f']} 29 | 30 | # yup, its there 31 | >>> h.get_account('pharesim')['hbd_balance'] 32 | '981.211 HBD' 33 | 34 | Importing your Hive Account 35 | ============================ 36 | `hive-python` comes with a BIP38 encrypted wallet, which holds your private keys. 37 | 38 | 39 | 40 | Alternatively, you can also pass required WIF's to ``Hive()`` initializer. 41 | 42 | :: 43 | 44 | from hive import Hive 45 | h = Hive(keys=['', '']) 46 | 47 | Using the encrypted wallet is however a recommended way. 48 | 49 | Please check :doc:`cli` to learn how to set up the wallet. 50 | 51 | Interfacing with hived 52 | ======================= 53 | ``Hive()`` inherits API methods from ``Hived``, which can be called like so: 54 | 55 | .. code-block:: python 56 | 57 | h = Hive() 58 | 59 | h.get_account('pharesim') 60 | h.get_block(8888888) 61 | h.get_content('author', 'permlink') 62 | h.broadcast_transaction(...) 63 | # and many more 64 | 65 | You can see the list of available methods by calling ``help(Hive)``. 66 | If a method is not available trough the Python API, we can call it manually using ``h.exec()``: 67 | 68 | .. code-block:: python 69 | 70 | h = Hive() 71 | 72 | # this call 73 | h.get_followers('furion', 'abit', 'blog', 10) 74 | 75 | # is same as 76 | h.exec('get_followers', 77 | 'furion', 'abit', 'blog', 10, 78 | api='follow_api') 79 | 80 | Commit and Wallet 81 | ================= 82 | ``Hive()`` comes equipped with ``Commit`` and ``Wallet``, accessible via dot-notation. 83 | 84 | .. code-block:: python 85 | 86 | h = Hive() 87 | 88 | # accessing Commit methods 89 | h.commit.transfer(...) 90 | 91 | # accessing Wallet methods 92 | h.wallet.get_active_key_for_account(...) 93 | 94 | Please check :doc:`core` documentation to learn more. 95 | 96 | 97 | Hive 98 | ----- 99 | 100 | As displayed in the `Quick Start` above, ``Hive`` is the main class of this library. It acts as a gateway to other components, such as 101 | ``Hived``, ``Commit``, ``Wallet`` and ``HttpClient``. 102 | 103 | Any arguments passed to ``Hive`` as ``kwargs`` will naturally flow to sub-components. For example, if we initialize 104 | Hive with ``hive = Hive(no_broadcast=True)``, the ``Commit`` instance is configured to not broadcast any transactions. 105 | This is very useful for testing. 106 | 107 | .. autoclass:: hive.hive.Hive 108 | :members: 109 | 110 | 111 | Hived API 112 | ---------- 113 | 114 | Hived contains API generating utilities. ``Hived``'s methods will be automatically available to ``Hive()`` classes. 115 | See :doc:`hive`. 116 | 117 | .. _hived-reference: 118 | 119 | .. automodule:: hive.hived 120 | :members: 121 | 122 | 123 | Setting Custom Nodes 124 | -------------------- 125 | 126 | There are 3 ways in which you can set custom ``hived`` nodes to use with ``hive-python``. 127 | 128 | **1. Global, permanent override:** 129 | You can use ``hivepy set nodes`` command to set one or more node URLs. The nodes need to be separated with comma (,) 130 | and shall contain no whitespaces. 131 | 132 | :: 133 | 134 | ~ % hivepy config 135 | +---------------------+--------+ 136 | | Key | Value | 137 | +---------------------+--------+ 138 | | default_vote_weight | 100 | 139 | | default_account | furion | 140 | +---------------------+--------+ 141 | ~ % hivepy set nodes https://api.hive.blog 142 | ~ % hivepy config 143 | +---------------------+-------------------------------+ 144 | | Key | Value | 145 | +---------------------+-------------------------------+ 146 | | default_account | furion | 147 | | default_vote_weight | 100 | 148 | | nodes | https://api.hive.blog | 149 | +---------------------+-------------------------------+ 150 | ~ % hivepy set nodes https://api.hive.blog,https://api.hive.network 151 | ~ % hivepy config 152 | +---------------------+----------------------------------------------------------+ 153 | | Key | Value | 154 | +---------------------+----------------------------------------------------------+ 155 | | nodes | https://api.hive.blog,https://api.hive.network | 156 | | default_vote_weight | 100 | 157 | | default_account | furion | 158 | +---------------------+----------------------------------------------------------+ 159 | ~ % 160 | 161 | 162 | To reset this config run ``hivepy set nodes ''``. 163 | 164 | **2. For Current Python Process:** 165 | You can override default `Hived` instance for current Python process, by overriding the `instance` singleton. 166 | You should execute the following code when your program starts, and from there on out, all classes (Blockchain, Account, 167 | Post, etc) will use this as their default instance. 168 | 169 | :: 170 | 171 | from hive.hived import Hived 172 | from hive.instance import set_shared_hived_instance 173 | 174 | hived_nodes = [ 175 | 'https://api.hive.blog', 176 | 'https://api.hive.network', 177 | ] 178 | set_shared_hived_instance(Hived(nodes=hived_nodes)) 179 | 180 | 181 | **3. For Specific Class Instance:** 182 | Every class that depends on hived comes with a ``hived_instance`` argument. 183 | You can override said hived instance, for any class you're initializing (and its children). 184 | 185 | This is useful when you want to contain a modified ``hived`` instance to an explicit piece of code (ie. for testing). 186 | 187 | :: 188 | 189 | from hive.hived import Hived 190 | from hive.account import Account 191 | from hive.Blockchain import Blockchain 192 | 193 | hived_nodes = [ 194 | 'https://api.hive.blog', 195 | 'https://api.hive.network', 196 | ] 197 | custom_instance = Hived(nodes=hived_nodes) 198 | 199 | account = Account('furion', hived_instance=custom_instance) 200 | blockchain = Blockchain('head', hived_instance=custom_instance) 201 | -------------------------------------------------------------------------------- /docs/tools.rst: -------------------------------------------------------------------------------- 1 | Tools 2 | ~~~~~ 3 | 4 | ``hive-python`` comes with batteries included. 5 | 6 | This page lists a collection of convenient tools at your disposal. 7 | 8 | 9 | Account 10 | ======= 11 | 12 | .. autoclass:: hive.account.Account 13 | :members: 14 | 15 | -------- 16 | 17 | Amount 18 | ====== 19 | 20 | .. autoclass:: hive.amount.Amount 21 | :members: 22 | 23 | -------- 24 | 25 | Blockchain 26 | ========== 27 | 28 | .. autoclass:: hive.blockchain.Blockchain 29 | :members: 30 | 31 | -------- 32 | 33 | Blog 34 | ==== 35 | 36 | .. autoclass:: hive.blog.Blog 37 | :members: 38 | 39 | -------- 40 | 41 | Converter 42 | ========= 43 | 44 | .. autoclass:: hive.converter.Converter 45 | :members: 46 | 47 | -------- 48 | 49 | Dex 50 | === 51 | 52 | .. autoclass:: hive.dex.Dex 53 | :members: 54 | 55 | -------- 56 | 57 | Post 58 | ==== 59 | 60 | .. autoclass:: hive.post.Post 61 | :members: 62 | 63 | -------- 64 | 65 | 66 | 67 | hive.utils 68 | =========== 69 | 70 | .. automodule:: hive.utils 71 | :members: 72 | 73 | -------- 74 | -------------------------------------------------------------------------------- /hive/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | from .hive import Hive 3 | 4 | __version__ = '0.10' 5 | -------------------------------------------------------------------------------- /hive/account.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import math 3 | import time 4 | 5 | from funcy.colls import walk_values, get_in 6 | from funcy.seqs import take 7 | from funcy import rpartial 8 | from hivebase.exceptions import AccountDoesNotExistsException 9 | from toolz import dissoc 10 | 11 | from .amount import Amount 12 | from .blockchain import Blockchain 13 | from .converter import Converter 14 | from .instance import shared_hived_instance 15 | from .utils import parse_time, json_expand 16 | 17 | 18 | class Account(dict): 19 | """ This class allows to easily access Account data 20 | 21 | :param str account_name: Name of the account 22 | :param Hived hived_instance: Hived() instance to use when 23 | accessing a RPC 24 | 25 | """ 26 | 27 | def __init__(self, account_name, hived_instance=None): 28 | self.hived = hived_instance or shared_hived_instance() 29 | self.name = account_name 30 | 31 | # caches 32 | self._converter = None 33 | 34 | self.refresh() 35 | 36 | def refresh(self): 37 | account = self.hived.get_account(self.name) 38 | if not account: 39 | raise AccountDoesNotExistsException 40 | 41 | # load json_metadata 42 | account = json_expand(account, 'json_metadata') 43 | super(Account, self).__init__(account) 44 | 45 | def __getitem__(self, key): 46 | return super(Account, self).__getitem__(key) 47 | 48 | def items(self): 49 | return super(Account, self).items() 50 | 51 | @property 52 | def converter(self): 53 | if not self._converter: 54 | self._converter = Converter(self.hived) 55 | return self._converter 56 | 57 | @property 58 | def profile(self): 59 | try: 60 | return get_in(self, ['json_metadata', 'profile'], default={}) 61 | except TypeError: 62 | pass 63 | 64 | return {} 65 | 66 | @property 67 | def sp(self): 68 | vests = Amount(self['vesting_shares']).amount 69 | return round(self.converter.vests_to_hp(vests), 3) 70 | 71 | @property 72 | def rep(self): 73 | return self.reputation() 74 | 75 | @property 76 | def balances(self): 77 | return self.get_balances() 78 | 79 | def get_balances(self): 80 | available = { 81 | 'HIVE': Amount(self['balance']).amount, 82 | 'HBD': Amount(self['hbd_balance']).amount, 83 | 'VESTS': Amount(self['vesting_shares']).amount, 84 | } 85 | 86 | savings = { 87 | 'HIVE': Amount(self['savings_balance']).amount, 88 | 'HBD': Amount(self['savings_hbd_balance']).amount, 89 | } 90 | 91 | rewards = { 92 | 'HIVE': Amount(self['reward_hive_balance']).amount, 93 | 'HBD': Amount(self['reward_hbd_balance']).amount, 94 | 'VESTS': Amount(self['reward_vesting_balance']).amount, 95 | } 96 | 97 | totals = { 98 | 'HIVE': 99 | sum([available['HIVE'], savings['HIVE'], rewards['HIVE']]), 100 | 'HBD': 101 | sum([available['HBD'], savings['HBD'], rewards['HBD']]), 102 | 'VESTS': 103 | sum([available['VESTS'], rewards['VESTS']]), 104 | } 105 | 106 | total = walk_values(rpartial(round, 3), totals) 107 | 108 | return { 109 | 'available': available, 110 | 'savings': savings, 111 | 'rewards': rewards, 112 | 'total': total, 113 | } 114 | 115 | def reputation(self, precision=2): 116 | rep = int(self['reputation']) 117 | if rep == 0: 118 | return 25 119 | score = (math.log10(abs(rep)) - 9) * 9 + 25 120 | if rep < 0: 121 | score = 50 - score 122 | return round(score, precision) 123 | 124 | def voting_power(self): 125 | return self['voting_power'] / 100 126 | 127 | def get_followers(self): 128 | return [ 129 | x['follower'] for x in self._get_followers(direction="follower") 130 | ] 131 | 132 | def get_following(self): 133 | return [ 134 | x['following'] for x in self._get_followers(direction="following") 135 | ] 136 | 137 | def _get_followers(self, direction="follower", last_user=""): 138 | if direction == "follower": 139 | 140 | followers = self.hived.get_followers(self.name, last_user, "blog", 141 | 100) 142 | elif direction == "following": 143 | followers = self.hived.get_following(self.name, last_user, "blog", 144 | 100) 145 | if len(followers) >= 100: 146 | followers += self._get_followers( 147 | direction=direction, last_user=followers[-1][direction])[1:] 148 | return followers 149 | 150 | def has_voted(self, post): 151 | active_votes = {v["voter"]: v for v in getattr(post, "active_votes")} 152 | return self.name in active_votes 153 | 154 | def curation_stats(self): 155 | trailing_24hr_t = time.time() - datetime.timedelta( 156 | hours=24).total_seconds() 157 | trailing_7d_t = time.time() - datetime.timedelta( 158 | days=7).total_seconds() 159 | 160 | reward_24h = 0.0 161 | reward_7d = 0.0 162 | 163 | for reward in take( 164 | 5000, self.history_reverse(filter_by="curation_reward")): 165 | 166 | timestamp = parse_time(reward['timestamp']).timestamp() 167 | if timestamp > trailing_7d_t: 168 | reward_7d += Amount(reward['reward']).amount 169 | 170 | if timestamp > trailing_24hr_t: 171 | reward_24h += Amount(reward['reward']).amount 172 | 173 | reward_7d = self.converter.vests_to_hp(reward_7d) 174 | reward_24h = self.converter.vests_to_hp(reward_24h) 175 | return { 176 | "24hr": reward_24h, 177 | "7d": reward_7d, 178 | "avg": reward_7d / 7, 179 | } 180 | 181 | def virtual_op_count(self): 182 | try: 183 | last_item = self.hived.get_account_history(self.name, -1, 1)[0][0] 184 | except IndexError: 185 | return 0 186 | else: 187 | return last_item 188 | 189 | def get_account_votes(self): 190 | return self.hived.get_account_votes(self.name) 191 | 192 | def get_withdraw_routes(self): 193 | return self.hived.get_withdraw_routes(self.name, 'all') 194 | 195 | def get_conversion_requests(self): 196 | return self.hived.get_conversion_requests(self.name) 197 | 198 | @staticmethod 199 | def filter_by_date(items, start_time, end_time=None): 200 | start_time = parse_time(start_time).timestamp() 201 | if end_time: 202 | end_time = parse_time(end_time).timestamp() 203 | else: 204 | end_time = time.time() 205 | 206 | filtered_items = [] 207 | for item in items: 208 | if 'time' in item: 209 | item_time = item['time'] 210 | elif 'timestamp' in item: 211 | item_time = item['timestamp'] 212 | timestamp = parse_time(item_time).timestamp() 213 | if end_time > timestamp > start_time: 214 | filtered_items.append(item) 215 | 216 | return filtered_items 217 | 218 | def export(self, load_extras=True): 219 | """ This method returns a dictionary that is type-safe to store as 220 | JSON or in a database. 221 | 222 | :param bool load_extras: Fetch extra information related to the 223 | account (this might take a while). 224 | """ 225 | extras = dict() 226 | if load_extras: 227 | followers = self.get_followers() 228 | following = self.get_following() 229 | extras = { 230 | "followers": followers, 231 | "followers_count": len(followers), 232 | "following": following, 233 | "following_count": len(following), 234 | "curation_stats": self.curation_stats(), 235 | "withdrawal_routes": self.get_withdraw_routes(), 236 | "conversion_requests": self.get_conversion_requests(), 237 | } 238 | 239 | composedDict = self.copy() 240 | composedDict.update(extras) 241 | composedDict.update( 242 | { 243 | "profile": self.profile, 244 | "sp": self.sp, 245 | "rep": self.rep, 246 | "balances": self.get_balances(), 247 | } 248 | ) 249 | 250 | return composedDict 251 | 252 | def get_account_history(self, 253 | index, 254 | limit, 255 | start=None, 256 | stop=None, 257 | order=-1, 258 | filter_by=None, 259 | raw_output=False): 260 | """ A generator over hived.get_account_history. 261 | 262 | It offers serialization, filtering and fine grained iteration control. 263 | 264 | Args: 265 | index (int): start index for get_account_history 266 | limit (int): How many items are we interested in. 267 | start (int): (Optional) skip items until this index 268 | stop (int): (Optional) stop iteration early at this index 269 | order: (1, -1): 1 for chronological, -1 for reverse order 270 | filter_by (str, list): filter out all but these operations 271 | raw_output (bool): (Defaults to False). If True, return history in 272 | hived format (unchanged). 273 | """ 274 | history = self.hived.get_account_history(self.name, index, limit) 275 | for item in history[::order]: 276 | index, event = item 277 | 278 | # start and stop utilities for chronological generator 279 | if start and index < start: 280 | continue 281 | 282 | if stop and index > stop: 283 | return 284 | 285 | op_type, op = event['op'] 286 | block_props = dissoc(event, 'op') 287 | 288 | def construct_op(account_name): 289 | # verbatim output from hived 290 | if raw_output: 291 | return item 292 | 293 | # index can change during reindexing in 294 | # future hard-forks. Thus we cannot take it for granted. 295 | immutable = op.copy() 296 | immutable.update(block_props) 297 | immutable.update({ 298 | 'account': account_name, 299 | 'type': op_type, 300 | }) 301 | _id = Blockchain.hash_op(immutable) 302 | immutable.update({ 303 | '_id': _id, 304 | 'index': index, 305 | }) 306 | return immutable 307 | 308 | if filter_by is None: 309 | yield construct_op(self.name) 310 | else: 311 | if type(filter_by) is list: 312 | if op_type in filter_by: 313 | yield construct_op(self.name) 314 | 315 | if type(filter_by) is str: 316 | if op_type == filter_by: 317 | yield construct_op(self.name) 318 | 319 | def history(self, 320 | filter_by=None, 321 | start=0, 322 | batch_size=1000, 323 | raw_output=False): 324 | """ Stream account history in chronological order. 325 | """ 326 | max_index = self.virtual_op_count() 327 | if not max_index: 328 | return 329 | 330 | start_index = start + batch_size 331 | i = start_index 332 | while i < max_index + batch_size: 333 | for account_history in self.get_account_history( 334 | index=i, 335 | limit=batch_size, 336 | start=i - batch_size, 337 | stop=max_index, 338 | order=1, 339 | filter_by=filter_by, 340 | raw_output=raw_output, 341 | ): 342 | yield account_history 343 | i += (batch_size + 1) 344 | 345 | def history_reverse(self, 346 | filter_by=None, 347 | batch_size=1000, 348 | raw_output=False): 349 | """ Stream account history in reverse chronological order. 350 | """ 351 | start_index = self.virtual_op_count() 352 | if not start_index: 353 | return 354 | 355 | i = start_index 356 | while i > 0: 357 | if i - batch_size < 0: 358 | batch_size = i 359 | for account_history in self.get_account_history( 360 | index=i, 361 | limit=batch_size, 362 | order=-1, 363 | filter_by=filter_by, 364 | raw_output=raw_output, 365 | ): 366 | yield account_history 367 | i -= (batch_size + 1) 368 | -------------------------------------------------------------------------------- /hive/amount.py: -------------------------------------------------------------------------------- 1 | class Amount(dict): 2 | """ This class helps deal and calculate with the different assets on the 3 | chain. 4 | 5 | :param str amountString: Amount string as used by the backend 6 | (e.g. "10 HBD") 7 | """ 8 | 9 | def __init__(self, amount_string="0 HBD"): 10 | if isinstance(amount_string, Amount): 11 | self["amount"] = amount_string["amount"] 12 | self["asset"] = amount_string["asset"] 13 | elif isinstance(amount_string, str): 14 | self["amount"], self["asset"] = amount_string.split(" ") 15 | elif isinstance(amount_string, unicode): 16 | self["amount"], self["asset"] = amount_string.split(" ") 17 | else: 18 | raise ValueError( 19 | "Need an instance of 'Amount' or a string with amount " + 20 | "and asset") 21 | 22 | self["amount"] = float(self["amount"]) 23 | 24 | @property 25 | def amount(self): 26 | return self["amount"] 27 | 28 | @property 29 | def symbol(self): 30 | return self["asset"] 31 | 32 | @property 33 | def asset(self): 34 | return self["asset"] 35 | 36 | def __str__(self): 37 | # HIVE 38 | if self["asset"] == "HBD": 39 | prec = 3 40 | elif self["asset"] == "HIVE": 41 | prec = 3 42 | elif self["asset"] == "VESTS": 43 | prec = 6 44 | 45 | # default 46 | else: 47 | prec = 6 48 | return "{:.{prec}f} {}".format( 49 | self["amount"], self["asset"], prec=prec) 50 | 51 | def __float__(self): 52 | return self["amount"] 53 | 54 | def __int__(self): 55 | return int(self["amount"]) 56 | 57 | def __add__(self, other): 58 | a = Amount(self) 59 | if isinstance(other, Amount): 60 | assert other["asset"] == self["asset"] 61 | a["amount"] += other["amount"] 62 | else: 63 | a["amount"] += float(other) 64 | return a 65 | 66 | def __sub__(self, other): 67 | a = Amount(self) 68 | if isinstance(other, Amount): 69 | assert other["asset"] == self["asset"] 70 | a["amount"] -= other["amount"] 71 | else: 72 | a["amount"] -= float(other) 73 | return a 74 | 75 | def __mul__(self, other): 76 | a = Amount(self) 77 | if isinstance(other, Amount): 78 | a["amount"] *= other["amount"] 79 | else: 80 | a["amount"] *= other 81 | return a 82 | 83 | def __floordiv__(self, other): 84 | a = Amount(self) 85 | if isinstance(other, Amount): 86 | raise Exception("Cannot divide two Amounts") 87 | else: 88 | a["amount"] //= other 89 | return a 90 | 91 | def __div__(self, other): 92 | a = Amount(self) 93 | if isinstance(other, Amount): 94 | raise Exception("Cannot divide two Amounts") 95 | else: 96 | a["amount"] /= other 97 | return a 98 | 99 | def __mod__(self, other): 100 | a = Amount(self) 101 | if isinstance(other, Amount): 102 | a["amount"] %= other["amount"] 103 | else: 104 | a["amount"] %= other 105 | return a 106 | 107 | def __pow__(self, other): 108 | a = Amount(self) 109 | if isinstance(other, Amount): 110 | a["amount"] **= other["amount"] 111 | else: 112 | a["amount"] **= other 113 | return a 114 | 115 | def __iadd__(self, other): 116 | if isinstance(other, Amount): 117 | assert other["asset"] == self["asset"] 118 | self["amount"] += other["amount"] 119 | else: 120 | self["amount"] += other 121 | return self 122 | 123 | def __isub__(self, other): 124 | if isinstance(other, Amount): 125 | assert other["asset"] == self["asset"] 126 | self["amount"] -= other["amount"] 127 | else: 128 | self["amount"] -= other 129 | return self 130 | 131 | def __imul__(self, other): 132 | if isinstance(other, Amount): 133 | self["amount"] *= other["amount"] 134 | else: 135 | self["amount"] *= other 136 | return self 137 | 138 | def __idiv__(self, other): 139 | if isinstance(other, Amount): 140 | assert other["asset"] == self["asset"] 141 | return self["amount"] / other["amount"] 142 | else: 143 | self["amount"] /= other 144 | return self 145 | 146 | def __ifloordiv__(self, other): 147 | if isinstance(other, Amount): 148 | self["amount"] //= other["amount"] 149 | else: 150 | self["amount"] //= other 151 | return self 152 | 153 | def __imod__(self, other): 154 | if isinstance(other, Amount): 155 | self["amount"] %= other["amount"] 156 | else: 157 | self["amount"] %= other 158 | return self 159 | 160 | def __ipow__(self, other): 161 | self["amount"] **= other 162 | return self 163 | 164 | def __lt__(self, other): 165 | if isinstance(other, Amount): 166 | assert other["asset"] == self["asset"] 167 | return self["amount"] < other["amount"] 168 | else: 169 | return self["amount"] < float(other or 0) 170 | 171 | def __le__(self, other): 172 | if isinstance(other, Amount): 173 | assert other["asset"] == self["asset"] 174 | return self["amount"] <= other["amount"] 175 | else: 176 | return self["amount"] <= float(other or 0) 177 | 178 | def __eq__(self, other): 179 | if isinstance(other, Amount): 180 | assert other["asset"] == self["asset"] 181 | return self["amount"] == other["amount"] 182 | else: 183 | return self["amount"] == float(other or 0) 184 | 185 | def __ne__(self, other): 186 | if isinstance(other, Amount): 187 | assert other["asset"] == self["asset"] 188 | return self["amount"] != other["amount"] 189 | else: 190 | return self["amount"] != float(other or 0) 191 | 192 | def __ge__(self, other): 193 | if isinstance(other, Amount): 194 | assert other["asset"] == self["asset"] 195 | return self["amount"] >= other["amount"] 196 | else: 197 | return self["amount"] >= float(other or 0) 198 | 199 | def __gt__(self, other): 200 | if isinstance(other, Amount): 201 | assert other["asset"] == self["asset"] 202 | return self["amount"] > other["amount"] 203 | else: 204 | return self["amount"] > float(other or 0) 205 | 206 | __repr__ = __str__ 207 | __truediv__ = __div__ 208 | __truemul__ = __mul__ 209 | 210 | 211 | if __name__ == "__main__": 212 | a = Amount("2 HBD") 213 | b = Amount("9 HBD") 214 | print(a + b) 215 | print(b) 216 | b **= 2 217 | b += .5 218 | print(b) 219 | print(b > a) 220 | 221 | c = Amount("100 HIVE") 222 | print(c * .10) 223 | 224 | # print(a + c) 225 | # print(a < c) 226 | -------------------------------------------------------------------------------- /hive/block.py: -------------------------------------------------------------------------------- 1 | from hivebase.exceptions import BlockDoesNotExistsException 2 | 3 | from .instance import shared_hived_instance 4 | from .utils import parse_time 5 | 6 | 7 | class Block(dict): 8 | """ Read a single block from the chain 9 | 10 | :param int block: block number 11 | :param Hived hived_instance: Hived() instance to use when 12 | accessing a RPC 13 | 14 | """ 15 | 16 | def __init__(self, block, hived_instance=None): 17 | self.hived = hived_instance or shared_hived_instance() 18 | self.block = block 19 | 20 | if isinstance(block, Block): 21 | super(Block, self).__init__(block) 22 | else: 23 | self.refresh() 24 | 25 | def refresh(self): 26 | block = self.hived.get_block(self.block) 27 | if not block: 28 | raise BlockDoesNotExistsException 29 | super(Block, self).__init__(block) 30 | 31 | def __getitem__(self, key): 32 | return super(Block, self).__getitem__(key) 33 | 34 | def items(self): 35 | return super(Block, self).items() 36 | 37 | def time(self): 38 | return parse_time(self['timestamp']) 39 | -------------------------------------------------------------------------------- /hive/blockchain.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | import time 4 | import warnings 5 | 6 | from .instance import shared_hived_instance, hve 7 | from .utils import parse_time, compat_bytes 8 | 9 | import logging 10 | 11 | logger = logging.getLogger(__name__) 12 | 13 | 14 | class Blockchain(object): 15 | """ Access the blockchain and read data from it. 16 | 17 | Args: 18 | hived_instance (Hived): Hived() instance to use when accessing a RPC 19 | mode (str): `irreversible` or `head`. `irreversible` is default. 20 | """ 21 | 22 | def __init__(self, hived_instance=None, mode="irreversible"): 23 | self.hive = hived_instance or shared_hived_instance() 24 | 25 | if mode == "irreversible": 26 | self.mode = 'last_irreversible_block_num' 27 | elif mode == "head": 28 | self.mode = "head_block_number" 29 | else: 30 | raise ValueError("invalid value for 'mode'!") 31 | 32 | def info(self): 33 | """ This call returns the *dynamic global properties* 34 | """ 35 | return self.hive.get_dynamic_global_properties() 36 | 37 | def config(self): 38 | return self.hive.get_config() 39 | 40 | def get_current_block_num(self): 41 | """ This call returns the current block 42 | """ 43 | return self.info().get(self.mode) 44 | 45 | def get_current_block(self): 46 | """ This call returns the current block 47 | """ 48 | return self.hive.get_block(self.get_current_block_num()) 49 | 50 | def stream_from(self, 51 | start_block=None, 52 | end_block=None, 53 | batch_operations=False, 54 | full_blocks=False, 55 | **kwargs): 56 | """ This call yields raw blocks or operations depending on 57 | ``full_blocks`` param. 58 | 59 | By default, this generator will yield operations, one by one. 60 | You can choose to yield lists of operations, batched to contain 61 | all operations for each block with ``batch_operations=True``. 62 | You can also yield full blocks instead, with ``full_blocks=True``. 63 | 64 | Args: start_block (int): Block to start with. If not provided, current 65 | (head) block is used. 66 | 67 | end_block (int): Stop iterating at this block. If not provided, this 68 | generator will run forever (streaming mode). 69 | 70 | batch_operations (bool): (Defaults to False) Rather than yielding 71 | operations one by one, yield a list of all operations for each block. 72 | 73 | full_blocks (bool): (Defaults to False) Rather than yielding 74 | operations, return raw, unedited blocks as provided by hived. This 75 | mode will NOT include virtual operations. 76 | 77 | """ 78 | 79 | _ = kwargs # we need this 80 | # Let's find out how often blocks are generated! 81 | block_interval = self.config().get("HIVE_BLOCK_INTERVAL") 82 | 83 | if not start_block: 84 | start_block = self.get_current_block_num() 85 | 86 | while True: 87 | head_block = self.get_current_block_num() 88 | 89 | for block_num in range(start_block, head_block + 1): 90 | if end_block and block_num > end_block: 91 | raise StopIteration( 92 | "Reached stop block at: #%s" % end_block) 93 | 94 | if full_blocks: 95 | yield self.hive.get_block(block_num) 96 | elif batch_operations: 97 | yield self.hive.get_ops_in_block(block_num, False) 98 | else: 99 | for ops in self.hive.get_ops_in_block(block_num, False): 100 | yield ops 101 | 102 | # next round 103 | start_block = head_block + 1 104 | time.sleep(block_interval) 105 | 106 | def reliable_stream(self, 107 | start_block=None, 108 | block_interval=None, 109 | update_interval=False, 110 | batch_operations=False, 111 | full_blocks=False, 112 | timeout=None, 113 | **kwargs): 114 | """ 115 | 116 | A version of stream_from() intended for use in services that NEED 117 | reliable (nonstop) streaming 118 | 119 | By default, works same as stream_from() but will also keep trying 120 | until getting a response from hived, allowing catching up after 121 | server downtime. 122 | 123 | Warnings: To ensure reliability, this method does some weird 124 | none-standard things with the hived client 125 | 126 | Args: 127 | 128 | start_block (int): Block to start with. If not provided, current 129 | (head) block is used. 130 | 131 | block_interval (int): Time between block generations. If not 132 | provided, will attempt to query hived for this value 133 | 134 | batch_operations (bool): (Defaults to False) Rather than yielding 135 | operations one by one, yield a list of all operations for each 136 | block. 137 | 138 | full_blocks (bool): (Defaults to False) Rather than yielding 139 | operations, return raw, unedited blocks as provided by hived. This 140 | mode will NOT include virtual operations. 141 | 142 | timeout (int): Time to wait on response from hived before assuming 143 | timeout and retrying queries. If not provided, this will default to 144 | block_interval/4 for all queries except get_block_interval() - 145 | where it will default to 2 seconds for initial setup 146 | 147 | """ 148 | 149 | def get_reliable_client(_timeout): 150 | # we want to fail fast and try the next node quickly 151 | return hve.hived.Hived( 152 | nodes=self.hive.nodes, 153 | retries=1, 154 | timeout=_timeout, 155 | re_raise=True) 156 | 157 | def reliable_query(_client, _method, _api, *_args): 158 | # this will ALWAYS eventually return, at all costs 159 | while True: 160 | try: 161 | return _client.call(_method, *_args, api=_api) 162 | except Exception as e: 163 | logger.error( 164 | 'Error: %s' % str(s), 165 | extra=dict( 166 | exc=e, 167 | response=retval, 168 | api_name=_api, 169 | api_method=_method, 170 | api_args=_args)) 171 | time.sleep(1) 172 | 173 | def get_reliable_block_interval(_client): 174 | return reliable_query(_client, 'get_config', 175 | 'database_api').get('HIVE_BLOCK_INTERVAL') 176 | 177 | def get_reliable_current_block(_client): 178 | return reliable_query(_client, 'get_dynamic_global_properties', 179 | 'database_api').get(self.mode) 180 | 181 | def get_reliable_blockdata(_client, _block_num): 182 | return reliable_query(_client, 'get_block', 'database_api', 183 | block_num) 184 | 185 | def get_reliable_ops_in_block(_client, _block_num): 186 | return reliable_query(_client, 'get_ops_in_block', 'database_api', 187 | block_num, False) 188 | 189 | if timeout is None: 190 | if block_interval is None: 191 | _reliable_client = get_reliable_client(2) 192 | block_interval = get_reliable_block_interval(_reliable_client) 193 | else: 194 | timeout = block_interval / 4 195 | _reliable_client = get_reliable_client(timeout) 196 | else: 197 | _reliable_client = get_reliable_client(timeout) 198 | if block_interval is None: 199 | block_interval = get_reliable_block_interval(_reliable_client) 200 | if start_block is None: 201 | start_block = get_reliable_current_block(_reliable_client) 202 | 203 | while True: 204 | sleep_interval = block_interval / 4 205 | head_block = get_reliable_current_block(_reliable_client) 206 | 207 | for block_num in range(start_block, head_block + 1): 208 | if full_blocks: 209 | yield get_reliable_current_block(_reliable_client, 210 | head_block) 211 | elif batch_operations: 212 | yield get_reliable_ops_in_block(_reliable_client, 213 | head_block) 214 | else: 215 | for reliable_ops in get_reliable_ops_in_block( 216 | _reliable_client, head_block): 217 | yield reliable_ops 218 | 219 | sleep_interval = sleep_interval / 2 220 | 221 | time.sleep(sleep_interval) 222 | start_block = head_block + 1 223 | 224 | def stream(self, filter_by=list(), *args, **kwargs): 225 | """ Yield a stream of operations, starting with current head block. 226 | 227 | Args: 228 | filter_by (str, list): List of operations to filter for 229 | """ 230 | if isinstance(filter_by, str): 231 | filter_by = [filter_by] 232 | 233 | for ops in self.stream_from(*args, **kwargs): 234 | 235 | # deal with different self.stream_from() outputs 236 | events = ops 237 | if type(ops) == dict: 238 | if 'witness_signature' in ops: 239 | raise ValueError( 240 | 'Blockchain.stream() is for operation level streams. ' 241 | 'For block level streaming, use ' 242 | 'Blockchain.stream_from()') 243 | events = [ops] 244 | 245 | for event in events: 246 | op_type, op = event['op'] 247 | if not filter_by or op_type in filter_by: 248 | # return unmodified hived output 249 | if kwargs.get('raw_output'): 250 | yield event 251 | else: 252 | updated_op = op.copy() 253 | updated_op.update({ 254 | "_id": self.hash_op(event), 255 | "type": op_type, 256 | "timestamp": parse_time(event.get("timestamp")), 257 | "block_num": event.get("block"), 258 | "trx_id": event.get("trx_id"), 259 | }) 260 | yield updated_op 261 | 262 | def history(self, 263 | filter_by=list(), 264 | start_block=1, 265 | end_block=None, 266 | raw_output=False, 267 | **kwargs): 268 | """ Yield a stream of historic operations. 269 | 270 | Similar to ``Blockchain.stream()``, but starts at beginning of chain 271 | unless ``start_block`` is set. 272 | 273 | Args: filter_by (str, list): List of operations to filter for 274 | start_block (int): Block to start with. If not provided, start of 275 | blockchain is used (block 1). 276 | end_block (int): Stop iterating at this 277 | block. If not provided, this generator will run forever. 278 | raw_output (bool): (Defaults to False). If True, return ops in a 279 | unmodified hived structure. """ 280 | 281 | return self.stream( 282 | filter_by=filter_by, 283 | start_block=start_block, 284 | end_block=end_block, 285 | raw_output=raw_output, 286 | **kwargs) 287 | 288 | def ops(self, *args, **kwargs): 289 | raise DeprecationWarning('Blockchain.ops() is deprecated. Please use ' 290 | + 'Blockchain.stream_from() instead.') 291 | 292 | def replay(self, **kwargs): 293 | warnings.warn('Blockchain.replay() is deprecated. ' + 294 | 'Please use Blockchain.history() instead.') 295 | return self.history(**kwargs) 296 | 297 | @staticmethod 298 | def hash_op(event): 299 | """ This method generates a hash of blockchain operation. """ 300 | data = json.dumps(event, sort_keys=True) 301 | return hashlib.sha1(compat_bytes(data, 'utf-8')).hexdigest() 302 | 303 | def get_all_usernames(self, *args, **kwargs): 304 | """ Fetch the full list of Hive usernames. """ 305 | _ = args, kwargs 306 | warnings.warn( 307 | 'Blockchain.get_all_usernames() is now Hived.get_all_usernames().' 308 | ) 309 | return self.hive.get_all_usernames() 310 | -------------------------------------------------------------------------------- /hive/blog.py: -------------------------------------------------------------------------------- 1 | from funcy.flow import silent 2 | from funcy.funcs import complement 3 | from funcy.seqs import take, first 4 | 5 | from .account import Account 6 | from .instance import shared_hived_instance 7 | from .post import Post 8 | from .utils import is_comment 9 | 10 | 11 | class Blog: 12 | """ Obtain a list of blog posts for an account 13 | 14 | Args: 15 | account_name (str): Name of the account 16 | comments_only (bool): (Default False). Toggle between posts 17 | and comments. 18 | hived_instance (Hived): Hived instance overload 19 | 20 | Returns: 21 | Generator with Post objects in reverse chronological order. 22 | 23 | Example: 24 | To get all posts, you can use either generator: 25 | 26 | :: 27 | 28 | gen1 = Blog('furion') 29 | gen2 = b.all() 30 | 31 | next(gen1) 32 | next(gen2) 33 | 34 | To get some posts, you can call `take()`: 35 | 36 | :: 37 | 38 | b = Blog('furion') 39 | posts = b.take(5) 40 | 41 | """ 42 | 43 | def __init__(self, 44 | account_name, 45 | comments_only=False, 46 | hived_instance=None): 47 | self.hive = hived_instance or shared_hived_instance() 48 | self.comments_only = comments_only 49 | self.account = Account(account_name, hived_instance=self.hive) 50 | self.history = self.account.history_reverse(filter_by='comment') 51 | self.seen_items = set() 52 | 53 | def take(self, limit=5): 54 | """ Take up to n (n = limit) posts/comments at a time. 55 | 56 | You can call this method as many times as you want. Once 57 | there are no more posts to take, it will return []. 58 | 59 | Returns: 60 | List of posts/comments in a batch of size up to `limit`. 61 | """ 62 | # get main posts only 63 | comment_filter = is_comment if self.comments_only else complement( 64 | is_comment) 65 | hist = filter(comment_filter, self.history) 66 | 67 | # filter out reblogs 68 | def match_author(x): 69 | return x['author'] == self.account.name 70 | 71 | hist2 = filter(match_author, hist) 72 | 73 | # post edits will re-appear in history 74 | # we should therefore filter out already seen posts 75 | def ensure_unique(post): 76 | if post['permlink'] not in self.seen_items: 77 | self.seen_items.add(post['permlink']) 78 | return True 79 | 80 | unique = filter(ensure_unique, hist2) 81 | 82 | serialized = filter(bool, map(silent(Post), unique)) 83 | 84 | batch = take(limit, serialized) 85 | return batch 86 | 87 | def all(self): 88 | """ A generator that will return ALL of account history. """ 89 | while True: 90 | chunk = self.take(10) 91 | if chunk: 92 | for little_chunk in iter(chunk): 93 | yield little_chunk 94 | else: 95 | break 96 | 97 | def __iter__(self): 98 | return self 99 | 100 | def __next__(self): 101 | next_item = first(self.take(1)) 102 | if not next_item: 103 | raise StopIteration 104 | 105 | return next_item 106 | -------------------------------------------------------------------------------- /hive/converter.py: -------------------------------------------------------------------------------- 1 | import math 2 | 3 | from .amount import Amount 4 | from .instance import shared_hived_instance 5 | 6 | 7 | class Converter(object): 8 | """ Converter simplifies the handling of different metrics of 9 | the blockchain 10 | 11 | :param Hived hived_instance: Hived() instance to 12 | use when accessing a RPC 13 | 14 | """ 15 | 16 | def __init__(self, hived_instance=None): 17 | self.hived = hived_instance or shared_hived_instance() 18 | 19 | self.CONTENT_CONSTANT = 2000000000000 20 | 21 | def hbd_median_price(self): 22 | """ Obtain the hbd price as derived from the median over all 23 | witness feeds. Return value will be HBD 24 | """ 25 | return (Amount(self.hived.get_feed_history()['current_median_history'] 26 | ['base']).amount / Amount(self.hived.get_feed_history( 27 | )['current_median_history']['quote']).amount) 28 | 29 | def hive_per_mvests(self): 30 | """ Obtain HIVE/MVESTS ratio 31 | """ 32 | info = self.hived.get_dynamic_global_properties() 33 | 34 | return (Amount(info["total_vesting_fund_hive"]).amount / 35 | (Amount(info["total_vesting_shares"]).amount / 1e6)) 36 | 37 | def vests_to_hp(self, vests): 38 | """ Obtain HP from VESTS (not MVESTS!) 39 | 40 | :param number vests: Vests to convert to HP 41 | """ 42 | return vests / 1e6 * self.hive_per_mvests() 43 | 44 | def hp_to_vests(self, hp): 45 | """ Obtain VESTS (not MVESTS!) from HP 46 | 47 | :param number sp: HP to convert 48 | """ 49 | return hp * 1e6 / self.hive_per_mvests() 50 | 51 | def hp_to_rshares(self, hp, voting_power=10000, vote_pct=10000): 52 | """ Obtain the r-shares 53 | 54 | :param number hp: Hive Power 55 | :param int voting_power: voting power (100% = 10000) 56 | :param int vote_pct: voting participation (100% = 10000) 57 | """ 58 | # calculate our account voting shares (from vests), mine is 6.08b 59 | vesting_shares = int(self.hp_to_vests(hp) * 1e6) 60 | 61 | # get props 62 | props = self.hived.get_dynamic_global_properties() 63 | 64 | # determine voting power used 65 | used_power = int((voting_power * vote_pct) / 10000); 66 | max_vote_denom = props['vote_power_reserve_rate'] * (5 * 60 * 60 * 24) / (60 * 60 * 24); 67 | used_power = int((used_power + max_vote_denom - 1) / max_vote_denom) 68 | 69 | # calculate vote rshares 70 | rshares = ((vesting_shares * used_power) / 10000) 71 | 72 | return rshares 73 | 74 | def hive_to_hbd(self, amount_hive): 75 | """ Conversion Ratio for given amount of HIVE to HBD at current 76 | price feed 77 | 78 | :param number hive: Amount of HIVE 79 | """ 80 | return self.hbd_median_price() * amount_hive 81 | 82 | def hive(self, amount_hbd): 83 | """ Conversion Ratio for given amount of HBD to HIVE at current 84 | price feed 85 | 86 | :param number amount_hbd: Amount of HBD 87 | """ 88 | return amount_hbd / self.hbd_median_price() 89 | 90 | def hbd_to_rshares(self, hbd_payout): 91 | """ Obtain r-shares from HBD 92 | 93 | :param number hbd_payout: Amount of HBD 94 | """ 95 | hive_payout = self.hbd_to_hive(hbd_payout) 96 | 97 | reward_fund = self.hived.get_reward_fund() 98 | reward_balance = Amount(reward_fund['reward_balance']).amount 99 | recent_claims = int(reward_fund['recent_claims']) 100 | 101 | return int(recent_claims * hive_payout / (reward_balance - hive_payout)) 102 | 103 | def rshares_2_weight(self, rshares): 104 | """ Obtain weight from rshares 105 | 106 | :param number rshares: R-Shares 107 | """ 108 | _max = 2 ** 64 - 1 109 | return (_max * rshares) / (2 * self.CONTENT_CONSTANT + rshares) 110 | -------------------------------------------------------------------------------- /hive/dex.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | from hivebase import transactions, operations 4 | from hivebase.storage import configStorage as config 5 | 6 | from .amount import Amount 7 | from .instance import shared_hived_instance 8 | 9 | 10 | class Dex(object): 11 | """ This class allows to access calls specific for the internal 12 | exchange of HIVE. 13 | 14 | :param Hived hived_instance: Hived() instance to use when 15 | accessing a RPC 16 | 17 | """ 18 | assets = ["HIVE", "HBD", "STEEM", "SBD"] 19 | 20 | def __init__(self, hived_instance=None): 21 | self.hived = hived_instance or shared_hived_instance() 22 | 23 | def _get_asset(self, symbol): 24 | """ Return the properties of the assets tradeable on the 25 | network. 26 | 27 | :param str symbol: Symbol to get the data for (i.e. HIVE, HBD, 28 | VESTS) 29 | 30 | """ 31 | if symbol == "HIVE" or symbol == "STEEM": 32 | return {"symbol": "STEEM", "precision": 3} 33 | elif symbol == "HBD" or symbol == "SBD": 34 | return {"symbol": "SBD", "precision": 3} 35 | elif symbol == "VESTS": 36 | return {"symbol": "VESTS", "precision": 6} 37 | else: 38 | return None 39 | 40 | def _get_assets(self, quote): 41 | """ Given the `quote` asset, return base. If quote is HBD, then 42 | base is HIVE and vice versa. 43 | """ 44 | assets = self.assets.copy() 45 | assets.remove(quote) 46 | base = assets[0] 47 | return self._get_asset(quote), self._get_asset(base) 48 | 49 | def get_ticker(self): 50 | """ Returns the ticker for all markets. 51 | 52 | Output Parameters: 53 | 54 | * ``latest``: Price of the order last filled 55 | * ``lowest_ask``: Price of the lowest ask 56 | * ``highest_bid``: Price of the highest bid 57 | * ``hbd_volume``: Volume of HBD 58 | * ``hive_volume``: Volume of HIVE 59 | * ``percent_change``: 24h change percentage (in %) 60 | 61 | .. note:: 62 | 63 | Market is HIVE:HBD and prices are HBD per HIVE! 64 | 65 | Sample Output: 66 | 67 | .. code-block:: js 68 | 69 | {'highest_bid': 0.30100226633322913, 70 | 'latest': 0.0, 71 | 'lowest_ask': 0.3249636958897082, 72 | 'percent_change': 0.0, 73 | 'hbd_volume': 108329611.0, 74 | 'hive_volume': 355094043.0} 75 | 76 | 77 | """ 78 | t = self.hived.get_ticker() 79 | return { 80 | 'highest_bid': float(t['highest_bid']), 81 | 'latest': float(t["latest"]), 82 | 'lowest_ask': float(t["lowest_ask"]), 83 | 'percent_change': float(t["percent_change"]), 84 | 'hbd_volume': Amount(t["hbd_volume"]), 85 | 'hive_volume': Amount(t["hive_volume"]) 86 | } 87 | 88 | def trade_history(self, time=1 * 60 * 60, limit=100): 89 | """ Returns the trade history for the internal market 90 | 91 | :param int time: Show the last x seconds of trades (default 1h) 92 | :param int limit: amount of trades to show (<100) (default: 100) 93 | """ 94 | assert limit <= 100, "'limit' has to be smaller than 100" 95 | return self.hived.get_trade_history( 96 | transactions.fmt_time_from_now(-time), 97 | transactions.fmt_time_from_now(), 98 | limit, 99 | ) 100 | 101 | def market_history_buckets(self): 102 | return self.hived.get_market_history_buckets() 103 | 104 | def market_history( 105 | self, 106 | bucket_seconds=60 * 5, 107 | start_age=1 * 60 * 60, 108 | end_age=0, 109 | ): 110 | """ Return the market history (filled orders). 111 | 112 | :param int bucket_seconds: Bucket size in seconds (see 113 | `returnMarketHistoryBuckets()`) 114 | 115 | :param int start_age: Age (in seconds) of the start of the 116 | window (default: 1h/3600) 117 | 118 | :param int end_age: Age (in seconds) of the end of the window 119 | (default: now/0) 120 | 121 | Example: 122 | 123 | .. code-block:: js 124 | 125 | {'close_hbd': 2493387, 126 | 'close_hive': 7743431, 127 | 'high_hbd': 1943872, 128 | 'high_hive': 5999610, 129 | 'id': '7.1.5252', 130 | 'low_hbd': 534928, 131 | 'low_hive': 1661266, 132 | 'open': '2016-07-08T11:25:00', 133 | 'open_hbd': 534928, 134 | 'open_hive': 1661266, 135 | 'hbd_volume': 9714435, 136 | 'seconds': 300, 137 | 'hive_volume': 30088443}, 138 | """ 139 | return self.hived.get_market_history( 140 | bucket_seconds, 141 | transactions.fmt_time_from_now(-start_age - end_age), 142 | transactions.fmt_time_from_now(-end_age), 143 | ) 144 | 145 | def buy(self, 146 | amount, 147 | quote_symbol, 148 | rate, 149 | expiration=7 * 24 * 60 * 60, 150 | killfill=False, 151 | account=None, 152 | order_id=None): 153 | """ Places a buy order in a given market (buy ``quote``, sell 154 | ``base`` in market ``quote_base``). If successful, the 155 | method will return the order creating (signed) transaction. 156 | 157 | :param number amount: Amount of ``quote`` to buy 158 | 159 | :param str quote_symbol: HIVE, or HBD 160 | 161 | :param float price: price denoted in ``base``/``quote`` 162 | 163 | :param number expiration: (optional) expiration time of the 164 | order in seconds (defaults to 7 days) 165 | 166 | :param bool killfill: flag that indicates if the order shall be 167 | killed if it is not filled (defaults to False) 168 | 169 | :param str account: (optional) the source account for the 170 | transfer if not ``default_account`` 171 | 172 | :param int order_id: (optional) a 32bit orderid for tracking of 173 | the created order (random by default) 174 | 175 | Prices/Rates are denoted in 'base', i.e. the HIVE:HBD market 176 | is priced in HBD per HIVE. 177 | """ 178 | if not account: 179 | if "default_account" in config: 180 | account = config["default_account"] 181 | if not account: 182 | raise ValueError("You need to provide an account") 183 | 184 | # We buy quote and pay with base 185 | quote, base = self._get_assets(quote=quote_symbol) 186 | op = operations.LimitOrderCreate( 187 | **{ 188 | "owner": 189 | account, 190 | "orderid": 191 | order_id or random.getrandbits(32), 192 | "amount_to_sell": 193 | '{:.{prec}f} {asset}'.format( 194 | amount * rate, 195 | prec=base["precision"], 196 | asset=base["symbol"]), 197 | "min_to_receive": 198 | '{:.{prec}f} {asset}'.format( 199 | amount, prec=quote["precision"], asset=quote["symbol"]), 200 | "fill_or_kill": 201 | killfill, 202 | "expiration": 203 | transactions.fmt_time_from_now(expiration) 204 | }) 205 | return self.hived.commit.finalizeOp(op, account, "active") 206 | 207 | def sell(self, 208 | amount, 209 | quote_symbol, 210 | rate, 211 | expiration=7 * 24 * 60 * 60, 212 | killfill=False, 213 | account=None, 214 | orderid=None): 215 | """ Places a sell order in a given market (sell ``quote``, buy 216 | ``base`` in market ``quote_base``). If successful, the 217 | method will return the order creating (signed) transaction. 218 | 219 | :param number amount: Amount of ``quote`` to sell 220 | 221 | :param str quote_symbol: HIVE, or HBD 222 | 223 | :param float price: price denoted in ``base``/``quote`` 224 | 225 | :param number expiration: (optional) expiration time of the 226 | order in seconds (defaults to 7 days) 227 | 228 | :param bool killfill: flag that indicates if the order shall be 229 | killed if it is not filled (defaults to False) 230 | 231 | :param str account: (optional) the source account for the 232 | transfer if not ``default_account`` 233 | 234 | :param int orderid: (optional) a 32bit orderid for tracking of 235 | the created order (random by default) 236 | 237 | Prices/Rates are denoted in 'base', i.e. the HIVE:HBD market 238 | is priced in HBD per HIVE. 239 | """ 240 | if not account: 241 | if "default_account" in config: 242 | account = config["default_account"] 243 | if not account: 244 | raise ValueError("You need to provide an account") 245 | # We buy quote and pay with base 246 | quote, base = self._get_assets(quote=quote_symbol) 247 | op = operations.LimitOrderCreate( 248 | **{ 249 | "owner": 250 | account, 251 | "orderid": 252 | orderid or random.getrandbits(32), 253 | "amount_to_sell": 254 | '{:.{prec}f} {asset}'.format( 255 | amount, prec=quote["precision"], asset=quote["symbol"]), 256 | "min_to_receive": 257 | '{:.{prec}f} {asset}'.format( 258 | amount * rate, 259 | prec=base["precision"], 260 | asset=base["symbol"]), 261 | "fill_or_kill": 262 | killfill, 263 | "expiration": 264 | transactions.fmt_time_from_now(expiration) 265 | }) 266 | return self.hived.commit.finalizeOp(op, account, "active") 267 | 268 | def cancel(self, orderid, account=None): 269 | """ Cancels an order you have placed in a given market. 270 | 271 | :param int orderid: the 32bit orderid 272 | 273 | :param str account: (optional) the source account for the 274 | transfer if not ``default_account`` 275 | 276 | """ 277 | if not account: 278 | if "default_account" in config: 279 | account = config["default_account"] 280 | if not account: 281 | raise ValueError("You need to provide an account") 282 | 283 | op = operations.LimitOrderCancel(**{ 284 | "owner": account, 285 | "orderid": orderid, 286 | }) 287 | return self.hived.commit.finalizeOp(op, account, "active") 288 | -------------------------------------------------------------------------------- /hive/hive.py: -------------------------------------------------------------------------------- 1 | from .commit import Commit 2 | from .hived import Hived 3 | 4 | 5 | class Hive: 6 | """ Connect to the Hive network. 7 | 8 | Args: 9 | 10 | nodes (list): A list of Hive HTTP RPC nodes to connect to. If 11 | not provided, official Hive.blog nodes will be used. 12 | 13 | debug (bool): Elevate logging level to `logging.DEBUG`. 14 | Defaults to `logging.INFO`. 15 | 16 | no_broadcast (bool): If set to ``True``, committal actions like 17 | sending funds will have no effect (simulation only). 18 | 19 | Optional Arguments (kwargs): 20 | 21 | Args: 22 | 23 | keys (list): A list of wif keys. If provided, the Wallet will 24 | use these keys rather than the ones found in BIP38 encrypted 25 | wallet. 26 | 27 | unsigned (bool): (Defaults to False) Use this for offline signing. 28 | 29 | expiration (int): (Defualts to 60) Size of window in seconds 30 | that the transaction needs to be broadcasted in, before it 31 | expires. 32 | 33 | Returns: 34 | 35 | Hived class instance. It can be used to execute commands 36 | against Hive node. 37 | 38 | Example: 39 | 40 | If you would like to override the official hive.blog nodes 41 | (default), you can pass your own. When currently used node goes 42 | offline, ``Hived`` will automatically fail-over to the next 43 | available node. 44 | 45 | .. code-block:: python 46 | 47 | nodes = [ 48 | 'https://hived.yournode1.com', 49 | 'https://hived.yournode2.com', 50 | ] 51 | 52 | h = Hived(nodes) 53 | 54 | """ 55 | 56 | def __init__(self, nodes=None, no_broadcast=False, **kwargs): 57 | self.hived = Hived(nodes=nodes, **kwargs) 58 | self.commit = Commit( 59 | hived_instance=self.hived, no_broadcast=no_broadcast, **kwargs) 60 | 61 | def __getattr__(self, item): 62 | """ Bind .commit, .hived methods here as a convenience. """ 63 | if hasattr(self.hived, item): 64 | return getattr(self.hived, item) 65 | if hasattr(self.commit, item): 66 | return getattr(self.commit, item) 67 | if item.endswith("_api"): 68 | return Hive.Api(api_name=item, exec_method=self.hived.call) 69 | 70 | raise AttributeError('Hive has no attribute "%s"' % item) 71 | 72 | class Api(object): 73 | def __init__(self, api_name="", exec_method=None): 74 | self.api_name = api_name 75 | self.exec_method = exec_method 76 | return 77 | 78 | def __getattr__(self, method_name): 79 | return Hive.Method( 80 | api_name=self.api_name, 81 | method_name=method_name, 82 | exec_method=self.exec_method, 83 | ) 84 | 85 | class Method(object): 86 | def __init__(self, api_name="", method_name="", exec_method=None): 87 | self.api_name = api_name 88 | self.method_name = method_name 89 | self.exec_method = exec_method 90 | return 91 | 92 | def __call__(self, *args, **kwargs): 93 | assert not (args and kwargs), "specified both args and kwargs" 94 | if len(kwargs) > 0: 95 | return self.exec_method( 96 | self.method_name, kwargs=kwargs, api=self.api_name) 97 | return self.exec_method(self.method_name, *args, api=self.api_name) 98 | 99 | 100 | if __name__ == '__main__': 101 | h = Hive() 102 | print(h.get_account_count()) 103 | -------------------------------------------------------------------------------- /hive/instance.py: -------------------------------------------------------------------------------- 1 | import hive as hve 2 | import sys 3 | 4 | _shared_hived_instance = None 5 | 6 | 7 | def get_config_node_list(): 8 | from hivebase.storage import configStorage 9 | nodes = configStorage.get('nodes', None) 10 | if nodes: 11 | return nodes.split(',') 12 | 13 | 14 | def shared_hived_instance(): 15 | """ This method will initialize _shared_hived_instance and return it. 16 | The purpose of this method is to have offer single default Hive 17 | instance that can be reused by multiple classes. """ 18 | 19 | global _shared_hived_instance 20 | if not _shared_hived_instance: 21 | if sys.version >= '3.0': 22 | _shared_hived_instance = hve.hived.Hived( 23 | nodes=get_config_node_list()) 24 | else: 25 | _shared_hived_instance = hve.Hived( 26 | nodes=get_config_node_list()) 27 | return _shared_hived_instance 28 | 29 | 30 | def set_shared_hived_instance(hived_instance): 31 | """ This method allows us to override default hive instance for all 32 | users of _shared_hived_instance. """ 33 | 34 | global _shared_hived_instance 35 | _shared_hived_instance = hived_instance 36 | -------------------------------------------------------------------------------- /hive/post.py: -------------------------------------------------------------------------------- 1 | import json 2 | import logging 3 | import re 4 | from datetime import datetime 5 | 6 | from funcy.colls import walk_values, get_in 7 | from funcy.flow import silent 8 | from funcy.seqs import flatten 9 | from hivebase.exceptions import ( 10 | PostDoesNotExist, 11 | VotingInvalidOnArchivedPost, 12 | ) 13 | from hivebase.operations import CommentOptions 14 | 15 | from .amount import Amount 16 | from .commit import Commit 17 | from .instance import shared_hived_instance 18 | from .utils import construct_identifier, resolve_identifier 19 | from .utils import parse_time, remove_from_dict 20 | 21 | log = logging.getLogger(__name__) 22 | 23 | 24 | class Post(dict): 25 | """ This object gets instantiated by Hive.streams and is used as an 26 | abstraction layer for Comments in Hive 27 | 28 | Args: 29 | 30 | post (str or dict): ``author/permlink`` or raw ``comment`` as 31 | dictionary. 32 | 33 | hived_instance (Hived): Hived node to connect to 34 | 35 | """ 36 | 37 | def __init__(self, post, hived_instance=None): 38 | self.hived = hived_instance or shared_hived_instance() 39 | self.commit = Commit(hived_instance=self.hived) 40 | 41 | # will set these during refresh() 42 | self.patched = False 43 | self.category = None 44 | self.root_identifier = None 45 | 46 | if isinstance(post, str): # From identifier 47 | self.identifier = self.parse_identifier(post) 48 | elif isinstance(post, 49 | dict) and "author" in post and "permlink" in post: 50 | 51 | self.identifier = construct_identifier(post["author"], 52 | post["permlink"]) 53 | else: 54 | raise ValueError("Post expects an identifier or a dict " 55 | "with author and permlink!") 56 | 57 | self.refresh() 58 | 59 | @staticmethod 60 | def parse_identifier(uri): 61 | """ Extract canonical post id/url (i.e. strip any leading `@`). """ 62 | return uri.split('@')[-1] 63 | 64 | def refresh(self): 65 | post_author, post_permlink = resolve_identifier(self.identifier) 66 | post = self.hived.get_content(post_author, post_permlink) 67 | if not post["permlink"]: 68 | raise PostDoesNotExist("Post does not exist: %s" % self.identifier) 69 | 70 | # If this 'post' comes from an operation, it might carry a patch 71 | if "body" in post and re.match("^@@", post["body"]): 72 | self.patched = True 73 | 74 | # Parse Times 75 | parse_times = [ 76 | "active", "cashout_time", "created", "last_payout", "last_update", 77 | "max_cashout_time" 78 | ] 79 | for p in parse_times: 80 | post[p] = parse_time(post.get(p, "1970-01-01T00:00:00")) 81 | 82 | # Parse Amounts 83 | hbd_amounts = [ 84 | "total_payout_value", 85 | "max_accepted_payout", 86 | "pending_payout_value", 87 | "curator_payout_value", 88 | "total_pending_payout_value", 89 | "promoted", 90 | ] 91 | for p in hbd_amounts: 92 | post[p] = Amount(post.get(p, "0.000 HBD")) 93 | 94 | # turn json_metadata into python dict 95 | meta_str = post.get("json_metadata", "{}") 96 | post['json_metadata'] = silent(json.loads)(meta_str) or {} 97 | 98 | post["tags"] = [] 99 | post['community'] = '' 100 | if isinstance(post['json_metadata'], dict): 101 | if post["depth"] == 0: 102 | tags = [post["parent_permlink"]] 103 | tags += get_in(post, ['json_metadata', 'tags'], default=[]) 104 | post["tags"] = set(tags) 105 | 106 | post['community'] = get_in( 107 | post, ['json_metadata', 'community'], default='') 108 | 109 | # If this post is a comment, retrieve the root comment 110 | self.root_identifier, self.category = self._get_root_identifier(post) 111 | 112 | self._store_post(post) 113 | 114 | def _store_post(self, post): 115 | # Store original values as obtained from the rpc 116 | for key, value in post.items(): 117 | super(Post, self).__setitem__(key, value) 118 | 119 | # Set attributes as well 120 | for key in post: 121 | setattr(self, key, post[key]) 122 | 123 | # also set identifier 124 | super(Post, self).__setitem__("identifier", self.identifier) 125 | 126 | def __getattr__(self, key): 127 | return object.__getattribute__(self, key) 128 | 129 | def __getitem__(self, key): 130 | return super(Post, self).__getitem__(key) 131 | 132 | def __repr__(self): 133 | return "" % self.identifier 134 | 135 | __str__ = __repr__ 136 | 137 | def _get_root_identifier(self, post=None): 138 | if not post: 139 | post = self 140 | m = re.match("/([^/]*)/@([^/]*)/([^#]*).*", post.get("url", "")) 141 | if not m: 142 | return "", "" 143 | else: 144 | category = m.group(1) 145 | author = m.group(2) 146 | permlink = m.group(3) 147 | return construct_identifier(author, permlink), category 148 | 149 | def get_replies(self): 150 | """ Return **first-level** comments of the post. 151 | """ 152 | post_author, post_permlink = resolve_identifier(self.identifier) 153 | replies = self.hived.get_content_replies(post_author, post_permlink) 154 | return map(silent(Post), replies) 155 | 156 | @staticmethod 157 | def get_all_replies(root_post=None, comments=list(), all_comments=list()): 158 | """ Recursively fetch all the child comments, and return them as a list. 159 | 160 | Usage: all_comments = Post.get_all_replies(Post('foo/bar')) 161 | """ 162 | # see if our root post has any comments 163 | if root_post: 164 | return Post.get_all_replies(comments=list(root_post.get_replies())) 165 | if not comments: 166 | return all_comments 167 | 168 | # recursively scrape children one depth layer at a time 169 | children = list(flatten([list(x.get_replies()) for x in comments])) 170 | if not children: 171 | return all_comments or comments 172 | return Post.get_all_replies( 173 | comments=children, all_comments=comments + children) 174 | 175 | @property 176 | def reward(self): 177 | """Return a float value of estimated total HBD reward. 178 | """ 179 | return Amount(self.get("total_payout_value", "0 HBD")) + \ 180 | Amount(self.get("pending_payout_value", "0 HBD")) 181 | 182 | def time_elapsed(self): 183 | """Return a timedelta on how old the post is. 184 | """ 185 | return datetime.utcnow() - self['created'] 186 | 187 | def is_main_post(self): 188 | """ Retuns True if main post, and False if this is a comment (reply). 189 | """ 190 | return self['depth'] == 0 191 | 192 | def is_comment(self): 193 | """ Retuns True if post is a comment 194 | """ 195 | return self['depth'] > 0 196 | 197 | def curation_reward_pct(self): 198 | """ If post is less than 15 minutes old, it will incur a curation 199 | reward penalty. """ 200 | reward = (self.time_elapsed().seconds / 900) * 100 201 | if reward > 100: 202 | reward = 100 203 | return reward 204 | 205 | def export(self): 206 | """ This method returns a dictionary that is type-safe to store as 207 | JSON or in a database. """ 208 | self.refresh() 209 | 210 | # Remove Hive instance object 211 | safe_dict = remove_from_dict(self, ['hived', 'commit']) 212 | 213 | # Convert Amount class objects into pure dictionaries 214 | def decompose_amounts(item): 215 | if type(item) == Amount: 216 | return dict(item) 217 | return item 218 | 219 | return walk_values(decompose_amounts, safe_dict) 220 | 221 | ###################### 222 | # Commital Properties 223 | ###################### 224 | def upvote(self, weight=+100, voter=None): 225 | """ Upvote the post 226 | 227 | :param float weight: (optional) Weight for posting (-100.0 - 228 | +100.0) defaults to +100.0 229 | :param str voter: (optional) Voting account 230 | """ 231 | return self.vote(weight, voter=voter) 232 | 233 | def downvote(self, weight=-100, voter=None): 234 | """ Downvote the post 235 | 236 | :param float weight: (optional) Weight for posting (-100.0 - 237 | +100.0) defaults to -100.0 238 | :param str voter: (optional) Voting account 239 | """ 240 | return self.vote(weight, voter=voter) 241 | 242 | def vote(self, weight, voter=None): 243 | """ Vote the post 244 | 245 | :param float weight: Weight for posting (-100.0 - +100.0) 246 | :param str voter: Voting account 247 | """ 248 | # Test if post is archived, if so, voting is worthless but just 249 | # pollutes the blockchain and account history 250 | if self.get('net_rshares', None) == None: 251 | raise VotingInvalidOnArchivedPost 252 | return self.commit.vote(self.identifier, weight, account=voter) 253 | 254 | def edit(self, body, meta=None, replace=False): 255 | """ Edit an existing post 256 | 257 | :param str body: Body of the reply 258 | :param json meta: JSON meta object that can be attached to the 259 | post. (optional) 260 | :param bool replace: Instead of calculating a *diff*, replace 261 | the post entirely (defaults to ``False``) 262 | """ 263 | if not meta: 264 | meta = {} 265 | original_post = self 266 | 267 | if replace: 268 | newbody = body 269 | else: 270 | import diff_match_patch 271 | dmp = diff_match_patch.diff_match_patch() 272 | patch = dmp.patch_make(original_post["body"], body) 273 | newbody = dmp.patch_toText(patch) 274 | 275 | if not newbody: 276 | log.info("No changes made! Skipping ...") 277 | return 278 | 279 | reply_identifier = construct_identifier( 280 | original_post["parent_author"], original_post["parent_permlink"]) 281 | 282 | new_meta = {} 283 | if meta: 284 | if original_post["json_metadata"]: 285 | import json 286 | new_meta = original_post["json_metadata"].update(meta) 287 | else: 288 | new_meta = meta 289 | 290 | return self.commit.post( 291 | original_post["title"], 292 | newbody, 293 | reply_identifier=reply_identifier, 294 | author=original_post["author"], 295 | permlink=original_post["permlink"], 296 | json_metadata=new_meta, 297 | ) 298 | 299 | def reply(self, body, title="", author="", meta=None): 300 | """ Reply to an existing post 301 | 302 | :param str body: Body of the reply 303 | :param str title: Title of the reply post 304 | :param str author: Author of reply (optional) if not provided 305 | ``default_user`` will be used, if present, else 306 | a ``ValueError`` will be raised. 307 | :param json meta: JSON meta object that can be attached to the 308 | post. (optional) 309 | """ 310 | return self.commit.post( 311 | title, 312 | body, 313 | json_metadata=meta, 314 | author=author, 315 | reply_identifier=self.identifier) 316 | 317 | def set_comment_options(self, options): 318 | op = CommentOptions( 319 | **{ 320 | "author": 321 | self["author"], 322 | "permlink": 323 | self["permlink"], 324 | "max_accepted_payout": 325 | options.get("max_accepted_payout", 326 | str(self["max_accepted_payout"])), 327 | "percent_hive_dollars": 328 | int( 329 | options.get("percent_hive_dollars", 330 | self["percent_hive_dollars"] / 100) * 100), 331 | "allow_votes": 332 | options.get("allow_votes", self["allow_votes"]), 333 | "allow_curation_rewards": 334 | options.get("allow_curation_rewards", self[ 335 | "allow_curation_rewards"]), 336 | }) 337 | return self.commit.finalizeOp(op, self["author"], "posting") 338 | -------------------------------------------------------------------------------- /hive/profile.py: -------------------------------------------------------------------------------- 1 | import json 2 | import collections 3 | 4 | 5 | class DotDict(dict): 6 | def __init__(self, *args): 7 | """ This class simplifies the use of "."-separated 8 | keys when defining a nested dictionary::: 9 | 10 | >>> keys = ['profile.url', 'profile.img'] 11 | >>> values = ["http:", "foobar"] 12 | >>> print(Profile(keys, values)) 13 | 14 | {"profile": {"url": "http:", "img": "foobar"}} 15 | 16 | """ 17 | if len(args) == 2: 18 | for i, item in enumerate(args[0]): 19 | t = self 20 | parts = item.split('.') 21 | for j, part in enumerate(parts): 22 | if j < len(parts) - 1: 23 | t = t.setdefault(part, {}) 24 | else: 25 | t[part] = args[1][i] 26 | elif len(args) == 1 and isinstance(args[0], dict): 27 | for k, v in args[0].items(): 28 | self[k] = v 29 | elif len(args) == 1 and isinstance(args[0], str): 30 | for k, v in json.loads(args[0]).items(): 31 | self[k] = v 32 | 33 | 34 | class Profile(DotDict): 35 | """ This class is a template to model a user's on-chain 36 | profile according to 37 | 38 | * https://github.com/adcpm/steemscript 39 | """ 40 | 41 | def __init__(self, *args, **kwargs): 42 | super(Profile, self).__init__(*args, **kwargs) 43 | 44 | def __str__(self): 45 | return json.dumps(self) 46 | 47 | def update(self, u): 48 | for k, v in u.items(): 49 | if isinstance(v, collections.Mapping): 50 | self.setdefault(k, {}).update(v) 51 | else: 52 | self[k] = u[k] 53 | 54 | def remove(self, key): 55 | parts = key.split(".") 56 | if len(parts) > 1: 57 | self[parts[0]].pop(".".join(parts[1:])) 58 | else: 59 | super(Profile, self).pop(parts[0], None) 60 | 61 | 62 | if __name__ == '__main__': 63 | keys = ['profile.url', 'profile.img'] 64 | values = ["http:", "foobar"] 65 | print(Profile(keys, values)) 66 | print(Profile({"foo": "bar"})) 67 | -------------------------------------------------------------------------------- /hive/transactionbuilder.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from .wallet import Wallet 4 | from hivebase.account import PrivateKey 5 | from hivebase.exceptions import (InsufficientAuthorityError, MissingKeyError, 6 | InvalidKeyFormat) 7 | from hivebase import operations 8 | from hivebase.operations import Operation 9 | from hivebase.transactions import SignedTransaction, fmt_time_from_now, \ 10 | get_block_params 11 | 12 | from .account import Account 13 | from .instance import shared_hived_instance 14 | 15 | log = logging.getLogger(__name__) 16 | 17 | 18 | class TransactionBuilder(dict): 19 | """ This class simplifies the creation of transactions by adding 20 | operations and signers. 21 | """ 22 | 23 | def __init__(self, 24 | tx=None, 25 | hived_instance=None, 26 | wallet_instance=None, 27 | no_broadcast=False, 28 | expiration=60): 29 | self.hived = hived_instance or shared_hived_instance() 30 | self.no_broadcast = no_broadcast 31 | self.expiration = expiration 32 | self.wallet = wallet_instance or Wallet(self.hived) 33 | 34 | self.op = [] 35 | self.wifs = [] 36 | if tx and not isinstance(tx, dict): 37 | raise ValueError("Invalid Transaction (self.tx) Format") 38 | super(TransactionBuilder, self).__init__(tx or {}) 39 | 40 | def appendOps(self, ops): 41 | if isinstance(ops, list): 42 | for op in ops: 43 | self.op.append(op) 44 | else: 45 | self.op.append(ops) 46 | self.constructTx() 47 | 48 | def appendSigner(self, account, permission): 49 | assert permission in ["active", "owner", 50 | "posting"], "Invalid permission" 51 | account = Account(account, hived_instance=self.hived) 52 | 53 | required_treshold = account[permission]["weight_threshold"] 54 | 55 | def fetchkeys(account, level=0): 56 | if level > 2: 57 | return [] 58 | r = [] 59 | for authority in account[permission]["key_auths"]: 60 | wif = self.wallet.getPrivateKeyForPublicKey(authority[0]) 61 | if wif: 62 | r.append([wif, authority[1]]) 63 | 64 | if sum([x[1] for x in r]) < required_treshold: 65 | # go one level deeper 66 | for authority in account[permission]["account_auths"]: 67 | auth_account = Account( 68 | authority[0], hived_instance=self.hived) 69 | r.extend(fetchkeys(auth_account, level + 1)) 70 | 71 | return r 72 | 73 | keys = fetchkeys(account) 74 | self.wifs.extend([x[0] for x in keys]) 75 | 76 | def appendWif(self, wif): 77 | if wif: 78 | try: 79 | PrivateKey(wif) 80 | self.wifs.append(wif) 81 | except: # noqa FIXME(sneak) 82 | raise InvalidKeyFormat 83 | 84 | def constructTx(self): 85 | if isinstance(self.op, list): 86 | ops = [Operation(o) for o in self.op] 87 | else: 88 | ops = [Operation(self.op)] 89 | expiration = fmt_time_from_now(self.expiration) 90 | ref_block_num, ref_block_prefix = get_block_params(self.hived) 91 | tx = SignedTransaction( 92 | ref_block_num=ref_block_num, 93 | ref_block_prefix=ref_block_prefix, 94 | expiration=expiration, 95 | operations=ops) 96 | super(TransactionBuilder, self).__init__(tx.json()) 97 | 98 | def sign(self): 99 | """ Sign a provided transaction witht he provided key(s) 100 | 101 | :param dict tx: The transaction to be signed and returned 102 | :param string wifs: One or many wif keys to use for signing 103 | a transaction. If not present, the keys will be loaded 104 | from the wallet as defined in "missing_signatures" key 105 | of the transactions. 106 | """ 107 | 108 | # We need to set the default prefix, otherwise pubkeys are 109 | # presented wrongly! 110 | if self.hived: 111 | operations.default_prefix = self.hived.chain_params["prefix"] 112 | elif "blockchain" in self: 113 | operations.default_prefix = self["blockchain"]["prefix"] 114 | 115 | try: 116 | signedtx = SignedTransaction(**self.json()) 117 | except Exception as e: # noqa FIXME(sneak) 118 | raise e 119 | 120 | if not any(self.wifs): 121 | raise MissingKeyError 122 | 123 | signedtx.sign(self.wifs, chain=self.hived.chain_params) 124 | self["signatures"].extend(signedtx.json().get("signatures")) 125 | 126 | def broadcast(self): 127 | """ Broadcast a transaction to the Hive network 128 | 129 | :param tx tx: Signed transaction to broadcast 130 | """ 131 | if self.no_broadcast: 132 | log.warning("Not broadcasting anything!") 133 | return self 134 | 135 | try: 136 | if not self.hived.verify_authority(self.json()): 137 | raise InsufficientAuthorityError 138 | except Exception as e: 139 | # There is an issue with some appbase builds which makes 140 | # `verify_authority` unusable. TODO: remove this case #212 141 | if 'Bad Cast:Invalid cast from string_type to Array' in str(e): 142 | log.error("Ignoring verify_authority failure. See #212.") 143 | else: 144 | print("failing on {}".format(e)) 145 | raise e 146 | 147 | try: 148 | self.hived.broadcast_transaction(self.json()) 149 | except Exception as e: 150 | raise e 151 | 152 | return self 153 | 154 | def addSigningInformation(self, account, permission): 155 | """ This is a private method that adds side information to a 156 | unsigned/partial transaction in order to simplify later 157 | signing (e.g. for multisig or coldstorage) 158 | """ 159 | accountObj = Account(account, hived_instance=self.hived) 160 | authority = accountObj[permission] 161 | # We add a required_authorities to be able to identify 162 | # how to sign later. This is an array, because we 163 | # may later want to allow multiple operations per tx 164 | self.update({"required_authorities": {account: authority}}) 165 | for account_auth in authority["account_auths"]: 166 | account_auth_account = Account( 167 | account_auth[0], hived_instance=self.hived) 168 | self["required_authorities"].update({ 169 | account_auth[0]: 170 | account_auth_account.get(permission) 171 | }) 172 | 173 | # Try to resolve required signatures for offline signing 174 | self["missing_signatures"] = [x[0] for x in authority["key_auths"]] 175 | # Add one recursion of keys from account_auths: 176 | for account_auth in authority["account_auths"]: 177 | account_auth_account = Account( 178 | account_auth[0], hived_instance=self.hived) 179 | self["missing_signatures"].extend( 180 | [x[0] for x in account_auth_account[permission]["key_auths"]]) 181 | self["blockchain"] = self.hived.chain_params 182 | 183 | def json(self): 184 | return dict(self) 185 | 186 | def appendMissingSignatures(self, wifs): 187 | missing_signatures = self.get("missing_signatures", []) 188 | for pub in missing_signatures: 189 | wif = self.wallet.getPrivateKeyForPublicKey(pub) 190 | if wif: 191 | self.appendWif(wif) 192 | -------------------------------------------------------------------------------- /hive/witness.py: -------------------------------------------------------------------------------- 1 | from .instance import shared_hived_instance 2 | 3 | from hivebase.exceptions import WitnessDoesNotExistsException 4 | 5 | 6 | class Witness(dict): 7 | """ Read data about a witness in the chain 8 | 9 | :param str witness: Name of the witness 10 | :param Hived hived_instance: Hived() instance to use when 11 | accessing a RPC 12 | 13 | """ 14 | 15 | def __init__(self, witness, hived_instance=None): 16 | self.hived = hived_instance or shared_hived_instance() 17 | self.witness_name = witness 18 | self.witness = None 19 | self.refresh() 20 | 21 | def refresh(self): 22 | witness = self.hived.get_witness_by_account(self.witness_name) 23 | if not witness: 24 | raise WitnessDoesNotExistsException 25 | super(Witness, self).__init__(witness) 26 | 27 | def __getitem__(self, key): 28 | return super(Witness, self).__getitem__(key) 29 | 30 | def items(self): 31 | return super(Witness, self).items() 32 | -------------------------------------------------------------------------------- /hivebase/__init__.py: -------------------------------------------------------------------------------- 1 | __all__ = [ 2 | 'account', 3 | 'base58', 4 | 'bip38', 5 | 'chains', 6 | 'exceptions', 7 | 'http_client', 8 | 'operationids', 9 | 'operations', 10 | 'storage', 11 | 'transactions', 12 | 'types', 13 | # 'memo', 14 | ] 15 | -------------------------------------------------------------------------------- /hivebase/aes.py: -------------------------------------------------------------------------------- 1 | from Crypto import Random 2 | from Crypto.Cipher import AES 3 | import hashlib 4 | import base64 5 | 6 | 7 | class AESCipher(object): 8 | """ 9 | 10 | A classical AES Cipher. Can use any size of data and any size of 11 | password thanks to padding. Also ensure the coherence and the type of 12 | the data with a unicode to byte converter. 13 | 14 | """ 15 | 16 | def __init__(self, key): 17 | self.bs = 32 18 | self.key = hashlib.sha256(AESCipher.str_to_bytes(key)).digest() 19 | 20 | @staticmethod 21 | def str_to_bytes(data): 22 | u_type = type(b''.decode('utf8')) 23 | if isinstance(data, u_type): 24 | return data.encode('utf8') 25 | return data 26 | 27 | def _pad(self, s): 28 | return s + (self.bs - len(s) % self.bs) * AESCipher.str_to_bytes( 29 | chr(self.bs - len(s) % self.bs)) 30 | 31 | @staticmethod 32 | def _unpad(s): 33 | return s[:-ord(s[len(s) - 1:])] 34 | 35 | def encrypt(self, raw): 36 | raw = self._pad(AESCipher.str_to_bytes(raw)) 37 | iv = Random.new().read(AES.block_size) 38 | cipher = AES.new(self.key, AES.MODE_CBC, iv) 39 | return base64.b64encode(iv + cipher.encrypt(raw)).decode('utf-8') 40 | 41 | def decrypt(self, enc): 42 | enc = base64.b64decode(enc) 43 | iv = enc[:AES.block_size] 44 | cipher = AES.new(self.key, AES.MODE_CBC, iv) 45 | return self._unpad(cipher.decrypt( 46 | enc[AES.block_size:])).decode('utf-8') 47 | -------------------------------------------------------------------------------- /hivebase/base58.py: -------------------------------------------------------------------------------- 1 | from binascii import hexlify, unhexlify 2 | import hashlib 3 | import sys 4 | import string 5 | import logging 6 | from .utils import compat_bytes 7 | 8 | log = logging.getLogger(__name__) 9 | 10 | """ Default Prefix """ 11 | PREFIX = "STM" 12 | 13 | known_prefixes = [ 14 | PREFIX, 15 | "GLS", 16 | "TST", 17 | ] 18 | 19 | 20 | class Base58(object): 21 | """Base58 base class 22 | 23 | This class serves as an abstraction layer to deal with base58 encoded 24 | strings and their corresponding hex and binary representation 25 | throughout the library. 26 | 27 | :param data: Data to initialize object, e.g. pubkey data, address data, 28 | ... 29 | 30 | :type data: hex, wif, bip38 encrypted wif, base58 string 31 | 32 | :param str prefix: Prefix to use for Address/PubKey strings (defaults 33 | to ``GPH``) 34 | 35 | :return: Base58 object initialized with ``data`` 36 | 37 | :rtype: Base58 38 | 39 | :raises ValueError: if data cannot be decoded 40 | 41 | * ``bytes(Base58)``: Returns the raw data 42 | * ``str(Base58)``: Returns the readable ``Base58CheckEncoded`` data. 43 | * ``repr(Base58)``: Gives the hex representation of the data. 44 | 45 | * ``format(Base58,_format)`` Formats the instance according to 46 | ``_format``: 47 | 48 | * ``"btc"``: prefixed with ``0x80``. Yields a valid btc address 49 | * ``"wif"``: prefixed with ``0x00``. Yields a valid wif key 50 | * ``"bts"``: prefixed with ``BTS`` 51 | * etc. 52 | 53 | """ 54 | 55 | def __init__(self, data, prefix=PREFIX): 56 | self._prefix = prefix 57 | if all(c in string.hexdigits for c in data): 58 | self._hex = data 59 | elif data[0] == "5" or data[0] == "6": 60 | self._hex = base58CheckDecode(data) 61 | elif data[0] == "K" or data[0] == "L": 62 | self._hex = base58CheckDecode(data)[:-2] 63 | elif data[:len(self._prefix)] == self._prefix: 64 | self._hex = gphBase58CheckDecode(data[len(self._prefix):]) 65 | else: 66 | raise ValueError("Error loading Base58 object") 67 | 68 | def __format__(self, _format): 69 | """ Format output according to argument _format (wif,btc,...) 70 | 71 | :param str _format: Format to use 72 | :return: formatted data according to _format 73 | :rtype: str 74 | 75 | """ 76 | if _format.upper() == "WIF": 77 | return base58CheckEncode(0x80, self._hex) 78 | elif _format.upper() == "ENCWIF": 79 | return base58encode(self._hex) 80 | elif _format.upper() == "BTC": 81 | return base58CheckEncode(0x00, self._hex) 82 | elif _format.upper() in known_prefixes: 83 | return _format.upper() + str(self) 84 | else: 85 | log.warn("Format %s unkown. You've been warned!\n" % _format) 86 | return _format.upper() + str(self) 87 | 88 | def __repr__(self): 89 | """ Returns hex value of object 90 | 91 | :return: Hex string of instance's data 92 | :rtype: hex string 93 | """ 94 | return self._hex 95 | 96 | def __str__(self): 97 | """ Return graphene-base58CheckEncoded string of data 98 | 99 | :return: Base58 encoded data 100 | :rtype: str 101 | """ 102 | return gphBase58CheckEncode(self._hex) 103 | 104 | def __bytes__(self): 105 | """ Return raw bytes 106 | 107 | :return: Raw bytes of instance 108 | :rtype: bytes 109 | 110 | """ 111 | return unhexlify(self._hex) 112 | 113 | 114 | # https://github.com/tochev/python3-cryptocoins/raw/master/cryptocoins/base58.py 115 | BASE58_ALPHABET = b"123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz" 116 | 117 | 118 | def base58decode(base58_str): 119 | base58_text = base58_str.encode('ascii') 120 | n = 0 121 | leading_zeroes_count = 0 122 | for b in base58_text: 123 | n = n * 58 + BASE58_ALPHABET.find(b) 124 | if n == 0: 125 | leading_zeroes_count += 1 126 | res = bytearray() 127 | while n >= 256: 128 | div, mod = divmod(n, 256) 129 | res.insert(0, mod) 130 | n = div 131 | else: 132 | res.insert(0, n) 133 | return hexlify(bytearray(1) * leading_zeroes_count + res).decode('ascii') 134 | 135 | 136 | def base58encode(hexstring): 137 | byteseq = compat_bytes(hexstring, 'ascii') 138 | byteseq = unhexlify(byteseq) 139 | byteseq = compat_bytes(byteseq) 140 | 141 | n = 0 142 | leading_zeroes_count = 0 143 | for c in byteseq: 144 | n = n * 256 + c 145 | if n == 0: 146 | leading_zeroes_count += 1 147 | res = bytearray() 148 | while n >= 58: 149 | div, mod = divmod(n, 58) 150 | res.insert(0, BASE58_ALPHABET[mod]) 151 | n = div 152 | else: 153 | res.insert(0, BASE58_ALPHABET[n]) 154 | 155 | return (BASE58_ALPHABET[0:1] * leading_zeroes_count + res).decode('ascii') 156 | 157 | 158 | def ripemd160(s): 159 | ripemd160 = hashlib.new('ripemd160') 160 | ripemd160.update(unhexlify(s)) 161 | return ripemd160.digest() 162 | 163 | 164 | def doublesha256(s): 165 | return hashlib.sha256(hashlib.sha256(unhexlify(s)).digest()).digest() 166 | 167 | 168 | def b58encode(v): 169 | return base58encode(v) 170 | 171 | 172 | def b58decode(v): 173 | return base58decode(v) 174 | 175 | 176 | def base58CheckEncode(version, payload): 177 | s = ('%.2x' % version) + payload 178 | checksum = doublesha256(s)[:4] 179 | result = s + hexlify(checksum).decode('ascii') 180 | return base58encode(result) 181 | 182 | 183 | def base58CheckDecode(s): 184 | s = unhexlify(base58decode(s)) 185 | dec = hexlify(s[:-4]).decode('ascii') 186 | checksum = doublesha256(dec)[:4] 187 | assert (s[-4:] == checksum) 188 | return dec[2:] 189 | 190 | 191 | def gphBase58CheckEncode(s): 192 | checksum = ripemd160(s)[:4] 193 | result = s + hexlify(checksum).decode('ascii') 194 | return base58encode(result) 195 | 196 | 197 | def gphBase58CheckDecode(s): 198 | s = unhexlify(base58decode(s)) 199 | dec = hexlify(s[:-4]).decode('ascii') 200 | checksum = ripemd160(dec)[:4] 201 | assert (s[-4:] == checksum) 202 | return dec 203 | -------------------------------------------------------------------------------- /hivebase/bip38.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import logging 3 | import os 4 | import sys 5 | from binascii import hexlify, unhexlify 6 | 7 | from .account import PrivateKey 8 | from .base58 import Base58, base58decode 9 | from .utils import compat_bytes 10 | 11 | log = logging.getLogger(__name__) 12 | 13 | try: 14 | from Crypto.Cipher import AES 15 | except ImportError: 16 | raise ImportError("Missing dependency: pycrypto") 17 | 18 | SCRYPT_MODULE = os.environ.get('SCRYPT_MODULE', None) 19 | if not SCRYPT_MODULE: 20 | try: 21 | import scrypt 22 | 23 | SCRYPT_MODULE = "scrypt" 24 | except ImportError: 25 | try: 26 | import pylibscrypt as scrypt 27 | 28 | SCRYPT_MODULE = "pylibscrypt" 29 | except ImportError: 30 | raise ImportError("Missing dependency: scrypt or pylibscrypt") 31 | elif 'pylibscrypt' in SCRYPT_MODULE: 32 | try: 33 | import pylibscrypt as scrypt 34 | except ImportError: 35 | raise ImportError("Missing dependency: pylibscrypt explicitly set but missing") 36 | elif 'scrypt' in SCRYPT_MODULE: 37 | try: 38 | import scrypt 39 | except ImportError: 40 | raise ImportError("Missing dependency: scrypt explicitly set but missing") 41 | 42 | 43 | log.debug("Using scrypt module: %s" % SCRYPT_MODULE) 44 | 45 | 46 | class SaltException(Exception): 47 | pass 48 | 49 | 50 | def _encrypt_xor(a, b, aes): 51 | """ Returns encrypt(a ^ b). """ 52 | a = unhexlify('%0.32x' % (int((a), 16) ^ int(hexlify(b), 16))) 53 | return aes.encrypt(a) 54 | 55 | 56 | def encrypt(privkey, passphrase): 57 | """ BIP0038 non-ec-multiply encryption. Returns BIP0038 encrypted privkey. 58 | 59 | :param privkey: Private key 60 | :type privkey: Base58 61 | :param str passphrase: UTF-8 encoded passphrase for encryption 62 | :return: BIP0038 non-ec-multiply encrypted wif key 63 | :rtype: Base58 64 | 65 | """ 66 | privkeyhex = repr(privkey) # hex 67 | addr = format(privkey.uncompressed.address, "BTC") 68 | a = compat_bytes(addr, 'ascii') 69 | salt = hashlib.sha256(hashlib.sha256(a).digest()).digest()[0:4] 70 | if SCRYPT_MODULE == "scrypt": 71 | if sys.version >= '3.0.0': 72 | key = scrypt.hash(passphrase, salt, 16384, 8, 8) 73 | else: 74 | key = scrypt.hash(str(passphrase), str(salt), 16384, 8, 8) 75 | elif SCRYPT_MODULE == "pylibscrypt": 76 | key = scrypt.scrypt(compat_bytes(passphrase, "utf-8"), salt, 16384, 8, 8) 77 | else: 78 | raise ValueError("No scrypt module loaded") 79 | (derived_half1, derived_half2) = (key[:32], key[32:]) 80 | aes = AES.new(derived_half2, AES.MODE_ECB) 81 | encrypted_half1 = _encrypt_xor(privkeyhex[:32], derived_half1[:16], aes) 82 | encrypted_half2 = _encrypt_xor(privkeyhex[32:], derived_half1[16:], aes) 83 | " flag byte is forced 0xc0 because Graphene only uses compressed keys " 84 | payload = ( 85 | b'\x01' + b'\x42' + b'\xc0' + salt + encrypted_half1 + encrypted_half2) 86 | " Checksum " 87 | checksum = hashlib.sha256(hashlib.sha256(payload).digest()).digest()[:4] 88 | privatekey = hexlify(payload + checksum).decode('ascii') 89 | return Base58(privatekey) 90 | 91 | 92 | def decrypt(encrypted_privkey, passphrase): 93 | """BIP0038 non-ec-multiply decryption. Returns WIF privkey. 94 | 95 | :param Base58 encrypted_privkey: Private key 96 | :param str passphrase: UTF-8 encoded passphrase for decryption 97 | :return: BIP0038 non-ec-multiply decrypted key 98 | :rtype: Base58 99 | :raises SaltException: if checksum verification failed (e.g. wrong 100 | password) 101 | 102 | """ 103 | 104 | d = unhexlify(base58decode(encrypted_privkey)) 105 | d = d[2:] # remove trailing 0x01 and 0x42 106 | flagbyte = d[0:1] # get flag byte 107 | d = d[1:] # get payload 108 | assert flagbyte == b'\xc0', "Flagbyte has to be 0xc0" 109 | salt = d[0:4] 110 | d = d[4:-4] 111 | if SCRYPT_MODULE == "scrypt": 112 | if sys.version >= '3.0.0': 113 | key = scrypt.hash(passphrase, salt, 16384, 8, 8) 114 | else: 115 | key = scrypt.hash(str(passphrase), str(salt), 16384, 8, 8) 116 | elif SCRYPT_MODULE == "pylibscrypt": 117 | key = scrypt.scrypt(compat_bytes(passphrase, "utf-8"), salt, 16384, 8, 8) 118 | else: 119 | raise ValueError("No scrypt module loaded") 120 | derivedhalf1 = key[0:32] 121 | derivedhalf2 = key[32:64] 122 | encryptedhalf1 = d[0:16] 123 | encryptedhalf2 = d[16:32] 124 | aes = AES.new(derivedhalf2, AES.MODE_ECB) 125 | decryptedhalf2 = aes.decrypt(encryptedhalf2) 126 | decryptedhalf1 = aes.decrypt(encryptedhalf1) 127 | privraw = decryptedhalf1 + decryptedhalf2 128 | privraw = ('%064x' % 129 | (int(hexlify(privraw), 16) ^ int(hexlify(derivedhalf1), 16))) 130 | wif = Base58(privraw) 131 | """ Verify Salt """ 132 | privkey = PrivateKey(format(wif, "wif")) 133 | addr = format(privkey.uncompressed.address, "BTC") 134 | a = compat_bytes(addr, 'ascii') 135 | saltverify = hashlib.sha256(hashlib.sha256(a).digest()).digest()[0:4] 136 | if saltverify != salt: 137 | raise SaltException( 138 | 'checksum verification failed! Password may be incorrect.') 139 | return wif 140 | -------------------------------------------------------------------------------- /hivebase/chains.py: -------------------------------------------------------------------------------- 1 | default_prefix = "STM" 2 | 3 | known_chains = { 4 | "HIVE": { 5 | "chain_id": "beeab0de00000000000000000000000000000000000000000000000000000000", 6 | "prefix": "STM", 7 | "hive_symbol": "HIVE", 8 | "hbd_symbol": "HBD", 9 | "vests_symbol": "VESTS", 10 | }, 11 | "STEEM": { 12 | "chain_id": "0" * int(256 / 4), 13 | "prefix": "STM", 14 | "hive_symbol": "STEEM", 15 | "hbd_symbol": "SBD", 16 | "vests_symbol": "VESTS", 17 | }, 18 | "GOLOS": { 19 | "chain_id": "782a3039b478c839e4cb0c941ff4eaeb7df40bdd68bd441afd444b9da763de12", 20 | "prefix": "GLS", 21 | "hive_symbol": "GOLOS", 22 | "hbd_symbol": "GBG", 23 | "vests_symbol": "GESTS", 24 | }, 25 | "TESTS": { 26 | "chain_id": 27 | "46d82ab7d8db682eb1959aed0ada039a6d49afa1602491f93dde9cac3e8e6c32", 28 | "prefix": 29 | "TST", 30 | "hive_symbol": 31 | "TESTS", 32 | "hbd_symbol": 33 | "TBD", 34 | "vests_symbol": 35 | "VESTS", 36 | }, 37 | } 38 | -------------------------------------------------------------------------------- /hivebase/exceptions.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | 4 | def decodeRPCErrorMsg(e): 5 | """ Helper function to decode the raised Exception and give it a 6 | python Exception class 7 | """ 8 | found = re.search( 9 | ("(10 assert_exception: Assert Exception\n|" 10 | "3030000 tx_missing_posting_auth)" 11 | ".*: (.*)\n"), 12 | str(e), 13 | flags=re.M) 14 | if found: 15 | return found.group(2).strip() 16 | else: 17 | return str(e) 18 | 19 | 20 | class RPCError(Exception): 21 | pass 22 | 23 | 24 | class RPCErrorRecoverable(RPCError): 25 | pass 26 | 27 | 28 | class NumRetriesReached(Exception): 29 | pass 30 | 31 | 32 | class NoAccessApi(RPCError): 33 | pass 34 | 35 | 36 | class AlreadyTransactedThisBlock(RPCError): 37 | pass 38 | 39 | 40 | class VoteWeightTooSmall(RPCError): 41 | pass 42 | 43 | 44 | class OnlyVoteOnceEvery3Seconds(RPCError): 45 | pass 46 | 47 | 48 | class AlreadyVotedSimilarily(RPCError): 49 | pass 50 | 51 | 52 | class NoMethodWithName(RPCError): 53 | pass 54 | 55 | 56 | class PostOnlyEvery5Min(RPCError): 57 | pass 58 | 59 | 60 | class DuplicateTransaction(RPCError): 61 | pass 62 | 63 | 64 | class MissingRequiredPostingAuthority(RPCError): 65 | pass 66 | 67 | 68 | class UnhandledRPCError(RPCError): 69 | pass 70 | 71 | 72 | class ExceededAllowedBandwidth(RPCError): 73 | pass 74 | 75 | 76 | class AccountExistsException(Exception): 77 | pass 78 | 79 | 80 | class AccountDoesNotExistsException(Exception): 81 | pass 82 | 83 | 84 | class InsufficientAuthorityError(Exception): 85 | pass 86 | 87 | 88 | class MissingKeyError(Exception): 89 | pass 90 | 91 | 92 | class BlockDoesNotExistsException(Exception): 93 | pass 94 | 95 | 96 | class WitnessDoesNotExistsException(Exception): 97 | pass 98 | 99 | 100 | class InvalidKeyFormat(Exception): 101 | pass 102 | 103 | 104 | class NoWallet(Exception): 105 | pass 106 | 107 | 108 | class InvalidWifError(Exception): 109 | pass 110 | 111 | 112 | class WalletExists(Exception): 113 | pass 114 | 115 | 116 | class PostDoesNotExist(Exception): 117 | pass 118 | 119 | 120 | class VotingInvalidOnArchivedPost(Exception): 121 | pass 122 | -------------------------------------------------------------------------------- /hivebase/memo.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | import struct 4 | import sys 5 | from binascii import hexlify, unhexlify 6 | from collections import OrderedDict 7 | 8 | from Crypto.Cipher import AES 9 | 10 | from .base58 import base58encode, base58decode 11 | from .utils import compat_bytes 12 | 13 | default_prefix = "STM" 14 | 15 | 16 | def get_shared_secret(priv, pub): 17 | """ Derive the share secret between ``priv`` and ``pub`` 18 | 19 | :param `Base58` priv: Private Key 20 | :param `Base58` pub: Public Key 21 | :return: Shared secret 22 | :rtype: hex 23 | 24 | The shared secret is generated such that:: 25 | 26 | Pub(Alice) * Priv(Bob) = Pub(Bob) * Priv(Alice) 27 | 28 | """ 29 | pub_point = pub.point() 30 | priv_point = int(repr(priv), 16) 31 | res = pub_point * priv_point 32 | res_hex = '%032x' % res.x() 33 | # Zero padding 34 | res_hex = '0' * (64 - len(res_hex)) + res_hex 35 | return hashlib.sha512(unhexlify(res_hex)).hexdigest() 36 | 37 | 38 | def init_aes(shared_secret, nonce): 39 | """ Initialize AES instance 40 | 41 | :param hex shared_secret: Shared Secret to use as encryption key 42 | :param int nonce: Random nonce 43 | :return: AES instance and checksum of the encryption key 44 | :rtype: length 2 tuple 45 | 46 | """ 47 | " Seed " 48 | ss = unhexlify(shared_secret) 49 | n = struct.pack("= 0x80: 40 | data += compat_bytes([(n & 0x7f) | 0x80]) 41 | n >>= 7 42 | data += compat_bytes([n]) 43 | return data 44 | 45 | 46 | def varintdecode(data): 47 | """ Varint decoding 48 | """ 49 | shift = 0 50 | result = 0 51 | for c in data: 52 | b = ord(c) 53 | result |= ((b & 0x7f) << shift) 54 | if not (b & 0x80): 55 | break 56 | shift += 7 57 | return result 58 | 59 | 60 | def variable_buffer(s): 61 | """ Encode variable length buffer 62 | """ 63 | return varint(len(s)) + s 64 | 65 | 66 | def JsonObj(data): 67 | """ Returns json object from data 68 | """ 69 | try: 70 | if sys.version >= '3.0': 71 | return json.loads(str(data)) 72 | else: 73 | return compat_json(json.loads(str(data), object_hook=compat_json), 74 | ignore_dicts=True) 75 | except Exception as e: # noqa FIXME(sneak) 76 | try: 77 | return data.__str__() 78 | except: # noqa FIXME(sneak) 79 | raise ValueError('JsonObj could not parse %s:\n%s' % 80 | (type(data).__name__, data.__class__)) 81 | 82 | 83 | class Uint8: 84 | def __init__(self, d): 85 | self.data = d 86 | 87 | def __bytes__(self): 88 | return struct.pack(">'<__main__.clazz object at 0x105171a90>' 20 | 21 | In this example, when `bytes(c)` is invoked, the interpreter then calls `str(c)`, and prints the above string. 22 | the method `__bytes__` is never invoked. 23 | 24 | Python 3.6: 25 | c = clazz() 26 | bytes(c) 27 | >>b'\x00\x00\x00\x00\x00' 28 | 29 | This is the expected and necessary behavior across both platforms. 30 | 31 | w/ compat_bytes method, we will ensure that the correct bytes method is always invoked, avoiding the `str` alias in 32 | 2.7. 33 | 34 | :param item: this is the object who's bytes method needs to be invoked 35 | :param encoding: optional encoding parameter to handle the Python 3.6 two argument 'bytes' method. 36 | :return: a bytes object that functions the same across 3.6 and 2.7 37 | """ 38 | if hasattr(item, '__bytes__'): 39 | return item.__bytes__() 40 | else: 41 | if encoding: 42 | return bytes(item, encoding) 43 | else: 44 | return bytes(item) 45 | 46 | 47 | def compat_chr(item): 48 | """ 49 | This is necessary to maintain compatibility across Python 2.7 and 3.6. 50 | In 3.6, 'chr' handles any unicode character, whereas in 2.7, `chr` only handles 51 | ASCII characters. Thankfully, the Python 2.7 method `unichr` provides the same 52 | functionality as 3.6 `chr`. 53 | 54 | :param item: a length 1 string who's `chr` method needs to be invoked 55 | :return: the unichr code point of the single character string, item 56 | """ 57 | if sys.version >= '3.0': 58 | return chr(item) 59 | else: 60 | return unichr(item) 61 | 62 | 63 | def compat_json(data, ignore_dicts=False): 64 | """ 65 | :param data: Json Data we want to ensure compatibility on. 66 | :param ignore_dicts: should only be set to true when first called. 67 | :return: Python compatible 2.7 byte-strings when encountering unicode. 68 | """ 69 | # if this is a unicode string, return its string representation 70 | if isinstance(data, unicode): 71 | return data.encode('utf-8') 72 | # if this is a list of values, return list of byte-string values 73 | if isinstance(data, list): 74 | return [compat_json(item, ignore_dicts=True) for item in data] 75 | # if this is a dictionary, return dictionary of byte-string keys and values 76 | # but only if we haven't already byte-string it 77 | if isinstance(data, dict) and not ignore_dicts: 78 | return { 79 | compat_json(key, ignore_dicts=True): compat_json(value, ignore_dicts=True) 80 | for key, value in data.items() 81 | } 82 | # if it's anything else, return it in its original form 83 | return data 84 | -------------------------------------------------------------------------------- /scripts/Dockerfile: -------------------------------------------------------------------------------- 1 | # The purpose of this docker file is to provide a shell with 2 | # hive-python pre-installed for interactive testing purposes. 3 | # 4 | # Usage: 5 | # docker build -t hivepy . 6 | # docker run -it hivepy 7 | 8 | FROM python:3.5.3 9 | MAINTAINER pharesim 10 | 11 | # set default password for BIP38 encrypted wallet 12 | ENV UNLOCK test123 13 | 14 | RUN pip install ipython 15 | RUN pip install git+git://github.com/pharesim/hive-python.git 16 | 17 | CMD "ipython" 18 | -------------------------------------------------------------------------------- /scripts/doc_rst_convert.py: -------------------------------------------------------------------------------- 1 | import pypandoc 2 | pypandoc.convert( 3 | source='README.md', 4 | format='markdown_github', 5 | to='rst', 6 | outputfile='README.rst') 7 | -------------------------------------------------------------------------------- /scripts/nuke_legacy.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | pip uninstall piston-steem -y 4 | pip uninstall python-steem -y 5 | pip uninstall steem-python -y 6 | pip uninstall steem-piston -y 7 | pip uninstall steem -y 8 | pip uninstall graphenelib -y 9 | pip uninstall piston-cli -y -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file=README.md 3 | 4 | [tool:pytest] 5 | norecursedirs=dist docs build deploy 6 | addopts = --pep8 7 | testpaths = tests 8 | 9 | [yapf] 10 | indent_width = 4 11 | column_limit = 77 12 | based_on_style = pep8 13 | spaces_before_comment = 2 14 | split_before_logical_operator = true 15 | dedent_closing_brackets = true 16 | i18n_comment = NOQA 17 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | # Note: To use the 'upload' functionality of this file, you must: 5 | # $ pip install twine 6 | 7 | import io 8 | import os 9 | import sys 10 | from shutil import rmtree 11 | 12 | from setuptools import find_packages, setup, Command 13 | from setuptools.command.test import test as TestCommand 14 | 15 | # Package meta-data. 16 | NAME = 'hivepy' 17 | VERSION = '0.10' 18 | DESCRIPTION = 'A python hive library.' 19 | URL = 'https://github.com/pharesim/hive-python' 20 | EMAIL = 'pharesim@protonmail.com' 21 | AUTHOR = 'pharesim' 22 | 23 | # What packages are required for this module to be executed? 24 | REQUIRED = [ 25 | 'appdirs', 26 | 'certifi', 27 | 'ecdsa>=0.13', 28 | 'funcy', 29 | 'futures ; python_version < "3.0.0"', 30 | 'future', 31 | 'langdetect', 32 | 'prettytable', 33 | 'pycrypto>=1.9.1', 34 | 'pylibscrypt>=1.6.1', 35 | 'scrypt>=0.8.0', 36 | 'toolz', 37 | 'ujson', 38 | 'urllib3', 39 | 'voluptuous', 40 | 'w3lib' 41 | ] 42 | TEST_REQUIRED = [ 43 | 'pep8', 44 | 'pytest', 45 | 'pytest-pylint ; python_version >= "3.4.0"', 46 | 'pytest-xdist', 47 | 'pytest-runner', 48 | 'pytest-pep8', 49 | 'pytest-cov', 50 | 'yapf', 51 | 'autopep8' 52 | ] 53 | 54 | BUILD_REQUIRED = [ 55 | 'twine', 56 | 'pypandoc', 57 | 'recommonmark' 58 | 'wheel', 59 | 'setuptools', 60 | 'sphinx', 61 | 'sphinx_rtd_theme' 62 | ] 63 | # The rest you shouldn't have to touch too much :) 64 | # ------------------------------------------------ 65 | # Except, perhaps the License and Trove Classifiers! 66 | # If you do change the License, remember to change the Trove Classifier for that! 67 | 68 | here = os.path.abspath(os.path.dirname(__file__)) 69 | 70 | # Import the README and use it as the long-description. 71 | # Note: this will only work if 'README.rst' is present in your MANIFEST.in file! 72 | # with io.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f: 73 | # long_description = '\n' + f.read() 74 | 75 | 76 | class PyTest(TestCommand): 77 | user_options = [('pytest-args=', 'a', "Arguments to pass into py.test")] 78 | 79 | def initialize_options(self): 80 | TestCommand.initialize_options(self) 81 | try: 82 | from multiprocessing import cpu_count 83 | self.pytest_args = ['-n', str(cpu_count()), '--boxed'] 84 | except (ImportError, NotImplementedError): 85 | self.pytest_args = ['-n', '1', '--boxed'] 86 | 87 | def finalize_options(self): 88 | TestCommand.finalize_options(self) 89 | self.test_args = [] 90 | self.test_suite = True 91 | 92 | def run_tests(self): 93 | import pytest 94 | 95 | errno = pytest.main(self.pytest_args) 96 | sys.exit(errno) 97 | 98 | 99 | class UploadCommand(Command): 100 | """Support setup.py upload.""" 101 | 102 | description = 'Build and publish the package.' 103 | user_options = [] 104 | 105 | @staticmethod 106 | def status(s): 107 | """Prints things in bold.""" 108 | print('\033[1m{0}\033[0m'.format(s)) 109 | 110 | def initialize_options(self): 111 | pass 112 | 113 | def finalize_options(self): 114 | pass 115 | 116 | def run(self): 117 | try: 118 | self.status('Removing previous builds…') 119 | rmtree(os.path.join(here, 'dist')) 120 | except OSError: 121 | pass 122 | 123 | self.status('Building Source and Wheel (universal) distribution…') 124 | os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable)) 125 | 126 | self.status('Uploading the package to PyPi via Twine…') 127 | os.system('twine upload dist/*') 128 | 129 | sys.exit() 130 | 131 | 132 | # Where the magic happens: 133 | setup( 134 | name=NAME, 135 | version=VERSION, 136 | description=DESCRIPTION, 137 | keywords=['hive', 'cryptocurrency', 'blockchain'], 138 | # long_description=long_description, 139 | author=AUTHOR, 140 | author_email=EMAIL, 141 | url=URL, 142 | packages=find_packages(exclude=('tests','scripts')), 143 | entry_points={ 144 | 'console_scripts': [ 145 | 'piston=hive.cli:legacyentry', 146 | 'hivepy=hive.cli:legacyentry', 147 | 'hivetail=hive.cli:hivetailentry', 148 | ], 149 | }, 150 | install_requires=REQUIRED, 151 | extras_require={ 152 | 'dev': TEST_REQUIRED + BUILD_REQUIRED, 153 | 'build': BUILD_REQUIRED, 154 | 'test': TEST_REQUIRED 155 | }, 156 | tests_require=TEST_REQUIRED, 157 | include_package_data=True, 158 | license='MIT', 159 | 160 | classifiers=[ 161 | # Trove classifiers 162 | # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers 163 | 'Intended Audience :: Developers', 164 | 'License :: OSI Approved :: MIT License', 165 | 'Natural Language :: English', 'Programming Language :: Python :: 3', 166 | 'Programming Language :: Python :: 3.5', 167 | 'Topic :: Software Development :: Libraries', 168 | 'Topic :: Software Development :: Libraries :: Python Modules', 169 | 'Development Status :: 4 - Beta' 170 | ], 171 | # $ setup.py publish support. 172 | cmdclass={ 173 | 'upload': UploadCommand, 174 | 'test': PyTest 175 | }, 176 | ) 177 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pharesim/hive-python/33dcc6353e481df926e5dde99ec4a8aba3901c9f/tests/__init__.py -------------------------------------------------------------------------------- /tests/block_data/account_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 3620775392, 3 | "expiration": "2016-03-30T07:05:03", 4 | "operations": [ 5 | [ 6 | "account_create", 7 | { 8 | "owner": { 9 | "weight_threshold": 1, 10 | "account_auths": [], 11 | "key_auths": [ 12 | [ 13 | "STM8MN3FNBa8WbEpxz3wGL3L1mkt6sGnncH8iuto7r8Wa3T9NSSGT", 14 | 1 15 | ] 16 | ] 17 | }, 18 | "memo_key": "STM6Gkj27XMkoGsr4zwEvkjNhh4dykbXmPFzHhT8g86jWsqu3U38X", 19 | "active": { 20 | "weight_threshold": 1, 21 | "account_auths": [], 22 | "key_auths": [ 23 | [ 24 | "STM8HCf7QLUexogEviN8x1SpKRhFwg2sc8LrWuJqv7QsmWrua6ZyR", 25 | 1 26 | ] 27 | ] 28 | }, 29 | "new_account_name": "fabian", 30 | "posting": { 31 | "weight_threshold": 1, 32 | "account_auths": [], 33 | "key_auths": [ 34 | [ 35 | "STM8EhGWcEuQ2pqCKkGHnbmcTNpWYZDjGTT7ketVBp4gUStDr2brz", 36 | 1 37 | ] 38 | ] 39 | }, 40 | "creator": "hello", 41 | "json_metadata": "{}", 42 | "fee": "0.000 HIVE" 43 | } 44 | ] 45 | ], 46 | "signatures": [ 47 | "2051b9c61cdd9df1f04e5d37529a72c9d4419c1e0b466d78c156c383aa951b21eb3f13b5bcbe9d0caf883143a15ff911c2d2cac9c466a7f619618bb3b4d24612b5" 48 | ], 49 | "ref_block_num": 29707, 50 | "extensions": [] 51 | } 52 | -------------------------------------------------------------------------------- /tests/block_data/account_update.json: -------------------------------------------------------------------------------- 1 | { 2 | "account_to_recover": "barrie", 3 | "extensions": [], 4 | "new_recovery_account": "boombastic" 5 | } 6 | -------------------------------------------------------------------------------- /tests/block_data/account_witness_proxy.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 2749880717, 3 | "operations": [ 4 | [ 5 | "account_witness_proxy", 6 | { 7 | "proxy": "abit", 8 | "account": "puppies" 9 | } 10 | ] 11 | ], 12 | "signatures": [ 13 | "2066825bf5033b1a85b3f26c43bc853aa2e1e57ecdd400f61ea0ed444906836c323345c6b04cdbbb39637ed180ddf7a8eacc9d36086158140d1dec5788b73a01b4" 14 | ], 15 | "ref_block_num": 31712, 16 | "expiration": "2016-04-08T15:47:00", 17 | "extensions": [] 18 | } 19 | 20 | -------------------------------------------------------------------------------- /tests/block_data/account_witness_vote.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 575883867, 3 | "operations": [ 4 | [ 5 | "account_witness_vote", 6 | { 7 | "witness": "berniesanders", 8 | "account": "donalddrumpf", 9 | "approve": true 10 | } 11 | ] 12 | ], 13 | "signatures": [ 14 | "1f7782f6c379d14c97718489b5ebca68fa25b3042e781d2f620ccc4927fbf4d3f30e60ba424cd906eb75b87cd4002bf982bc2ba9dc0f2c7b136b566de7416a170b" 15 | ], 16 | "ref_block_num": 57831, 17 | "expiration": "2016-03-28T23:43:36", 18 | "extensions": [] 19 | } 20 | -------------------------------------------------------------------------------- /tests/block_data/block.json: -------------------------------------------------------------------------------- 1 | { 2 | "extensions": [], 3 | "timestamp": "2016-08-11T22:00:09", 4 | "transaction_merkle_root": "57e17f40cfa97c260eef365dc599e06acdba8591", 5 | "previous": "003d0900c38ca36625f50fc6724cbb9d82a9a93e", 6 | "witness": "roadscape", 7 | "transactions": [ 8 | { 9 | "signatures": [ 10 | "1f7f99b4e98878ecd2b65bc9e6c8e2fc3a929fdb766411e89b6df2accddf326b901e8bc10c0d0f47738c26c6fdcf15f76a11eb69a12058e96820b2625061d6aa96" 11 | ], 12 | "extensions": [], 13 | "expiration": "2016-08-11T22:00:18", 14 | "ref_block_num": 2203, 15 | "operations": [ 16 | [ 17 | "comment", 18 | { 19 | "body": "@@ -154,16 +154,17 @@\n at coffe\n+e\n deliver\n", 20 | "title": "", 21 | "author": "mindfreak", 22 | "parent_author": "einsteinpotsdam", 23 | "permlink": "re-einsteinpotsdam-tutorial-for-other-shop-owners-how-to-accept-steem-and-steem-usd-payments-setup-time-under-2-minutes-android-20160811t215904898z", 24 | "parent_permlink": "tutorial-for-other-shop-owners-how-to-accept-steem-and-steem-usd-payments-setup-time-under-2-minutes-android", 25 | "json_metadata": "{\"tags\":[\"steemit\"]}" 26 | } 27 | ] 28 | ], 29 | "ref_block_prefix": 3949810370 30 | }, 31 | { 32 | "signatures": [], 33 | "extensions": [], 34 | "expiration": "2016-08-11T22:00:36", 35 | "ref_block_num": 2304, 36 | "operations": [ 37 | [ 38 | "witness_update", 39 | { 40 | "url": "http://fxxk.com", 41 | "props": { 42 | "maximum_block_size": 65536, 43 | "account_creation_fee": "1.000 HIVE", 44 | "hbd_interest_rate": 1000 45 | }, 46 | "block_signing_key": "STM5b3wkzd5cPuW8tYbHpsM6qo26R5eympAQsBaoEfeMDxxUCLvsY", 47 | "fee": "0.000 HIVE", 48 | "owner": "supercomputing06" 49 | } 50 | ] 51 | ], 52 | "ref_block_prefix": 1721994435 53 | }, 54 | { 55 | "signatures": [], 56 | "extensions": [], 57 | "expiration": "2016-08-11T22:00:36", 58 | "ref_block_num": 2304, 59 | "operations": [ 60 | [ 61 | "account_update", 62 | { 63 | "json_metadata": "", 64 | "account": "supercomputing06", 65 | "memo_key": "STM7myUzFgMrc5w2jRc3LH2cTwcs96q74Kj6GJ3DyKHyrHFPDP96N", 66 | "active": { 67 | "key_auths": [ 68 | [ 69 | "STM5sP9GUuExPzK35F1MLjN2dTY7fqqP7dSpMWqnzCoU3je64gm6q", 70 | 2 71 | ], 72 | [ 73 | "STM7t97bmNzbVruhH3yGQ7yFR58UJyPTb7Jh6ugmPfH1zqzJpngQH", 74 | 1 75 | ] 76 | ], 77 | "weight_threshold": 0, 78 | "account_auths": [] 79 | } 80 | } 81 | ] 82 | ], 83 | "ref_block_prefix": 1721994435 84 | }, 85 | { 86 | "signatures": [], 87 | "extensions": [], 88 | "expiration": "2016-08-11T22:00:36", 89 | "ref_block_num": 2304, 90 | "operations": [ 91 | [ 92 | "account_update", 93 | { 94 | "json_metadata": "", 95 | "account": "supercomputing06", 96 | "memo_key": "STM7myUzFgMrc5w2jRc3LH2cTwcs96q74Kj6GJ3DyKHyrHFPDP96N", 97 | "active": { 98 | "key_auths": [ 99 | [ 100 | "STM5sP9GUuExPzK35F1MLjN2dTY7fqqP7dSpMWqnzCoU3je64gm6q", 101 | 2 102 | ], 103 | [ 104 | "STM7t97bmNzbVruhH3yGQ7yFR58UJyPTb7Jh6ugmPfH1zqzJpngQH", 105 | 1 106 | ] 107 | ], 108 | "weight_threshold": 2, 109 | "account_auths": [] 110 | } 111 | } 112 | ] 113 | ], 114 | "ref_block_prefix": 1721994435 115 | } 116 | ], 117 | "witness_signature": "20033915d9ddfca226eeadc57807556f18dd1ace85659774f2b6e620c56426e4560449e07635e9724ad1171a1f49800fe392e047e2a69bfbe9ee06948608fca211" 118 | } 119 | -------------------------------------------------------------------------------- /tests/block_data/cancel_transfer_from_savings.json: -------------------------------------------------------------------------------- 1 | { 2 | "operations": [ 3 | [ 4 | "cancel_transfer_from_savings", 5 | { 6 | "request_id": 1, 7 | "from": "jesta" 8 | } 9 | ] 10 | ], 11 | "expiration": "2016-09-21T07:55:45", 12 | "signatures": [ 13 | "1f135b1a9123672cdeb679d57272631858242f569c6066b0f69293963b6b7f7781587478587b843c7f9679855606c37f0ef16707bb03a775d78e2602540626e3d4" 14 | ], 15 | "ref_block_num": 45917, 16 | "ref_block_prefix": 2784823756, 17 | "extensions": [] 18 | } 19 | -------------------------------------------------------------------------------- /tests/block_data/change_recovery_account.json: -------------------------------------------------------------------------------- 1 | { 2 | "account_to_recover": "barrie", 3 | "extensions": [], 4 | "new_recovery_account": "boombastic" 5 | } 6 | -------------------------------------------------------------------------------- /tests/block_data/comment.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 3071757153, 3 | "operations": [ 4 | [ 5 | "comment", 6 | { 7 | "author": "xeldal", 8 | "body": "This is xeldal, an experienced witness. Will you vote for me?", 9 | "json_metadata": "{}", 10 | "title": "xeldal Witness Thread", 11 | "permlink": "xeldal-witness-post", 12 | "parent_author": "", 13 | "parent_permlink": "witness-category" 14 | } 15 | ] 16 | ], 17 | "signatures": [ 18 | "1f332f851112774103c4a12a97941f1c39a1c30a0561e64fbbe756d0860f7e68a206f2f57dfd15b77ecf3ce13fcffd6e66ae4b65a8df29bc01682876e34eb3cecf" 19 | ], 20 | "ref_block_num": 32379, 21 | "expiration": "2016-04-08T16:20:27", 22 | "extensions": [] 23 | } 24 | -------------------------------------------------------------------------------- /tests/block_data/comment_options.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 3739556666, 3 | "expiration": "2016-08-12T07:02:19", 4 | "operations": [ 5 | [ 6 | "comment_options", 7 | { 8 | "author": "testing001", 9 | "allow_curation_rewards": true, 10 | "max_accepted_payout": "1000.000 HBD", 11 | "percent_hive_dollars": 5000, 12 | "allow_votes": true, 13 | "permlink": "testing6", 14 | "extensions": [] 15 | } 16 | ] 17 | ], 18 | "signatures": [ 19 | "1f47ff6711fad1ab07a9e10ce91e1fd84ca49629a6f45af6aca67150705e651b2006b5548520995bedd09daf6f597d2bc68a30c7c161b4600e14f0032e69235fcf" 20 | ], 21 | "ref_block_num": 13118, 22 | "extensions": [] 23 | } 24 | -------------------------------------------------------------------------------- /tests/block_data/convert.json: -------------------------------------------------------------------------------- 1 | { 2 | 'expiration': '2016-07-04T00:29:39', 3 | 'extensions': [], 4 | 'operations': [ 5 | [ 6 | 'convert', 7 | { 8 | 'amount': '5.000 HBD', 9 | 'owner': 'summon', 10 | 'requestid': 1467592156 11 | } 12 | ] 13 | ], 14 | 'ref_block_num': 5864, 15 | 'ref_block_prefix': 521569582, 16 | 'signatures': [ 17 | '1f0cd39195d45d5d40cd92651081670b5a799217d615c311921fc1981a0898703d1864555148c2e1246a19fa8ea1b80b4dd4474df86fc9a9c9d6a9c8576d467687' 18 | ] 19 | } 20 | -------------------------------------------------------------------------------- /tests/block_data/custom.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 1654024379, 3 | "expiration": "2016-06-03T18:37:24", 4 | "operations": [ 5 | [ 6 | "custom_json", 7 | { 8 | "required_posting_auths": [ 9 | "steemit" 10 | ], 11 | "required_auths": [], 12 | "id": "follow", 13 | "json": "{" 14 | follower 15 | ":" 16 | steemit 17 | "," 18 | following 19 | ":" 20 | steem 21 | "," 22 | what 23 | ":[" 24 | posts 25 | "]}" 26 | } 27 | ] 28 | ], 29 | "signatures": [ 30 | "1f6019603d73f8c26b92cbdf1c224bf48fb0e600ff9e1689a09e8e4cb1234aeeb92b5eb6f8b8d148bbd3e62a4eb2bc94d1ff2293ec9b957d17d46e9dc11f41735d" 31 | ], 32 | "ref_block_num": 56232, 33 | "extensions": [] 34 | } 35 | -------------------------------------------------------------------------------- /tests/block_data/custom_json.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 1654024379, 3 | "expiration": "2016-06-03T18:37:24", 4 | "operations": [ 5 | [ 6 | "custom_json", 7 | { 8 | "required_posting_auths": [ 9 | "steemit" 10 | ], 11 | "required_auths": [], 12 | "id": "follow", 13 | "json": "{" 14 | follower 15 | ":" 16 | steemit 17 | "," 18 | following 19 | ":" 20 | steem 21 | "," 22 | what 23 | ":[" 24 | posts 25 | "]}" 26 | } 27 | ] 28 | ], 29 | "signatures": [ 30 | "1f6019603d73f8c26b92cbdf1c224bf48fb0e600ff9e1689a09e8e4cb1234aeeb92b5eb6f8b8d148bbd3e62a4eb2bc94d1ff2293ec9b957d17d46e9dc11f41735d" 31 | ], 32 | "ref_block_num": 56232, 33 | "extensions": [] 34 | } 35 | 36 | -------------------------------------------------------------------------------- /tests/block_data/delete_comment.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 3023139187, 3 | "expiration": "2016-06-06T19:34:27", 4 | "operations": [ 5 | [ 6 | "delete_comment", 7 | { 8 | "author": "jsc", 9 | "permlink": "tests-delete" 10 | } 11 | ] 12 | ], 13 | "signatures": [ 14 | "2044602e8a51a6f44827be54fb5fec8b53698fdf608a5fdd5943af71f288229fc078104b9798391989e15153f5f1aeb370d74ec027fefe4b5372a6c90d35b175f3" 15 | ], 16 | "ref_block_num": 12211, 17 | "extensions": [] 18 | } 19 | 20 | -------------------------------------------------------------------------------- /tests/block_data/feed_publish.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 336265640, 3 | "expiration": "2016-04-26T23:08:06", 4 | "operations": [ 5 | [ 6 | "feed_publish", 7 | { 8 | "exchange_rate": { 9 | "quote": "1.000 HIVE", 10 | "base": "0.374 HBD" 11 | }, 12 | "publisher": "smooth.witness" 13 | } 14 | ] 15 | ], 16 | "signatures": [ 17 | "1f45f20c78e105eba93946b4366293f28a1d5b5e6e52e2007e8c0965c19bdd5b1464ba7a6b274d1a483715e3a883125106905c24e57092bc89247689cdc335c3fc" 18 | ], 19 | "ref_block_num": 19946, 20 | "extensions": [] 21 | } 22 | -------------------------------------------------------------------------------- /tests/block_data/limit_order_cancel.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 843461126, 3 | "expiration": "2016-07-01T13:33:03", 4 | "operations": [ 5 | [ 6 | "limit_order_create", 7 | { 8 | "owner": "adm", 9 | "amount_to_sell": "5.000 HIVE", 10 | "min_to_receive": "1.542 HBD", 11 | "orderid": 9, 12 | "fill_or_kill": false, 13 | "expiration": "2016-07-01T13:34:03" 14 | } 15 | ] 16 | ], 17 | "signatures": [ 18 | "1f28e4e49e31cb9f22176fe142b3334d2459ec75cd70e48b2f536f6dc38deb8e8e5402e2cb878e0bc8cee2dc1280c480acdabe5807de5f7bc5c59ccf788920cdeb" 19 | ], 20 | "ref_block_num": 969, 21 | "extensions": [] 22 | } 23 | -------------------------------------------------------------------------------- /tests/block_data/limit_order_create.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 843461126, 3 | "expiration": "2016-07-01T13:33:03", 4 | "operations": [ 5 | [ 6 | "limit_order_create", 7 | { 8 | "owner": "adm", 9 | "amount_to_sell": "5.000 HIVE", 10 | "min_to_receive": "1.542 HBD", 11 | "orderid": 9, 12 | "fill_or_kill": false, 13 | "expiration": "2016-07-01T13:34:03" 14 | } 15 | ] 16 | ], 17 | "signatures": [ 18 | "1f28e4e49e31cb9f22176fe142b3334d2459ec75cd70e48b2f536f6dc38deb8e8e5402e2cb878e0bc8cee2dc1280c480acdabe5807de5f7bc5c59ccf788920cdeb" 19 | ], 20 | "ref_block_num": 969, 21 | "extensions": [] 22 | } 23 | -------------------------------------------------------------------------------- /tests/block_data/pow.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 2181793527, 3 | "expiration": "2016-03-24T18:00:21", 4 | "operations": [ 5 | [ 6 | "pow", 7 | { 8 | "props": { 9 | "account_creation_fee": "100.000 HIVE", 10 | "hbd_interest_rate": 1000, 11 | "maximum_block_size": 131072 12 | }, 13 | "work": { 14 | "signature": "202f30b355f4bfe501292d3c3d650de105a1d7053fcefe875a286e79d3e886e7b005e97255b81f4c35e0ca1ad8e9acc4a57d694828231e57ae7e408e8a2f858a99", 15 | "work": "0031b16c3007c425f72c1c32359511fb89ede9980ac807b81f5ab8e5edcce345", 16 | "input": "8a023b6abb7e241ad41594fb0a22afb6832e4c4d68bae99707e20bfc8679b8e6", 17 | "worker": "STM5gzvDurFRmVUUs38TDtTtGVAEz8TcWMt4xLVbxwP2PP8b9q7P4" 18 | }, 19 | "nonce": 326, 20 | "block_id": "00000449f7860b82b4fbe2f317c670e9f01d6d9a", 21 | "worker_account": "nxt6" 22 | } 23 | ] 24 | ], 25 | "signatures": [], 26 | "ref_block_num": 1097, 27 | "extensions": [] 28 | } 29 | -------------------------------------------------------------------------------- /tests/block_data/pow2.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 2030100032, 3 | "expiration": "2017-01-20T17:43:24", 4 | "operations": [ 5 | [ 6 | "pow2", 7 | { 8 | "work": [ 9 | 1, 10 | { 11 | "prev_block": "0083f04940de00790a548572b5f7a09d2a9e6676", 12 | "pow_summary": 3542335882, 13 | "proof": { 14 | "inputs": [ 15 | 2930666, 16 | 3055534, 17 | 16227194, 18 | 1878724, 19 | 3055534, 20 | 3370375, 21 | 10368718, 22 | 8279292, 23 | 1878724, 24 | 12665269, 25 | 13416647, 26 | 14101780, 27 | 14954112, 28 | 16332900, 29 | 7269530, 30 | 13055417, 31 | 16709657, 32 | 14859041, 33 | 8879475, 34 | 3839300, 35 | 8879475, 36 | 14954112, 37 | 3370375, 38 | 7416112, 39 | 15613499, 40 | 15613499, 41 | 6086878, 42 | 9856240, 43 | 587509, 44 | 587509, 45 | 6047993, 46 | 10368718, 47 | 6449363, 48 | 7416112, 49 | 15056305, 50 | 8279292, 51 | 13055417, 52 | 6086878, 53 | 16332900, 54 | 14859041, 55 | 308997, 56 | 13416647, 57 | 14101780, 58 | 2930666, 59 | 2552223, 60 | 12665269, 61 | 2552223, 62 | 6047993, 63 | 308997, 64 | 16709657, 65 | 3654688, 66 | 9885009, 67 | 15056305, 68 | 9856240, 69 | 7269530, 70 | 3654688, 71 | 5757028, 72 | 16227194, 73 | 5757028, 74 | 3839300, 75 | 9885009, 76 | 6449363, 77 | 2141293, 78 | 2141293 79 | ], 80 | "n": 140, 81 | "seed": "3dbe4a5694af55d7bccc622a7b2d41293c26d5290ca43bd9754104d99c52dd2a", 82 | "k": 6 83 | }, 84 | "input": { 85 | "prev_block": "0083f04940de00790a548572b5f7a09d2a9e6676", 86 | "nonce": "11247522470727134118", 87 | "worker_account": "nori" 88 | } 89 | } 90 | ], 91 | "props": { 92 | "account_creation_fee": "0.001 HIVE", 93 | "hbd_interest_rate": 1000, 94 | "maximum_block_size": 131072 95 | } 96 | } 97 | ] 98 | ], 99 | "signatures": [ 100 | "1f0e5ef13b709989d1256def83f45dd8a89b821cefdf3f5feefa380508233afb0d2c457d04e2c64937f36ff4d6a86e26303f710db1d92749ac6fc8fa8f95259e95" 101 | ], 102 | "ref_block_num": 61513, 103 | "extensions": [] 104 | } 105 | -------------------------------------------------------------------------------- /tests/block_data/recover_account.json: -------------------------------------------------------------------------------- 1 | { 2 | "operations": [ 3 | [ 4 | "recover_account", 5 | { 6 | "recent_owner_authority": { 7 | "account_auths": [], 8 | "key_auths": [ 9 | [ 10 | "STM6Wf68LVi22QC9eS8LBWykRiSrKKp5RTWXcNqjh3VPNhiT9xFxx", 11 | 1 12 | ] 13 | ], 14 | "weight_threshold": 1 15 | }, 16 | "new_owner_authority": { 17 | "account_auths": [], 18 | "key_auths": [ 19 | [ 20 | "STM82miH8qam2G2WPPjgyquPBrUbenGDHjhZMxqaKqCugWhcuqZzW", 21 | 1 22 | ] 23 | ], 24 | "weight_threshold": 1 25 | }, 26 | "extensions": [], 27 | "account_to_recover": "steemychicken1" 28 | } 29 | ] 30 | ], 31 | "expiration": "2016-07-18T05:46:33", 32 | "signatures": [ 33 | "202c2c3902d513bb7f22e833576ea8418fdf7be3a08b0736d1de03c3289c5db11e1a95af820703e1407b8f3c0b030d857f666132b10be165b7569faba0442790f5", 34 | "2059587d734535c43caf33a706404d813897e8887ad1696750435be63dfae26fde5995a2c6c8cf295c380d89152abe97f4990f9c78a0e9095a96e6e2432dd88e05" 35 | ], 36 | "ref_block_num": 17711, 37 | "ref_block_prefix": 311057647, 38 | "extensions": [] 39 | } 40 | -------------------------------------------------------------------------------- /tests/block_data/request_account_recovery.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 392888852, 3 | "expiration": "2016-07-18T00:14:45", 4 | "operations": [ 5 | [ 6 | "request_account_recovery", 7 | { 8 | "account_to_recover": "gandalf", 9 | "new_owner_authority": { 10 | "weight_threshold": 1, 11 | "account_auths": [], 12 | "key_auths": [ 13 | [ 14 | "STM6LYxj96zdypHYqgDdD6Nyh2NxerN3P1Mp3ddNm7gci63nfrSuZ", 15 | 1 16 | ] 17 | ] 18 | }, 19 | "recovery_account": "steem", 20 | "extensions": [] 21 | } 22 | ] 23 | ], 24 | "signatures": [ 25 | "1f6b0f44985aa8f476385078b69366b0868b45b666f717b34e074b98ca97a767b6209a931e998912f51b2f7d490a6283c3ce9c3d1f2a42a4695bda1e7a6786d0d3" 26 | ], 27 | "ref_block_num": 11112, 28 | "extensions": [] 29 | } 30 | -------------------------------------------------------------------------------- /tests/block_data/set_withdraw_vesting_route.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 1734342499, 3 | "expiration": "2016-07-01T14:12:24", 4 | "operations": [ 5 | [ 6 | "set_withdraw_vesting_route", 7 | { 8 | "from_account": "lin9uxis", 9 | "percent": 10000, 10 | "auto_vest": false, 11 | "to_account": "linouxis9" 12 | } 13 | ] 14 | ], 15 | "signatures": [ 16 | "1f1fb84928c952d6bec647f8180787485165714762591096655b9f44ad8b35742a0b964faa5d40b4ff66602ff5e5d978153414abf166adf90b6926e4791164c76a" 17 | ], 18 | "ref_block_num": 1756, 19 | "extensions": [] 20 | } 21 | -------------------------------------------------------------------------------- /tests/block_data/transaction.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "signatures": [ 4 | "1f7f99b4e98878ecd2b65bc9e6c8e2fc3a929fdb766411e89b6df2accddf326b901e8bc10c0d0f47738c26c6fdcf15f76a11eb69a12058e96820b2625061d6aa96" 5 | ], 6 | "extensions": [], 7 | "expiration": "2016-08-11T22:00:18", 8 | "ref_block_num": 2203, 9 | "operations": [ 10 | [ 11 | "comment", 12 | { 13 | "body": "@@ -154,16 +154,17 @@\n at coffe\n+e\n deliver\n", 14 | "title": "", 15 | "author": "mindfreak", 16 | "parent_author": "einsteinpotsdam", 17 | "permlink": "re-einsteinpotsdam-tutorial-for-other-shop-owners-how-to-accept-steem-and-steem-usd-payments-setup-time-under-2-minutes-android-20160811t215904898z", 18 | "parent_permlink": "tutorial-for-other-shop-owners-how-to-accept-steem-and-steem-usd-payments-setup-time-under-2-minutes-android", 19 | "json_metadata": "{\"tags\":[\"steemit\"]}" 20 | } 21 | ] 22 | ], 23 | "ref_block_prefix": 3949810370 24 | }, 25 | { 26 | "signatures": [], 27 | "extensions": [], 28 | "expiration": "2016-08-11T22:00:36", 29 | "ref_block_num": 2304, 30 | "operations": [ 31 | [ 32 | "witness_update", 33 | { 34 | "url": "http://fxxk.com", 35 | "props": { 36 | "maximum_block_size": 65536, 37 | "account_creation_fee": "1.000 HIVE", 38 | "hbd_interest_rate": 1000 39 | }, 40 | "block_signing_key": "STM5b3wkzd5cPuW8tYbHpsM6qo26R5eympAQsBaoEfeMDxxUCLvsY", 41 | "fee": "0.000 HIVE", 42 | "owner": "supercomputing06" 43 | } 44 | ] 45 | ], 46 | "ref_block_prefix": 1721994435 47 | }, 48 | { 49 | "signatures": [], 50 | "extensions": [], 51 | "expiration": "2016-08-11T22:00:36", 52 | "ref_block_num": 2304, 53 | "operations": [ 54 | [ 55 | "account_update", 56 | { 57 | "json_metadata": "", 58 | "account": "supercomputing06", 59 | "memo_key": "STM7myUzFgMrc5w2jRc3LH2cTwcs96q74Kj6GJ3DyKHyrHFPDP96N", 60 | "active": { 61 | "key_auths": [ 62 | [ 63 | "STM5sP9GUuExPzK35F1MLjN2dTY7fqqP7dSpMWqnzCoU3je64gm6q", 64 | 2 65 | ], 66 | [ 67 | "STM7t97bmNzbVruhH3yGQ7yFR58UJyPTb7Jh6ugmPfH1zqzJpngQH", 68 | 1 69 | ] 70 | ], 71 | "weight_threshold": 0, 72 | "account_auths": [] 73 | } 74 | } 75 | ] 76 | ], 77 | "ref_block_prefix": 1721994435 78 | }, 79 | { 80 | "signatures": [], 81 | "extensions": [], 82 | "expiration": "2016-08-11T22:00:36", 83 | "ref_block_num": 2304, 84 | "operations": [ 85 | [ 86 | "account_update", 87 | { 88 | "json_metadata": "", 89 | "account": "supercomputing06", 90 | "memo_key": "STM7myUzFgMrc5w2jRc3LH2cTwcs96q74Kj6GJ3DyKHyrHFPDP96N", 91 | "active": { 92 | "key_auths": [ 93 | [ 94 | "STM5sP9GUuExPzK35F1MLjN2dTY7fqqP7dSpMWqnzCoU3je64gm6q", 95 | 2 96 | ], 97 | [ 98 | "STM7t97bmNzbVruhH3yGQ7yFR58UJyPTb7Jh6ugmPfH1zqzJpngQH", 99 | 1 100 | ] 101 | ], 102 | "weight_threshold": 2, 103 | "account_auths": [] 104 | } 105 | } 106 | ] 107 | ], 108 | "ref_block_prefix": 1721994435 109 | } 110 | ] 111 | -------------------------------------------------------------------------------- /tests/block_data/transfer.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 4211555470, 3 | "expiration": "2016-03-25T13:49:33", 4 | "operations": [ 5 | [ 6 | "transfer", 7 | { 8 | "amount": "833.000 HIVE", 9 | "to": "steemit", 10 | "memo": "", 11 | "from": "admin" 12 | } 13 | ] 14 | ], 15 | "signatures": [ 16 | "204ffd40d4feefdf309780a62058e7944b6833595c500603f3bb66ddbbca2ea661391196a97aa7dde53fdcca8aeb31f8c63aee4f47a20238f3749d9f4cb77f03f5" 17 | ], 18 | "ref_block_num": 25501, 19 | "extensions": [] 20 | } 21 | -------------------------------------------------------------------------------- /tests/block_data/transfer_from_savings.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 57927444, 3 | "expiration": "2016-10-11T17:23:06", 4 | "operations": [ 5 | [ 6 | "transfer_from_savings", 7 | { 8 | "amount": "0.051 HIVE", 9 | "to": "knozaki2015", 10 | "request_id": 1476206568, 11 | "memo": "", 12 | "from": "knozaki2015" 13 | } 14 | ] 15 | ], 16 | "signatures": [ 17 | "205230e01b4def2d4e5a7d0446dd8c41874689155e5c739fc9a6a7d339303a5f135aa89cad21b568ef9f68d15bfaaf85e9fcc78bd544d9831c977a9b1ac578f726" 18 | ], 19 | "ref_block_num": 42559, 20 | "extensions": [] 21 | } 22 | -------------------------------------------------------------------------------- /tests/block_data/transfer_to_savings.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 2803959602, 3 | "expiration": "2016-10-10T16:41:45", 4 | "operations": [ 5 | [ 6 | "transfer_to_savings", 7 | { 8 | "amount": "1000.000 HBD", 9 | "to": "jamesc", 10 | "memo": "", 11 | "from": "jamesc" 12 | } 13 | ] 14 | ], 15 | "signatures": [ 16 | "1f248e64af20e24ad88078b101dba8d565aa1a6bde7ce105bed11a261f5aea9d4b6aca52f3aae23f8b98526ebeede8974407a972a85606036c304020cb2af28afb" 17 | ], 18 | "ref_block_num": 12870, 19 | "extensions": [] 20 | } 21 | -------------------------------------------------------------------------------- /tests/block_data/transfer_to_vesting.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 4131173691, 3 | "expiration": "2016-03-27T06:55:27", 4 | "operations": [ 5 | [ 6 | "transfer_to_vesting", 7 | { 8 | "amount": "20.000 HIVE", 9 | "to": "itsascam", 10 | "from": "james" 11 | } 12 | ] 13 | ], 14 | "signatures": [ 15 | "1f2853e69b7cf718f53e97c637a348115e17ae3995c773c28445c46b12ccf3716664aca8e82963f343a061ce0e097c29fa3e07ee9dc61d372bb14882b3106547a0" 16 | ], 17 | "ref_block_num": 9132, 18 | "extensions": [] 19 | } 20 | -------------------------------------------------------------------------------- /tests/block_data/vote.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 286809142, 3 | "expiration": "2016-12-16T11:31:55", 4 | "operations": [ 5 | [ 6 | "vote", 7 | { 8 | "voter": "a00", 9 | "weight": 10000, 10 | "author": "kibbjez", 11 | "permlink": "t6wv1" 12 | } 13 | ] 14 | ], 15 | "signatures": [ 16 | "20795b036ba95df0b211bc6e79c3a1d0c2363e694aee62e79eeb60f5ed859d21b86dc2205f28e8779d369a8e9a1c898df0e62efbbaf3fc3ae0ac8c8679ed6b2d68" 17 | ], 18 | "ref_block_num": 32469, 19 | "extensions": [] 20 | } 21 | -------------------------------------------------------------------------------- /tests/block_data/withdraw_vesting.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 4265937178, 3 | "expiration": "2016-03-31T18:52:33", 4 | "operations": [ 5 | [ 6 | "withdraw_vesting", 7 | { 8 | "account": "steemit", 9 | "vesting_shares": "260000.000000 VESTS" 10 | } 11 | ] 12 | ], 13 | "signatures": [ 14 | "2056b5be4b9d12f91e3cec198e74dd048bcfded95b92291709815c0afc069e5aa44c1a62e3aca0001a50d57010a870975c576f83de42e435f8634dcde52a8764c5" 15 | ], 16 | "ref_block_num": 7003, 17 | "extensions": [] 18 | } 19 | -------------------------------------------------------------------------------- /tests/block_data/witness_set_properties.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 82172829, 3 | "expiration": "2018-09-24T19:22:18", 4 | "operations": [ 5 | [ 6 | "witness_set_properties", { 7 | "owner": "init-1", 8 | "props": [ 9 | ["account_creation_fee", "d0070000000000000354455354530000"], 10 | ["key", "032d2a4af3e23294e0a1d9dbc46e0272d8e1977ce2ae3349527cc90fe1cc9c5db9"] 11 | ] 12 | } 13 | ] 14 | ], 15 | "signatures": [ 16 | "a1489ddbe5046a39a95b012a06696e69742d3102146163636f756e745f6372656174696f6e5f66656510d0070000000000000354455354530000036b657921032d2a4af3e23294e0a1d9dbc46e0272d8e1977ce2ae3349527cc90fe1cc9c5db9000000" 17 | ], 18 | "ref_block_num": 18593, 19 | "extensions": [] 20 | } 21 | -------------------------------------------------------------------------------- /tests/block_data/witness_update.json: -------------------------------------------------------------------------------- 1 | { 2 | "ref_block_prefix": 1306647607, 3 | "expiration": "2016-04-26T02:53:27", 4 | "operations": [ 5 | [ 6 | "witness_update", 7 | { 8 | "owner": "arhag", 9 | "block_signing_key": "STM5VNk9doxq55YEuyFw6qpNQt7q8neBWHhrau52fjV8N3TjNNUMP", 10 | "props": { 11 | "account_creation_fee": "100.000 HIVE", 12 | "hbd_interest_rate": 1000, 13 | "maximum_block_size": 131072 14 | }, 15 | "url": " ", 16 | "fee": "0.000 HIVE" 17 | } 18 | ] 19 | ], 20 | "signatures": [ 21 | "1f2183af215f6878a080b659c4a302ce2c67f0df4c9914872d90cf129e6d1793b11401715e130af0da60f5a5a95c48b8de30140dd9884cbc812a017aab5c2b8b5c" 22 | ], 23 | "ref_block_num": 64732, 24 | "extensions": [] 25 | } 26 | -------------------------------------------------------------------------------- /tests/steem/test_account.py: -------------------------------------------------------------------------------- 1 | from hive.account import Account 2 | 3 | 4 | def test_history(): 5 | # TODO 1: test is disabled because api.steemit.com account history 6 | # pruning is temporarily in place, breaking assumptions. 7 | # TODO 2: in addition, the current pruning implementation fails 8 | # to remove the very first operation, revealing a bug in 9 | # history_reverse() which causes it to be included once 10 | # on every page, causing an item count mismatch. 11 | return 12 | 13 | a = Account('barbara2') 14 | h1 = [x['index'] for x in list(a.history())] 15 | h2 = [x['index'] for x in list(a.history_reverse())] 16 | 17 | # pprint(list(zip(h1, h2[::-1]))) 18 | 19 | # various tests of equality should pass 20 | assert len(h1) == len(h2) 21 | assert set(h1) == set(h2) == set(range(a.virtual_op_count() + 1)) 22 | assert h1 == h2[::-1] == list(range(a.virtual_op_count() + 1)) 23 | -------------------------------------------------------------------------------- /tests/steem/test_amount.py: -------------------------------------------------------------------------------- 1 | from hive.amount import Amount 2 | 3 | 4 | def test_amount_init(): 5 | a = Amount('1 HIVE') 6 | assert dict(a) == {'amount': 1.0, 'asset': 'HIVE'} 7 | -------------------------------------------------------------------------------- /tests/steem/test_broadcast.py: -------------------------------------------------------------------------------- 1 | from hive.hived import Hived 2 | from hive.commit import Commit 3 | from hivebase.exceptions import RPCError 4 | 5 | 6 | def test_transfer(): 7 | wif = '5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3' 8 | c = Commit(hived_instance=Hived(nodes=[]), 9 | keys=[wif]) 10 | 11 | rpc_error = None 12 | try: 13 | c.transfer('test2', '1.000', 'HIVE', 'foo', 'test') 14 | except RPCError as e: 15 | rpc_error = str(e) 16 | else: 17 | raise Exception('expected RPCError') 18 | 19 | assert 'tx_missing_active_auth' in rpc_error 20 | 21 | 22 | def test_claim_reward(): 23 | wif = '5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3' 24 | c = Commit(hived_instance=Hived(nodes=[]), 25 | keys=[wif]) 26 | 27 | rpc_error = None 28 | try: 29 | c.claim_reward_balance( 30 | account='test', 31 | reward_hive='1.000 HIVE', 32 | reward_vests='0.000000 VESTS', 33 | reward_hbd='0.000 HBD') 34 | except RPCError as e: 35 | rpc_error = str(e) 36 | else: 37 | raise Exception('expected RPCError') 38 | 39 | assert 'tx_missing_posting_auth' in rpc_error 40 | 41 | 42 | def test_witness_update(): 43 | wif = '5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3' 44 | c = Commit(hived_instance=Hived(nodes=[]), 45 | keys=[wif]) 46 | 47 | signing_key = 'STM1111111111111111111111111111111114T1Anm' 48 | props = { 49 | 'account_creation_fee': '0.500 HIVE', 50 | 'maximum_block_size': 65536, 51 | 'hbd_interest_rate': 0} 52 | 53 | rpc_error = None 54 | try: 55 | c.witness_update( 56 | signing_key=signing_key, 57 | account='test', 58 | props=props, 59 | url='foo') 60 | except RPCError as e: 61 | rpc_error = str(e) 62 | else: 63 | raise Exception('expected RPCError') 64 | 65 | assert 'tx_missing_active_auth' in rpc_error 66 | 67 | 68 | def test_witness_set_properties(): 69 | wif = '5KQwrPbwdL6PhXujxW37FSSQZ1JiwsST4cqQzDeyXtP79zkvFD3' 70 | c = Commit(hived_instance=Hived(nodes=[]), 71 | keys=[wif]) 72 | 73 | signing_key = 'STM1111111111111111111111111111111114T1Anm' 74 | props = [ 75 | ['account_creation_fee', 'd0070000000000000354455354530000'], 76 | ['key', ('032d2a4af3e23294e0a1d9dbc46e0272d' 77 | '8e1977ce2ae3349527cc90fe1cc9c5db9')] 78 | ] 79 | 80 | rpc_error = None 81 | try: 82 | c.witness_set_properties( 83 | signing_key=signing_key, 84 | props=props, 85 | account='test') 86 | except RPCError as e: 87 | rpc_error = str(e) 88 | else: 89 | raise Exception('expected RPCError') 90 | 91 | assert 'tx_missing_other_auth' in rpc_error 92 | -------------------------------------------------------------------------------- /tests/steem/test_memo.py: -------------------------------------------------------------------------------- 1 | import random 2 | import unittest 3 | 4 | from hivebase import memo as Memo 5 | from hivebase.account import PrivateKey 6 | 7 | 8 | class Testcases(unittest.TestCase): 9 | def __init__(self, *args, **kwargs): 10 | super(Testcases, self).__init__(*args, **kwargs) 11 | self.maxDiff = None 12 | 13 | def test_memo(self): 14 | from_priv = "5KNK3bejeP3PtQ1Q9EagBmGacYFCZ3qigRAZDbfqcdjDWWmZSMm" 15 | to_priv = "5K2JRPe1iRwD2He5DyDRtHs3Z1wpom3YXguFxEd57kNTHhQuZ2k" 16 | 17 | for msg in [ 18 | "foobar", 19 | "just a donation", 20 | "1124safafASFasc", 21 | ]: 22 | nonce = random.getrandbits(64) 23 | memo = Memo.encode_memo( 24 | PrivateKey(from_priv), 25 | PrivateKey(to_priv).pubkey, nonce, msg) 26 | plain = Memo.decode_memo(PrivateKey(to_priv), memo) 27 | self.assertEqual(msg, plain) 28 | -------------------------------------------------------------------------------- /tests/steem/test_post.py: -------------------------------------------------------------------------------- 1 | from hive.post import Post 2 | 3 | 4 | def test_post_refresh(): 5 | """ Post should load correctly if passed a dict or string identifier. """ 6 | p1 = Post('https://hive.blog/marketing/@steemitblog/' 7 | 'marketing-w-mitchell-a-steem-ecosystem') 8 | p2 = Post({ 9 | 'author': 'steemitblog', 10 | 'permlink': 'marketing-w-mitchell-a-steem-ecosystem' 11 | }) 12 | 13 | # did post load? 14 | assert 'json_metadata' in p1 and 'json_metadata' in p2 15 | 16 | # are posts the same 17 | assert p1.export() == p2.export() 18 | -------------------------------------------------------------------------------- /tests/steem/test_steemd.py: -------------------------------------------------------------------------------- 1 | from funcy.colls import pluck 2 | from hive.hived import Hived 3 | 4 | 5 | def test_get_version(): 6 | """ We should be able to call get_version on hived """ 7 | h = Hived() 8 | response = h.call('get_version', api='login_api') 9 | version = response['blockchain_version'] 10 | assert version[0:4] == '0.20' 11 | 12 | 13 | def test_get_dgp(): 14 | """ We should be able to call get_dynamic_global_properties on hived """ 15 | h = Hived() 16 | response = h.call('get_dynamic_global_properties', api='database_api') 17 | assert response['head_block_number'] > 20e6 18 | 19 | 20 | def test_get_block(): 21 | """ We should be able to fetch some blocks. """ 22 | h = Hived() 23 | 24 | for num in [1000, 1000000, 10000000, 20000000, 21000000]: 25 | b = h.get_block(num) 26 | assert b, 'block %d was blank' % num 27 | assert num == int(b['block_id'][:8], base=16) 28 | 29 | start = 21000000 30 | for num in range(start, start + 50): 31 | b = h.get_block(num) 32 | assert b, 'block %d was blank' % num 33 | assert num == int(b['block_id'][:8], base=16) 34 | 35 | non_existent_block = 99999999 36 | b = h.get_block(non_existent_block) 37 | assert not b, 'block %d expected to be blank' % non_existent_block 38 | 39 | 40 | def test_ensured_block_ranges(): 41 | """ Post should load correctly if passed a dict or string identifier. """ 42 | h = Hived() 43 | assert list(pluck('block_num', h.get_blocks_range(1000, 2000))) == list( 44 | range(1000, 2000)) 45 | 46 | # for fuzzing in h.get_block_range_ensured() use: 47 | # degraded_results = [x for x in results if x['block_num'] % 48 | # random.choice(range(1, 10)) != 0] 49 | -------------------------------------------------------------------------------- /tests/steem/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from hive.utils import ( 4 | construct_identifier, 5 | sanitize_permlink, 6 | derive_permlink, 7 | resolve_identifier, 8 | fmt_time, 9 | ) 10 | 11 | 12 | class Testcases(unittest.TestCase): 13 | def test_constructIdentifier(self): 14 | self.assertEqual(construct_identifier("A", "B"), "A/B") 15 | 16 | def test_sanitizePermlink(self): 17 | self.assertEqual(sanitize_permlink("aAf_0.12"), "aaf-0-12") 18 | self.assertEqual(sanitize_permlink("[](){}"), "") 19 | 20 | def test_derivePermlink(self): 21 | self.assertEqual(derive_permlink("Hello World"), "hello-world") 22 | self.assertEqual(derive_permlink("aAf_0.12"), "aaf-0-12") 23 | self.assertEqual(derive_permlink("[](){}"), "") 24 | 25 | def test_resolveIdentifier(self): 26 | self.assertEqual(resolve_identifier("A/B"), ("A", "B")) 27 | 28 | def test_formatTime(self): 29 | self.assertEqual(fmt_time(1463480746), "20160517t102546") 30 | 31 | 32 | if __name__ == '__main__': 33 | unittest.main() 34 | -------------------------------------------------------------------------------- /tests/steembase/test_base58.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import re 3 | from hivebase.base58 import (Base58, base58decode, base58encode, 4 | base58CheckEncode, base58CheckDecode, 5 | gphBase58CheckEncode, gphBase58CheckDecode) 6 | 7 | 8 | class Testcases(unittest.TestCase): 9 | def test_base58decode(self): 10 | self.assertEqual([ 11 | base58decode( 12 | '5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ'), 13 | base58decode( 14 | '5KYZdUEo39z3FPrtuX2QbbwGnNP5zTd7yyr2SC1j299sBCnWjss'), 15 | base58decode('5KfazyjBBtR2YeHjNqX5D6MXvqTUd2iZmWusrdDSUqoykTyWQZB') 16 | ], [ 17 | '800c28fca386c7a227600b2fe50b7cae11ec86d3bf1fbe471be89827e' 18 | '19d72aa1d507a5b8d', 19 | '80e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991' 20 | 'b7852b8555c5bbb26', 21 | '80f3a375e00cc5147f30bee97bb5d54b31a12eee148a1ac31ac9edc4e' 22 | 'cd13bc1f80cc8148e' 23 | ]) 24 | 25 | def test_base58encode(self): 26 | self.assertEqual([ 27 | '5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ', 28 | '5KYZdUEo39z3FPrtuX2QbbwGnNP5zTd7yyr2SC1j299sBCnWjss', 29 | '5KfazyjBBtR2YeHjNqX5D6MXvqTUd2iZmWusrdDSUqoykTyWQZB' 30 | ], [ 31 | base58encode('800c28fca386c7a227600b2fe50b7cae11ec86d3bf1fbe47' 32 | '1be89827e19d72aa1d507a5b8d'), 33 | base58encode('80e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b93' 34 | '4ca495991b7852b8555c5bbb26'), 35 | base58encode('80f3a375e00cc5147f30bee97bb5d54b31a12eee148a1ac3' 36 | '1ac9edc4ecd13bc1f80cc8148e') 37 | ]) 38 | 39 | def test_gphBase58CheckEncode(self): 40 | self.assertEqual([ 41 | gphBase58CheckEncode( 42 | "02e649f63f8e8121345fd7f47d0d185a3ccaa843115cd2e9" 43 | "392dcd9b82263bc680"), 44 | gphBase58CheckEncode( 45 | "021c7359cd885c0e319924d97e3980206ad64387aff54908" 46 | "241125b3a88b55ca16"), 47 | gphBase58CheckEncode( 48 | "02f561e0b57a552df3fa1df2d87a906b7a9fc33a83d5d15f" 49 | "a68a644ecb0806b49a"), 50 | gphBase58CheckEncode( 51 | "03e7595c3e6b58f907bee951dc29796f3757307e700ecf3d" 52 | "09307a0cc4a564eba3") 53 | ], [ 54 | "6dumtt9swxCqwdPZBGXh9YmHoEjFFnNfwHaTqRbQTghGAY2gRz", 55 | "5725vivYpuFWbeyTifZ5KevnHyqXCi5hwHbNU9cYz1FHbFXCxX", 56 | "6kZKHSuxqAwdCYsMvwTcipoTsNE2jmEUNBQufGYywpniBKXWZK", 57 | "8b82mpnH8YX1E9RHnU2a2YgLTZ8ooevEGP9N15c1yFqhoBvJur" 58 | ]) 59 | 60 | def test_gphBase58CheckDecode(self): 61 | self.assertEqual([ 62 | "02e649f63f8e8121345fd7f47d0d185a3ccaa84311" 63 | "5cd2e9392dcd9b82263bc680", 64 | "021c7359cd885c0e319924d97e3980206ad64387af" 65 | "f54908241125b3a88b55ca16", 66 | "02f561e0b57a552df3fa1df2d87a906b7a9fc33a83" 67 | "d5d15fa68a644ecb0806b49a", 68 | "03e7595c3e6b58f907bee951dc29796f3757307e70" 69 | "0ecf3d09307a0cc4a564eba3", 70 | ], [ 71 | gphBase58CheckDecode( 72 | "6dumtt9swxCqwdPZBGXh9YmHoEjFFnNfwHaTqRbQTghGAY2gRz"), 73 | gphBase58CheckDecode( 74 | "5725vivYpuFWbeyTifZ5KevnHyqXCi5hwHbNU9cYz1FHbFXCxX"), 75 | gphBase58CheckDecode( 76 | "6kZKHSuxqAwdCYsMvwTcipoTsNE2jmEUNBQufGYywpniBKXWZK"), 77 | gphBase58CheckDecode( 78 | "8b82mpnH8YX1E9RHnU2a2YgLTZ8ooevEGP9N15c1yFqhoBvJur") 79 | ]) 80 | 81 | def test_btsb58(self): 82 | ml = """ 83 | 02e649f63f8e8121345fd7f47d0d185a3ccaa843115cd2e9392dcd9b82263bc680 84 | 03457298c4b2c56a8d572c051ca3109dabfe360beb144738180d6c964068ea3e58 85 | 021c7359cd885c0e319924d97e3980206ad64387aff54908241125b3a88b55ca16 86 | 02f561e0b57a552df3fa1df2d87a906b7a9fc33a83d5d15fa68a644ecb0806b49a 87 | 03e7595c3e6b58f907bee951dc29796f3757307e700ecf3d09307a0cc4a564eba3""" 88 | 89 | for x in re.split('\s+', ml): 90 | self.assertEqual(x, gphBase58CheckDecode(gphBase58CheckEncode(x))) 91 | 92 | def test_Base58CheckDecode(self): 93 | self.assertEqual([ 94 | "02e649f63f8e8121345fd7f47d0d185a3ccaa84" 95 | "3115cd2e9392dcd9b82263bc680", 96 | "021c7359cd885c0e319924d97e3980206ad6438" 97 | "7aff54908241125b3a88b55ca16", 98 | "02f561e0b57a552df3fa1df2d87a906b7a9fc33" 99 | "a83d5d15fa68a644ecb0806b49a", 100 | "03e7595c3e6b58f907bee951dc29796f3757307" 101 | "e700ecf3d09307a0cc4a564eba3", 102 | "02b52e04a0acfe611a4b6963462aca94b6ae02b24e321eda86507661901adb49", 103 | "5b921f7051be5e13e177a0253229903c40493df410ae04f4a450c85568f19131", 104 | "0e1bfc9024d1f55a7855dc690e45b2e089d2d825a4671a3c3c7e4ea4e74ec00e", 105 | "6e5cc4653d46e690c709ed9e0570a2c75a286ad7c1bc69a648aae6855d919d3e", 106 | "b84abd64d66ee1dd614230ebbe9d9c6d66d78d93927c395196666762e9ad69d8" 107 | ], [ 108 | base58CheckDecode( 109 | "KwKM6S22ZZDYw5dxBFhaRyFtcuWjaoxqDDfyCcBYSevnjdfm9Cjo"), 110 | base58CheckDecode( 111 | "KwHpCk3sLE6VykHymAEyTMRznQ1Uh5ukvFfyDWpGToT7Hf5jzrie"), 112 | base58CheckDecode( 113 | "KwKTjyQbKe6mfrtsf4TFMtqAf5as5bSp526s341PQEQvq5ZzEo5W"), 114 | base58CheckDecode( 115 | "KwMJJgtyBxQ9FEvUCzJmvr8tXxB3zNWhkn14mWMCTGSMt5GwGLgz"), 116 | base58CheckDecode( 117 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd"), 118 | base58CheckDecode( 119 | "5JWcdkhL3w4RkVPcZMdJsjos22yB5cSkPExerktvKnRNZR5gx1S"), 120 | base58CheckDecode( 121 | "5HvVz6XMx84aC5KaaBbwYrRLvWE46cH6zVnv4827SBPLorg76oq"), 122 | base58CheckDecode( 123 | "5Jete5oFNjjk3aUMkKuxgAXsp7ZyhgJbYNiNjHLvq5xzXkiqw7R"), 124 | base58CheckDecode( 125 | "5KDT58ksNsVKjYShG4Ls5ZtredybSxzmKec8juj7CojZj6LPRF7") 126 | ]) 127 | 128 | def test_base58CheckEncodeDecode(self): 129 | ml = """ 130 | 02e649f63f8e8121345fd7f47d0d185a3ccaa843115cd2e9392dcd9b82263bc680 131 | 03457298c4b2c56a8d572c051ca3109dabfe360beb144738180d6c964068ea3e58 132 | 021c7359cd885c0e319924d97e3980206ad64387aff54908241125b3a88b55ca16 133 | 02f561e0b57a552df3fa1df2d87a906b7a9fc33a83d5d15fa68a644ecb0806b49a 134 | 03e7595c3e6b58f907bee951dc29796f3757307e700ecf3d09307a0cc4a564eba3 135 | """ 136 | 137 | for x in re.split('\s+', ml): 138 | self.assertEqual(x, base58CheckDecode(base58CheckEncode(0x80, x))) 139 | 140 | def test_Base58(self): 141 | self.assertEqual([ 142 | format( 143 | Base58( 144 | "02b52e04a0acfe611a4b6963462aca94b6ae02b24e321eda865076619" 145 | "01adb49"), "wif"), 146 | format( 147 | Base58( 148 | "5b921f7051be5e13e177a0253229903c40493df410ae04f4a450c8556" 149 | "8f19131"), "wif"), 150 | format( 151 | Base58( 152 | "0e1bfc9024d1f55a7855dc690e45b2e089d2d825a4671a3c3c7e4ea4e" 153 | "74ec00e"), "wif"), 154 | format( 155 | Base58( 156 | "6e5cc4653d46e690c709ed9e0570a2c75a286ad7c1bc69a648aae6855" 157 | "d919d3e"), "wif"), 158 | format( 159 | Base58( 160 | "b84abd64d66ee1dd614230ebbe9d9c6d66d78d93927c395196666762e" 161 | "9ad69d8"), "wif") 162 | ], [ 163 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd", 164 | "5JWcdkhL3w4RkVPcZMdJsjos22yB5cSkPExerktvKnRNZR5gx1S", 165 | "5HvVz6XMx84aC5KaaBbwYrRLvWE46cH6zVnv4827SBPLorg76oq", 166 | "5Jete5oFNjjk3aUMkKuxgAXsp7ZyhgJbYNiNjHLvq5xzXkiqw7R", 167 | "5KDT58ksNsVKjYShG4Ls5ZtredybSxzmKec8juj7CojZj6LPRF7" 168 | ]) 169 | 170 | 171 | if __name__ == '__main__': 172 | unittest.main() 173 | -------------------------------------------------------------------------------- /tests/steembase/test_bip38.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | import sys 4 | from hivebase.account import PrivateKey 5 | import hivebase.bip38 6 | 7 | 8 | class Testcases(unittest.TestCase): 9 | def test_encrypt(self): 10 | self.assertEqual([ 11 | format( 12 | hivebase.bip38.encrypt( 13 | PrivateKey( 14 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd"), 15 | "TestingOneTwoThree"), "encwif"), 16 | format( 17 | hivebase.bip38.encrypt( 18 | PrivateKey( 19 | "5KN7MzqK5wt2TP1fQCYyHBtDrXdJuXbUzm4A9rKAteGu3Qi5CVR"), 20 | "TestingOneTwoThree"), "encwif"), 21 | format( 22 | hivebase.bip38.encrypt( 23 | PrivateKey( 24 | "5HtasZ6ofTHP6HCwTqTkLDuLQisYPah7aUnSKfC7h4hMUVw2gi5"), 25 | "Satoshi"), "encwif") 26 | ], [ 27 | "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxThxsUW8epQi", 28 | "6PRVWUbkzzsbcVac2qwfssoUJAN1Xhrg6bNk8J7Nzm5H7kxEbn2Nh2ZoGg", 29 | "6PRNFFkZc2NZ6dJqFfhRoFNMR9Lnyj7dYGrzdgXXVMXcxoKTePPX1dWByq" 30 | ]) 31 | 32 | def test_decrypt(self): 33 | self.assertEqual([ 34 | format( 35 | hivebase.bip38.decrypt( 36 | "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxTh" 37 | "xsUW8epQi", "TestingOneTwoThree"), "wif"), 38 | format( 39 | hivebase.bip38.decrypt( 40 | "6PRVWUbkzzsbcVac2qwfssoUJAN1Xhrg6bNk8J7Nzm5H7kxEb" 41 | "n2Nh2ZoGg", "TestingOneTwoThree"), "wif"), 42 | format( 43 | hivebase.bip38.decrypt( 44 | "6PRNFFkZc2NZ6dJqFfhRoFNMR9Lnyj7dYGrzdgXXVMXcxoKTe" 45 | "PPX1dWByq", "Satoshi"), "wif") 46 | ], [ 47 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd", 48 | "5KN7MzqK5wt2TP1fQCYyHBtDrXdJuXbUzm4A9rKAteGu3Qi5CVR", 49 | "5HtasZ6ofTHP6HCwTqTkLDuLQisYPah7aUnSKfC7h4hMUVw2gi5" 50 | ]) 51 | 52 | 53 | if __name__ == '__main__': 54 | unittest.main() 55 | -------------------------------------------------------------------------------- /tests/steembase/test_bip38_pylibscrypt.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | from hivebase.account import PrivateKey 4 | 5 | 6 | class Testcases(unittest.TestCase): 7 | 8 | def test_encrypt_pylibscrypt(self): 9 | os.environ['SCRYPT_MODULE'] = 'pylibscrypt' 10 | import hivebase.bip38 11 | self.assertEqual([ 12 | format( 13 | hivebase.bip38.encrypt( 14 | PrivateKey( 15 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd"), 16 | "TestingOneTwoThree"), "encwif"), 17 | format( 18 | hivebase.bip38.encrypt( 19 | PrivateKey( 20 | "5KN7MzqK5wt2TP1fQCYyHBtDrXdJuXbUzm4A9rKAteGu3Qi5CVR"), 21 | "TestingOneTwoThree"), "encwif"), 22 | format( 23 | hivebase.bip38.encrypt( 24 | PrivateKey( 25 | "5HtasZ6ofTHP6HCwTqTkLDuLQisYPah7aUnSKfC7h4hMUVw2gi5"), 26 | "Satoshi"), "encwif") 27 | ], [ 28 | "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxThxsUW8epQi", 29 | "6PRVWUbkzzsbcVac2qwfssoUJAN1Xhrg6bNk8J7Nzm5H7kxEbn2Nh2ZoGg", 30 | "6PRNFFkZc2NZ6dJqFfhRoFNMR9Lnyj7dYGrzdgXXVMXcxoKTePPX1dWByq" 31 | ]) 32 | 33 | def test_decrypt_pylibscrypt(self): 34 | os.environ['SCRYPT_MODULE'] = 'pylibscrypt' 35 | import hivebase.bip38 36 | self.assertEqual([ 37 | format( 38 | hivebase.bip38.decrypt( 39 | "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxTh" 40 | "xsUW8epQi", "TestingOneTwoThree"), "wif"), 41 | format( 42 | hivebase.bip38.decrypt( 43 | "6PRVWUbkzzsbcVac2qwfssoUJAN1Xhrg6bNk8J7Nzm5H7kxEb" 44 | "n2Nh2ZoGg", "TestingOneTwoThree"), "wif"), 45 | format( 46 | hivebase.bip38.decrypt( 47 | "6PRNFFkZc2NZ6dJqFfhRoFNMR9Lnyj7dYGrzdgXXVMXcxoKTe" 48 | "PPX1dWByq", "Satoshi"), "wif") 49 | ], [ 50 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd", 51 | "5KN7MzqK5wt2TP1fQCYyHBtDrXdJuXbUzm4A9rKAteGu3Qi5CVR", 52 | "5HtasZ6ofTHP6HCwTqTkLDuLQisYPah7aUnSKfC7h4hMUVw2gi5" 53 | ]) 54 | 55 | 56 | if __name__ == '__main__': 57 | unittest.main() 58 | -------------------------------------------------------------------------------- /tests/steembase/test_bip38_scrypt.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import os 3 | from hivebase.account import PrivateKey 4 | 5 | 6 | class Testcases(unittest.TestCase): 7 | 8 | def test_encrypt_scrypt(self): 9 | os.environ['SCRYPT_MODULE'] = 'scrypt' 10 | import hivebase.bip38 11 | self.assertEqual([ 12 | format( 13 | hivebase.bip38.encrypt( 14 | PrivateKey( 15 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd"), 16 | "TestingOneTwoThree"), "encwif"), 17 | format( 18 | hivebase.bip38.encrypt( 19 | PrivateKey( 20 | "5KN7MzqK5wt2TP1fQCYyHBtDrXdJuXbUzm4A9rKAteGu3Qi5CVR"), 21 | "TestingOneTwoThree"), "encwif"), 22 | format( 23 | hivebase.bip38.encrypt( 24 | PrivateKey( 25 | "5HtasZ6ofTHP6HCwTqTkLDuLQisYPah7aUnSKfC7h4hMUVw2gi5"), 26 | "Satoshi"), "encwif") 27 | ], [ 28 | "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxThxsUW8epQi", 29 | "6PRVWUbkzzsbcVac2qwfssoUJAN1Xhrg6bNk8J7Nzm5H7kxEbn2Nh2ZoGg", 30 | "6PRNFFkZc2NZ6dJqFfhRoFNMR9Lnyj7dYGrzdgXXVMXcxoKTePPX1dWByq" 31 | ]) 32 | 33 | def test_decrypt_scrypt(self): 34 | os.environ['SCRYPT_MODULE'] = 'scrypt' 35 | import hivebase.bip38 36 | self.assertEqual([ 37 | format( 38 | hivebase.bip38.decrypt( 39 | "6PRN5mjUTtud6fUXbJXezfn6oABoSr6GSLjMbrGXRZxSUcxTh" 40 | "xsUW8epQi", "TestingOneTwoThree"), "wif"), 41 | format( 42 | hivebase.bip38.decrypt( 43 | "6PRVWUbkzzsbcVac2qwfssoUJAN1Xhrg6bNk8J7Nzm5H7kxEb" 44 | "n2Nh2ZoGg", "TestingOneTwoThree"), "wif"), 45 | format( 46 | hivebase.bip38.decrypt( 47 | "6PRNFFkZc2NZ6dJqFfhRoFNMR9Lnyj7dYGrzdgXXVMXcxoKTe" 48 | "PPX1dWByq", "Satoshi"), "wif") 49 | ], [ 50 | "5HqUkGuo62BfcJU5vNhTXKJRXuUi9QSE6jp8C3uBJ2BVHtB8WSd", 51 | "5KN7MzqK5wt2TP1fQCYyHBtDrXdJuXbUzm4A9rKAteGu3Qi5CVR", 52 | "5HtasZ6ofTHP6HCwTqTkLDuLQisYPah7aUnSKfC7h4hMUVw2gi5" 53 | ]) 54 | 55 | 56 | if __name__ == '__main__': 57 | unittest.main() 58 | -------------------------------------------------------------------------------- /tests/test_import.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import sys 4 | sys.path.insert(0, os.path.abspath('..')) 5 | 6 | from hive import * # noqa 7 | from hivebase import * # noqa 8 | 9 | 10 | # pylint: disable=unused-import,unused-variable 11 | def test_import(): 12 | _ = Hive() 13 | _ = account.PasswordKey 14 | --------------------------------------------------------------------------------