├── .gitignore ├── .travis.yml ├── LICENSE.md ├── README.md ├── docs └── index.md ├── examples ├── dyor_smart_contracts.ipynb └── dyor_voting.ipynb ├── mkdocs.yml ├── poetry.lock ├── pyproject.toml ├── pytezos ├── __init__.py ├── crypto.py ├── encoding.py ├── micheline │ ├── __init__.py │ ├── grammar.py │ └── schema.py ├── rpc │ ├── __init__.py │ ├── block.py │ ├── chain.py │ ├── context.py │ ├── contract.py │ ├── helpers.py │ ├── node.py │ ├── operation.py │ ├── protocol.py │ ├── search.py │ ├── shell.py │ └── votes.py └── tools │ ├── __init__.py │ ├── diff.py │ ├── keychain.py │ ├── otp.py │ └── templates │ ├── jsondiff.html │ └── unidiff.html └── tests ├── data ├── parameter │ ├── code_0.json │ └── parameters_0.json ├── script │ ├── sample_0.json │ ├── sample_0.tz │ ├── sample_1.json │ ├── sample_1.tz │ ├── sample_2.json │ └── sample_2.tz └── storage │ ├── sample_0.json │ └── sample_1.json ├── test_crypto.py ├── test_encoding.py └── test_micheline.py /.gitignore: -------------------------------------------------------------------------------- 1 | .python-version 2 | .idea/ 3 | __pycache__/ 4 | .pytest_cache/ 5 | dist/ -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | python: 4 | - "3.6" 5 | 6 | cache: 7 | pip: true 8 | directories: 9 | - $HOME/.cache/pypoetry 10 | 11 | before_install: 12 | - sudo add-apt-repository ppa:chris-lea/libsodium -y 13 | - sudo apt-get update -q 14 | - sudo apt-get install libsodium-dev -y 15 | - curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py | python 16 | - source $HOME/.poetry/env 17 | 18 | install: 19 | - poetry install -v 20 | 21 | script: 22 | - poetry run pytest -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | Copyright 2018 Arthur Breitman 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PyTezos 2 | 3 | [![Build Status](https://travis-ci.org/baking-bad/pytezos.svg?branch=master)](https://travis-ci.org/baking-bad/pytezos) 4 | [![Made With](https://img.shields.io/badge/made%20with-python-blue.svg?)](https://www.python.org) 5 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 6 | 7 | Python utils for Tezos. 8 | 9 | ## Requirements 10 | 11 | * git 12 | * python 3.6+ (can't resist string interpolation) 13 | * pip 19.0.1+ (in order to support poetry packages) 14 | 15 | You will also probably need to install several cryptographic packets: 16 | 17 | ``` 18 | $ sudo apt install libsodium-dev libsecp256k1-dev libgmp-dev 19 | ``` 20 | 21 | ## Installation 22 | 23 | ``` 24 | $ pip install git+https://github.com/murbard/pytezos 25 | ``` 26 | 27 | ## Usage 28 | 29 | ### Crypto: keys and signatures 30 | 31 | All the three elliptic curves are now supported: ed25519, secp256k1, p256 (secp256r1). 32 | 33 | ```python 34 | from pytezos.crypto import Key 35 | 36 | private_key = 'edsk3nM41ygNfSxVU4w1uAW3G9EnTQEB5rjojeZedLTGmiGRcierVv' 37 | 38 | Key(private_key).public_key() 39 | >>> 'edpku976gpuAD2bXyx1XGraeKuCo1gUZ3LAJcHM12W1ecxZwoiu22R' 40 | 41 | Key(private_key).public_key_hash() 42 | >>> 'tz1eKkWU5hGtfLUiqNpucHrXymm83z3DG9Sq' 43 | 44 | Key(private_key).sign('test') 45 | >>> 'edsigtzLBGCyadERX1QsYHKpwnxSxEYQeGLnJGsSkHEsyY8vB5GcNdnvzUZDdFevJK7YZQ2ujwVjvQZn62ahCEcy74AwtbA8HuN' 46 | 47 | public_key = 'edpku976gpuAD2bXyx1XGraeKuCo1gUZ3LAJcHM12W1ecxZwoiu22R' 48 | signature = 'edsigtzLBGCyadERX1QsYHKpwnxSxEYQeGLnJGsSkHEsyY8vB5GcNdnvzUZDdFevJK7YZQ2ujwVjvQZn62ahCEcy74AwtbA8HuN' 49 | 50 | Key(public_key).verify(signature, 'fake') 51 | >>> Exception('Signature is not valid.') 52 | ``` 53 | 54 | ### RPC: query builder and a little bit more 55 | 56 | Tezos node API was designed with REST in mind and this package reflects it as well. Basically it's a query building machine, like sqlalchemy for SQL. In addition to this functional it also offers smart caching, shortcuts, and autocomplete, which is very helpful for doing researches in jupyter notebook for instance. 57 | 58 | ```python 59 | from pytezos.rpc import mainnet 60 | 61 | mainnet.head.hash 62 | >>> chains/main/blocks/head/hash 63 | 64 | mainnet.head.hash() 65 | >>> 'BKiWhfLw4Qc49pzimVZkvUW5UKbhcbEDNc8UXsbuLsztu92RG8U' 66 | 67 | mainnet.blocks(length=2, head='BLyvi5G4i6zaqLPL2r1k2SLKwQp8tsYXEf4mAVrwRjF9w4qVVSv') 68 | >>> [['BLyvi5G4i6zaqLPL2r1k2SLKwQp8tsYXEf4mAVrwRjF9w4qVVSv', 69 | 'BKiWhfLw4Qc49pzimVZkvUW5UKbhcbEDNc8UXsbuLsztu92RG8U']] 70 | 71 | mainnet.head.operations[0, 0] 72 | >>> chains/main/blocks/head/operations/0/0 73 | 74 | mainnet.context.contracts['tz1TNWtofRofCU11YwCNwTMWNFBodYi6eNqU']() 75 | >>> {'manager': 'tz1TNWtofRofCU11YwCNwTMWNFBodYi6eNqU', 76 | 'balance': '384854285987', 77 | 'spendable': True, 78 | 'delegate': {'setable': False, 79 | 'value': 'tz1TNWtofRofCU11YwCNwTMWNFBodYi6eNqU'}, 80 | 'counter': '2317'} 81 | ``` 82 | 83 | There are also some syntax sugar for convenient collection iteration: 84 | 85 | ```python 86 | mainnet.blocks[329830:329836] 87 | >>> [['BLiZM5t1cQSDde9Acv4JGYNetA3rS6tAEinvkwYhaFvMvsQE9XB', 88 | 'BLZ1R83AHKknKzmNPqF22aMYL6jQcPU1EBQ8W1PagV3ag6pysWQ', 89 | 'BLXCMEHUpHtwB1RoFGnzVnoVf7FEmeEwz8Ad5dxtrRcq5tD3DeB', 90 | 'BLVrBJyGdm9j4su7exVtkTNi4aJ1D9ephUfPfZFzZwGoRDc8T5z', 91 | 'BLuNPrD4QxjBWc26eDnuoCg7qFSutVURoZpToHshLbbuhZBsb5X', 92 | 'BKujfxvZvqrVmz4anJz6JNQgKe5w7sudcb7awBKUYY39XhReMQp']] 93 | 94 | mainnet.blocks[-2:] 95 | >>> [['BKkAhaDbzaFEm99wWedeELxzXHrozQrG4B4p8BU56ttmDjsMdge', 96 | 'BL8u6Ny9naUnVWAtNJQ4P93LfgW8x9LDUCGoW9TEEzqHiG4mPsu']] 97 | ``` 98 | 99 | And several methods for operations manipulation: 100 | 101 | ```python 102 | ops = mainnet.mempool.pending_operations.applied(kind='endorsement') 103 | 104 | ops[0].forge() 105 | >>> '6b96a5df309727b4cd9a2aee24a56a83565db00d7dce158d5b55400d92f5022c0000050888' 106 | 107 | ops[0].preapply() 108 | >>> [{'contents': [{'kind': 'endorsement', 109 | 'level': 329864, 110 | 'metadata': {'balance_updates': [{'kind': 'contract', 111 | 'contract': 'tz3RDC3Jdn4j15J7bBHZd29EUee9gVB1CxD9', 112 | 'change': '-128000000'}, 113 | {'kind': 'freezer', 114 | 'category': 'deposits', 115 | 'delegate': 'tz3RDC3Jdn4j15J7bBHZd29EUee9gVB1CxD9', 116 | 'level': 80, 117 | 'change': '128000000'}, 118 | {'kind': 'freezer', 119 | 'category': 'rewards', 120 | 'delegate': 'tz3RDC3Jdn4j15J7bBHZd29EUee9gVB1CxD9', 121 | 'level': 80, 122 | 'change': '4000000'}], 123 | 'delegate': 'tz3RDC3Jdn4j15J7bBHZd29EUee9gVB1CxD9', 124 | 'slots': [15, 5]}}], 125 | 'signature': 'sigsfBsrxKVcS8btuik6vgqR1TRNundZD36ph2tEgjUQdMqHjrNYziJ6godapYMCKq483XqS7rcvfPD61StZ63TE5Jchujs4'}] 126 | 127 | ops[0].verify_signature() 128 | >>> No exceptions 129 | ``` 130 | 131 | The best thing about this wrapper is that you can use not yet wrapped RPC endpoints: 132 | 133 | ```python 134 | mainnet.context.delegates(active=True) 135 | >>> ['tz2KuCcKSyMzs8wRJXzjqoHgojPkSUem8ZBS', 136 | 'tz2JMPu9yVKuX2Au8UUbp7YrKBZJSdYhgwwu', 137 | 'tz2E3BvcMiGvFEgNVdsAiwVvPHcwJDTA8wLt', ... ] 138 | ``` 139 | 140 | Or just use raw requests: 141 | 142 | ```python 143 | from pytezos.rpc.node import Node 144 | 145 | node = Node() 146 | node.get('chains/main/blocks', params={'length': 1}) 147 | >>> [['BKvRWnbPeFzFNJ9mkUEcxCzYk68fLa3mPYweqcFVo7TNLAJAz2G'], ... ] 148 | 149 | node.post('chains/main/mempool/filter', json={'minimal_fees': 0}) 150 | >>> {} 151 | ``` 152 | 153 | -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # Welcome to PyTezos -------------------------------------------------------------------------------- /examples/dyor_smart_contracts.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Inspecting smart contracts with PyTezos" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import sys\n", 17 | "assert sys.version_info.major == 3 and sys.version_info.minor >= 6" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "import os\n", 27 | "sys.path.insert(1, os.path.abspath('..'))" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "## Searching for origination\n", 35 | "\n", 36 | "As was shown in the previous post, thanks to Tezos statefulness we can rather quickly look for transactions of a special kind. \n", 37 | "First af all let's try to find a smart contract origination." 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "from pytezos.rpc import alphanet, mainnet\n", 47 | "from pytezos.rpc.search import SearchChain" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": null, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "sc = SearchChain.from_chain(mainnet.main)" 57 | ] 58 | }, 59 | { 60 | "cell_type": "markdown", 61 | "metadata": {}, 62 | "source": [ 63 | "We will look for one of the TezVote smart contracts. \n", 64 | "The algorithm checks for the contract existence, before the origination RPC query have to return 404." 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": null, 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "origination = sc.find_contract_origination_operation('KT1ExvG3EjTrvDcAU7EqLNb77agPa5u6KvnY')" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": { 80 | "scrolled": true 81 | }, 82 | "outputs": [], 83 | "source": [ 84 | "origination()" 85 | ] 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "metadata": {}, 90 | "source": [ 91 | "## Finding all storage altering transactions\n", 92 | "\n", 93 | "We can also use step&bisect algo to retrieve all transactions that have changed the SC storage." 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": { 100 | "scrolled": true 101 | }, 102 | "outputs": [], 103 | "source": [ 104 | "for op in sc.find_storage_change_operations('KT1ExvG3EjTrvDcAU7EqLNb77agPa5u6KvnY', origination_level=293492):\n", 105 | " break" 106 | ] 107 | }, 108 | { 109 | "cell_type": "code", 110 | "execution_count": null, 111 | "metadata": { 112 | "scrolled": true 113 | }, 114 | "outputs": [], 115 | "source": [ 116 | "op()" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "metadata": {}, 122 | "source": [ 123 | "## Decoding storage\n", 124 | "\n", 125 | "Allright, now let's view some data, i'll take some random SC from alphanet for demonstration" 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": null, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "alphanet.context.contracts['KT19iGCL4YrVpT6ezEzbDH37Yxbas8jWQz4s'].storage()" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": null, 140 | "metadata": {}, 141 | "outputs": [], 142 | "source": [ 143 | "alphanet.context.contracts['KT1VnRY6UuWH89P8DQtC7Sd178jkckff8o8d'].storage()" 144 | ] 145 | }, 146 | { 147 | "cell_type": "code", 148 | "execution_count": null, 149 | "metadata": { 150 | "scrolled": true 151 | }, 152 | "outputs": [], 153 | "source": [ 154 | "alphanet.context.contracts['KT1FEDVALSfQLZwVZbF1hRxJ9c8MTPe7azCZ'].storage()" 155 | ] 156 | }, 157 | { 158 | "cell_type": "markdown", 159 | "metadata": {}, 160 | "source": [ 161 | "Okay that won't work, let's do some magic:" 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": null, 167 | "metadata": {}, 168 | "outputs": [], 169 | "source": [ 170 | "alphanet.context.contracts['KT19iGCL4YrVpT6ezEzbDH37Yxbas8jWQz4s'].decode_storage()" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "metadata": {}, 177 | "outputs": [], 178 | "source": [ 179 | "alphanet.context.contracts['KT1VnRY6UuWH89P8DQtC7Sd178jkckff8o8d'].decode_storage()" 180 | ] 181 | }, 182 | { 183 | "cell_type": "code", 184 | "execution_count": null, 185 | "metadata": {}, 186 | "outputs": [], 187 | "source": [ 188 | "alphanet.context.contracts['KT1FEDVALSfQLZwVZbF1hRxJ9c8MTPe7azCZ'].decode_storage()" 189 | ] 190 | }, 191 | { 192 | "cell_type": "markdown", 193 | "metadata": {}, 194 | "source": [ 195 | "Oh yeah, much better :) \n", 196 | "Note that we can handle both annotated and not annotated storage." 197 | ] 198 | }, 199 | { 200 | "cell_type": "markdown", 201 | "metadata": {}, 202 | "source": [ 203 | "## Encoding storage\n", 204 | "\n", 205 | "We can easily encode storage data back, this functional is quite useful for unit testing." 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": {}, 212 | "outputs": [], 213 | "source": [ 214 | "contract = alphanet.context.contracts['KT1VnRY6UuWH89P8DQtC7Sd178jkckff8o8d']" 215 | ] 216 | }, 217 | { 218 | "cell_type": "markdown", 219 | "metadata": {}, 220 | "source": [ 221 | "Let's look at the schema first" 222 | ] 223 | }, 224 | { 225 | "cell_type": "code", 226 | "execution_count": null, 227 | "metadata": {}, 228 | "outputs": [], 229 | "source": [ 230 | "contract.storage_schema()" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": null, 236 | "metadata": {}, 237 | "outputs": [], 238 | "source": [ 239 | "contract.encode_storage({\n", 240 | " 'accounts': {},\n", 241 | " 'version': 2,\n", 242 | " 'totalSupply': 100000000,\n", 243 | " 'decimals': 8,\n", 244 | " 'name': 'AbcCoin',\n", 245 | " 'symbol': 'ABC',\n", 246 | " 'owner': 'KT1GE2AZhazRxGsAjRVkQccHcB2pvANXQWd7'\n", 247 | "})" 248 | ] 249 | }, 250 | { 251 | "cell_type": "markdown", 252 | "metadata": {}, 253 | "source": [ 254 | "## Decoding transaction parameters\n", 255 | "\n", 256 | "Great! But what about parameters we call smart contracts with? \n", 257 | "Take some complicated example" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": null, 263 | "metadata": {}, 264 | "outputs": [], 265 | "source": [ 266 | "content = alphanet.blocks[216905].operations.managers.contents()[0]" 267 | ] 268 | }, 269 | { 270 | "cell_type": "code", 271 | "execution_count": null, 272 | "metadata": {}, 273 | "outputs": [], 274 | "source": [ 275 | "contract = alphanet.context.contracts['KT1FU74GimCeEVRAEZGURb6TWU8jK1N6zFJy']" 276 | ] 277 | }, 278 | { 279 | "cell_type": "code", 280 | "execution_count": null, 281 | "metadata": {}, 282 | "outputs": [], 283 | "source": [ 284 | "content['parameters']" 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": null, 290 | "metadata": {}, 291 | "outputs": [], 292 | "source": [ 293 | "contract.decode_parameters(content['parameters'])" 294 | ] 295 | }, 296 | { 297 | "cell_type": "markdown", 298 | "metadata": {}, 299 | "source": [ 300 | "As you can see, we combine type and field annotations from the code with the transaction data." 301 | ] 302 | }, 303 | { 304 | "cell_type": "markdown", 305 | "metadata": {}, 306 | "source": [ 307 | "## Encoding transaction parameters\n", 308 | "\n", 309 | "If we do not know exactly the SC interface, we can look at the internal schema representation:" 310 | ] 311 | }, 312 | { 313 | "cell_type": "code", 314 | "execution_count": null, 315 | "metadata": {}, 316 | "outputs": [], 317 | "source": [ 318 | "contract.parameter_schema()" 319 | ] 320 | }, 321 | { 322 | "cell_type": "code", 323 | "execution_count": null, 324 | "metadata": {}, 325 | "outputs": [], 326 | "source": [ 327 | "contract.encode_parameters({'Redeem': {'rdHSec': '12', 'rdSec': '34'}})" 328 | ] 329 | }, 330 | { 331 | "cell_type": "markdown", 332 | "metadata": {}, 333 | "source": [ 334 | "## Visualize storage diff" 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "execution_count": null, 340 | "metadata": {}, 341 | "outputs": [], 342 | "source": [ 343 | "storage_1 = mainnet.blocks[328451].context.contracts['KT1ExvG3EjTrvDcAU7EqLNb77agPa5u6KvnY'].decode_storage()\n", 344 | "storage_1" 345 | ] 346 | }, 347 | { 348 | "cell_type": "code", 349 | "execution_count": null, 350 | "metadata": {}, 351 | "outputs": [], 352 | "source": [ 353 | "storage_2 = mainnet.blocks[334288].context.contracts['KT1ExvG3EjTrvDcAU7EqLNb77agPa5u6KvnY'].decode_storage()\n", 354 | "storage_2" 355 | ] 356 | }, 357 | { 358 | "cell_type": "code", 359 | "execution_count": null, 360 | "metadata": {}, 361 | "outputs": [], 362 | "source": [ 363 | "from pytezos.tools.diff import generate_jsondiff_html" 364 | ] 365 | }, 366 | { 367 | "cell_type": "code", 368 | "execution_count": null, 369 | "metadata": {}, 370 | "outputs": [], 371 | "source": [ 372 | "generate_jsondiff_html(storage_1, storage_2, output_path='storage_diff.html')" 373 | ] 374 | }, 375 | { 376 | "cell_type": "markdown", 377 | "metadata": {}, 378 | "source": [ 379 | "[view diff](./storage_diff.html)" 380 | ] 381 | }, 382 | { 383 | "cell_type": "markdown", 384 | "metadata": {}, 385 | "source": [ 386 | "This is pretty what we need when analyzing a smart contract behaviour, an input and side effects." 387 | ] 388 | }, 389 | { 390 | "cell_type": "markdown", 391 | "metadata": {}, 392 | "source": [ 393 | "## Accessing BigMap" 394 | ] 395 | }, 396 | { 397 | "cell_type": "code", 398 | "execution_count": null, 399 | "metadata": {}, 400 | "outputs": [], 401 | "source": [ 402 | "contract.big_map_get('52c5bcbf9cb4dcaacd8689b42726c6f11e6eb575ade913923b6b1420b5b65eb9')" 403 | ] 404 | }, 405 | { 406 | "cell_type": "markdown", 407 | "metadata": {}, 408 | "source": [ 409 | "We can also decode big map delta from the transaction" 410 | ] 411 | }, 412 | { 413 | "cell_type": "code", 414 | "execution_count": null, 415 | "metadata": { 416 | "scrolled": false 417 | }, 418 | "outputs": [], 419 | "source": [ 420 | "big_map_diff = alphanet.blocks[216895].operations.contents('transaction')[0]['metadata']['operation_result']['big_map_diff']\n", 421 | "big_map_diff" 422 | ] 423 | }, 424 | { 425 | "cell_type": "code", 426 | "execution_count": null, 427 | "metadata": {}, 428 | "outputs": [], 429 | "source": [ 430 | "contract.big_map_diff_decode(big_map_diff)" 431 | ] 432 | }, 433 | { 434 | "cell_type": "markdown", 435 | "metadata": {}, 436 | "source": [ 437 | "## Bonus: parsing .tz files\n", 438 | "\n", 439 | "You can also initialize a `Contract` instance from Micheline, Michelson source string or file." 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": null, 445 | "metadata": {}, 446 | "outputs": [], 447 | "source": [ 448 | "source = '''\n", 449 | "parameter nat;\n", 450 | "storage (map address nat);\n", 451 | "code { DUP ;\n", 452 | " CDR ;\n", 453 | " SWAP ;\n", 454 | " CAR ;\n", 455 | " DUP ;\n", 456 | " PUSH nat 2 ;\n", 457 | " { { COMPARE ; GE } ; IF {} { { UNIT ; FAILWITH } } } ;\n", 458 | " SOME ;\n", 459 | " SENDER ;\n", 460 | " UPDATE ;\n", 461 | " NIL operation ;\n", 462 | " PAIR }\n", 463 | "'''" 464 | ] 465 | }, 466 | { 467 | "cell_type": "code", 468 | "execution_count": null, 469 | "metadata": {}, 470 | "outputs": [], 471 | "source": [ 472 | "from pytezos.rpc.contract import Contract" 473 | ] 474 | }, 475 | { 476 | "cell_type": "code", 477 | "execution_count": null, 478 | "metadata": {}, 479 | "outputs": [], 480 | "source": [ 481 | "contract = Contract.from_string(source)\n", 482 | "contract" 483 | ] 484 | }, 485 | { 486 | "cell_type": "code", 487 | "execution_count": null, 488 | "metadata": {}, 489 | "outputs": [], 490 | "source": [] 491 | } 492 | ], 493 | "metadata": { 494 | "kernelspec": { 495 | "display_name": "python 3.7", 496 | "language": "python", 497 | "name": "python36" 498 | }, 499 | "language_info": { 500 | "codemirror_mode": { 501 | "name": "ipython", 502 | "version": 3 503 | }, 504 | "file_extension": ".py", 505 | "mimetype": "text/x-python", 506 | "name": "python", 507 | "nbconvert_exporter": "python", 508 | "pygments_lexer": "ipython3", 509 | "version": "3.7.0" 510 | } 511 | }, 512 | "nbformat": 4, 513 | "nbformat_minor": 2 514 | } 515 | -------------------------------------------------------------------------------- /examples/dyor_voting.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Check what you are voting for with PyTezos" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "import sys\n", 17 | "assert sys.version_info.major == 3 and sys.version_info.minor >= 6" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 2, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "import os\n", 27 | "sys.path.insert(1, os.path.abspath('..'))" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": {}, 33 | "source": [ 34 | "## Current voting phase\n", 35 | "\n", 36 | "Say, you don't read any news and all you have is public node access and documentation. \n", 37 | "How to determine what is the current voting phase? " 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 3, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "from pytezos.rpc import mainnet" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 4, 52 | "metadata": {}, 53 | "outputs": [ 54 | { 55 | "data": { 56 | "text/plain": [ 57 | "'proposal'" 58 | ] 59 | }, 60 | "execution_count": 4, 61 | "metadata": {}, 62 | "output_type": "execute_result" 63 | } 64 | ], 65 | "source": [ 66 | "mainnet.head.votes.current_period_kind()" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "metadata": {}, 72 | "source": [ 73 | "Great, we are at the first stage, now we want to know when did it start and when it will ends. \n", 74 | "We can get this information from the block metadata:" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 5, 80 | "metadata": {}, 81 | "outputs": [ 82 | { 83 | "data": { 84 | "text/plain": [ 85 | "{'level': 349243,\n", 86 | " 'level_position': 349242,\n", 87 | " 'cycle': 85,\n", 88 | " 'cycle_position': 1082,\n", 89 | " 'voting_period': 10,\n", 90 | " 'voting_period_position': 21562,\n", 91 | " 'expected_commitment': False}" 92 | ] 93 | }, 94 | "execution_count": 5, 95 | "metadata": {}, 96 | "output_type": "execute_result" 97 | } 98 | ], 99 | "source": [ 100 | "level_info = mainnet.head.metadata.get('level')\n", 101 | "level_info" 102 | ] 103 | }, 104 | { 105 | "cell_type": "markdown", 106 | "metadata": {}, 107 | "source": [ 108 | "Get precise boundaries in levels and rough estimation in days" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 6, 114 | "metadata": {}, 115 | "outputs": [], 116 | "source": [ 117 | "import pendulum" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 7, 123 | "metadata": {}, 124 | "outputs": [ 125 | { 126 | "data": { 127 | "text/plain": [ 128 | "327681" 129 | ] 130 | }, 131 | "execution_count": 7, 132 | "metadata": {}, 133 | "output_type": "execute_result" 134 | } 135 | ], 136 | "source": [ 137 | "start_level = level_info['level'] - level_info['voting_period_position']\n", 138 | "start_level" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 9, 144 | "metadata": {}, 145 | "outputs": [ 146 | { 147 | "data": { 148 | "text/plain": [ 149 | "360448" 150 | ] 151 | }, 152 | "execution_count": 9, 153 | "metadata": {}, 154 | "output_type": "execute_result" 155 | } 156 | ], 157 | "source": [ 158 | "end_level = start_level + 8 * 4096 - 1 # eight cycles of 4096 blocks\n", 159 | "end_level" 160 | ] 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": 10, 165 | "metadata": {}, 166 | "outputs": [ 167 | { 168 | "name": "stdout", 169 | "output_type": "stream", 170 | "text": [ 171 | "15.6 days passed\n" 172 | ] 173 | } 174 | ], 175 | "source": [ 176 | "start_dt = pendulum.parse(mainnet.blocks[start_level].header.get('timestamp'))\n", 177 | "time_past = (pendulum.now() - start_dt)\n", 178 | "print(round(time_past.total_days(), 1), 'days passed')" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 11, 184 | "metadata": {}, 185 | "outputs": [ 186 | { 187 | "name": "stdout", 188 | "output_type": "stream", 189 | "text": [ 190 | "8.1 days left\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "time_left = (end_level - level_info['level']) / (level_info['level'] - start_level) * time_past\n", 196 | "print(round(time_left.total_days(), 1), 'days left')" 197 | ] 198 | }, 199 | { 200 | "cell_type": "markdown", 201 | "metadata": {}, 202 | "source": [ 203 | "## Current proposals" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": 12, 209 | "metadata": {}, 210 | "outputs": [ 211 | { 212 | "data": { 213 | "text/plain": [ 214 | "[['Psd1ynUBhMZAeajwcZJAeq5NrxorM6UCU4GJqxZ7Bx2e9vUWB6z', 5176],\n", 215 | " ['Pt24m4xiPbLDhVgVfABUjirbmda3yohdN82Sp9FeuAXJ4eV9otd', 13183]]" 216 | ] 217 | }, 218 | "execution_count": 12, 219 | "metadata": {}, 220 | "output_type": "execute_result" 221 | } 222 | ], 223 | "source": [ 224 | "proposals = mainnet.head.votes.proposals()\n", 225 | "proposals" 226 | ] 227 | }, 228 | { 229 | "cell_type": "markdown", 230 | "metadata": {}, 231 | "source": [ 232 | "Let's examine one of proposals" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": 13, 238 | "metadata": {}, 239 | "outputs": [ 240 | { 241 | "data": { 242 | "text/plain": [ 243 | "'Pt24m4xiPbLDhVgVfABUjirbmda3yohdN82Sp9FeuAXJ4eV9otd'" 244 | ] 245 | }, 246 | "execution_count": 13, 247 | "metadata": {}, 248 | "output_type": "execute_result" 249 | } 250 | ], 251 | "source": [ 252 | "proposal_id = proposals[1][0]\n", 253 | "proposal_id" 254 | ] 255 | }, 256 | { 257 | "cell_type": "markdown", 258 | "metadata": {}, 259 | "source": [ 260 | "### Injection operation\n", 261 | "\n", 262 | "It's interesting to find the author of the proposal. \n", 263 | "In order to do that we have to search for the first `proposals` operation for this particular proposal_id." 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": 14, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "from pytezos.rpc.search import SearchChain" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": 15, 278 | "metadata": {}, 279 | "outputs": [], 280 | "source": [ 281 | "sc = SearchChain.from_chain(mainnet.main)" 282 | ] 283 | }, 284 | { 285 | "cell_type": "markdown", 286 | "metadata": {}, 287 | "source": [ 288 | "Thanks to the statefullness of the Tezos blockchain we can perform a binary search inside the voting period. \n", 289 | "The algorithm searches for the first level where number of votes changed from 0 to non-zero." 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 16, 295 | "metadata": {}, 296 | "outputs": [ 297 | { 298 | "name": "stderr", 299 | "output_type": "stream", 300 | "text": [ 301 | "2019-03-12 16:21:03.348 | DEBUG | pytezos.rpc.search:bisect:29 - 714 at level 338464\n", 302 | "2019-03-12 16:21:04.196 | DEBUG | pytezos.rpc.search:bisect:29 - 23 at level 333072\n", 303 | "2019-03-12 16:21:04.985 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 330376\n", 304 | "2019-03-12 16:21:05.763 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 331724\n", 305 | "2019-03-12 16:21:06.585 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 332398\n", 306 | "2019-03-12 16:21:07.351 | DEBUG | pytezos.rpc.search:bisect:29 - 11 at level 332735\n", 307 | "2019-03-12 16:21:07.957 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 332566\n", 308 | "2019-03-12 16:21:08.570 | DEBUG | pytezos.rpc.search:bisect:29 - 11 at level 332650\n", 309 | "2019-03-12 16:21:09.646 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 332608\n", 310 | "2019-03-12 16:21:10.210 | DEBUG | pytezos.rpc.search:bisect:29 - 11 at level 332629\n", 311 | "2019-03-12 16:21:10.786 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 332618\n", 312 | "2019-03-12 16:21:11.661 | DEBUG | pytezos.rpc.search:bisect:29 - 0 at level 332623\n", 313 | "2019-03-12 16:21:12.220 | DEBUG | pytezos.rpc.search:bisect:29 - 11 at level 332626\n", 314 | "2019-03-12 16:21:12.782 | DEBUG | pytezos.rpc.search:bisect:29 - 11 at level 332624\n" 315 | ] 316 | } 317 | ], 318 | "source": [ 319 | "operation = sc.find_proposal_inject_operation(proposal_id)" 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": 17, 325 | "metadata": {}, 326 | "outputs": [ 327 | { 328 | "data": { 329 | "text/plain": [ 330 | "{'protocol': 'PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP',\n", 331 | " 'chain_id': 'NetXdQprcVkpaWU',\n", 332 | " 'hash': 'onydFJLWdGhfKNBfbnSLmqDu93j9NRimkbQm9WqLWYG8eyZUyTF',\n", 333 | " 'branch': 'BL53WJx6xPn6rnTnWZmpNaWGAQqU8HTwRTDqVDmthYsxUTBewo9',\n", 334 | " 'contents': [{'kind': 'proposals',\n", 335 | " 'source': 'tz1fNdh4YftsUasbB1BWBpqDmr4sFZaPNZVL',\n", 336 | " 'period': 10,\n", 337 | " 'proposals': ['Pt24m4xiPbLDhVgVfABUjirbmda3yohdN82Sp9FeuAXJ4eV9otd',\n", 338 | " 'Psd1ynUBhMZAeajwcZJAeq5NrxorM6UCU4GJqxZ7Bx2e9vUWB6z'],\n", 339 | " 'metadata': {}}],\n", 340 | " 'signature': 'sigvUqvh7rBS8yAoE5RMieQaD5hvg9NsLeJ4kTnXdK1tXXyrHL8mX7E3KCm9q9YgYbJn3edhcUiZjdU3xNhVPEUPkSGVNbi9'}" 341 | ] 342 | }, 343 | "execution_count": 17, 344 | "metadata": {}, 345 | "output_type": "execute_result" 346 | } 347 | ], 348 | "source": [ 349 | "operation()" 350 | ] 351 | }, 352 | { 353 | "cell_type": "markdown", 354 | "metadata": {}, 355 | "source": [ 356 | "We can perform some checks on this operation, such as signature validation:" 357 | ] 358 | }, 359 | { 360 | "cell_type": "code", 361 | "execution_count": 18, 362 | "metadata": {}, 363 | "outputs": [], 364 | "source": [ 365 | "operation.verify_signature()" 366 | ] 367 | }, 368 | { 369 | "cell_type": "markdown", 370 | "metadata": {}, 371 | "source": [ 372 | "The only thing we can learn about the submitter's identity is his public key:" 373 | ] 374 | }, 375 | { 376 | "cell_type": "code", 377 | "execution_count": 19, 378 | "metadata": {}, 379 | "outputs": [ 380 | { 381 | "data": { 382 | "text/plain": [ 383 | "'edpktyybTTrLm2rk6bn7xtgY9t2Kgt9GnqZxEcSrunm4vKWTF9ES9o'" 384 | ] 385 | }, 386 | "execution_count": 19, 387 | "metadata": {}, 388 | "output_type": "execute_result" 389 | } 390 | ], 391 | "source": [ 392 | "mainnet.get_public_key(operation.source())" 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": 20, 398 | "metadata": {}, 399 | "outputs": [ 400 | { 401 | "data": { 402 | "text/plain": [ 403 | "'Proposed on the 4th day of the voting period'" 404 | ] 405 | }, 406 | "execution_count": 20, 407 | "metadata": {}, 408 | "output_type": "execute_result" 409 | } 410 | ], 411 | "source": [ 412 | "timestamp = mainnet.blocks[operation.get('branch')].header.get('timestamp')\n", 413 | "inject_dt = pendulum.parse(timestamp)\n", 414 | "f'Proposed on the {(inject_dt - start_dt).days + 1}th day of the voting period'" 415 | ] 416 | }, 417 | { 418 | "cell_type": "markdown", 419 | "metadata": {}, 420 | "source": [ 421 | "### Source code\n", 422 | "\n", 423 | "Unfortunately it's practically impossible to get proposal source from the blockchain. \n", 424 | "We could get lucky and find the submitter's node (in case it's public) which has to know this proto (according to the docs). \n", 425 | "The other option is trying to find a node in zeronet which has participated in the voting rehearsal. \n", 426 | "\n", 427 | "We will be back to this issue later, now let's download proposal sources." 428 | ] 429 | }, 430 | { 431 | "cell_type": "code", 432 | "execution_count": 21, 433 | "metadata": {}, 434 | "outputs": [], 435 | "source": [ 436 | "tar_url = 'https://blog.nomadic-labs.com/files/Athens_proposal_A.tar'" 437 | ] 438 | }, 439 | { 440 | "cell_type": "code", 441 | "execution_count": 22, 442 | "metadata": {}, 443 | "outputs": [], 444 | "source": [ 445 | "from pytezos.rpc.protocol import Protocol" 446 | ] 447 | }, 448 | { 449 | "cell_type": "markdown", 450 | "metadata": {}, 451 | "source": [ 452 | "Loading sources and convert them to the internal format (as in blockchain)" 453 | ] 454 | }, 455 | { 456 | "cell_type": "code", 457 | "execution_count": 23, 458 | "metadata": {}, 459 | "outputs": [ 460 | { 461 | "name": "stderr", 462 | "output_type": "stream", 463 | "text": [ 464 | "1218560it [03:36, 5620.72it/s]\n" 465 | ] 466 | } 467 | ], 468 | "source": [ 469 | "athens_a = Protocol.from_uri(tar_url)" 470 | ] 471 | }, 472 | { 473 | "cell_type": "markdown", 474 | "metadata": {}, 475 | "source": [ 476 | "Check that the sources we have downloaded are original. \n", 477 | "In order to do that we have to obtain binary representation of the sources according to the http://tezos.gitlab.io/mainnet/api/rpc.html#get-protocols-protocol-hash (Binary output tab)." 478 | ] 479 | }, 480 | { 481 | "cell_type": "code", 482 | "execution_count": 24, 483 | "metadata": {}, 484 | "outputs": [ 485 | { 486 | "data": { 487 | "text/plain": [ 488 | "'Pt24m4xiPbLDhVgVfABUjirbmda3yohdN82Sp9FeuAXJ4eV9otd'" 489 | ] 490 | }, 491 | "execution_count": 24, 492 | "metadata": {}, 493 | "output_type": "execute_result" 494 | } 495 | ], 496 | "source": [ 497 | "proto_id = athens_a.calculate_hash()\n", 498 | "proto_id" 499 | ] 500 | }, 501 | { 502 | "cell_type": "code", 503 | "execution_count": 25, 504 | "metadata": {}, 505 | "outputs": [], 506 | "source": [ 507 | "assert proposal_id == proto_id" 508 | ] 509 | }, 510 | { 511 | "cell_type": "markdown", 512 | "metadata": {}, 513 | "source": [ 514 | "### Protocol update diff\n", 515 | "\n", 516 | "First of all we need to get sources of the current protocol:" 517 | ] 518 | }, 519 | { 520 | "cell_type": "code", 521 | "execution_count": 26, 522 | "metadata": {}, 523 | "outputs": [ 524 | { 525 | "data": { 526 | "text/plain": [ 527 | "protocols/PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP" 528 | ] 529 | }, 530 | "execution_count": 26, 531 | "metadata": {}, 532 | "output_type": "execute_result" 533 | } 534 | ], 535 | "source": [ 536 | "current_proto = mainnet.protocols[operation.get('protocol')]\n", 537 | "current_proto" 538 | ] 539 | }, 540 | { 541 | "cell_type": "code", 542 | "execution_count": 27, 543 | "metadata": {}, 544 | "outputs": [ 545 | { 546 | "data": { 547 | "text/plain": [ 548 | "'PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP'" 549 | ] 550 | }, 551 | "execution_count": 27, 552 | "metadata": {}, 553 | "output_type": "execute_result" 554 | } 555 | ], 556 | "source": [ 557 | "current_hash = current_proto.calculate_hash()\n", 558 | "current_hash" 559 | ] 560 | }, 561 | { 562 | "cell_type": "code", 563 | "execution_count": 28, 564 | "metadata": {}, 565 | "outputs": [], 566 | "source": [ 567 | "assert current_hash == operation.get('protocol')" 568 | ] 569 | }, 570 | { 571 | "cell_type": "markdown", 572 | "metadata": {}, 573 | "source": [ 574 | "Now we can generate patch file in the standard diff format. \n", 575 | "We can optional specify number of lines before and after the change: this is useful for review." 576 | ] 577 | }, 578 | { 579 | "cell_type": "code", 580 | "execution_count": 29, 581 | "metadata": {}, 582 | "outputs": [], 583 | "source": [ 584 | "patch = current_proto.diff(athens_a, context_size=3)" 585 | ] 586 | }, 587 | { 588 | "cell_type": "markdown", 589 | "metadata": {}, 590 | "source": [ 591 | "Generate github-like side-by-side patch views, powered by diff2html.js" 592 | ] 593 | }, 594 | { 595 | "cell_type": "code", 596 | "execution_count": 30, 597 | "metadata": {}, 598 | "outputs": [], 599 | "source": [ 600 | "patch.export_html(output_path='report.html')" 601 | ] 602 | }, 603 | { 604 | "cell_type": "markdown", 605 | "metadata": {}, 606 | "source": [ 607 | "[View report](./report.html)" 608 | ] 609 | }, 610 | { 611 | "cell_type": "markdown", 612 | "metadata": {}, 613 | "source": [ 614 | "## Bonus: get all voting operations for a proposal\n", 615 | "\n", 616 | "As an alternative to TzScan and other indexed-blockchain solutions." 617 | ] 618 | }, 619 | { 620 | "cell_type": "code", 621 | "execution_count": 31, 622 | "metadata": {}, 623 | "outputs": [ 624 | { 625 | "name": "stderr", 626 | "output_type": "stream", 627 | "text": [ 628 | "2019-03-12 16:27:29.640 | DEBUG | pytezos.rpc.search:find_state_change_intervals:14 - 13179 at level 349194\n", 629 | "2019-03-12 16:27:30.438 | DEBUG | pytezos.rpc.search:bisect:29 - 13183 at level 349224\n", 630 | "2019-03-12 16:27:31.570 | DEBUG | pytezos.rpc.search:bisect:29 - 13183 at level 349209\n", 631 | "2019-03-12 16:27:32.367 | DEBUG | pytezos.rpc.search:bisect:29 - 13183 at level 349201\n", 632 | "2019-03-12 16:27:33.171 | DEBUG | pytezos.rpc.search:bisect:29 - 13179 at level 349197\n", 633 | "2019-03-12 16:27:33.973 | DEBUG | pytezos.rpc.search:bisect:29 - 13183 at level 349199\n", 634 | "2019-03-12 16:27:35.187 | DEBUG | pytezos.rpc.search:bisect:29 - 13179 at level 349198\n" 635 | ] 636 | }, 637 | { 638 | "name": "stdout", 639 | "output_type": "stream", 640 | "text": [ 641 | "{'protocol': 'PsddFKi32cMJ2qPjf43Qv5GDWLDPZb3T3bF6fLKiF5HtvHNU7aP', 'chain_id': 'NetXdQprcVkpaWU', 'hash': 'oomv4x6eVaa2EbT37h7MvCZEMyKBiqf3L4yuEoFE95n9aKYfGxF', 'branch': 'BLDU8tQwnZEt3So7Utbwxzj7WMLQdyNqu7UkzuuAJKUnnNmenHK', 'contents': [{'kind': 'proposals', 'source': 'tz1Z3KCf8CLGAYfvVWPEr562jDDyWkwNF7sT', 'period': 10, 'proposals': ['Pt24m4xiPbLDhVgVfABUjirbmda3yohdN82Sp9FeuAXJ4eV9otd'], 'metadata': {}}], 'signature': 'sigqAsAk9wWFSz5zDt3nn3M5yf14ynxNSw23MAwDmwv5kifGtNyqDbi1aoxiAAF5byyzqjgu3ftWYxc8DHKsB6cGWr1nHh5S'}\n" 642 | ] 643 | } 644 | ], 645 | "source": [ 646 | "for operation in sc.find_proposal_votes_operations(proposal_id):\n", 647 | " print(operation())\n", 648 | " break # this can take a while" 649 | ] 650 | }, 651 | { 652 | "cell_type": "markdown", 653 | "metadata": {}, 654 | "source": [ 655 | "Search algorithm works as follows:\n", 656 | "1. Split block interval into equal chunks\n", 657 | "2. Determine which of the intervals contain state changes\n", 658 | "3. For each interval run binary search\n", 659 | "4. If there are several changes inside single interval, run binary search again\n", 660 | "\n", 661 | "It's obvious that the search space can be easily splitted and processed independently, i.e parallelized. " 662 | ] 663 | }, 664 | { 665 | "cell_type": "markdown", 666 | "metadata": {}, 667 | "source": [ 668 | "## Improvement: proposal sources on-chain\n", 669 | "\n", 670 | "As we pointed earlier there is no convenient way to get proposal source from the blockchain. This can be implemented via smart-contract. But it's more reasonable to store compressed code diff rather than full source." 671 | ] 672 | }, 673 | { 674 | "cell_type": "code", 675 | "execution_count": 32, 676 | "metadata": {}, 677 | "outputs": [], 678 | "source": [ 679 | "ctxless_patch = current_proto.diff(athens_a, context_size=0)" 680 | ] 681 | }, 682 | { 683 | "cell_type": "code", 684 | "execution_count": 33, 685 | "metadata": {}, 686 | "outputs": [], 687 | "source": [ 688 | "ctxless_patch.export_tar('diff.tar.bz2')" 689 | ] 690 | }, 691 | { 692 | "cell_type": "code", 693 | "execution_count": 34, 694 | "metadata": {}, 695 | "outputs": [ 696 | { 697 | "data": { 698 | "text/plain": [ 699 | "12839" 700 | ] 701 | }, 702 | "execution_count": 34, 703 | "metadata": {}, 704 | "output_type": "execute_result" 705 | } 706 | ], 707 | "source": [ 708 | "os.path.getsize('diff.tar.bz2')" 709 | ] 710 | }, 711 | { 712 | "cell_type": "markdown", 713 | "metadata": {}, 714 | "source": [ 715 | "### Applying protocol diff" 716 | ] 717 | }, 718 | { 719 | "cell_type": "code", 720 | "execution_count": 35, 721 | "metadata": {}, 722 | "outputs": [], 723 | "source": [ 724 | "proto = current_proto.apply(ctxless_patch)" 725 | ] 726 | }, 727 | { 728 | "cell_type": "code", 729 | "execution_count": 36, 730 | "metadata": {}, 731 | "outputs": [], 732 | "source": [ 733 | "assert proposal_id == proto.calculate_hash()" 734 | ] 735 | }, 736 | { 737 | "cell_type": "code", 738 | "execution_count": null, 739 | "metadata": {}, 740 | "outputs": [], 741 | "source": [] 742 | } 743 | ], 744 | "metadata": { 745 | "kernelspec": { 746 | "display_name": "python 3.7", 747 | "language": "python", 748 | "name": "python36" 749 | }, 750 | "language_info": { 751 | "codemirror_mode": { 752 | "name": "ipython", 753 | "version": 3 754 | }, 755 | "file_extension": ".py", 756 | "mimetype": "text/x-python", 757 | "name": "python", 758 | "nbconvert_exporter": "python", 759 | "pygments_lexer": "ipython3", 760 | "version": "3.7.0" 761 | } 762 | }, 763 | "nbformat": 4, 764 | "nbformat_minor": 2 765 | } 766 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: PyTezos 2 | nav: 3 | - Home: index.md 4 | theme: readthedocs 5 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | category = "main" 3 | description = "Produce colored terminal text with an xml-like markup" 4 | name = "ansimarkup" 5 | optional = false 6 | python-versions = "*" 7 | version = "1.4.0" 8 | 9 | [package.dependencies] 10 | colorama = "*" 11 | 12 | [[package]] 13 | category = "dev" 14 | description = "Atomic file writes." 15 | name = "atomicwrites" 16 | optional = false 17 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 18 | version = "1.3.0" 19 | 20 | [[package]] 21 | category = "dev" 22 | description = "Classes Without Boilerplate" 23 | name = "attrs" 24 | optional = false 25 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 26 | version = "19.1.0" 27 | 28 | [[package]] 29 | category = "main" 30 | description = "Base58 and Base58Check implementation" 31 | name = "base58" 32 | optional = false 33 | python-versions = "*" 34 | version = "1.0.3" 35 | 36 | [[package]] 37 | category = "main" 38 | description = "Pretty and helpful exceptions, automatically" 39 | name = "better-exceptions-fork" 40 | optional = false 41 | python-versions = "*" 42 | version = "0.2.1.post6" 43 | 44 | [package.dependencies] 45 | ansimarkup = ">=1.3.0" 46 | colorama = "*" 47 | pygments = ">=2.2.0" 48 | 49 | [[package]] 50 | category = "main" 51 | description = "Python package for providing Mozilla's CA Bundle." 52 | name = "certifi" 53 | optional = false 54 | python-versions = "*" 55 | version = "2019.3.9" 56 | 57 | [[package]] 58 | category = "main" 59 | description = "Foreign Function Interface for Python calling C code." 60 | name = "cffi" 61 | optional = false 62 | python-versions = "*" 63 | version = "1.12.2" 64 | 65 | [package.dependencies] 66 | pycparser = "*" 67 | 68 | [[package]] 69 | category = "main" 70 | description = "Universal encoding detector for Python 2 and 3" 71 | name = "chardet" 72 | optional = false 73 | python-versions = "*" 74 | version = "3.0.4" 75 | 76 | [[package]] 77 | category = "main" 78 | description = "Cross-platform colored terminal text." 79 | name = "colorama" 80 | optional = false 81 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 82 | version = "0.4.1" 83 | 84 | [[package]] 85 | category = "dev" 86 | description = "Code coverage measurement for Python" 87 | name = "coverage" 88 | optional = false 89 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" 90 | version = "4.5.3" 91 | 92 | [[package]] 93 | category = "main" 94 | description = "Fast elliptic curve digital signatures" 95 | name = "fastecdsa" 96 | optional = false 97 | python-versions = "*" 98 | version = "1.7.1" 99 | 100 | [[package]] 101 | category = "main" 102 | description = "Internationalized Domain Names in Applications (IDNA)" 103 | name = "idna" 104 | optional = false 105 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 106 | version = "2.8" 107 | 108 | [[package]] 109 | category = "main" 110 | description = "Python logging made (stupidly) simple" 111 | name = "loguru" 112 | optional = false 113 | python-versions = ">=3.5" 114 | version = "0.2.5" 115 | 116 | [package.dependencies] 117 | ansimarkup = ">=1.4.0" 118 | better-exceptions-fork = ">=0.2.1.post6" 119 | colorama = ">=0.3.4" 120 | 121 | [[package]] 122 | category = "dev" 123 | description = "More routines for operating on iterables, beyond itertools" 124 | name = "more-itertools" 125 | optional = false 126 | python-versions = ">=3.4" 127 | version = "6.0.0" 128 | 129 | [[package]] 130 | category = "main" 131 | description = "Packed binary data for networking." 132 | name = "netstruct" 133 | optional = false 134 | python-versions = "*" 135 | version = "1.1.2" 136 | 137 | [[package]] 138 | category = "dev" 139 | description = "Parameterized testing with any Python test framework" 140 | name = "parameterized" 141 | optional = false 142 | python-versions = "*" 143 | version = "0.7.0" 144 | 145 | [[package]] 146 | category = "dev" 147 | description = "Object-oriented filesystem paths" 148 | marker = "python_version < \"3.6\"" 149 | name = "pathlib2" 150 | optional = false 151 | python-versions = "*" 152 | version = "2.3.3" 153 | 154 | [package.dependencies] 155 | six = "*" 156 | 157 | [[package]] 158 | category = "main" 159 | description = "Python datetimes made easy" 160 | name = "pendulum" 161 | optional = false 162 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 163 | version = "2.0.4" 164 | 165 | [package.dependencies] 166 | python-dateutil = ">=2.6,<3.0" 167 | pytzdata = ">=2018.3" 168 | 169 | [[package]] 170 | category = "dev" 171 | description = "plugin and hook calling mechanisms for python" 172 | name = "pluggy" 173 | optional = false 174 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 175 | version = "0.9.0" 176 | 177 | [[package]] 178 | category = "main" 179 | description = "Python Lex & Yacc" 180 | name = "ply" 181 | optional = false 182 | python-versions = "*" 183 | version = "3.11" 184 | 185 | [[package]] 186 | category = "dev" 187 | description = "library with cross-python path, ini-parsing, io, code, log facilities" 188 | name = "py" 189 | optional = false 190 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 191 | version = "1.8.0" 192 | 193 | [[package]] 194 | category = "main" 195 | description = "BLAKE2 hash function extension module" 196 | name = "pyblake2" 197 | optional = false 198 | python-versions = "*" 199 | version = "1.1.2" 200 | 201 | [[package]] 202 | category = "main" 203 | description = "C parser in Python" 204 | name = "pycparser" 205 | optional = false 206 | python-versions = "*" 207 | version = "2.19" 208 | 209 | [[package]] 210 | category = "main" 211 | description = "Pygments is a syntax highlighting package written in Python." 212 | name = "pygments" 213 | optional = false 214 | python-versions = "*" 215 | version = "2.3.1" 216 | 217 | [[package]] 218 | category = "main" 219 | description = "python libsodium wrapper" 220 | name = "pysodium" 221 | optional = false 222 | python-versions = "*" 223 | version = "0.7.1" 224 | 225 | [[package]] 226 | category = "dev" 227 | description = "pytest: simple powerful testing with Python" 228 | name = "pytest" 229 | optional = false 230 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 231 | version = "3.10.1" 232 | 233 | [package.dependencies] 234 | atomicwrites = ">=1.0" 235 | attrs = ">=17.4.0" 236 | colorama = "*" 237 | more-itertools = ">=4.0.0" 238 | pluggy = ">=0.7" 239 | py = ">=1.5.0" 240 | setuptools = "*" 241 | six = ">=1.10.0" 242 | 243 | [package.dependencies.pathlib2] 244 | python = "<3.6" 245 | version = ">=2.2.0" 246 | 247 | [[package]] 248 | category = "dev" 249 | description = "Pytest plugin for measuring coverage." 250 | name = "pytest-cov" 251 | optional = false 252 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 253 | version = "2.6.1" 254 | 255 | [package.dependencies] 256 | coverage = ">=4.4" 257 | pytest = ">=3.6" 258 | 259 | [[package]] 260 | category = "main" 261 | description = "Extensions to the standard Python datetime module" 262 | name = "python-dateutil" 263 | optional = false 264 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 265 | version = "2.8.0" 266 | 267 | [package.dependencies] 268 | six = ">=1.5" 269 | 270 | [[package]] 271 | category = "main" 272 | description = "The Olson timezone database for Python." 273 | name = "pytzdata" 274 | optional = false 275 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 276 | version = "2018.9" 277 | 278 | [[package]] 279 | category = "main" 280 | description = "Python HTTP for Humans." 281 | name = "requests" 282 | optional = false 283 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 284 | version = "2.21.0" 285 | 286 | [package.dependencies] 287 | certifi = ">=2017.4.17" 288 | chardet = ">=3.0.2,<3.1.0" 289 | idna = ">=2.5,<2.9" 290 | urllib3 = ">=1.21.1,<1.25" 291 | 292 | [[package]] 293 | category = "main" 294 | description = "FFI bindings to libsecp256k1" 295 | name = "secp256k1" 296 | optional = false 297 | python-versions = "*" 298 | version = "0.13.2" 299 | 300 | [package.dependencies] 301 | cffi = ">=1.3.0" 302 | 303 | [[package]] 304 | category = "main" 305 | description = "Simple, fast, extensible JSON encoder/decoder for Python" 306 | name = "simplejson" 307 | optional = false 308 | python-versions = "*" 309 | version = "3.16.0" 310 | 311 | [[package]] 312 | category = "main" 313 | description = "Python 2 and 3 compatibility utilities" 314 | name = "six" 315 | optional = false 316 | python-versions = ">=2.6, !=3.0.*, !=3.1.*" 317 | version = "1.12.0" 318 | 319 | [[package]] 320 | category = "main" 321 | description = "Fast, Extensible Progress Meter" 322 | name = "tqdm" 323 | optional = false 324 | python-versions = ">=2.6, !=3.0.*, !=3.1.*" 325 | version = "4.31.1" 326 | 327 | [[package]] 328 | category = "main" 329 | description = "HTTP library with thread-safe connection pooling, file post, and more." 330 | name = "urllib3" 331 | optional = false 332 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" 333 | version = "1.24.1" 334 | 335 | [metadata] 336 | content-hash = "03bb82cfbc2bc41ea8d61c7678f9743b1f32be1f255a34e9d3808e7dd4c8344f" 337 | python-versions = "^3.5" 338 | 339 | [metadata.hashes] 340 | ansimarkup = ["06365e3ef89a12734fc408b2449cb4642d5fe2e603e95e7296eff9e98a0fe0b4", "174d920481416cec8d5a707af542d6fba25a1df1c21d8996479c32ba453649a4"] 341 | atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"] 342 | attrs = ["69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", "f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399"] 343 | base58 = ["1e42993c0628ed4f898c03b522b26af78fb05115732549b21a028bc4633d19ab", "6aa0553e477478993588303c54659d15e3c17ae062508c854a8b752d07c716bd", "9a793c599979c497800eb414c852b80866f28daaed5494703fc129592cc83e60"] 344 | better-exceptions-fork = ["5f0983da51e956dbdaf8b9a3d10e2774b382ce6c6ff2e54685c33e2dbe8f1472"] 345 | certifi = ["59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", "b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae"] 346 | cffi = ["00b97afa72c233495560a0793cdc86c2571721b4271c0667addc83c417f3d90f", "0ba1b0c90f2124459f6966a10c03794082a2f3985cd699d7d63c4a8dae113e11", "0bffb69da295a4fc3349f2ec7cbe16b8ba057b0a593a92cbe8396e535244ee9d", "21469a2b1082088d11ccd79dd84157ba42d940064abbfa59cf5f024c19cf4891", "2e4812f7fa984bf1ab253a40f1f4391b604f7fc424a3e21f7de542a7f8f7aedf", "2eac2cdd07b9049dd4e68449b90d3ef1adc7c759463af5beb53a84f1db62e36c", "2f9089979d7456c74d21303c7851f158833d48fb265876923edcb2d0194104ed", "3dd13feff00bddb0bd2d650cdb7338f815c1789a91a6f68fdc00e5c5ed40329b", "4065c32b52f4b142f417af6f33a5024edc1336aa845b9d5a8d86071f6fcaac5a", "51a4ba1256e9003a3acf508e3b4f4661bebd015b8180cc31849da222426ef585", "59888faac06403767c0cf8cfb3f4a777b2939b1fbd9f729299b5384f097f05ea", "59c87886640574d8b14910840327f5cd15954e26ed0bbd4e7cef95fa5aef218f", "610fc7d6db6c56a244c2701575f6851461753c60f73f2de89c79bbf1cc807f33", "70aeadeecb281ea901bf4230c6222af0248c41044d6f57401a614ea59d96d145", "71e1296d5e66c59cd2c0f2d72dc476d42afe02aeddc833d8e05630a0551dad7a", "8fc7a49b440ea752cfdf1d51a586fd08d395ff7a5d555dc69e84b1939f7ddee3", "9b5c2afd2d6e3771d516045a6cfa11a8da9a60e3d128746a7fe9ab36dfe7221f", "9c759051ebcb244d9d55ee791259ddd158188d15adee3c152502d3b69005e6bd", "b4d1011fec5ec12aa7cc10c05a2f2f12dfa0adfe958e56ae38dc140614035804", "b4f1d6332339ecc61275bebd1f7b674098a66fea11a00c84d1c58851e618dc0d", "c030cda3dc8e62b814831faa4eb93dd9a46498af8cd1d5c178c2de856972fd92", "c2e1f2012e56d61390c0e668c20c4fb0ae667c44d6f6a2eeea5d7148dcd3df9f", "c37c77d6562074452120fc6c02ad86ec928f5710fbc435a181d69334b4de1d84", "c8149780c60f8fd02752d0429246088c6c04e234b895c4a42e1ea9b4de8d27fb", "cbeeef1dc3c4299bd746b774f019de9e4672f7cc666c777cd5b409f0b746dac7", "e113878a446c6228669144ae8a56e268c91b7f1fafae927adc4879d9849e0ea7", "e21162bf941b85c0cda08224dade5def9360f53b09f9f259adb85fc7dd0e7b35", "fb6934ef4744becbda3143d30c6604718871495a5e36c408431bf33d9c146889"] 347 | chardet = ["84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"] 348 | colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"] 349 | coverage = ["0c5fe441b9cfdab64719f24e9684502a59432df7570521563d7b1aff27ac755f", "2b412abc4c7d6e019ce7c27cbc229783035eef6d5401695dccba80f481be4eb3", "3684fabf6b87a369017756b551cef29e505cb155ddb892a7a29277b978da88b9", "39e088da9b284f1bd17c750ac672103779f7954ce6125fd4382134ac8d152d74", "3c205bc11cc4fcc57b761c2da73b9b72a59f8d5ca89979afb0c1c6f9e53c7390", "42692db854d13c6c5e9541b6ffe0fe921fe16c9c446358d642ccae1462582d3b", "465ce53a8c0f3a7950dfb836438442f833cf6663d407f37d8c52fe7b6e56d7e8", "48020e343fc40f72a442c8a1334284620f81295256a6b6ca6d8aa1350c763bbe", "4ec30ade438d1711562f3786bea33a9da6107414aed60a5daa974d50a8c2c351", "5296fc86ab612ec12394565c500b412a43b328b3907c0d14358950d06fd83baf", "5f61bed2f7d9b6a9ab935150a6b23d7f84b8055524e7be7715b6513f3328138e", "6899797ac384b239ce1926f3cb86ffc19996f6fa3a1efbb23cb49e0c12d8c18c", "68a43a9f9f83693ce0414d17e019daee7ab3f7113a70c79a3dd4c2f704e4d741", "6b8033d47fe22506856fe450470ccb1d8ba1ffb8463494a15cfc96392a288c09", "7ad7536066b28863e5835e8cfeaa794b7fe352d99a8cded9f43d1161be8e9fbd", "7bacb89ccf4bedb30b277e96e4cc68cd1369ca6841bde7b005191b54d3dd1034", "839dc7c36501254e14331bcb98b27002aa415e4af7ea039d9009409b9d2d5420", "8e679d1bde5e2de4a909efb071f14b472a678b788904440779d2c449c0355b27", "8f9a95b66969cdea53ec992ecea5406c5bd99c9221f539bca1e8406b200ae98c", "932c03d2d565f75961ba1d3cec41ddde00e162c5b46d03f7423edcb807734eab", "93f965415cc51604f571e491f280cff0f5be35895b4eb5e55b47ae90c02a497b", "988529edadc49039d205e0aa6ce049c5ccda4acb2d6c3c5c550c17e8c02c05ba", "998d7e73548fe395eeb294495a04d38942edb66d1fa61eb70418871bc621227e", "9de60893fb447d1e797f6bf08fdf0dbcda0c1e34c1b06c92bd3a363c0ea8c609", "9e80d45d0c7fcee54e22771db7f1b0b126fb4a6c0a2e5afa72f66827207ff2f2", "a545a3dfe5082dc8e8c3eb7f8a2cf4f2870902ff1860bd99b6198cfd1f9d1f49", "a5d8f29e5ec661143621a8f4de51adfb300d7a476224156a39a392254f70687b", "a9abc8c480e103dc05d9b332c6cc9fb1586330356fc14f1aa9c0ca5745097d19", "aca06bfba4759bbdb09bf52ebb15ae20268ee1f6747417837926fae990ebc41d", "bb23b7a6fd666e551a3094ab896a57809e010059540ad20acbeec03a154224ce", "bfd1d0ae7e292105f29d7deaa9d8f2916ed8553ab9d5f39ec65bcf5deadff3f9", "c22ab9f96cbaff05c6a84e20ec856383d27eae09e511d3e6ac4479489195861d", "c62ca0a38958f541a73cf86acdab020c2091631c137bd359c4f5bddde7b75fd4", "c709d8bda72cf4cd348ccec2a4881f2c5848fd72903c185f363d361b2737f773", "c968a6aa7e0b56ecbd28531ddf439c2ec103610d3e2bf3b75b813304f8cb7723", "ca58eba39c68010d7e87a823f22a081b5290e3e3c64714aac3c91481d8b34d22", "df785d8cb80539d0b55fd47183264b7002077859028dfe3070cf6359bf8b2d9c", "f406628ca51e0ae90ae76ea8398677a921b36f0bd71aab2099dfed08abd0322f", "f46087bbd95ebae244a0eda01a618aff11ec7a069b15a3ef8f6b520db523dcf1", "f8019c5279eb32360ca03e9fac40a12667715546eed5c5eb59eb381f2f501260", "fc5f4d209733750afd2714e9109816a29500718b32dd9a5db01c0cb3a019b96a"] 350 | fastecdsa = ["018c5aed286ccc81d858593a08fb5600bead4183969f934fec05aaa2249f0c3f", "3e493b03050484c4f48ba2a96933908baea49d64a12358e22fd9bff38c483bbd", "ca3b70122a95a310020758924f0772527bf4758b830460b87fa948d87ca2205d"] 351 | idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] 352 | loguru = ["68297d9f23064c2f4764bb5d0c5c767f3ed7f9fc1218244841878f5fc7c94add", "ebac59630946721fd6207264679b267a8bdc290b086226067d6aad86830e3123"] 353 | more-itertools = ["0125e8f60e9e031347105eb1682cef932f5e97d7b9a1a28d9bf00c22a5daef40", "590044e3942351a1bdb1de960b739ff4ce277960f2425ad4509446dbace8d9d1"] 354 | netstruct = ["70b6a5c73f5bbc7ab57b019369642adfb34dd8af41b948c400ce95f952b7df9a"] 355 | parameterized = ["020343a281efcfe9b71b9028a91817f981202c14d72104b5a2fbe401dee25a18", "d8c8837fb677ed2d5a93b9e2308ce0da3aeb58cf513120d501e0b7af14da78d5"] 356 | pathlib2 = ["25199318e8cc3c25dcb45cbe084cc061051336d5a9ea2a12448d3d8cb748f742", "5887121d7f7df3603bca2f710e7219f3eca0eb69e0b7cc6e0a022e155ac931a7"] 357 | pendulum = ["0f43d963b27e92b04047ce8352e4c277db99f20d0b513df7d0ceafe674a2f727", "14e60d26d7400980123dbb6e3f2a90b70d7c18c63742ffe5bd6d6a643f8c6ef1", "5035a4e17504814a679f138374269cc7cc514aeac7ba6d9dc020abc224f25dbc", "8c0b3d655c1e9205d4dacf42fffc929cde3b19b5fb544a7f7561e6896eb8a000", "bfc7b33ae193a204ec0bec12ad0d2d3300cd7e51d91d992da525ba3b28f0d265", "cd70b75800439794e1ad8dbfa24838845e171918df81fa98b68d0d5a6f9b8bf2", "cf535d36c063575d4752af36df928882b2e0e31541b4482c97d63752785f9fcb"] 358 | pluggy = ["19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", "84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746"] 359 | ply = ["00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3", "096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"] 360 | py = ["64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53"] 361 | pyblake2 = ["3757f7ad709b0e1b2a6b3919fa79fe3261f166fc375cd521f2be480f8319dde9", "407e02c7f8f36fcec1b7aa114ddca0c1060c598142ea6f6759d03710b946a7e3", "4d47b4a2c1d292b1e460bde1dda4d13aa792ed2ed70fcc263b6bc24632c8e902", "5ccc7eb02edb82fafb8adbb90746af71460fbc29aa0f822526fc976dff83e93f", "8043267fbc0b2f3748c6920591cd0b8b5609dcce60c504c32858aa36206386f2", "982295a87907d50f4723db6bc724660da76b6547826d52160171d54f95b919ac", "baa2190bfe549e36163aa44664d4ee3a9080b236fc5d42f50dc6fd36bbdc749e", "c53417ee0bbe77db852d5fd1036749f03696ebc2265de359fe17418d800196c4", "fbc9fcde75713930bc2a91b149e97be2401f7c9c56d735b46a109210f58d7358"] 362 | pycparser = ["a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"] 363 | pygments = ["5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a", "e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d"] 364 | pysodium = ["59bc77d7c5a00990129ff7a07cb723a3bdb895de698afc628514bc3403428afd", "ce53537524dd52b021c3ff0a2f8133e8eaa7fec19bbfe15b65daace0ec2c32bb"] 365 | pytest = ["3f193df1cfe1d1609d4c583838bea3d532b18d6160fd3f55c9447fdca30848ec", "e246cf173c01169b9617fc07264b7b1316e78d7a650055235d6d897bc80d9660"] 366 | pytest-cov = ["0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", "230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f"] 367 | python-dateutil = ["7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb", "c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"] 368 | pytzdata = ["9626e42fd9df77b16aedbd909d1e5fda839be47966adb7089b990f3452c45dd8", "dddaaf4f1717820a6fdcac94057e03c1a15b3829a44d9eaf19988917977db408"] 369 | requests = ["502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", "7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"] 370 | secp256k1 = ["29521dda0f3ad51afc26141d9a564b40bb39e84d27046164c502774b22ea2f9c", "2ad20bb8553425be795ecaf6641a510470701fa6a7aa9c54395d3989c070ab4c", "5cded8e5d241dd92f6e55fd7462ff74e4edcbe97850b7bf4378cc56e48e446d7", "a3b43e02d321c09eafa769a6fc2c156f555cab3a7db62175ef2fd21e16cdf20c", "a598f2223ed3bf55bb7e317c5ebd1a316b3b4d65db26de32554bb84adf026eae", "a755c85947b70c69cf318412bfd12889863e70ddf76b588701835a4da5322d55", "df29091e6ac74bb0683a44d57411bd59c6c99d04d79ef4e396e4a67d91f6bd65", "e86eab58d0c2e93c08483c8a8546fe54ade8150ff4a1f320417d647635cb133b", "e9842fdc3b24c3174f49794d93b10b2013aa7b5d7029d12989458482c95ee56a"] 371 | simplejson = ["067a7177ddfa32e1483ba5169ebea1bc2ea27f224853211ca669325648ca5642", "2b8cb601d9ba0381499db719ccc9dfbb2fbd16013f5ff096b1a68a4775576a04", "2c139daf167b96f21542248f8e0a06596c9b9a7a41c162cc5c9ee9f3833c93cd", "2fc546e6af49fb45b93bbe878dea4c48edc34083729c0abd09981fe55bdf7f91", "354fa32b02885e6dae925f1b5bbf842c333c1e11ea5453ddd67309dc31fdb40a", "37e685986cf6f8144607f90340cff72d36acf654f3653a6c47b84c5c38d00df7", "3af610ee72efbe644e19d5eaad575c73fb83026192114e5f6719f4901097fce2", "3b919fc9cf508f13b929a9b274c40786036b31ad28657819b3b9ba44ba651f50", "3dd289368bbd064974d9a5961101f080e939cbe051e6689a193c99fb6e9ac89b", "491de7acc423e871a814500eb2dcea8aa66c4a4b1b4825d18f756cdf58e370cb", "495511fe5f10ccf4e3ed4fc0c48318f533654db6c47ecbc970b4ed215c791968", "65b41a5cda006cfa7c66eabbcf96aa704a6be2a5856095b9e2fd8c293bad2b46", "6c3258ffff58712818a233b9737fe4be943d306c40cf63d14ddc82ba563f483a", "75e3f0b12c28945c08f54350d91e624f8dd580ab74fd4f1bbea54bc6b0165610", "79b129fe65fdf3765440f7a73edaffc89ae9e7885d4e2adafe6aa37913a00fbb", "b1f329139ba647a9548aa05fb95d046b4a677643070dc2afc05fa2e975d09ca5", "c206f47cbf9f32b573c9885f0ec813d2622976cf5effcf7e472344bc2e020ac1", "d8e238f20bcf70063ee8691d4a72162bcec1f4c38f83c93e6851e72ad545dabb", "ee9625fc8ee164902dfbb0ff932b26df112da9f871c32f0f9c1bcf20c350fe2a", "fb2530b53c28f0d4d84990e945c2ebb470edb469d63e389bf02ff409012fe7c5", "feadb95170e45f439455354904768608e356c5b174ca30b3d11b0e3f24b5c0df"] 372 | six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] 373 | tqdm = ["d385c95361699e5cf7622485d9b9eae2d4864b21cd5a2374a9c381ffed701021", "e22977e3ebe961f72362f6ddfb9197cc531c9737aaf5f607ef09740c849ecd05"] 374 | urllib3 = ["61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"] 375 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pytezos" 3 | version = "1.2.1" 4 | description = "Python utils for Tezos" 5 | authors = ["Arthur Breitman, Michael Zaikin "] 6 | license = "MIT" 7 | readme = "README.md" 8 | repository = "https://github.com/baking-bad/pytezos" 9 | keywords = ['tezos', 'crypto', 'blockchain', 'xtz'] 10 | 11 | [tool.poetry.dependencies] 12 | python = "^3.5" 13 | pysodium = "^0.7.1" 14 | pyblake2 = "^1.1.2" 15 | base58 = "^1.0.3" 16 | secp256k1 = "^0.13.2" 17 | requests = "^2.21.0" 18 | pendulum = "*" 19 | fastecdsa = "^1.7.1" 20 | loguru = "*" 21 | netstruct = "*" 22 | tqdm = "*" 23 | ply = "*" 24 | simplejson = "*" 25 | 26 | [tool.poetry.dev-dependencies] 27 | parameterized = "^0.7.0" 28 | pytest = "^3.0" 29 | pytest-cov = "^2.4" 30 | 31 | [build-system] 32 | requires = ["poetry>=0.12"] 33 | build-backend = "poetry.masonry.api" 34 | -------------------------------------------------------------------------------- /pytezos/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/murbard/pytezos/a228a67fbc94b11dd7dbc7ff0df9e996d0ff5f01/pytezos/__init__.py -------------------------------------------------------------------------------- /pytezos/crypto.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import pysodium 3 | import secp256k1 4 | import unicodedata 5 | from fastecdsa.ecdsa import sign, verify 6 | from fastecdsa.keys import get_public_key 7 | from fastecdsa.curve import P256 8 | from fastecdsa.encoding.util import int_to_bytes, bytes_to_int 9 | from fastecdsa.encoding.sec1 import SEC1Encoder 10 | from pyblake2 import blake2b 11 | 12 | from pytezos.encoding import scrub_input, base58_decode, base58_encode 13 | 14 | 15 | def blake2b_32(v=b''): 16 | return blake2b(scrub_input(v), digest_size=32) 17 | 18 | 19 | class Key(object): 20 | """ 21 | Represents a public or secret key for Tezos. Ed25519, Secp256k1 and P256 22 | are supported. 23 | """ 24 | def __init__(self, key: str, passphrase: str = None, email: str = None): 25 | """ 26 | Creates a key object from a base58 encoded key. 27 | :param key: a public or secret key in base58 encoding, or a 15 word bip39 english mnemonic 28 | :param passphrase: the passphrase used if the key provided is an encrypted private key or a fundraiser key 29 | :param email: email used if a fundraiser key is passed 30 | """ 31 | key = scrub_input(key) 32 | 33 | if email: 34 | if not passphrase: 35 | raise Exception("Fundraiser key provided without a passphrase.") 36 | 37 | mnemonic = u' '.join(key).lower() 38 | passphrase = scrub_input(passphrase) 39 | # TODO check wordlist and checksum 40 | salt = unicodedata.normalize( 41 | "NFKD", (email + passphrase).decode("utf8")).encode("utf8") 42 | seed = hashlib.pbkdf2_hmac("sha512", mnemonic, "mnemonic" + salt, iterations=2048, dklen=64) 43 | 44 | self._public_key, self._secret_key = pysodium.crypto_sign_seed_keypair(seed[:32]) 45 | self.curve = b"ed" 46 | self.is_secret = True 47 | del passphrase 48 | return 49 | 50 | self.curve = key[:2] # "sp", "p2" "ed" 51 | if self.curve not in [b"sp", b"p2", b"ed"]: 52 | raise ValueError("Invalid prefix for a key encoding.") 53 | 54 | if not len(key) in [54, 55, 88, 98]: 55 | raise ValueError("Invalid length for a key encoding.") 56 | 57 | encrypted = (key[2:3] == b'e') 58 | 59 | public_or_secret = key[3:5] if encrypted else key[2:4] 60 | if public_or_secret not in [b"pk", b"sk"]: 61 | raise Exception("Invalid prefix for a key encoding.") 62 | 63 | self.is_secret = (public_or_secret == b"sk") 64 | 65 | key = base58_decode(key) 66 | 67 | if encrypted: 68 | if not passphrase: 69 | raise ValueError("Encrypted key provided without a passphrase.") 70 | 71 | salt, encrypted_sk = key[:8], key[8:] 72 | encryption_key = hashlib.pbkdf2_hmac( 73 | hash_name="sha512", 74 | password=scrub_input(passphrase), 75 | salt=salt, 76 | iterations=32768, 77 | dklen=32 78 | ) 79 | key = pysodium.crypto_secretbox_open( 80 | c=encrypted_sk, nonce=b'\000' * 24, k=encryption_key) 81 | del passphrase 82 | 83 | if not self.is_secret: 84 | self._public_key = key 85 | self._secret_key = None 86 | else: 87 | self._secret_key = key 88 | # Ed25519 89 | if self.curve == b"ed": 90 | # Dealing with secret key or seed? 91 | if len(key) == 64: 92 | self._public_key = pysodium.crypto_sign_sk_to_pk(sk=key) 93 | else: 94 | self._public_key, self._secret_key = pysodium.crypto_sign_seed_keypair(seed=key) 95 | # Secp256k1 96 | elif self.curve == b"sp": 97 | sk = secp256k1.PrivateKey(key) 98 | self._public_key = sk.pubkey.serialize() 99 | # P256 100 | elif self.curve == b"p2": 101 | pk = get_public_key(bytes_to_int(self._secret_key), curve=P256) 102 | self._public_key = SEC1Encoder.encode_public_key(pk) 103 | else: 104 | assert False 105 | 106 | def public_key(self): 107 | """ 108 | :return: the public key associated with the private key 109 | """ 110 | return base58_encode(self._public_key, self.curve + b'pk').decode() 111 | 112 | def secret_key(self, passphrase=None): 113 | """ 114 | :param passphrase: encryption phrase for the private key 115 | :return: the secret key associated with this key, if available 116 | """ 117 | if not self._secret_key: 118 | raise ValueError("Secret key not known.") 119 | 120 | if self.curve == b'ed': 121 | key = pysodium.crypto_sign_sk_to_seed(self._secret_key) 122 | else: 123 | key = self._secret_key 124 | 125 | if passphrase: 126 | salt = pysodium.randombytes(8) 127 | encryption_key = hashlib.pbkdf2_hmac( 128 | hash_name="sha512", 129 | password=scrub_input(passphrase), 130 | salt=salt, 131 | iterations=32768, 132 | dklen=32 133 | ) 134 | encrypted_sk = pysodium.crypto_secretbox( 135 | msg=key, nonce=b'\000' * 24, k=encryption_key) 136 | key = salt + encrypted_sk # we have to combine salt and encrypted key in order to decrypt later 137 | prefix = self.curve + b'esk' 138 | else: 139 | prefix = self.curve + b'sk' 140 | 141 | return base58_encode(key, prefix).decode() 142 | 143 | def public_key_hash(self): 144 | """ 145 | Public key hash for this key. 146 | :return: the public key hash for this key 147 | """ 148 | pkh = blake2b(data=self._public_key, digest_size=20).digest() 149 | prefix = {b'ed': b'tz1', b'sp': b'tz2', b'p2': b'tz3'}[self.curve] 150 | return base58_encode(pkh, prefix).decode() 151 | 152 | def sign(self, message, generic=False): 153 | """ 154 | Sign a raw sequence of bytes 155 | :param message: sequence of bytes, raw format or hexadecimal notation 156 | :param generic: do not specify elliptic curve if set to True 157 | :return: signature in base58 encoding 158 | """ 159 | message = scrub_input(message) 160 | 161 | if not self.is_secret: 162 | raise ValueError("Cannot sign without a secret key.") 163 | 164 | # Ed25519 165 | if self.curve == b"ed": 166 | digest = pysodium.crypto_generichash(message) 167 | signature = pysodium.crypto_sign_detached(digest, self._secret_key) 168 | # Secp256k1 169 | elif self.curve == b"sp": 170 | pk = secp256k1.PrivateKey(self._secret_key) 171 | signature = pk.ecdsa_serialize_compact( 172 | pk.ecdsa_sign(message, digest=blake2b_32)) 173 | # P256 174 | elif self.curve == b"p2": 175 | r, s = sign(msg=message, d=bytes_to_int(self._secret_key), hashfunc=blake2b_32) 176 | signature = int_to_bytes(r) + int_to_bytes(s) 177 | else: 178 | assert False 179 | 180 | if generic: 181 | prefix = b'sig' 182 | else: 183 | prefix = self.curve + b'sig' 184 | 185 | return base58_encode(signature, prefix).decode() 186 | 187 | def verify(self, signature, message): 188 | """ 189 | Verify signature, raise exception if it is not valid 190 | :param message: sequance of bytes, raw format or hexadecimal notation 191 | :param signature: a signature in base58 encoding 192 | """ 193 | signature = scrub_input(signature) 194 | message = scrub_input(message) 195 | 196 | if not self._public_key: 197 | raise ValueError("Cannot verify without a public key") 198 | 199 | if signature[:3] != b'sig': # not generic 200 | if self.curve != signature[:2]: # "sp", "p2" "ed" 201 | raise ValueError("Signature and public key curves mismatch.") 202 | 203 | signature = base58_decode(signature) 204 | 205 | # Ed25519 206 | if self.curve == b"ed": 207 | digest = pysodium.crypto_generichash(message) 208 | try: 209 | pysodium.crypto_sign_verify_detached(signature, digest, self._public_key) 210 | except ValueError: 211 | raise ValueError('Signature is invalid.') 212 | # Secp256k1 213 | elif self.curve == b"sp": 214 | pk = secp256k1.PublicKey(self._public_key, raw=True) 215 | sig = pk.ecdsa_deserialize_compact(signature) 216 | if not pk.ecdsa_verify(message, sig, digest=blake2b_32): 217 | raise ValueError('Signature is invalid.') 218 | # P256 219 | elif self.curve == b"p2": 220 | pk = SEC1Encoder.decode_public_key(self._public_key, curve=P256) 221 | r, s = bytes_to_int(signature[:32]), bytes_to_int(signature[32:]) 222 | if not verify(sig=(r, s), msg=message, Q=pk, hashfunc=blake2b_32): 223 | raise ValueError('Signature is invalid.') 224 | else: 225 | assert False 226 | -------------------------------------------------------------------------------- /pytezos/encoding.py: -------------------------------------------------------------------------------- 1 | import base58 2 | 3 | 4 | def tb(l): 5 | return b''.join(map(lambda x: x.to_bytes(1, 'big'), l)) 6 | 7 | 8 | base58_encodings = [ 9 | # Encoded | Decoded | 10 | # prefix | len | prefix | len | Data type 11 | (b"B", 51, tb([1, 52]), 32, u"block hash"), 12 | (b"o", 51, tb([5, 116]), 32, u"operation hash"), 13 | (b"Lo", 52, tb([133, 233]), 32, u"operation list hash"), 14 | (b"LLo", 53, tb([29, 159, 109]), 32, u"operation list list hash"), 15 | (b"P", 51, tb([2, 170]), 32, u"protocol hash"), 16 | (b"Co", 52, tb([79, 199]), 32, u"context hash"), 17 | 18 | (b"tz1", 36, tb([6, 161, 159]), 20, u"ed25519 public key hash"), 19 | (b"tz2", 36, tb([6, 161, 161]), 20, u"secp256k1 public key hash"), 20 | (b"tz3", 36, tb([6, 161, 164]), 20, u"p256 public key hash"), 21 | (b"KT1", 36, tb([2, 90, 121]), 20, u"Originated address"), 22 | 23 | (b"id", 30, tb([153, 103]), 16, u"cryptobox public key hash"), 24 | 25 | (b"edsk", 54, tb([13, 15, 58, 7]), 32, u"ed25519 seed"), 26 | (b"edpk", 54, tb([13, 15, 37, 217]), 32, u"ed25519 public key"), 27 | (b"spsk", 54, tb([17, 162, 224, 201]), 32, u"secp256k1 secret key"), 28 | (b"p2sk", 54, tb([16, 81, 238, 189]), 32, u"p256 secret key"), 29 | 30 | (b"edesk", 88, tb([7, 90, 60, 179, 41]), 56, u"ed25519 encrypted seed"), 31 | (b"spesk", 88, tb([9, 237, 241, 174, 150]), 56, u"secp256k1 encrypted secret key"), 32 | (b"p2esk", 88, tb([9, 48, 57, 115, 171]), 56, u"p256_encrypted_secret_key"), 33 | 34 | (b"sppk", 55, tb([3, 254, 226, 86]), 33, u"secp256k1 public key"), 35 | (b"p2pk", 55, tb([3, 178, 139, 127]), 33, u"p256 public key"), 36 | (b"SSp", 53, tb([38, 248, 136]), 33, u"secp256k1 scalar"), 37 | (b"GSp", 53, tb([5, 92, 0]), 33, u"secp256k1 element"), 38 | 39 | (b"edsk", 98, tb([43, 246, 78, 7]), 64, u"ed25519 secret key"), 40 | (b"edsig", 99, tb([9, 245, 205, 134, 18]), 64, u"ed25519 signature"), 41 | (b"spsig", 99, tb([13, 115, 101, 19, 63]), 64, u"secp256k1 signature"), 42 | (b"p2sig", 98, tb([54, 240, 44, 52]), 64, u"p256 signature"), 43 | (b"sig", 96, tb([4, 130, 43]), 64, u"generic signature"), 44 | 45 | (b'Net', 15, tb([87, 82, 0]), 4, u"chain id") 46 | ] 47 | 48 | 49 | def scrub_input(v) -> bytes: 50 | if isinstance(v, str) and not isinstance(v, bytes): 51 | try: 52 | _ = int(v, 16) 53 | except ValueError: 54 | v = v.encode('ascii') 55 | else: 56 | if v.startswith('0x'): 57 | v = v[2:] 58 | v = bytes.fromhex(v) 59 | 60 | if not isinstance(v, bytes): 61 | raise TypeError( 62 | "a bytes-like object is required (also str), not '%s'" % 63 | type(v).__name__) 64 | 65 | return v 66 | 67 | 68 | def base58_decode(v: bytes) -> bytes: 69 | try: 70 | prefix_len = next( 71 | len(encoding[2]) 72 | for encoding in base58_encodings 73 | if len(v) == encoding[1] and v.startswith(encoding[0]) 74 | ) 75 | except StopIteration: 76 | raise ValueError('Invalid encoding, prefix or length mismatch.') 77 | 78 | return base58.b58decode_check(v)[prefix_len:] 79 | 80 | 81 | def base58_encode(v: bytes, prefix: bytes) -> bytes: 82 | try: 83 | encoding = next( 84 | encoding 85 | for encoding in base58_encodings 86 | if len(v) == encoding[3] and prefix == encoding[0] 87 | ) 88 | except StopIteration: 89 | raise ValueError('Invalid encoding, prefix or length mismatch.') 90 | 91 | return base58.b58encode_check(encoding[2] + v) 92 | 93 | 94 | def _validate(v, prefixes: list): 95 | v = scrub_input(v) 96 | if any(map(lambda x: v.startswith(x), prefixes)): 97 | base58_decode(v) 98 | else: 99 | raise ValueError('Unknown prefix.') 100 | 101 | 102 | def validate_pkh(v): 103 | return _validate(v, prefixes=[b'tz1', b'tz2', b'tz3']) 104 | 105 | 106 | def validate_sig(v): 107 | return _validate(v, prefixes=[b'edsig', b'spsig', b'p2sig', b'sig']) 108 | 109 | 110 | def is_pkh(v) -> bool: 111 | try: 112 | validate_pkh(v) 113 | except ValueError: 114 | return False 115 | return True 116 | 117 | 118 | def is_sig(v) -> bool: 119 | try: 120 | validate_sig(v) 121 | except ValueError: 122 | return False 123 | return True 124 | 125 | 126 | def is_bh(v) -> bool: 127 | try: 128 | _validate(v, prefixes=[b'B']) 129 | except ValueError: 130 | return False 131 | return True 132 | -------------------------------------------------------------------------------- /pytezos/micheline/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/murbard/pytezos/a228a67fbc94b11dd7dbc7ff0df9e996d0ff5f01/pytezos/micheline/__init__.py -------------------------------------------------------------------------------- /pytezos/micheline/grammar.py: -------------------------------------------------------------------------------- 1 | from ply.lex import Lexer, lex 2 | from ply.yacc import yacc 3 | import re 4 | 5 | # Inspired by https://github.com/jansorg/tezos-intellij/blob/master/grammar/michelson.bnf 6 | 7 | 8 | class SimpleMichelsonLexer(Lexer): 9 | tokens = ( 10 | 'PRIM', 'INT', 'BYTE', 'STR', 11 | 'LEFT_CURLY', 'RIGHT_CURLY', 'LEFT_PAREN', 'RIGHT_PAREN', 'SEMI', 12 | 'COMMENT', 'MULTI_COMMENT', 'ANNOT' 13 | ) 14 | t_PRIM = r'[A-Za-z_]+' 15 | t_INT = r'-?[0-9]+' 16 | t_BYTE = r'0x[A-Fa-f0-9]+' 17 | t_STR = r'\"[^\"]*\"' 18 | t_LEFT_CURLY = r'\{' 19 | t_RIGHT_CURLY = r'\}' 20 | t_LEFT_PAREN = r'\(' 21 | t_RIGHT_PAREN = r'\)' 22 | t_SEMI = r';' 23 | t_COMMENT = r'#[^\n]+' 24 | t_MULTI_COMMENT = r'/"\* ~\*"/' 25 | t_ANNOT = r'[:@%](@|%|%%|[_a-zA-Z][_0-9a-zA-Z\.]*)?' 26 | t_ignore = ' \r\n\t\f' 27 | 28 | def __init__(self): 29 | super(SimpleMichelsonLexer, self).__init__() 30 | self.lexer = lex(module=self, reflags=re.DOTALL) 31 | 32 | 33 | class MichelineParser(object): 34 | tokens = SimpleMichelsonLexer.tokens 35 | 36 | def p_instr(self, p): 37 | '''instr : expr 38 | | empty 39 | ''' 40 | p[0] = p[1] 41 | 42 | def p_instr_list(self, p): 43 | '''instr : instr SEMI instr''' 44 | p[0] = list() 45 | for i in [p[1], p[3]]: 46 | if isinstance(i, list): 47 | p[0].extend(i) 48 | elif i is not None: 49 | p[0].append(i) 50 | 51 | def p_instr_subseq(self, p): 52 | '''instr : LEFT_CURLY instr RIGHT_CURLY''' 53 | p[0] = list() 54 | if p[2] is not None: 55 | p[0].append(p[2]) 56 | 57 | def p_expr(self, p): 58 | '''expr : PRIM annots args''' 59 | p[0] = {'prim': p[1]} 60 | if p[2]: 61 | p[0]['annots'] = p[2] 62 | if p[3]: 63 | p[0]['args'] = p[3] 64 | 65 | def p_annots(self, p): 66 | '''annots : annot 67 | | empty 68 | ''' 69 | if p[1] is not None: 70 | p[0] = [p[1]] 71 | 72 | def p_annots_list(self, p): 73 | '''annots : annots annot''' 74 | p[0] = list() 75 | if isinstance(p[1], list): 76 | p[0].extend(p[1]) 77 | if p[2] is not None: 78 | p[0].append(p[2]) 79 | 80 | def p_annot(self, p): 81 | '''annot : ANNOT''' 82 | p[0] = p[1] 83 | 84 | def p_args(self, p): 85 | '''args : arg 86 | | empty 87 | ''' 88 | p[0] = list() 89 | if p[1] is not None: 90 | p[0].append(p[1]) 91 | 92 | def p_args_list(self, p): 93 | '''args : args arg''' 94 | p[0] = list() 95 | if isinstance(p[1], list): 96 | p[0].extend(p[1]) 97 | if p[2] is not None: 98 | p[0].append(p[2]) 99 | 100 | def p_arg_prim(self, p): 101 | '''arg : PRIM''' 102 | p[0] = {'prim': p[1]} 103 | 104 | def p_arg_int(self, p): 105 | '''arg : INT''' 106 | p[0] = {'int': p[1]} 107 | 108 | def p_arg_byte(self, p): 109 | '''arg : BYTE''' 110 | p[0] = {'bytes': p[1][2:]} # strip 0x prefix 111 | 112 | def p_arg_str(self, p): 113 | '''arg : STR''' 114 | p[0] = {'string': p[1].strip('"')} 115 | 116 | def p_arg_subseq(self, p): 117 | '''arg : LEFT_CURLY instr RIGHT_CURLY''' 118 | if isinstance(p[2], list): 119 | p[0] = p[2] 120 | elif p[2] is not None: 121 | p[0] = [p[2]] 122 | else: 123 | p[0] = [] 124 | 125 | def p_arg_group(self, p): 126 | '''arg : LEFT_PAREN expr RIGHT_PAREN''' 127 | p[0] = p[2] 128 | 129 | def p_empty(self, p): 130 | '''empty :''' 131 | 132 | def __init__(self, debug=False, write_tables=False): 133 | self.lexer = SimpleMichelsonLexer() 134 | self.parser = yacc( 135 | module=self, 136 | debug=debug, 137 | write_tables=write_tables, 138 | ) 139 | 140 | def parse(self, code): 141 | return self.parser.parse(code) 142 | -------------------------------------------------------------------------------- /pytezos/micheline/schema.py: -------------------------------------------------------------------------------- 1 | import pendulum 2 | from decimal import Decimal 3 | from collections import namedtuple 4 | 5 | from pytezos.encoding import base58_encode 6 | 7 | Schema = namedtuple('Schema', ['type_map', 'collapsed_tree']) 8 | 9 | 10 | def flatten(items, itemtype): 11 | if isinstance(items, itemtype): 12 | if len(items) == 0: 13 | return itemtype() 14 | first, rest = items[0], items[1:] 15 | return flatten(first, itemtype) + flatten(rest, itemtype) 16 | else: 17 | return itemtype([items]) 18 | 19 | 20 | def make_dict(**kwargs) -> dict: 21 | return {k: v for k, v in kwargs.items() if v} 22 | 23 | 24 | def decode_literal(node, prim): 25 | core_type, value = next(iter(node.items())) 26 | if prim in ['int', 'nat']: 27 | return int(value) 28 | if prim == 'timestamp': 29 | if core_type == 'int': 30 | return pendulum.from_timestamp(int(value)) 31 | else: 32 | return pendulum.parse(value) 33 | if prim == 'mutez': 34 | return Decimal(value) / 10 ** 6 35 | if prim == 'bool': 36 | return value == 'True' 37 | if prim == 'address' and core_type == 'bytes': 38 | prefix = {'0000': b'tz1', '0001': b'tz2', '0002': b'tz3'} # TODO: check it's ttr 39 | return base58_encode(bytes.fromhex(value[4:]), prefix[value[:4]]).decode() 40 | return value 41 | 42 | 43 | def encode_literal(value, prim, binary=False): 44 | if prim in ['int', 'nat']: 45 | core_type = 'int' 46 | value = str(value) 47 | elif prim == 'timestamp': 48 | core_type = 'string' 49 | if isinstance(value, int): 50 | value = pendulum.from_timestamp(value) 51 | if isinstance(value, pendulum.DateTime): 52 | value = value.strftime('%Y-%m-%dT%H:%M:%SZ') 53 | elif prim == 'mutez': 54 | core_type = 'int' 55 | if isinstance(value, Decimal): 56 | value = int(value * 10 ** 6) 57 | if isinstance(value, int): 58 | value = str(value) 59 | elif prim == 'bool': 60 | core_type = 'prim' 61 | value = 'True' if value else 'False' 62 | elif prim == 'bytes': 63 | core_type = 'bytes' 64 | else: 65 | core_type = 'string' 66 | value = str(value) 67 | 68 | return {core_type: value} 69 | 70 | 71 | def build_schema(code) -> Schema: 72 | type_map = dict() 73 | 74 | def get_annotation(x, prefix, default=None): 75 | return next((a[1:] for a in x.get('annots', []) if a[0] == prefix), default) 76 | 77 | def parse_node(node, path='0', nested=None): 78 | if node['prim'] in ['storage', 'parameter']: 79 | return parse_node(node['args'][0]) 80 | 81 | type_map[path] = dict(prim=node['prim']) 82 | typename = get_annotation(node, ':') 83 | 84 | args = [ 85 | parse_node(arg, path=path + str(i), nested=node['prim']) 86 | for i, arg in enumerate(node.get('args', [])) 87 | ] 88 | 89 | if node['prim'] in ['pair', 'or']: 90 | if typename or nested != node['prim']: 91 | args = flatten(args, list) # TODO: pair/or conflicts? 92 | props = list(map(lambda x: x.get('name'), args)) 93 | if all(props): 94 | type_map[path]['props'] = props 95 | if typename: 96 | type_map[path]['name'] = typename 97 | else: 98 | return args 99 | 100 | return make_dict( 101 | prim=node['prim'], 102 | path=path, 103 | args=args, 104 | name=get_annotation(node, '%', typename), 105 | ) 106 | 107 | collapsed_tree = parse_node(code) 108 | return Schema(type_map, collapsed_tree) 109 | 110 | 111 | def decode_data(data, schema: Schema, annotations=True, literals=True, root='0'): 112 | def decode_node(node, path='0'): 113 | type_info = schema.type_map.get(path, {}) 114 | if isinstance(node, dict): 115 | args = ( 116 | decode_node(arg, path + str(i)) 117 | for i, arg in enumerate(node.get('args', [])) 118 | ) 119 | if node.get('prim') == 'Pair': 120 | res = flatten(tuple(args), tuple) 121 | if type_info.get('props') and annotations: 122 | res = dict(zip(type_info['props'], res)) 123 | 124 | elif node.get('prim') in ['Left', 'Right']: 125 | index = {'Left': 0, 'Right': 1}[node['prim']] 126 | value = decode_node(node['args'][0], path + str(index)) 127 | if type_info.get('props') and annotations: 128 | res = {type_info['props'][index]: value} 129 | else: 130 | res = {index: value} 131 | 132 | elif node.get('prim') == 'Elt': 133 | res = list(args) 134 | elif node.get('prim') == 'Right': 135 | res = decode_node(node['args'][0], path + '1') 136 | elif node.get('prim') == 'Some': 137 | res = next(iter(args)) 138 | elif node.get('prim') == 'None': 139 | res = None 140 | elif literals: 141 | res = decode_literal(node, type_info['prim']) 142 | else: 143 | _, res = next(iter(node.items())) 144 | 145 | elif isinstance(node, list): 146 | if type_info['prim'] in ['map', 'big_map']: 147 | res = dict(decode_node(item, path) for item in node) 148 | else: 149 | args = (decode_node(item, path + '0') for item in node) 150 | if type_info['prim'] == 'set': 151 | res = set(args) 152 | elif type_info['prim'] == 'list': 153 | res = list(args) 154 | else: 155 | raise ValueError(node, type_info) 156 | else: 157 | raise ValueError(node, type_info) 158 | 159 | return res 160 | 161 | return decode_node(data, root) 162 | 163 | 164 | def build_value_map(data, schema: Schema, root='0') -> dict: 165 | value_map = dict() 166 | 167 | def find_root(node): 168 | if node['path'] == root: 169 | return node 170 | for arg in node.get('args', []): 171 | res = find_root(arg) 172 | if res: 173 | return res 174 | return None 175 | 176 | def parse_value(node, node_info, is_element=False): 177 | if node_info['prim'] == 'pair': 178 | values = node 179 | if isinstance(node, dict): # props 180 | values = [node[arg['name']] for arg in node_info['args']] 181 | for i, arg_info in enumerate(node_info['args']): 182 | parse_value(values[i], arg_info, is_element) 183 | 184 | elif node_info['prim'] in ['map', 'big_map']: 185 | value_map[node_info['path']] = len(node) 186 | for key, value in node.items(): 187 | parse_value(key, node_info['args'][0], True) 188 | parse_value(value, node_info['args'][1], True) 189 | 190 | elif node_info['prim'] in ['set', 'list']: 191 | value_map[node_info['path']] = len(node) 192 | for value in node: 193 | parse_value(value, node_info['args'][0], True) 194 | 195 | elif node_info['prim'] == 'or': 196 | key, value = next(iter(node.items())) 197 | if isinstance(key, str): 198 | key, arg_info = next( 199 | (i, arg) 200 | for i, arg in enumerate(node_info['args']) 201 | if arg['name'] == key 202 | ) 203 | else: 204 | arg_info = node_info['args'][key] 205 | parse_value(value, arg_info, is_element) 206 | 207 | elif node_info['prim'] == 'contract': 208 | parse_value(node, node_info['args'][0], is_element) 209 | 210 | elif node_info['prim'] == 'option': 211 | parse_value(node, node_info['args'][0], is_element) 212 | 213 | elif is_element: 214 | value_map[node_info['path']] = value_map.get(node_info['path'], []) + [node] 215 | else: 216 | value_map[node_info['path']] = node 217 | 218 | root_node = find_root(schema.collapsed_tree) 219 | parse_value(data, root_node) 220 | return value_map 221 | 222 | 223 | def encode_data(data, schema: Schema, binary=False, root='0'): 224 | value_map = build_value_map(data, schema, root=root) 225 | 226 | def get_value(path, index=None): 227 | if path not in value_map: 228 | raise KeyError(path) 229 | value = value_map[path] 230 | if index is not None: 231 | return value[index] 232 | return value 233 | 234 | def encode_node(path='0', index=None): 235 | type_info = schema.type_map[path] 236 | if type_info['prim'] == 'pair': 237 | return dict( 238 | prim='Pair', 239 | args=list(map(lambda x: encode_node(path + x, index), '01')) 240 | ) 241 | elif type_info['prim'] in ['map', 'big_map']: 242 | return [ 243 | dict( 244 | prim='Elt', 245 | args=[encode_node(path + '0', i), encode_node(path + '1', i)] 246 | ) 247 | for i in range(get_value(path)) 248 | ] 249 | elif type_info['prim'] in ['set', 'list']: 250 | return [ 251 | encode_node(path + '0', i) 252 | for i in range(get_value(path)) 253 | ] 254 | elif type_info['prim'] == 'or': 255 | for i in [0, 1]: 256 | try: 257 | return dict( 258 | prim={0: 'Left', 1: 'Right'}[i], 259 | args=[encode_node(path + str(i), index)] 260 | ) 261 | except KeyError: 262 | continue 263 | elif type_info['prim'] == 'option': 264 | arg = encode_node(path + '0', index) 265 | if arg is None: 266 | return dict(prim='None') 267 | else: 268 | return dict(prim='Some', args=[arg]) 269 | 270 | return encode_literal( 271 | value=get_value(path, index), 272 | prim=type_info['prim'], 273 | binary=binary 274 | ) 275 | 276 | return encode_node(root) 277 | 278 | 279 | def decode_schema(schema: Schema): 280 | def decode_node(node): 281 | if node['prim'] == 'or': 282 | return { 283 | arg.get('name', str(i)): decode_node(arg) 284 | for i, arg in enumerate(node['args']) 285 | } 286 | if node['prim'] == 'pair': 287 | args = list(map(lambda x: (x.get('name'), decode_node(x)), node['args'])) 288 | names, values = zip(*args) 289 | return dict(args) if all(names) else values 290 | if node['prim'] == 'set': 291 | return {decode_node(node['args'][0])} 292 | if node['prim'] == 'list': 293 | return [decode_node(node['args'][0])] 294 | if node['prim'] in {'map', 'big_map'}: 295 | return {decode_node(node['args'][0]): decode_node(node['args'][1])} 296 | 297 | return f'#{node["prim"]}' 298 | 299 | return decode_node(schema.collapsed_tree) 300 | -------------------------------------------------------------------------------- /pytezos/rpc/__init__.py: -------------------------------------------------------------------------------- 1 | from pytezos.rpc.shell import Shell 2 | from pytezos.rpc.node import Node, public_nodes 3 | 4 | mainnet = Shell(Node(public_nodes['mainnet'][0])) 5 | alphanet = Shell(Node(public_nodes['alphanet'][0])) 6 | zeronet = Shell(Node(public_nodes['zeronet'][0])) 7 | -------------------------------------------------------------------------------- /pytezos/rpc/block.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | from functools import lru_cache 3 | from pendulum.parsing.exceptions import ParserError 4 | import pendulum 5 | import os 6 | 7 | from pytezos.rpc.context import Context 8 | from pytezos.rpc.node import RpcQuery, urljoin 9 | from pytezos.rpc.operation import Operation, OperationListList 10 | from pytezos.rpc.votes import Votes 11 | from pytezos.rpc.helpers import HelpersMixin 12 | from pytezos.crypto import blake2b_32, Key 13 | from pytezos.encoding import base58_encode, is_bh 14 | 15 | 16 | def to_timestamp(v): 17 | try: 18 | v = pendulum.parse(v) 19 | except ParserError: 20 | pass 21 | if isinstance(v, datetime): 22 | v = int(v.timestamp()) 23 | return v 24 | 25 | 26 | class BlockListList(RpcQuery): 27 | 28 | def __call__(self, length=1, head=None, min_date=None): 29 | if isinstance(head, str) and not is_bh(head): 30 | head = self.__getitem__(head).calculate_hash() 31 | 32 | if min_date and not isinstance(min_date, int): 33 | min_date = to_timestamp(min_date) 34 | 35 | return super(BlockListList, self).__call__(length=length, head=head, min_date=min_date) 36 | 37 | def __getitem__(self, item): 38 | if isinstance(item, slice): 39 | if not isinstance(item.start, int): 40 | raise NotImplementedError('Slice start should be an integer.') 41 | 42 | if item.stop is None: 43 | block_id = 'head' 44 | elif isinstance(item.stop, int): 45 | if item.stop < 0: 46 | block_id = f'head~{abs(item.stop)}' 47 | else: 48 | block_id = item.stop 49 | else: 50 | raise NotImplementedError('Slice stop can be an integer or None.') 51 | 52 | header = self.__getitem__(block_id).header() 53 | 54 | if item.start < 0: 55 | length = abs(item.start) 56 | if isinstance(item.stop, int) and item.stop < 0: 57 | length -= abs(item.stop) 58 | else: 59 | length = header['level'] - item.start 60 | 61 | return self.__call__(length=length, head=header['hash']) 62 | 63 | return super(BlockListList, self).__getitem__(item) 64 | 65 | 66 | class BlockHeader(RpcQuery, HelpersMixin): 67 | 68 | def __init__(self, *args, **kwargs): 69 | super(BlockHeader, self).__init__( 70 | properties=['shell', 'protocol_data', 'raw'], 71 | *args, **kwargs) 72 | 73 | def watermark(self): 74 | return '01' + self.get_chain_watermark() 75 | 76 | def unsigned_data(self): 77 | data = self.shell() 78 | data['protocol_data'] = self.protocol_data.signed_bytes()[:-128] 79 | return data 80 | 81 | def unsigned_bytes(self): 82 | return self.watermark() + self.forge() 83 | 84 | def calculate_hash(self): 85 | hash_digest = blake2b_32(self.raw()).digest() 86 | return base58_encode(hash_digest, b'B').decode() 87 | 88 | def calculate_pow_stamp(self): 89 | hash_digest = blake2b_32(self.forge() + '0' * 128).digest() 90 | return int.from_bytes(hash_digest, byteorder='big') 91 | 92 | def forge(self): 93 | data = self._node.post( 94 | path='chains/main/blocks/head/helpers/forge_block_header', 95 | json=self.unsigned_data() 96 | ) 97 | return data['block'] 98 | 99 | 100 | class Block(RpcQuery, HelpersMixin): 101 | 102 | def __init__(self, *args, **kwargs): 103 | kwargs.update( 104 | cache='head' not in kwargs.get('path', ''), 105 | block_id=os.path.basename(kwargs.get('path', '')) 106 | ) 107 | super(Block, self).__init__( 108 | properties={ 109 | 'hash': RpcQuery, 110 | 'header': BlockHeader, 111 | 'context': Context, 112 | 'metadata': RpcQuery 113 | }, 114 | *args, **kwargs) 115 | 116 | @property 117 | @lru_cache(maxsize=None) 118 | def operations(self): 119 | return OperationListList( 120 | path=f'{self._path}/operations', 121 | node=self._node, 122 | child_class=Operation, 123 | **self._kwargs 124 | ) 125 | 126 | @property 127 | @lru_cache(maxsize=None) 128 | def operation_hashes(self): 129 | return OperationListList( 130 | path=f'{self._path}/operation_hashes', 131 | node=self._node, 132 | child_class=RpcQuery, 133 | **self._kwargs 134 | ) 135 | 136 | @property 137 | def votes(self): 138 | return Votes( 139 | path=f'{self._path}/votes', 140 | node=self._node, 141 | **self._kwargs 142 | ) 143 | 144 | def freeze(self): 145 | """ 146 | Returns fixed-hash block, useful for aliases, like head, head~1, etc. 147 | :return: Block instance with hash initialized 148 | """ 149 | return Block( 150 | path=urljoin(os.path.dirname(self._path), self.hash()), 151 | node=self._node, 152 | **self._kwargs 153 | ) 154 | 155 | @property 156 | def predecessor(self): 157 | return Block( 158 | path=urljoin(os.path.dirname(self._path), self.header.get('predecessor')), 159 | node=self._node, 160 | **self._kwargs 161 | ) 162 | 163 | def level(self) -> int: 164 | return self.metadata.get('level')['level'] 165 | 166 | def cycle(self) -> int: 167 | return self.metadata.get('level')['cycle'] 168 | 169 | def create_endorsement(self) -> Operation: 170 | header = self.header() 171 | return Operation(data={ 172 | 'branch': header['hash'], 173 | 'contents': [{ 174 | 'kind': 'endorsement', 175 | 'level': header['level'] 176 | }] 177 | }) 178 | 179 | def verify_signature(self): 180 | pk = self.get_public_key(self.metadata.get('baker')) 181 | Key(pk).verify(self.header.get('signature'), self.header.unsigned_bytes()) 182 | -------------------------------------------------------------------------------- /pytezos/rpc/chain.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | from typing import List 3 | import os 4 | 5 | from pytezos.rpc.node import RpcQuery 6 | from pytezos.rpc.block import Block, BlockListList 7 | from pytezos.rpc.operation import Operation 8 | 9 | 10 | class OperationsDict(RpcQuery): 11 | 12 | def _get_operations_list(self, key, kind=None): 13 | operations = self.get(key) 14 | 15 | if kind: 16 | if isinstance(kind, str): 17 | kind = {kind} 18 | elif isinstance(kind, list): 19 | kind = set(kind) 20 | 21 | operations = filter( 22 | lambda op: any(map( 23 | lambda x: x['kind'] in kind, op['contents'])), operations) 24 | 25 | return list(map( 26 | lambda x: Operation(data=x, node=self._node, **self._kwargs), 27 | operations 28 | )) 29 | 30 | def applied(self, kind=None) -> List[Operation]: 31 | """ 32 | :param kind: endorsement, seed_nonce_revelation, double_endorsement_evidence, double_baking_evidence, 33 | activate_account, proposals, ballot, reveal, transaction, origination, delegation 34 | :return: 35 | """ 36 | return self._get_operations_list('applied', kind=kind) 37 | 38 | def refused(self) -> List[Operation]: 39 | return self._get_operations_list('refused') 40 | 41 | def branch_delayed(self) -> List[Operation]: 42 | return self._get_operations_list('branch_delayed') 43 | 44 | def unprocessed(self) -> List[Operation]: 45 | return self._get_operations_list('unprocessed') 46 | 47 | 48 | class Mempool(RpcQuery): 49 | 50 | def __init__(self, *args, **kwargs): 51 | super(Mempool, self).__init__( 52 | properties={'pending_operations': OperationsDict}, 53 | *args, **kwargs) 54 | 55 | @property 56 | def filter(self, **kwargs): 57 | """ 58 | minimal_fees, minimal_nanotez_per_gas_unit, minimal_nanotez_per_byte 59 | :return: 60 | """ 61 | if kwargs: 62 | return self._node.post(f'{self._path}/filter', json=kwargs) 63 | return self._node.get(f'{self._path}/filter') 64 | 65 | 66 | class Chain(RpcQuery): 67 | 68 | def __init__(self, *args, **kwargs): 69 | kwargs.update( 70 | chain_id=os.path.basename(kwargs.get('path', '')) 71 | ) 72 | super(Chain, self).__init__( 73 | properties={'mempool': Mempool}, 74 | *args, **kwargs) 75 | 76 | @property 77 | @lru_cache(maxsize=None) 78 | def blocks(self): 79 | return BlockListList( 80 | path=f'{self._path}/blocks', 81 | node=self._node, 82 | child_class=Block, 83 | properties=['head', 'genesis'], 84 | **self._kwargs 85 | ) 86 | 87 | @property 88 | def head(self) -> Block: 89 | return self.blocks.head 90 | 91 | @property 92 | def genesis(self) -> Block: 93 | return self.blocks.genesis 94 | -------------------------------------------------------------------------------- /pytezos/rpc/context.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | 3 | from pytezos.rpc.contract import Contract 4 | from pytezos.rpc.node import RpcQuery 5 | 6 | 7 | class Context(RpcQuery): 8 | 9 | def __init__(self, *args, **kwargs): 10 | super(Context, self).__init__(*args, **kwargs) 11 | 12 | def __call__(self, *args, **kwargs): 13 | return self._node.get(f'{self._path}/raw/json?depth=1', cache=self._cache) 14 | 15 | @property 16 | @lru_cache(maxsize=None) 17 | def contracts(self): 18 | """ 19 | Attention: very slow method 20 | :return: list of Contracts 21 | """ 22 | return RpcQuery( 23 | path=f'{self._path}/contracts', 24 | node=self._node, 25 | child_class=Contract, 26 | **self._kwargs 27 | ) 28 | -------------------------------------------------------------------------------- /pytezos/rpc/contract.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | 3 | from pytezos.rpc.node import RpcQuery 4 | from pytezos.micheline.grammar import MichelineParser 5 | from pytezos.micheline.schema import build_schema, decode_data, encode_data, decode_schema 6 | 7 | micheline_parser = MichelineParser() 8 | 9 | 10 | class Contract(RpcQuery): 11 | 12 | def __init__(self, code=None, *args, **kwargs): 13 | super(Contract, self).__init__( 14 | properties=[ 15 | 'balance', 'counter', 'delegatable', 'delegate', 'manager', 16 | 'manager_key', 'script', 'spendable', 'storage' 17 | ], 18 | *args, **kwargs) 19 | self._code = code 20 | 21 | def __repr__(self): 22 | if self._code: 23 | return str(self._code) 24 | return super(Contract, self).__repr__() 25 | 26 | def get_code(self): 27 | if self._code: 28 | return self._code 29 | return self.get('script')['code'] 30 | 31 | def get_section(self, section): 32 | code = self.get_code() 33 | return next(s for s in code if s['prim'] == section) 34 | 35 | @lru_cache(maxsize=None) 36 | def _get_schema(self, section): 37 | return build_schema(self.get_section(section)) 38 | 39 | def _get_big_map_schema(self): 40 | schema = self._get_schema('storage') 41 | root = next(k for k, v in schema.type_map.items() if v['prim'] == 'big_map') 42 | key_path, val_path = root + '0', root + '1' 43 | return schema, key_path, val_path 44 | 45 | @classmethod 46 | def from_code(cls, code): 47 | return Contract(code=code) 48 | 49 | @classmethod 50 | def from_string(cls, text): 51 | code = micheline_parser.parse(text) 52 | return cls.from_code(code) 53 | 54 | @classmethod 55 | def from_file(cls, path): 56 | with open(path) as f: 57 | return cls.from_string(f.read()) 58 | 59 | def decode_storage(self, data=None, annotations=True, literals=True): 60 | if data is None: 61 | data = self.get('script')['storage'] 62 | 63 | return decode_data( 64 | data=data, 65 | schema=self._get_schema('storage'), 66 | annotations=annotations, 67 | literals=literals 68 | ) 69 | 70 | def encode_storage(self, data): 71 | return encode_data( 72 | data=data, 73 | schema=self._get_schema('storage') 74 | ) 75 | 76 | def decode_parameters(self, data, annotations=True, literals=True): 77 | return decode_data( 78 | data=data, 79 | schema=self._get_schema('parameter'), 80 | annotations=annotations, 81 | literals=literals 82 | ) 83 | 84 | def encode_parameters(self, data): 85 | return encode_data( 86 | data=data, 87 | schema=self._get_schema('parameter') 88 | ) 89 | 90 | def storage_schema(self): 91 | return decode_schema(self._get_schema('storage')) 92 | 93 | def parameter_schema(self): 94 | return decode_schema(self._get_schema('parameter')) 95 | 96 | def big_map_get(self, key): 97 | schema, key_path, val_path = self._get_big_map_schema() 98 | query = dict( 99 | key=encode_data(key, schema, root=key_path), 100 | type=schema.type_map[key_path] 101 | ) 102 | value = self._node.post(f'{self._path}/big_map_get', json=query) 103 | return decode_data(value, schema, root=val_path) 104 | 105 | def big_map_diff_decode(self, data): 106 | schema, key_path, val_path = self._get_big_map_schema() 107 | return { 108 | decode_data(item['key'], schema, root=key_path): 109 | decode_data(item['value'], schema, root=val_path) 110 | for item in data 111 | } 112 | -------------------------------------------------------------------------------- /pytezos/rpc/helpers.py: -------------------------------------------------------------------------------- 1 | from binascii import hexlify 2 | 3 | from pytezos.encoding import base58_decode 4 | from pytezos.rpc.node import RpcQuery 5 | 6 | 7 | class HelpersMixin: 8 | 9 | def get_chain_id(self, default='main'): 10 | assert isinstance(self, RpcQuery) 11 | return self._kwargs.get('chain_id', default) 12 | 13 | def get_block_id(self, default='head'): 14 | assert isinstance(self, RpcQuery) 15 | return self._kwargs.get('block_id', default) 16 | 17 | def get_manager_key(self, contract_id): 18 | assert isinstance(self, RpcQuery) 19 | return self._node.get(f'chains/{self.get_chain_id()}/blocks/{self.get_block_id()}' 20 | f'/context/contracts/{contract_id}/manager_key') 21 | 22 | def get_public_key(self, pkh): 23 | """ 24 | Public key by the public key hash 25 | :param pkh: public key hash, base58 encoded, i.e. 'tz1eKkWU5hGtfLUiqNpucHrXymm83z3DG9Sq' 26 | :return: base58 encoded public key 27 | """ 28 | if pkh.startswith('KT'): # it is not pkh, but let's handle this 29 | pkh = self.get_manager_key(pkh).get('manager') 30 | 31 | pk = self.get_manager_key(pkh).get('key') 32 | if not pk: 33 | raise ValueError('Public key is not revealed.') 34 | 35 | return pk 36 | 37 | def get_chain_watermark(self): 38 | assert isinstance(self, RpcQuery) 39 | data = self._node.get(f'chains/{self.get_chain_id()}/chain_id') 40 | return hexlify(base58_decode(data.encode())).decode() 41 | 42 | def get_protocol(self, branch='head'): 43 | assert isinstance(self, RpcQuery) 44 | return self._node.get(f'chains/{self.get_chain_id()}/blocks/{self.get_block_id(branch)}' 45 | f'/header').get('protocol') 46 | 47 | @staticmethod 48 | def get_level(cycle: int, index=0) -> int: 49 | return cycle * 4096 + (index % 4096) + 1 50 | -------------------------------------------------------------------------------- /pytezos/rpc/node.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from functools import lru_cache 3 | from hashlib import sha1 4 | 5 | public_nodes = { 6 | 'mainnet': ['http://localhost:8732/', 'https://rpc.tezrpc.me/', 'https://mainnet-node.tzscan.io/'], 7 | 'zeronet': ['http://localhost:8732/', 'https://zeronet-node.tzscan.io/'], 8 | 'alphanet': ['http://localhost:8732/', 'https://alphanet-node.tzscan.io/'] 9 | } 10 | 11 | 12 | def urljoin(*args): 13 | return "/".join(map(lambda x: str(x).strip('/'), args)) 14 | 15 | 16 | class RpcError(ValueError): 17 | 18 | def __init__(self, res: requests.Response): 19 | super(RpcError, self).__init__(res.text) 20 | self.res = res 21 | 22 | def __str__(self): 23 | return f'{self.res.request.method} {self.res.request.url} <{self.res.status_code}>\n{self.res.text}' 24 | 25 | 26 | class Node: 27 | 28 | def __init__(self, uri=public_nodes['mainnet'][0]): 29 | self._uri = uri 30 | self._cache = dict() 31 | self._session = requests.Session() 32 | 33 | def __repr__(self): 34 | return f'{self._uri}' 35 | 36 | def _request(self, method, path, **kwargs): 37 | res = self._session.request( 38 | method=method, 39 | url=urljoin(self._uri, path), 40 | headers={'content-type': 'application/json'}, 41 | **kwargs 42 | ) 43 | if res.status_code != 200: 44 | raise RpcError(res) 45 | 46 | return res.json() 47 | 48 | def get(self, path, params=None, cache=False): 49 | if cache and path in self._cache: 50 | return self._cache[path] 51 | 52 | res = self._request('GET', path, params=params) 53 | if cache: 54 | self._cache[path] = res 55 | 56 | return res 57 | 58 | def post(self, path, json=None, cache=False): 59 | key = None 60 | if cache: 61 | key = sha1((path + str(json)).encode()).hexdigest() 62 | if key in self._cache: 63 | return self._cache[key] 64 | 65 | res = self._request('POST', path, json=json) 66 | if cache: 67 | self._cache[key] = res 68 | 69 | return res 70 | 71 | 72 | class RpcQuery: 73 | 74 | def __init__(self, path='', node=Node(), cache=False, child_class=None, properties=None, **kwargs): 75 | self._node = node 76 | self._path = path 77 | self._cache = cache 78 | self._child_class = child_class if child_class else RpcQuery 79 | self._kwargs = kwargs 80 | 81 | if isinstance(properties, dict): 82 | self._properties = properties 83 | elif isinstance(properties, list): 84 | self._properties = {x: self._child_class for x in properties} 85 | else: 86 | self._properties = dict() 87 | 88 | def __repr__(self): 89 | return self._path 90 | 91 | def __dir__(self): 92 | return sorted(list(super(RpcQuery, self).__dir__()) 93 | + list(self._properties.keys())) 94 | 95 | def __call__(self, *args, **kwargs): 96 | return self._node.get( 97 | path=self._path, 98 | params=kwargs, 99 | cache=self._cache 100 | ) 101 | 102 | @lru_cache(maxsize=None) 103 | def __getattr__(self, item): 104 | if not item.startswith('_'): 105 | child_class = self._properties.get(item, RpcQuery) 106 | return child_class( 107 | path=f'{self._path}/{item}', 108 | node=self._node, 109 | cache=self._cache, 110 | **self._kwargs 111 | ) 112 | raise AttributeError(item) 113 | 114 | @lru_cache(maxsize=None) 115 | def __getitem__(self, item): 116 | return self._child_class( 117 | path=f'{self._path}/{item}', 118 | node=self._node, 119 | cache=self._cache, 120 | **self._kwargs 121 | ) 122 | 123 | def get(self, key, default=None): 124 | data = self() 125 | if key in data: 126 | return data[key] 127 | if default is not None: 128 | return default 129 | raise KeyError(f'{key} is missing.') 130 | -------------------------------------------------------------------------------- /pytezos/rpc/operation.py: -------------------------------------------------------------------------------- 1 | from binascii import hexlify 2 | from functools import lru_cache 3 | from typing import List 4 | 5 | from pytezos.crypto import Key, blake2b_32 6 | from pytezos.encoding import base58_decode, base58_encode 7 | from pytezos.rpc.node import RpcQuery 8 | from pytezos.rpc.helpers import HelpersMixin 9 | 10 | 11 | def filter_contents(entity, kind): 12 | if isinstance(kind, str): 13 | kind = {kind} 14 | elif isinstance(kind, list): 15 | kind = set(kind) 16 | 17 | def iter_contents(op): 18 | for content in op['contents']: 19 | internal = content.get('metadata', {}).get('internal_operation_results', []) 20 | 21 | if not kind or content['kind'] in kind: 22 | if internal: 23 | del content['metadata']['internal_operation_results'] 24 | yield content 25 | 26 | for result in internal: 27 | if not kind or result['kind'] in kind: 28 | yield result 29 | 30 | if isinstance(entity, dict): 31 | return list(iter_contents(entity)) 32 | 33 | if isinstance(entity, list): 34 | contents = list() 35 | if len(entity) and isinstance(entity[0], list): 36 | for op_list in entity: 37 | for op in op_list: 38 | contents.extend(iter_contents(op)) 39 | else: 40 | for op in entity: 41 | contents.extend(iter_contents(op)) 42 | return contents 43 | 44 | raise ValueError(entity, kind) 45 | 46 | 47 | class OperationList(RpcQuery): 48 | 49 | def contents(self, kind=None): 50 | return filter_contents(self(), kind) 51 | 52 | 53 | class OperationListList(RpcQuery): 54 | 55 | @lru_cache(maxsize=None) 56 | def __getitem__(self, item): 57 | if isinstance(item, tuple): 58 | return self._child_class( 59 | path=f'{self._path}/{item[0]}/{item[1]}', 60 | node=self._node, 61 | cache=self._cache 62 | ) 63 | elif isinstance(item, int): 64 | return OperationList( 65 | path=f'{self._path}/{item}', 66 | node=self._node, 67 | cache=self._cache, 68 | child_class=self._child_class 69 | ) 70 | else: 71 | raise NotImplementedError(item) 72 | 73 | @property 74 | def endorsements(self): 75 | """ 76 | The first list contains the endorsements (kind `endorsement`) 77 | :return: RPCQuery instance 78 | """ 79 | return self[0] 80 | 81 | @property 82 | def votes(self): 83 | """ 84 | The second list contains all the operations regarding votes and proposals (kind `proposals`, `ballot`) 85 | :return: RPCQuery instance 86 | """ 87 | return self[1] 88 | 89 | @property 90 | def anonymous(self): 91 | """ 92 | The third list contains anonymous operations (kind `seed_nonce_revelation`, `double_endorsement_evidence`, 93 | `double_baking_evidence`, `activate_account`) 94 | :return: RPCQuery instance 95 | """ 96 | return self[2] 97 | 98 | @property 99 | def managers(self): 100 | """ 101 | The last one contains the manager operations (`reveal`, `transaction`, `origination`, `delegation`) 102 | :return: RPCQuery instance 103 | """ 104 | return self[3] 105 | 106 | def contents(self, kind=None) -> List[dict]: 107 | return filter_contents(self(), kind) 108 | 109 | 110 | class Operation(RpcQuery, HelpersMixin): 111 | 112 | def __init__(self, data=None, *args, **kwargs): 113 | super(Operation, self).__init__(*args, **kwargs) 114 | self._data = data if data else dict() 115 | 116 | def __repr__(self): 117 | if self._data: 118 | return str(self._data) 119 | return super(Operation, self).__repr__() 120 | 121 | def __call__(self, *args, **kwargs): 122 | if self._data: 123 | return self._data 124 | return super(Operation, self).__call__(*args, **kwargs) 125 | 126 | @classmethod 127 | def from_data(cls, data: dict): 128 | return Operation(data) 129 | 130 | def watermark(self): 131 | content = self.get('contents')[0] 132 | kind = content['kind'] 133 | if kind in ['endorsement', 'seed_nonce_revelation']: 134 | return '02' + self.get_chain_watermark() 135 | if kind in ['transaction', 'origination', 'delegation', 'reveal', 'ballot', 'proposals', 'activate_account']: 136 | return '03' 137 | raise NotImplementedError(kind) 138 | 139 | def source(self): 140 | content = self.get('contents')[0] 141 | kind = content['kind'] 142 | if kind in ['endorsement']: 143 | return content['metadata']['delegate'] 144 | if kind == 'activate_account': 145 | return content['pkh'] 146 | if kind in ['transaction', 'origination', 'delegation', 'reveal', 'ballot', 'proposals']: 147 | return content['source'] 148 | raise NotImplementedError(f'Operation `{kind}` is anonymous.') 149 | 150 | def protocol(self): 151 | try: 152 | protocol = self.get('protocol') 153 | except KeyError: 154 | branch = self.get("branch") 155 | protocol = self._node.get(f'chains/main/blocks/{branch}/header').get('protocol') 156 | return protocol 157 | 158 | def unsigned_data(self) -> dict: 159 | operation = self() 160 | return { 161 | 'branch': operation['branch'], 162 | 'contents': [ 163 | {k: v for k, v in c.items() if k != 'metadata'} 164 | for c in operation['contents'] 165 | ] 166 | } 167 | 168 | def signed_data(self) -> dict: 169 | return { 170 | 'protocol': self.protocol(), 171 | 'signature': self.get('signature'), 172 | **self.unsigned_data() 173 | } 174 | 175 | def unsigned_bytes(self): 176 | return self.watermark() + self.forge() 177 | 178 | def signed_bytes(self): 179 | signature_bytes = hexlify(base58_decode(self.get('signature'))).decode() 180 | return self.forge() + signature_bytes 181 | 182 | def calculate_hash(self): 183 | hash_digest = blake2b_32(self.signed_bytes()).digest() 184 | return base58_encode(hash_digest, b'o').decode() 185 | 186 | def forge(self): 187 | return self._node.post( 188 | path='chains/main/blocks/head/helpers/forge/operations', 189 | json=self.unsigned_data(), 190 | cache=True 191 | ) 192 | 193 | def sign(self, key): 194 | if isinstance(key, str): 195 | key = Key(key) 196 | if not isinstance(key, Key): 197 | raise ValueError('Base58 encoded secret key or Key instance required.') 198 | 199 | self._data['signature'] = key.sign(self.unsigned_bytes(), generic=True) 200 | return self._data['signature'] 201 | 202 | def preapply(self, branch=None): 203 | operation = self.signed_data() 204 | if branch is None: 205 | branch = operation['branch'] 206 | 207 | data = self._node.post( 208 | path=f'chains/main/blocks/{branch}/helpers/preapply/operations', 209 | json=[operation], 210 | cache=True 211 | ) 212 | self._data['contents'] = data[0]['contents'] 213 | return data 214 | 215 | def verify_signature(self): 216 | pk = self.get_public_key(self.source()) 217 | Key(pk).verify(self.get('signature'), self.unsigned_bytes()) 218 | 219 | def contents(self, kind=None): 220 | return filter_contents(self(), kind) 221 | -------------------------------------------------------------------------------- /pytezos/rpc/protocol.py: -------------------------------------------------------------------------------- 1 | import os 2 | import tarfile 3 | import requests 4 | import io 5 | import netstruct 6 | import simplejson as json 7 | from tempfile import TemporaryDirectory 8 | from functools import lru_cache 9 | from binascii import hexlify 10 | from collections import OrderedDict 11 | from typing import List, Tuple 12 | from tqdm import tqdm 13 | 14 | from pytezos.rpc.node import RpcQuery 15 | from pytezos.crypto import blake2b_32 16 | from pytezos.encoding import base58_encode 17 | from pytezos.tools.diff import make_patch, apply_patch, generate_unidiff_html 18 | 19 | 20 | def dir_to_files(path) -> List[Tuple[str, str]]: 21 | files = list() 22 | 23 | with open(os.path.join(path, 'TEZOS_PROTOCOL')) as f: 24 | index = json.load(f) 25 | 26 | for module in index['modules']: 27 | for ext in ['mli', 'ml']: 28 | name = f'{module.lower()}.{ext}' 29 | 30 | filename = os.path.join(path, name) 31 | if not os.path.exists(filename): 32 | continue 33 | 34 | with open(filename, 'r') as file: 35 | text = file.read() 36 | files.append((name, text)) 37 | 38 | return files 39 | 40 | 41 | def tar_to_files(path=None, raw=None) -> List[Tuple[str, str]]: 42 | assert path or raw 43 | 44 | fileobj = io.BytesIO(raw) if raw else None 45 | with tarfile.open(name=path, fileobj=fileobj) as tar: 46 | with TemporaryDirectory() as tmp_dir: 47 | tar.extractall(tmp_dir) 48 | files = dir_to_files(tmp_dir) 49 | 50 | return files 51 | 52 | 53 | def url_to_files(url) -> List[Tuple[str, str]]: 54 | res = requests.get(url, stream=True) 55 | raw = b'' 56 | 57 | for data in tqdm(res.iter_content()): 58 | raw += data 59 | 60 | return tar_to_files(raw=raw) 61 | 62 | 63 | def files_to_proto(files: List[Tuple[str, str]]) -> dict: 64 | components = OrderedDict() 65 | 66 | for filename, text in files: 67 | name, ext = filename.split('.') 68 | key = {'mli': 'interface', 'ml': 'implementation'}[ext] 69 | name = name.capitalize() 70 | data = hexlify(text.encode()).decode() 71 | 72 | if name in components: 73 | components[name][key] = data 74 | else: 75 | components[name] = {'name': name, key: data} 76 | 77 | proto = { 78 | 'expected_env_version': 0, # TODO: this is V1 79 | 'components': list(components.values()) 80 | } 81 | return proto 82 | 83 | 84 | def files_to_tar(files: List[Tuple[str, str]], output_path=None): 85 | fileobj = io.BytesIO() if output_path is None else None 86 | nameparts = os.path.basename(output_path).split('.') 87 | mode = 'w' 88 | if len(nameparts) == 3: 89 | mode = f'w:{nameparts[-1]}' 90 | 91 | with tarfile.open(name=output_path, fileobj=fileobj, mode=mode) as tar: 92 | for filename, text in files: 93 | file = io.BytesIO(text.encode()) 94 | ti = tarfile.TarInfo(filename) 95 | ti.size = len(file.getvalue()) 96 | tar.addfile(ti, file) 97 | 98 | if fileobj: 99 | return fileobj.getvalue() 100 | 101 | 102 | def proto_to_files(proto: dict) -> List[Tuple[str, str]]: 103 | files = list() 104 | extensions = {'interface': 'mli', 'implementation': 'ml'} 105 | 106 | for component in proto.get('components', []): 107 | for key, ext in extensions.items(): 108 | if key in component: 109 | filename = f'{component["name"].lower()}.{ext}' 110 | text = bytes.fromhex(component[key]).decode() 111 | files.append((filename, text)) 112 | 113 | return files 114 | 115 | 116 | def proto_to_bytes(proto: dict) -> bytes: 117 | res = b'' 118 | 119 | for component in proto.get('components', []): 120 | res += netstruct.pack(b'I$', component['name'].encode()) 121 | 122 | if component.get('interface'): 123 | res += b'\xff' + netstruct.pack(b'I$', bytes.fromhex(component['interface'])) 124 | else: 125 | res += b'\x00' 126 | 127 | # we should also handle patch case 128 | res += netstruct.pack(b'I$', bytes.fromhex(component.get('implementation', ''))) 129 | 130 | res = netstruct.pack(b'hI$', proto['expected_env_version'], res) 131 | return res 132 | 133 | 134 | class Protocol(RpcQuery): 135 | 136 | def __init__(self, data=None, *args, **kwargs): 137 | super(Protocol, self).__init__(*args, **kwargs) 138 | self._data = data 139 | 140 | def __repr__(self): 141 | if self._data: 142 | return str(self._data) 143 | return super(Protocol, self).__repr__() 144 | 145 | def __call__(self, *args, **kwargs): 146 | if self._data: 147 | return self._data 148 | return super(Protocol, self).__call__(*args, **kwargs) 149 | 150 | def __iter__(self): 151 | return iter(proto_to_files(self())) 152 | 153 | @classmethod 154 | @lru_cache(maxsize=None) 155 | def from_uri(cls, uri): 156 | """ 157 | Loads protocol implementation from various sources and converts it to the RPC-like format 158 | :param uri: link/path to a tar archive or path to a folder with extracted contents 159 | :return: Protocol instance 160 | """ 161 | if uri.startswith('http'): 162 | files = url_to_files(uri) 163 | elif os.path.isfile(uri): 164 | files = tar_to_files(uri) 165 | elif os.path.isdir(uri): 166 | files = dir_to_files(uri) 167 | else: 168 | raise ValueError(uri) 169 | 170 | return Protocol(data=files_to_proto(files)) 171 | 172 | def index(self) -> dict: 173 | """ 174 | Generates TEZOS_PROTOCOL file 175 | :return: dict with protocol hash and modules 176 | """ 177 | proto = self() 178 | data = { 179 | 'hash': self.calculate_hash(), 180 | 'modules': list(map(lambda x: x['name'], proto.get('components', []))) 181 | } 182 | return data 183 | 184 | def export_tar(self, output_path=None): 185 | """ 186 | Creates a tarball and dumps to a file or returns bytes 187 | :param output_path: Path to the tarball [optional]. You can add .bz2 or .gz extension to make it compressed 188 | :return: bytes if path is None or nothing 189 | """ 190 | files = proto_to_files(self()) 191 | files.append(('TEZOS_PROTOCOL', json.dumps(self.index()))) 192 | return files_to_tar(files, output_path) 193 | 194 | def export_html(self, output_path=None): 195 | """ 196 | Generates github-like side-by-side diff viewe, powered by diff2html.js 197 | :param output_path: will write to this file if specified 198 | :return: html string if path is not specified 199 | """ 200 | diffs = [text for filename, text in self if text] 201 | return generate_unidiff_html(diffs, output_path=output_path) 202 | 203 | def diff(self, proto, context_size=3): 204 | """ 205 | Calculates file diff between two protocol versions 206 | :param proto: an instance of Protocol 207 | :param context_size: number of context lines before and after the change 208 | :return: patch in proto format 209 | """ 210 | files = list() 211 | yours = dict(iter(self)) 212 | theirs = proto_to_files(proto()) 213 | 214 | for filename, their_text in theirs: 215 | patch = make_patch( 216 | a=yours.get(filename, ''), 217 | b=their_text, 218 | filename=filename, 219 | context_size=context_size 220 | ) 221 | files.append((filename, patch)) 222 | 223 | return Protocol(data=files_to_proto(files)) 224 | 225 | def apply(self, patch): 226 | """ 227 | Applies unified diff and returns full-fledged protocol 228 | :param patch: an instance of Protocol containing diff of files 229 | :return: Protocol instance 230 | """ 231 | files = list() 232 | yours = dict(iter(self)) 233 | diff = proto_to_files(patch()) 234 | 235 | for filename, diff_text in diff: 236 | text = yours.get(filename, '') 237 | if diff_text: 238 | text = apply_patch(text, diff_text) 239 | files.append((filename, text)) 240 | 241 | return Protocol(data=files_to_proto(files)) 242 | 243 | def calculate_hash(self): 244 | hash_digest = blake2b_32(proto_to_bytes(self())).digest() 245 | return base58_encode(hash_digest, b'P').decode() 246 | -------------------------------------------------------------------------------- /pytezos/rpc/search.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Callable, Generator 2 | from loguru import logger 3 | import simplejson as json 4 | 5 | from pytezos.rpc.node import RpcError 6 | from pytezos.rpc.chain import Chain 7 | from pytezos.rpc.operation import Operation, filter_contents 8 | 9 | 10 | def find_state_change_intervals(head: int, tail: int, get: Callable, equals: Callable, 11 | step=60) -> Generator: 12 | succ_value = get(head) 13 | 14 | for level in range(head - step, tail, -step): 15 | value = get(level) 16 | logger.debug(f'{value} at level {level}') 17 | 18 | if not equals(value, succ_value): 19 | yield level + step, succ_value, level, value 20 | succ_value = value 21 | 22 | 23 | def find_state_change(head: int, tail: int, get: Callable, equals: Callable, 24 | pred_value: Any) -> (int, Any): 25 | def bisect(start: int, end: int): 26 | if end == start + 1: 27 | return end, get(end) 28 | 29 | level = (end + start) // 2 30 | value = get(level) 31 | logger.debug(f'{value} at level {level}') 32 | 33 | if equals(value, pred_value): 34 | return bisect(level, end) 35 | else: 36 | return bisect(start, level) 37 | 38 | return bisect(tail, head) 39 | 40 | 41 | def walk_state_change_interval(head: int, tail: int, get: Callable, equals: Callable, 42 | head_value: Any, tail_value: Any) -> Generator: 43 | level = tail 44 | value = tail_value 45 | while not equals(value, head_value): 46 | level, value = find_state_change( 47 | head, level, get, equals, 48 | pred_value=value) 49 | yield level, value 50 | 51 | 52 | def find_state_changes(head: int, tail: int, get: Callable, equals: Callable, 53 | step=60) -> Generator: 54 | for int_head, int_head_value, int_tail, int_tail_value in find_state_change_intervals( 55 | head, tail, get, equals, step): 56 | for change in walk_state_change_interval( 57 | int_head, int_tail, get, equals, 58 | head_value=int_head_value, 59 | tail_value=int_tail_value): 60 | yield change 61 | 62 | 63 | class SearchChain(Chain): 64 | 65 | @classmethod 66 | def from_chain(cls, chain: Chain): 67 | return SearchChain( 68 | path=chain._path, 69 | node=chain._node, 70 | **chain._kwargs 71 | ) 72 | 73 | def get_voting_period(self): 74 | level_info = self.head.metadata.get('level') 75 | head = level_info['level'] 76 | tail = head - level_info['voting_period_position'] 77 | return head, tail 78 | 79 | def find_proposal_inject_level(self, proposal_id) -> int: 80 | level, _ = find_state_change( 81 | *self.get_voting_period(), 82 | get=lambda x: self.blocks[x].votes.roll_count(proposal_id), 83 | equals=lambda x, y: x == y, 84 | pred_value=0 85 | ) 86 | return level 87 | 88 | def find_proposal_votes_levels(self, proposal_id) -> Generator: 89 | for level, _ in find_state_changes( 90 | *self.get_voting_period(), 91 | get=lambda x: self.blocks[x].votes.roll_count(proposal_id), 92 | equals=lambda x, y: x == y): 93 | yield level 94 | 95 | def find_proposal_inject_operation(self, proposal_id) -> Operation: 96 | level = self.find_proposal_inject_level(proposal_id) 97 | operations = self.blocks[level].operations.votes() 98 | if not operations: 99 | raise ValueError('Injection operation not found.') 100 | 101 | return Operation.from_data(operations[0]) 102 | 103 | def find_proposal_votes_operations(self, proposal_id) -> Generator: 104 | for level in self.find_proposal_votes_levels(proposal_id): 105 | for operation in self.blocks[level].operations.votes(): 106 | yield Operation.from_data(operation) 107 | 108 | def find_contract_origination_level(self, contract_id) -> int: 109 | def get_counter(x): 110 | try: 111 | return self.blocks[x].context.contracts[contract_id].counter() 112 | except RpcError: 113 | return None 114 | 115 | level, _ = find_state_change( 116 | head=self.head.level(), 117 | tail=0, 118 | get=get_counter, 119 | equals=lambda x, y: x == y, 120 | pred_value=None 121 | ) 122 | return level 123 | 124 | def find_contract_origination_operation(self, contract_id) -> Operation: 125 | level = self.find_contract_origination_level(contract_id) 126 | operations = self.blocks[level].operations.managers() 127 | 128 | for operation in operations: 129 | for content in filter_contents(operation, kind='origination'): 130 | if content.get('metadata'): 131 | result = content['metadata']['operation_result'] 132 | else: 133 | result = content['result'] 134 | if contract_id in result['originated_contracts']: 135 | return Operation.from_data(operation) 136 | 137 | raise ValueError('Origination operation not found.') 138 | 139 | def find_storage_change_levels(self, contract_id, origination_level=None) -> Generator: 140 | if origination_level is None: 141 | origination_level = self.find_contract_origination_level(contract_id) 142 | 143 | for level, _ in find_state_changes( 144 | head=self.head.level(), 145 | tail=origination_level, 146 | get=lambda x: hash(json.dumps(self.blocks[x].context.contracts[contract_id].storage())), 147 | equals=lambda x, y: x == y, 148 | step=720): 149 | yield level 150 | 151 | def find_storage_change_operations(self, contract_id, origination_level=None) -> Generator: 152 | for level in self.find_storage_change_levels(contract_id, origination_level): 153 | for operation in self.blocks[level].operations.managers(): 154 | if any(map(lambda x: x.get('destination') == contract_id, operation['contents'])): 155 | yield Operation.from_data(operation) 156 | -------------------------------------------------------------------------------- /pytezos/rpc/shell.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | 3 | from pytezos.rpc.node import Node, RpcQuery 4 | from pytezos.rpc.chain import Chain, OperationsDict, Mempool 5 | from pytezos.rpc.block import Block 6 | from pytezos.rpc.context import Context 7 | from pytezos.rpc.helpers import HelpersMixin 8 | from pytezos.rpc.protocol import Protocol 9 | 10 | 11 | class Shell(RpcQuery, HelpersMixin): 12 | 13 | def __init__(self, node=Node()): 14 | super(Shell, self).__init__(node=node) 15 | 16 | @property 17 | @lru_cache(maxsize=None) 18 | def chains(self): 19 | return RpcQuery( 20 | path='chains', 21 | node=self._node, 22 | child_class=Chain, 23 | properties=['main'] 24 | ) 25 | 26 | @property 27 | def main(self) -> Chain: 28 | return self.chains.main 29 | 30 | @property 31 | def test(self) -> Chain: 32 | return self.chains.test 33 | 34 | @property 35 | def blocks(self): 36 | return self.main.blocks 37 | 38 | @property 39 | def head(self) -> Block: 40 | return self.main.head 41 | 42 | @property 43 | def context(self) -> Context: 44 | return self.head.context 45 | 46 | @property 47 | def mempool(self) -> Mempool: 48 | return self.main.mempool 49 | 50 | @property 51 | def pending_operations(self) -> OperationsDict: 52 | return self.main.mempool.pending_operations 53 | 54 | @property 55 | def protocols(self): 56 | return RpcQuery( 57 | path='protocols', 58 | node=self._node, 59 | child_class=Protocol 60 | ) 61 | 62 | def level(self) -> int: 63 | return self.head.level() 64 | 65 | def cycle(self) -> int: 66 | return self.head.cycle() 67 | -------------------------------------------------------------------------------- /pytezos/rpc/votes.py: -------------------------------------------------------------------------------- 1 | from pytezos.rpc.node import RpcQuery 2 | 3 | 4 | class Votes(RpcQuery): 5 | 6 | def __init__(self, *args, **kwargs): 7 | super(Votes, self).__init__( 8 | properties=[ 9 | 'ballot_list', 'ballots', 'current_period_kind', 'current_proposal', 'current_quorum', 10 | 'listings', 'proposals' 11 | ], 12 | *args, **kwargs) 13 | 14 | def roll_count(self, proposal_id) -> int: 15 | proposals = self.proposals() 16 | roll_count = next((x[1] for x in proposals if x[0] == proposal_id), 0) 17 | return roll_count 18 | -------------------------------------------------------------------------------- /pytezos/tools/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/murbard/pytezos/a228a67fbc94b11dd7dbc7ff0df9e996d0ff5f01/pytezos/tools/__init__.py -------------------------------------------------------------------------------- /pytezos/tools/diff.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | import difflib 4 | import simplejson as json 5 | 6 | # Based on https://gist.github.com/noporpoise/16e731849eb1231e86d78f9dfeca3abc 7 | _no_eol = "\ No newline at end of file" 8 | _hdr_pat = re.compile('^@@ -(\d+),?(\d+)? \+(\d+),?(\d+)? @@$') 9 | 10 | 11 | def make_patch(a, b, filename, context_size=0): 12 | """ 13 | Get unified string diff between two strings. Trims top two lines. 14 | Returns empty string if strings are identical. 15 | """ 16 | diffs = difflib.unified_diff( 17 | a=a.splitlines(True), 18 | b=b.splitlines(True), 19 | fromfile=filename, 20 | tofile=filename, 21 | n=context_size 22 | ) 23 | diffs = map(lambda x: x if x[-1] == '\n' else x + '\n' + _no_eol + '\n', diffs) 24 | return ''.join(diffs) 25 | 26 | 27 | def apply_patch(source, patch, revert=False): 28 | """ 29 | Apply patch to string s to recover newer string. 30 | If revert is True, treat s as the newer string, recover older string. 31 | """ 32 | source = source.splitlines(True) 33 | patch = patch.splitlines(True) 34 | target = '' 35 | 36 | i = sl = 0 37 | (midx, sign) = (1, '+') if not revert else (3, '-') 38 | 39 | while i < len(patch) and patch[i].startswith(("---", "+++")): 40 | i += 1 # skip filename header 41 | 42 | while i < len(patch): 43 | match = _hdr_pat.match(patch[i]) 44 | if not match: 45 | raise ValueError(f'Regex mismatch on line {i}, `{patch[i]}`') 46 | 47 | l = int(match.group(midx)) - 1 + (match.group(midx + 1) == '0') 48 | if sl > l or l > len(source): 49 | raise ValueError(f'Bad line num {i}: `{patch[i]}`') 50 | 51 | target += ''.join(source[sl:l]) 52 | sl = l 53 | i += 1 54 | 55 | while i < len(patch) and patch[i][0] != '@': 56 | if i + 1 < len(patch) and patch[i + 1][0] == '\\': 57 | line = patch[i][:-1] 58 | i += 2 59 | else: 60 | line = patch[i] 61 | i += 1 62 | 63 | if len(line) > 0: 64 | if line[0] == sign or line[0] == ' ': 65 | target += line[1:] 66 | sl += (line[0] != sign) 67 | 68 | target += ''.join(source[sl:]) 69 | return target 70 | 71 | 72 | def read_template(filename): 73 | with open(os.path.join(os.path.dirname(__file__), 'templates', filename)) as f: 74 | return f.read() 75 | 76 | 77 | def generate_unidiff_html(diffs: list, output_path=None): 78 | html = read_template('unidiff.html') 79 | html = html.replace('{{text}}', json.dumps('\n'.join(diffs))) 80 | if output_path: 81 | with open(output_path, 'w') as f: 82 | f.write(html) 83 | else: 84 | return html 85 | 86 | 87 | def generate_jsondiff_html(left: dict, right: dict, output_path=None): 88 | html = read_template('jsondiff.html') 89 | html = html.replace('{{left}}', json.dumps(left)) 90 | html = html.replace('{{right}}', json.dumps(right)) 91 | if output_path: 92 | with open(output_path, 'w') as f: 93 | f.write(html) 94 | else: 95 | return html 96 | -------------------------------------------------------------------------------- /pytezos/tools/keychain.py: -------------------------------------------------------------------------------- 1 | import os 2 | import simplejson as json 3 | from getpass import getpass 4 | 5 | from pytezos.crypto import Key 6 | 7 | 8 | class Keychain: 9 | 10 | def __init__(self, path='~/.tezos-client/secret_keys'): 11 | self._path = os.path.expanduser(path) 12 | self._secret_keys = list() 13 | self._last_modified = 0 14 | 15 | def reload(self): 16 | last_modified = os.path.getmtime(self._path) 17 | if last_modified > self._last_modified: 18 | self._last_modified = last_modified 19 | with open(self._path, 'r') as f: 20 | self._secret_keys = json.load(f) 21 | 22 | def get_key(self, name) -> Key: 23 | self.reload() 24 | 25 | value = next(item['value'] for item in self._secret_keys if item['name'] == name) 26 | prefix, key = value.split(':', maxsplit=1) 27 | 28 | if prefix == 'encrypted': 29 | password = getpass(f'Please, enter passphrase for `{name}`:\n') 30 | key = Key(key, passphrase=password) 31 | else: 32 | key = Key(key) 33 | 34 | return key 35 | 36 | def list_keys(self) -> list: 37 | self.reload() 38 | 39 | def format_item(item: dict): 40 | prefix, key = item['value'].split(':') 41 | return dict( 42 | name=item['name'], 43 | type=prefix, 44 | curve={'ed': 'ed25519', 'sp': 'secp256k1', 'p2': 'p256'}[key[:2]] 45 | ) 46 | 47 | return list(map(format_item, self._secret_keys)) 48 | -------------------------------------------------------------------------------- /pytezos/tools/otp.py: -------------------------------------------------------------------------------- 1 | from loguru import logger 2 | 3 | from pytezos.encoding import is_pkh 4 | from pytezos.crypto import Key 5 | from pytezos.rpc import mainnet 6 | 7 | 8 | class OTP: 9 | 10 | def __init__(self, key, interval=5, shell=mainnet): 11 | """ 12 | :param key: secret key (encrypted/unencrypted), public key or public key hash, all base58 encoded 13 | :param interval: number of blocks to check (tolerance) 14 | :param shell: Shell instance 15 | """ 16 | if not isinstance(key, Key): 17 | if is_pkh(key): 18 | key = shell.get_public_key(key) 19 | key = Key(key) 20 | 21 | self._key = key 22 | self._interval = interval 23 | self._shell = shell 24 | 25 | def now(self) -> str: 26 | if not self._key.is_secret: 27 | raise ValueError('Cannot generate OTP without a secret key') 28 | 29 | message = self._shell.head.calculate_hash() 30 | logger.debug(f'block hash: {message}') 31 | 32 | return self._key.sign(message) 33 | 34 | def verify(self, signature) -> bool: 35 | block_hashes = self._shell.blocks(length=self._interval) 36 | 37 | for row in block_hashes: 38 | try: 39 | message = row[0] 40 | logger.debug(f'try {message}') 41 | 42 | self._key.verify(signature, message) 43 | except ValueError as e: 44 | logger.debug(str(e)) 45 | else: 46 | return True 47 | 48 | return False 49 | -------------------------------------------------------------------------------- /pytezos/tools/templates/jsondiff.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 | 16 | 17 | -------------------------------------------------------------------------------- /pytezos/tools/templates/unidiff.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 |
10 | 20 | 21 | -------------------------------------------------------------------------------- /tests/data/parameter/code_0.json: -------------------------------------------------------------------------------- 1 | {"prim": "parameter", 2 | "args": [{"prim": "or", 3 | "args": [{"prim": "pair", 4 | "args": [{"prim": "key_hash"}, 5 | {"prim": "pair", 6 | "args": [{"prim": "key_hash"}, 7 | {"prim": "option", "args": [{"prim": "key_hash"}]}]}], 8 | "annots": ["%_Liq_entry_createIdentity"]}, 9 | {"prim": "pair", 10 | "args": [{"prim": "bytes"}, {"prim": "signature"}], 11 | "annots": ["%_Liq_entry_forwardTranx"]}], 12 | "annots": [":_entries"]}]} -------------------------------------------------------------------------------- /tests/data/parameter/parameters_0.json: -------------------------------------------------------------------------------- 1 | {"prim": "Left", 2 | "args": [{"prim": "Pair", 3 | "args": [{"string": "tz1PKKC9NTsxxfwYpg1bVAY5k5AKuPYe9gpg"}, 4 | {"prim": "Pair", 5 | "args": [{"string": "tz1PKKC9NTsxxfwYpg1bVAY5k5AKuPYe9gpg"}, 6 | {"prim": "Some", 7 | "args": [{"string": "tz1PKKC9NTsxxfwYpg1bVAY5k5AKuPYe9gpg"}]}]}]}]} -------------------------------------------------------------------------------- /tests/data/script/sample_0.json: -------------------------------------------------------------------------------- 1 | [{"args": [{"prim": "nat"}], "prim": "parameter"}, 2 | {"args": [{"args": [{"prim": "address"}, {"prim": "nat"}], "prim": "map"}], 3 | "prim": "storage"}, 4 | {"args": [[{"prim": "DUP"}, 5 | {"prim": "CDR"}, 6 | {"prim": "SWAP"}, 7 | {"prim": "CAR"}, 8 | {"prim": "DUP"}, 9 | {"args": [{"prim": "nat"}, {"int": "2"}], "prim": "PUSH"}, 10 | [[{"prim": "COMPARE"}, {"prim": "GE"}], 11 | {"args": [[], [[{"prim": "UNIT"}, {"prim": "FAILWITH"}]]], "prim": "IF"}], 12 | {"prim": "SOME"}, 13 | {"prim": "SENDER"}, 14 | {"prim": "UPDATE"}, 15 | {"args": [{"prim": "operation"}], "prim": "NIL"}, 16 | {"prim": "PAIR"}]], 17 | "prim": "code"}] -------------------------------------------------------------------------------- /tests/data/script/sample_0.tz: -------------------------------------------------------------------------------- 1 | parameter nat; 2 | storage (map address nat); 3 | code { DUP ; 4 | CDR ; 5 | SWAP ; 6 | CAR ; 7 | DUP ; 8 | PUSH nat 2 ; 9 | { { COMPARE ; GE } ; IF {} { { UNIT ; FAILWITH } } } ; 10 | SOME ; 11 | SENDER ; 12 | UPDATE ; 13 | NIL operation ; 14 | PAIR } -------------------------------------------------------------------------------- /tests/data/script/sample_1.json: -------------------------------------------------------------------------------- 1 | [{"args": [{"args": [{"prim": "address"}, 2 | {"args": [{"prim": "string"}, 3 | {"args": [{"prim": "nat"}, 4 | {"args": [{"prim": "string"}, 5 | {"args": [{"annots": [":LiqidPool"], 6 | "args": [{"args": [{"annots": [":Assets"], 7 | "args": [{"annots": [":_entries"], 8 | "args": [{"annots": ["%_Liq_entry_assets_cb"], 9 | "prim": "bytes"}, 10 | {"args": [{"annots": ["%_Liq_entry_assets_list"], 11 | "prim": "unit"}, 12 | {"args": [{"annots": ["%_Liq_entry_assets_add"], 13 | "prim": "nat"}, 14 | {"args": [{"annots": ["%_Liq_entry_assets_remove"], 15 | "prim": "nat"}, 16 | {"args": [{"annots": ["%_Liq_entry_assets_buy"], 17 | "prim": "nat"}, 18 | {"annots": ["%_Liq_entry_assets_updatePrice"], 19 | "prim": "nat"}], 20 | "prim": "or"}], 21 | "prim": "or"}], 22 | "prim": "or"}], 23 | "prim": "or"}], 24 | "prim": "or"}], 25 | "prim": "contract"}, 26 | {"args": [{"prim": "string"}, 27 | {"args": [{"prim": "nat"}, {"prim": "address"}], 28 | "prim": "pair"}], 29 | "prim": "pair"}], 30 | "prim": "pair"}], 31 | "prim": "contract"}, 32 | {"annots": [":Assets"], 33 | "args": [{"annots": [":_entries"], 34 | "args": [{"annots": ["%_Liq_entry_assets_cb"], 35 | "prim": "bytes"}, 36 | {"args": [{"annots": ["%_Liq_entry_assets_list"], 37 | "prim": "unit"}, 38 | {"args": [{"annots": ["%_Liq_entry_assets_add"], 39 | "prim": "nat"}, 40 | {"args": [{"annots": ["%_Liq_entry_assets_remove"], 41 | "prim": "nat"}, 42 | {"args": [{"annots": ["%_Liq_entry_assets_buy"], 43 | "prim": "nat"}, 44 | {"annots": ["%_Liq_entry_assets_updatePrice"], 45 | "prim": "nat"}], 46 | "prim": "or"}], 47 | "prim": "or"}], 48 | "prim": "or"}], 49 | "prim": "or"}], 50 | "prim": "or"}], 51 | "prim": "contract"}], 52 | "prim": "pair"}], 53 | "prim": "pair"}], 54 | "prim": "pair"}], 55 | "prim": "pair"}], 56 | "prim": "pair"}], 57 | "prim": "parameter"}, 58 | {"args": [{"annots": [":storage"], 59 | "args": [{"annots": ["%single_sachet"], 60 | "args": [{"prim": "string"}, {"prim": "nat"}], 61 | "prim": "map"}, 62 | {"annots": ["%account_sachets"], 63 | "args": [{"prim": "string"}, 64 | {"args": [{"prim": "string"}, {"prim": "nat"}], "prim": "map"}], 65 | "prim": "map"}], 66 | "prim": "pair"}], 67 | "prim": "storage"}, 68 | {"args": [[{"prim": "DUP"}, 69 | {"args": [[{"annots": ["@storage_slash_5"], "prim": "CDR"}]], 70 | "prim": "DIP"}, 71 | {"annots": ["@_user_identifier_asset_identifier_asset_quantity_sachet_identifier_addr1_addr2_slash_6"], 72 | "prim": "CAR"}, 73 | {"prim": "DUP"}, 74 | [{"prim": "CDR"}, {"annots": ["@asset_identifier"], "prim": "CAR"}], 75 | [{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, {"prim": "SWAP"}], 76 | [{"prim": "CDR"}, 77 | {"prim": "CDR"}, 78 | {"annots": ["@asset_quantity"], "prim": "CAR"}], 79 | [{"args": [[[{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, 80 | {"prim": "SWAP"}]]], 81 | "prim": "DIP"}, 82 | {"prim": "SWAP"}], 83 | [{"prim": "CDR"}, 84 | {"prim": "CDR"}, 85 | {"prim": "CDR"}, 86 | {"prim": "CDR"}, 87 | {"annots": ["@addr1"], "prim": "CAR"}], 88 | {"args": [{"prim": "mutez"}, {"int": "0"}], "prim": "PUSH"}, 89 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[{"prim": "DUP"}]], 90 | "prim": "DIP"}, 91 | {"prim": "SWAP"}]]], 92 | "prim": "DIP"}, 93 | {"prim": "SWAP"}]]], 94 | "prim": "DIP"}, 95 | {"prim": "SWAP"}]]], 96 | "prim": "DIP"}, 97 | {"prim": "SWAP"}], 98 | {"annots": ["@user_identifier"], "prim": "CAR"}, 99 | [{"args": [[[{"args": [[[{"args": [[{"annots": ["@asset_quantity"], 100 | "prim": "DUP"}]], 101 | "prim": "DIP"}, 102 | {"prim": "SWAP"}]]], 103 | "prim": "DIP"}, 104 | {"prim": "SWAP"}]]], 105 | "prim": "DIP"}, 106 | {"prim": "SWAP"}], 107 | {"prim": "PAIR"}, 108 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@asset_identifier"], 109 | "prim": "DUP"}]], 110 | "prim": "DIP"}, 111 | {"prim": "SWAP"}]]], 112 | "prim": "DIP"}, 113 | {"prim": "SWAP"}]]], 114 | "prim": "DIP"}, 115 | {"prim": "SWAP"}]]], 116 | "prim": "DIP"}, 117 | {"prim": "SWAP"}], 118 | {"prim": "PAIR"}, 119 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"prim": "DUP"}]], 120 | "prim": "DIP"}, 121 | {"prim": "SWAP"}]]], 122 | "prim": "DIP"}, 123 | {"prim": "SWAP"}]]], 124 | "prim": "DIP"}, 125 | {"prim": "SWAP"}]]], 126 | "prim": "DIP"}, 127 | {"prim": "SWAP"}]]], 128 | "prim": "DIP"}, 129 | {"prim": "SWAP"}], 130 | [{"prim": "CDR"}, 131 | {"prim": "CDR"}, 132 | {"prim": "CDR"}, 133 | {"prim": "CDR"}, 134 | {"annots": ["@addr2"], "prim": "CDR"}], 135 | {"prim": "PAIR"}, 136 | {"annots": ["@op"], "prim": "TRANSFER_TOKENS"}, 137 | [{"args": [[[{"args": [[[{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, 138 | {"prim": "SWAP"}]]], 139 | "prim": "DIP"}, 140 | {"prim": "SWAP"}]]], 141 | "prim": "DIP"}, 142 | {"prim": "SWAP"}], 143 | [{"prim": "CDR"}, 144 | {"prim": "CDR"}, 145 | {"prim": "CDR"}, 146 | {"annots": ["@sachet_identifier"], "prim": "CAR"}], 147 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 148 | "prim": "DUP"}]], 149 | "prim": "DIP"}, 150 | {"prim": "SWAP"}]]], 151 | "prim": "DIP"}, 152 | {"prim": "SWAP"}]]], 153 | "prim": "DIP"}, 154 | {"prim": "SWAP"}]]], 155 | "prim": "DIP"}, 156 | {"prim": "SWAP"}]]], 157 | "prim": "DIP"}, 158 | {"prim": "SWAP"}], 159 | {"annots": ["%single_sachet"], "prim": "CAR"}, 160 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 161 | "prim": "DUP"}]], 162 | "prim": "DIP"}, 163 | {"prim": "SWAP"}]]], 164 | "prim": "DIP"}, 165 | {"prim": "SWAP"}]]], 166 | "prim": "DIP"}, 167 | {"prim": "SWAP"}]]], 168 | "prim": "DIP"}, 169 | {"prim": "SWAP"}]]], 170 | "prim": "DIP"}, 171 | {"prim": "SWAP"}]]], 172 | "prim": "DIP"}, 173 | {"prim": "SWAP"}], 174 | {"annots": ["%account_sachets"], "prim": "CDR"}, 175 | [{"args": [[[{"args": [[{"annots": ["@new_sachet_id"], "prim": "DUP"}]], 176 | "prim": "DIP"}, 177 | {"prim": "SWAP"}]]], 178 | "prim": "DIP"}, 179 | {"prim": "SWAP"}], 180 | {"prim": "GET"}, 181 | {"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 182 | "prim": "DUP"}]], 183 | "prim": "DIP"}, 184 | {"prim": "SWAP"}]]], 185 | "prim": "DIP"}, 186 | {"prim": "SWAP"}]]], 187 | "prim": "DIP"}, 188 | {"prim": "SWAP"}]]], 189 | "prim": "DIP"}, 190 | {"prim": "SWAP"}]]], 191 | "prim": "DIP"}, 192 | {"prim": "SWAP"}]]], 193 | "prim": "DIP"}, 194 | {"prim": "SWAP"}], 195 | {"annots": ["%account_sachets"], "prim": "CDR"}, 196 | {"args": [{"args": [{"prim": "string"}, {"prim": "nat"}], 197 | "prim": "map"}, 198 | []], 199 | "prim": "PUSH"}, 200 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@asset_quantity"], 201 | "prim": "DUP"}]], 202 | "prim": "DIP"}, 203 | {"prim": "SWAP"}]]], 204 | "prim": "DIP"}, 205 | {"prim": "SWAP"}]]], 206 | "prim": "DIP"}, 207 | {"prim": "SWAP"}]]], 208 | "prim": "DIP"}, 209 | {"prim": "SWAP"}]]], 210 | "prim": "DIP"}, 211 | {"prim": "SWAP"}], 212 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@asset_identifier"], 213 | "prim": "DUP"}]], 214 | "prim": "DIP"}, 215 | {"prim": "SWAP"}]]], 216 | "prim": "DIP"}, 217 | {"prim": "SWAP"}]]], 218 | "prim": "DIP"}, 219 | {"prim": "SWAP"}]]], 220 | "prim": "DIP"}, 221 | {"prim": "SWAP"}]]], 222 | "prim": "DIP"}, 223 | {"prim": "SWAP"}]]], 224 | "prim": "DIP"}, 225 | {"prim": "SWAP"}]]], 226 | "prim": "DIP"}, 227 | {"prim": "SWAP"}], 228 | {"args": [[{"prim": "SOME"}]], "prim": "DIP"}, 229 | {"annots": ["@new_sachet"], "prim": "UPDATE"}, 230 | [{"args": [[[{"args": [[[{"args": [[{"annots": ["@new_sachet_id"], 231 | "prim": "DUP"}]], 232 | "prim": "DIP"}, 233 | {"prim": "SWAP"}]]], 234 | "prim": "DIP"}, 235 | {"prim": "SWAP"}]]], 236 | "prim": "DIP"}, 237 | {"prim": "SWAP"}], 238 | {"args": [[{"prim": "SOME"}]], "prim": "DIP"}, 239 | {"prim": "UPDATE"}], 240 | [[{"args": [[[{"args": [[{"annots": ["@new_sachet_id"], "prim": "DUP"}]], 241 | "prim": "DIP"}, 242 | {"prim": "SWAP"}]]], 243 | "prim": "DIP"}, 244 | {"prim": "SWAP"}], 245 | {"args": [{"prim": "string"}, {"string": "id exists"}], "prim": "PUSH"}, 246 | {"prim": "PAIR"}, 247 | {"prim": "FAILWITH"}]], 248 | "prim": "IF_NONE"}, 249 | {"args": [[{"args": [[{"prim": "DROP"}]], "prim": "DIP"}]], "prim": "DIP"}, 250 | {"args": [[{"args": [[{"args": [[{"prim": "DROP"}, 251 | {"prim": "DROP"}, 252 | {"prim": "DROP"}, 253 | {"prim": "DROP"}]], 254 | "prim": "DIP"}]], 255 | "prim": "DIP"}]], 256 | "prim": "DIP"}, 257 | {"prim": "SWAP"}, 258 | {"annots": ["@storage", "%single_sachet", "%account_sachets"], 259 | "prim": "PAIR"}, 260 | {"args": [{"prim": "operation"}], "prim": "NIL"}, 261 | [{"args": [[[{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, 262 | {"prim": "SWAP"}]]], 263 | "prim": "DIP"}, 264 | {"prim": "SWAP"}], 265 | {"args": [[{"args": [[{"args": [[{"prim": "DROP"}]], "prim": "DIP"}]], 266 | "prim": "DIP"}]], 267 | "prim": "DIP"}, 268 | {"prim": "CONS"}, 269 | {"prim": "PAIR"}]], 270 | "prim": "code"}] -------------------------------------------------------------------------------- /tests/data/script/sample_1.tz: -------------------------------------------------------------------------------- 1 | parameter 2 | (pair address 3 | (pair string 4 | (pair nat 5 | (pair string 6 | (pair (contract :LiqidPool 7 | (pair (contract :Assets 8 | (or :_entries 9 | (bytes %_Liq_entry_assets_cb) 10 | (or (unit %_Liq_entry_assets_list) 11 | (or (nat %_Liq_entry_assets_add) 12 | (or (nat %_Liq_entry_assets_remove) 13 | (or (nat %_Liq_entry_assets_buy) (nat %_Liq_entry_assets_updatePrice))))))) 14 | (pair string (pair nat address)))) 15 | (contract :Assets 16 | (or :_entries 17 | (bytes %_Liq_entry_assets_cb) 18 | (or (unit %_Liq_entry_assets_list) 19 | (or (nat %_Liq_entry_assets_add) 20 | (or (nat %_Liq_entry_assets_remove) 21 | (or (nat %_Liq_entry_assets_buy) (nat %_Liq_entry_assets_updatePrice)))))))))))); 22 | storage 23 | (pair :storage 24 | (map %single_sachet string nat) 25 | (map %account_sachets string (map string nat))); 26 | code { DUP ; 27 | DIP { CDR @storage_slash_5 } ; 28 | CAR @_user_identifier_asset_identifier_asset_quantity_sachet_identifier_addr1_addr2_slash_6 ; 29 | DUP ; 30 | { CDR ; CAR @asset_identifier } ; 31 | { DIP { DUP } ; SWAP } ; 32 | { CDR ; CDR ; CAR @asset_quantity } ; 33 | { DIP { { DIP { DUP } ; SWAP } } ; SWAP } ; 34 | { CDR ; CDR ; CDR ; CDR ; CAR @addr1 } ; 35 | PUSH mutez 0 ; 36 | { DIP { { DIP { { DIP { { DIP { DUP } ; SWAP } } ; SWAP } } ; SWAP } } ; 37 | SWAP } ; 38 | CAR @user_identifier ; 39 | { DIP { { DIP { { DIP { DUP @asset_quantity } ; SWAP } } ; SWAP } } ; 40 | SWAP } ; 41 | PAIR ; 42 | { DIP { { DIP { { DIP { { DIP { DUP @asset_identifier } ; SWAP } } ; SWAP } } ; 43 | SWAP } } ; 44 | SWAP } ; 45 | PAIR ; 46 | { DIP { { DIP { { DIP { { DIP { { DIP { DUP } ; SWAP } } ; SWAP } } ; SWAP } } ; 47 | SWAP } } ; 48 | SWAP } ; 49 | { CDR ; CDR ; CDR ; CDR ; CDR @addr2 } ; 50 | PAIR ; 51 | TRANSFER_TOKENS @op ; 52 | { DIP { { DIP { { DIP { DUP } ; SWAP } } ; SWAP } } ; SWAP } ; 53 | { CDR ; CDR ; CDR ; CAR @sachet_identifier } ; 54 | { DIP { { DIP { { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 55 | SWAP } } ; 56 | SWAP } } ; 57 | SWAP } ; 58 | CAR %single_sachet ; 59 | { DIP { { DIP { { DIP { { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 60 | SWAP } } ; 61 | SWAP } } ; 62 | SWAP } } ; 63 | SWAP } ; 64 | CDR %account_sachets ; 65 | { DIP { { DIP { DUP @new_sachet_id } ; SWAP } } ; SWAP } ; 66 | GET ; 67 | IF_NONE 68 | { { DIP { { DIP { { DIP { { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 69 | SWAP } } ; 70 | SWAP } } ; 71 | SWAP } } ; 72 | SWAP } ; 73 | CDR %account_sachets ; 74 | PUSH (map string nat) {} ; 75 | { DIP { { DIP { { DIP { { DIP { { DIP { DUP @asset_quantity } ; SWAP } } ; SWAP } } ; 76 | SWAP } } ; 77 | SWAP } } ; 78 | SWAP } ; 79 | { DIP { { DIP { { DIP { { DIP { { DIP { { DIP { { DIP { DUP @asset_identifier } ; SWAP } } ; SWAP } } ; 80 | SWAP } } ; 81 | SWAP } } ; 82 | SWAP } } ; 83 | SWAP } } ; 84 | SWAP } ; 85 | DIP { SOME } ; 86 | UPDATE @new_sachet ; 87 | { DIP { { DIP { { DIP { DUP @new_sachet_id } ; SWAP } } ; SWAP } } ; 88 | SWAP } ; 89 | DIP { SOME } ; 90 | UPDATE } 91 | { { DIP { { DIP { DUP @new_sachet_id } ; SWAP } } ; SWAP } ; 92 | PUSH string "id exists" ; 93 | PAIR ; 94 | FAILWITH } ; 95 | DIP { DIP { DROP } } ; 96 | DIP { DIP { DIP { DROP ; DROP ; DROP ; DROP } } } ; 97 | SWAP ; 98 | PAIR @storage %single_sachet %account_sachets ; 99 | NIL operation ; 100 | { DIP { { DIP { DUP } ; SWAP } } ; SWAP } ; 101 | DIP { DIP { DIP { DROP } } } ; 102 | CONS ; 103 | PAIR } -------------------------------------------------------------------------------- /tests/data/script/sample_2.json: -------------------------------------------------------------------------------- 1 | [{"args": [{"annots": [":parameter"], 2 | "args": [{"annots": ["%Pay"], "prim": "unit"}, 3 | {"annots": ["%Manage"], 4 | "args": [{"annots": [":proposition"], 5 | "args": [{"annots": ["%destination"], "prim": "key_hash"}, 6 | {"annots": ["%amount"], "prim": "mutez"}], 7 | "prim": "pair"}], 8 | "prim": "option"}], 9 | "prim": "or"}], 10 | "prim": "parameter"}, 11 | {"args": [{"annots": [":storage"], 12 | "args": [{"annots": ["%owners"], 13 | "args": [{"prim": "address"}], 14 | "prim": "set"}, 15 | {"args": [{"annots": ["%actions"], 16 | "args": [{"prim": "address"}, 17 | {"annots": [":proposition"], 18 | "args": [{"annots": ["%destination"], "prim": "key_hash"}, 19 | {"annots": ["%amount"], "prim": "mutez"}], 20 | "prim": "pair"}], 21 | "prim": "map"}, 22 | {"args": [{"annots": ["%owners_length"], "prim": "nat"}, 23 | {"annots": ["%min_agree"], "prim": "nat"}], 24 | "prim": "pair"}], 25 | "prim": "pair"}], 26 | "prim": "pair"}], 27 | "prim": "storage"}, 28 | {"args": [[{"prim": "DUP"}, 29 | {"args": [[{"annots": ["@storage_slash_1"], "prim": "CDR"}]], 30 | "prim": "DIP"}, 31 | {"annots": ["@parameter_slash_2"], "prim": "CAR"}, 32 | {"annots": ["@parameter"], "prim": "DUP"}, 33 | {"args": [[{"prim": "DROP"}, 34 | [{"args": [[{"annots": ["@storage"], "prim": "DUP"}]], "prim": "DIP"}, 35 | {"prim": "SWAP"}], 36 | {"args": [{"prim": "operation"}], "prim": "NIL"}, 37 | {"prim": "PAIR"}], 38 | [{"annots": ["@action_slash_18"], "prim": "RENAME"}, 39 | {"annots": ["@owner"], "prim": "SOURCE"}, 40 | [{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 41 | "prim": "DUP"}]], 42 | "prim": "DIP"}, 43 | {"prim": "SWAP"}]]], 44 | "prim": "DIP"}, 45 | {"prim": "SWAP"}]]], 46 | "prim": "DIP"}, 47 | {"prim": "SWAP"}], 48 | {"annots": ["%owners"], "prim": "CAR"}, 49 | [{"args": [[{"annots": ["@owner"], "prim": "DUP"}]], "prim": "DIP"}, 50 | {"prim": "SWAP"}], 51 | {"prim": "MEM"}, 52 | {"prim": "NOT"}, 53 | {"args": [[{"prim": "UNIT"}, {"prim": "FAILWITH"}], [{"prim": "UNIT"}]], 54 | "prim": "IF"}, 55 | {"prim": "DROP"}, 56 | {"args": [{"prim": "mutez"}, {"int": "0"}], "prim": "PUSH"}, 57 | {"prim": "AMOUNT"}, 58 | {"prim": "COMPARE"}, 59 | {"prim": "NEQ"}, 60 | {"args": [[{"args": [{"prim": "string"}, 61 | {"string": "Don't send money while managing multisig"}], 62 | "prim": "PUSH"}, 63 | {"prim": "FAILWITH"}], 64 | [{"prim": "UNIT"}]], 65 | "prim": "IF"}, 66 | {"prim": "DROP"}, 67 | [{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 68 | "prim": "DUP"}]], 69 | "prim": "DIP"}, 70 | {"prim": "SWAP"}]]], 71 | "prim": "DIP"}, 72 | {"prim": "SWAP"}]]], 73 | "prim": "DIP"}, 74 | {"prim": "SWAP"}], 75 | {"prim": "DUP"}, 76 | {"annots": ["%owners"], "prim": "CAR"}, 77 | {"prim": "SWAP"}, 78 | {"prim": "CDR"}, 79 | {"prim": "CDR"}, 80 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 81 | "prim": "DUP"}]], 82 | "prim": "DIP"}, 83 | {"prim": "SWAP"}]]], 84 | "prim": "DIP"}, 85 | {"prim": "SWAP"}]]], 86 | "prim": "DIP"}, 87 | {"prim": "SWAP"}]]], 88 | "prim": "DIP"}, 89 | {"prim": "SWAP"}]]], 90 | "prim": "DIP"}, 91 | {"prim": "SWAP"}], 92 | [{"prim": "CDR"}, {"annots": ["%actions"], "prim": "CAR"}], 93 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@action"], 94 | "prim": "DUP"}]], 95 | "prim": "DIP"}, 96 | {"prim": "SWAP"}]]], 97 | "prim": "DIP"}, 98 | {"prim": "SWAP"}]]], 99 | "prim": "DIP"}, 100 | {"prim": "SWAP"}]]], 101 | "prim": "DIP"}, 102 | {"prim": "SWAP"}], 103 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[{"annots": ["@owner"], 104 | "prim": "DUP"}]], 105 | "prim": "DIP"}, 106 | {"prim": "SWAP"}]]], 107 | "prim": "DIP"}, 108 | {"prim": "SWAP"}]]], 109 | "prim": "DIP"}, 110 | {"prim": "SWAP"}]]], 111 | "prim": "DIP"}, 112 | {"prim": "SWAP"}], 113 | {"prim": "UPDATE"}, 114 | {"annots": ["%actions"], "prim": "PAIR"}, 115 | {"prim": "SWAP"}, 116 | {"annots": ["@storage", "%owners"], "prim": "PAIR"}, 117 | {"annots": ["@storage"], "prim": "DUP"}, 118 | {"args": [{"prim": "operation"}], "prim": "NIL"}, 119 | {"annots": ["@do_nothing"], "prim": "PAIR"}, 120 | [{"args": [[[{"args": [[[{"args": [[{"annots": ["@action"], 121 | "prim": "DUP"}]], 122 | "prim": "DIP"}, 123 | {"prim": "SWAP"}]]], 124 | "prim": "DIP"}, 125 | {"prim": "SWAP"}]]], 126 | "prim": "DIP"}, 127 | {"prim": "SWAP"}], 128 | {"args": [[{"annots": ["@do_nothing"], "prim": "DUP"}], 129 | [{"annots": ["@p"], "prim": "DUP"}, 130 | {"prim": "BALANCE"}, 131 | [{"args": [[{"annots": ["@p"], "prim": "DUP"}]], "prim": "DIP"}, 132 | {"prim": "SWAP"}], 133 | {"annots": ["%amount"], "prim": "CDR"}, 134 | {"prim": "COMPARE"}, 135 | {"prim": "GT"}, 136 | {"args": [[{"args": [{"prim": "string"}, 137 | {"string": "Balance to low for withdrawal"}], 138 | "prim": "PUSH"}, 139 | {"prim": "FAILWITH"}], 140 | [{"prim": "UNIT"}]], 141 | "prim": "IF"}, 142 | {"args": [[{"prim": "DROP"}]], "prim": "DIP"}, 143 | {"prim": "DROP"}, 144 | [{"args": [[[{"args": [[{"annots": ["@storage"], "prim": "DUP"}]], 145 | "prim": "DIP"}, 146 | {"prim": "SWAP"}]]], 147 | "prim": "DIP"}, 148 | {"prim": "SWAP"}], 149 | [{"args": [[{"annots": ["@p"], "prim": "DUP"}]], "prim": "DIP"}, 150 | {"prim": "SWAP"}], 151 | {"prim": "PAIR"}, 152 | {"prim": "DUP"}, 153 | {"annots": ["@storage"], "prim": "CDR"}, 154 | {"annots": ["@storage"], "prim": "DUP"}, 155 | [{"prim": "CDR"}, 156 | {"prim": "CDR"}, 157 | {"annots": ["%min_agree"], "prim": "CDR"}], 158 | {"args": [{"prim": "nat"}, {"int": "0"}], "prim": "PUSH"}, 159 | [{"args": [[[{"args": [[{"annots": ["@storage"], "prim": "DUP"}]], 160 | "prim": "DIP"}, 161 | {"prim": "SWAP"}]]], 162 | "prim": "DIP"}, 163 | {"prim": "SWAP"}], 164 | [{"prim": "CDR"}, {"annots": ["%actions"], "prim": "CAR"}], 165 | {"args": [[{"annots": ["@___p_prim__cpt_slash_12"], 166 | "prim": "RENAME"}, 167 | {"args": [[{"prim": "DUP"}]], "prim": "DIP"}, 168 | {"prim": "PAIR"}, 169 | {"prim": "DUP"}, 170 | {"annots": ["@cpt"], "prim": "CDR"}, 171 | [{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, {"prim": "SWAP"}], 172 | {"prim": "CAR"}, 173 | {"annots": ["@p_prim_"], "prim": "CDR"}, 174 | [{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[[{"args": [[{"prim": "DUP"}]], 175 | "prim": "DIP"}, 176 | {"prim": "SWAP"}]]], 177 | "prim": "DIP"}, 178 | {"prim": "SWAP"}]]], 179 | "prim": "DIP"}, 180 | {"prim": "SWAP"}]]], 181 | "prim": "DIP"}, 182 | {"prim": "SWAP"}]]], 183 | "prim": "DIP"}, 184 | {"prim": "SWAP"}]]], 185 | "prim": "DIP"}, 186 | {"prim": "SWAP"}], 187 | {"annots": ["@p"], "prim": "CAR"}, 188 | {"prim": "PAIR"}, 189 | {"prim": "DUP"}, 190 | {"annots": ["@p1"], "prim": "CAR"}, 191 | {"prim": "SWAP"}, 192 | {"annots": ["@p2"], "prim": "CDR"}, 193 | {"prim": "DUP"}, 194 | {"annots": ["%amount"], "prim": "CDR"}, 195 | [{"args": [[[{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, 196 | {"prim": "SWAP"}]]], 197 | "prim": "DIP"}, 198 | {"prim": "SWAP"}], 199 | {"annots": ["%amount"], "prim": "CDR"}, 200 | {"prim": "COMPARE"}, 201 | {"prim": "EQ"}, 202 | {"prim": "SWAP"}, 203 | {"annots": ["%destination"], "prim": "CAR"}, 204 | [{"args": [[[{"args": [[{"prim": "DUP"}]], "prim": "DIP"}, 205 | {"prim": "SWAP"}]]], 206 | "prim": "DIP"}, 207 | {"prim": "SWAP"}], 208 | {"args": [[{"args": [[{"args": [[{"prim": "DROP"}]], 209 | "prim": "DIP"}]], 210 | "prim": "DIP"}]], 211 | "prim": "DIP"}, 212 | {"annots": ["%destination"], "prim": "CAR"}, 213 | {"prim": "COMPARE"}, 214 | {"prim": "EQ"}, 215 | {"prim": "AND"}, 216 | {"args": [[{"args": [{"prim": "nat"}, {"int": "1"}], 217 | "prim": "PUSH"}, 218 | [{"args": [[{"annots": ["@cpt"], "prim": "DUP"}]], 219 | "prim": "DIP"}, 220 | {"prim": "SWAP"}], 221 | {"prim": "ADD"}], 222 | [{"annots": ["@cpt"], "prim": "DUP"}]], 223 | "prim": "IF"}, 224 | {"args": [[{"prim": "DROP"}, {"prim": "DROP"}, {"prim": "DROP"}]], 225 | "prim": "DIP"}]], 226 | "prim": "ITER"}, 227 | {"args": [[{"args": [[{"prim": "DROP"}, {"prim": "DROP"}]], 228 | "prim": "DIP"}]], 229 | "prim": "DIP"}, 230 | {"annots": ["@nb_agree"], "prim": "RENAME"}, 231 | {"prim": "COMPARE"}, 232 | {"prim": "GE"}, 233 | {"args": [[{"annots": ["@p"], "prim": "DUP"}, 234 | {"annots": ["%destination"], "prim": "CAR"}, 235 | {"annots": ["@c_dest"], "prim": "IMPLICIT_ACCOUNT"}, 236 | [{"args": [[{"annots": ["@p"], "prim": "DUP"}]], "prim": "DIP"}, 237 | {"prim": "SWAP"}], 238 | {"annots": ["%amount"], "prim": "CDR"}, 239 | {"prim": "UNIT"}, 240 | {"annots": ["@op"], "prim": "TRANSFER_TOKENS"}, 241 | [{"args": [[[{"args": [[[{"args": [[{"annots": ["@storage"], 242 | "prim": "DUP"}]], 243 | "prim": "DIP"}, 244 | {"prim": "SWAP"}]]], 245 | "prim": "DIP"}, 246 | {"prim": "SWAP"}]]], 247 | "prim": "DIP"}, 248 | {"prim": "SWAP"}], 249 | {"prim": "DUP"}, 250 | {"annots": ["%owners"], "prim": "CAR"}, 251 | {"prim": "SWAP"}, 252 | {"prim": "CDR"}, 253 | {"prim": "CDR"}, 254 | {"args": [{"args": [{"prim": "address"}, 255 | {"annots": [":proposition"], 256 | "args": [{"annots": ["%destination"], "prim": "key_hash"}, 257 | {"annots": ["%amount"], "prim": "mutez"}], 258 | "prim": "pair"}], 259 | "prim": "map"}, 260 | []], 261 | "prim": "PUSH"}, 262 | {"annots": ["%actions"], "prim": "PAIR"}, 263 | {"prim": "SWAP"}, 264 | {"annots": ["%owners"], "prim": "PAIR"}, 265 | {"args": [{"prim": "operation"}], "prim": "NIL"}, 266 | [{"args": [[[{"args": [[{"annots": ["@op"], "prim": "DUP"}]], 267 | "prim": "DIP"}, 268 | {"prim": "SWAP"}]]], 269 | "prim": "DIP"}, 270 | {"prim": "SWAP"}], 271 | {"args": [[{"args": [[{"args": [[{"prim": "DROP"}]], 272 | "prim": "DIP"}]], 273 | "prim": "DIP"}]], 274 | "prim": "DIP"}, 275 | {"prim": "CONS"}, 276 | {"prim": "PAIR"}], 277 | [[{"args": [[{"annots": ["@do_nothing"], "prim": "DUP"}]], 278 | "prim": "DIP"}, 279 | {"prim": "SWAP"}]]], 280 | "prim": "IF"}, 281 | {"args": [[{"prim": "DROP"}]], "prim": "DIP"}]], 282 | "prim": "IF_NONE"}, 283 | {"args": [[{"prim": "DROP"}, 284 | {"prim": "DROP"}, 285 | {"prim": "DROP"}, 286 | {"prim": "DROP"}]], 287 | "prim": "DIP"}]], 288 | "prim": "IF_LEFT"}, 289 | {"args": [[{"prim": "DROP"}, {"prim": "DROP"}]], "prim": "DIP"}]], 290 | "prim": "code"}] -------------------------------------------------------------------------------- /tests/data/script/sample_2.tz: -------------------------------------------------------------------------------- 1 | parameter 2 | (or :parameter 3 | (unit %Pay) 4 | (option %Manage (pair :proposition (key_hash %destination) (mutez %amount)))); 5 | storage 6 | (pair :storage 7 | (set %owners address) 8 | (pair (map %actions address (pair :proposition (key_hash %destination) (mutez %amount))) 9 | (pair (nat %owners_length) (nat %min_agree)))); 10 | code { DUP ; 11 | DIP { CDR @storage_slash_1 } ; 12 | CAR @parameter_slash_2 ; 13 | DUP @parameter ; 14 | IF_LEFT 15 | { DROP ; { DIP { DUP @storage } ; SWAP } ; NIL operation ; PAIR } 16 | { RENAME @action_slash_18 ; 17 | SOURCE @owner ; 18 | { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 19 | SWAP } ; 20 | CAR %owners ; 21 | { DIP { DUP @owner } ; SWAP } ; 22 | MEM ; 23 | NOT ; 24 | IF { UNIT ; FAILWITH } { UNIT } ; 25 | DROP ; 26 | PUSH mutez 0 ; 27 | AMOUNT ; 28 | COMPARE ; 29 | NEQ ; 30 | IF { PUSH string "Don't send money while managing multisig" ; FAILWITH } 31 | { UNIT } ; 32 | DROP ; 33 | { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 34 | SWAP } ; 35 | DUP ; 36 | CAR %owners ; 37 | SWAP ; 38 | CDR ; 39 | CDR ; 40 | { DIP { { DIP { { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 41 | SWAP } } ; 42 | SWAP } } ; 43 | SWAP } ; 44 | { CDR ; CAR %actions } ; 45 | { DIP { { DIP { { DIP { { DIP { DUP @action } ; SWAP } } ; SWAP } } ; 46 | SWAP } } ; 47 | SWAP } ; 48 | { DIP { { DIP { { DIP { { DIP { DUP @owner } ; SWAP } } ; SWAP } } ; 49 | SWAP } } ; 50 | SWAP } ; 51 | UPDATE ; 52 | PAIR %actions ; 53 | SWAP ; 54 | PAIR @storage %owners ; 55 | DUP @storage ; 56 | NIL operation ; 57 | PAIR @do_nothing ; 58 | { DIP { { DIP { { DIP { DUP @action } ; SWAP } } ; SWAP } } ; 59 | SWAP } ; 60 | IF_NONE 61 | { DUP @do_nothing } 62 | { DUP @p ; 63 | BALANCE ; 64 | { DIP { DUP @p } ; SWAP } ; 65 | CDR %amount ; 66 | COMPARE ; 67 | GT ; 68 | IF { PUSH string "Balance to low for withdrawal" ; FAILWITH } { UNIT } ; 69 | DIP { DROP } ; 70 | DROP ; 71 | { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } ; 72 | { DIP { DUP @p } ; SWAP } ; 73 | PAIR ; 74 | DUP ; 75 | CDR @storage ; 76 | DUP @storage ; 77 | { CDR ; CDR ; CDR %min_agree } ; 78 | PUSH nat 0 ; 79 | { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } ; 80 | { CDR ; CAR %actions } ; 81 | ITER { RENAME @___p_prim__cpt_slash_12 ; 82 | DIP { DUP } ; 83 | PAIR ; 84 | DUP ; 85 | CDR @cpt ; 86 | { DIP { DUP } ; SWAP } ; 87 | CAR ; 88 | CDR @p_prim_ ; 89 | { DIP { { DIP { { DIP { { DIP { { DIP { { DIP { DUP } ; SWAP } } ; SWAP } } ; SWAP } } ; 90 | SWAP } } ; 91 | SWAP } } ; 92 | SWAP } ; 93 | CAR @p ; 94 | PAIR ; 95 | DUP ; 96 | CAR @p1 ; 97 | SWAP ; 98 | CDR @p2 ; 99 | DUP ; 100 | CDR %amount ; 101 | { DIP { { DIP { DUP } ; SWAP } } ; SWAP } ; 102 | CDR %amount ; 103 | COMPARE ; 104 | EQ ; 105 | SWAP ; 106 | CAR %destination ; 107 | { DIP { { DIP { DUP } ; SWAP } } ; SWAP } ; 108 | DIP { DIP { DIP { DROP } } } ; 109 | CAR %destination ; 110 | COMPARE ; 111 | EQ ; 112 | AND ; 113 | IF { PUSH nat 1 ; { DIP { DUP @cpt } ; SWAP } ; ADD } 114 | { DUP @cpt } ; 115 | DIP { DROP ; DROP ; DROP } } ; 116 | DIP { DIP { DROP ; DROP } } ; 117 | RENAME @nb_agree ; 118 | COMPARE ; 119 | GE ; 120 | IF { DUP @p ; 121 | CAR %destination ; 122 | IMPLICIT_ACCOUNT @c_dest ; 123 | { DIP { DUP @p } ; SWAP } ; 124 | CDR %amount ; 125 | UNIT ; 126 | TRANSFER_TOKENS @op ; 127 | { DIP { { DIP { { DIP { DUP @storage } ; SWAP } } ; SWAP } } ; 128 | SWAP } ; 129 | DUP ; 130 | CAR %owners ; 131 | SWAP ; 132 | CDR ; 133 | CDR ; 134 | PUSH (map address (pair :proposition (key_hash %destination) (mutez %amount))) {} ; 135 | PAIR %actions ; 136 | SWAP ; 137 | PAIR %owners ; 138 | NIL operation ; 139 | { DIP { { DIP { DUP @op } ; SWAP } } ; SWAP } ; 140 | DIP { DIP { DIP { DROP } } } ; 141 | CONS ; 142 | PAIR } 143 | { { DIP { DUP @do_nothing } ; SWAP } } ; 144 | DIP { DROP } } ; 145 | DIP { DROP ; DROP ; DROP ; DROP } } ; 146 | DIP { DROP ; DROP } } -------------------------------------------------------------------------------- /tests/data/storage/sample_0.json: -------------------------------------------------------------------------------- 1 | {"code": [{"prim": "parameter", "args": [{"prim": "bytes"}]}, 2 | {"prim": "storage", 3 | "args": [{"prim": "pair", 4 | "args": [{"prim": "map", 5 | "args": [{"prim": "int"}, 6 | {"prim": "pair", 7 | "args": [{"prim": "string"}, 8 | {"prim": "pair", 9 | "args": [{"prim": "string"}, 10 | {"prim": "pair", 11 | "args": [{"prim": "mutez"}, 12 | {"prim": "pair", 13 | "args": [{"prim": "address"}, 14 | {"prim": "pair", 15 | "args": [{"prim": "address"}, 16 | {"prim": "pair", 17 | "args": [{"prim": "pair", 18 | "args": [{"prim": "bool"}, 19 | {"prim": "pair", 20 | "args": [{"prim": "string"}, {"prim": "string"}]}]}, 21 | {"prim": "pair", 22 | "args": [{"prim": "timestamp"}, 23 | {"prim": "timestamp"}]}]}]}]}]}]}]}]}, 24 | {"prim": "int"}]}]}], 25 | "storage": {"prim": "Pair", 26 | "args": [[{"prim": "Elt", 27 | "args": [{"int": "0"}, 28 | {"prim": "Pair", 29 | "args": [{"string": ""}, 30 | {"prim": "Pair", 31 | "args": [{"string": ""}, 32 | {"prim": "Pair", 33 | "args": [{"int": "0"}, 34 | {"prim": "Pair", 35 | "args": [{"string": "tz1gH29qAVaNfv7imhPthCwpUBcqmMdLWxPG"}, 36 | {"prim": "Pair", 37 | "args": [{"string": "tz1gH29qAVaNfv7imhPthCwpUBcqmMdLWxPG"}, 38 | {"prim": "Pair", 39 | "args": [{"prim": "Pair", 40 | "args": [{"prim": "True"}, 41 | {"prim": "Pair", 42 | "args": [{"string": ""}, {"string": ""}]}]}, 43 | {"prim": "Pair", 44 | "args": [{"string": "1970-01-01T00:00:00Z"}, 45 | {"string": "1970-01-01T00:00:00Z"}]}]}]}]}]}]}]}]}, 46 | {"prim": "Elt", 47 | "args": [{"int": "1"}, 48 | {"prim": "Pair", 49 | "args": [{"string": "2035 Moon Lander"}, 50 | {"prim": "Pair", 51 | "args": [{"string": "A shuttle like no other. Experiance a ride to the moon, retro style."}, 52 | {"prim": "Pair", 53 | "args": [{"int": "1000000"}, 54 | {"prim": "Pair", 55 | "args": [{"string": "tz1KwNFuzryhY7ZFyZVcdnHFEZCT9xgrRmWT"}, 56 | {"prim": "Pair", 57 | "args": [{"string": "tz1gH29qAVaNfv7imhPthCwpUBcqmMdLWxPG"}, 58 | {"prim": "Pair", 59 | "args": [{"prim": "Pair", 60 | "args": [{"prim": "False"}, 61 | {"prim": "Pair", 62 | "args": [{"string": "Moon Lander was damaged."}, 63 | {"string": "nil"}]}]}, 64 | {"prim": "Pair", 65 | "args": [{"string": "2019-04-20T04:00:00Z"}, 66 | {"string": "2019-05-20T04:00:00Z"}]}]}]}]}]}]}]}]}], 67 | {"int": "1"}]}} -------------------------------------------------------------------------------- /tests/data/storage/sample_1.json: -------------------------------------------------------------------------------- 1 | {"code": [{"prim": "parameter", 2 | "args": [{"prim": "or", 3 | "args": [{"prim": "pair", 4 | "args": [{"prim": "address"}, {"prim": "nat"}], 5 | "annots": ["%_Liq_entry_transfer"]}, 6 | {"prim": "or", 7 | "args": [{"prim": "pair", 8 | "args": [{"prim": "address"}, {"prim": "nat"}], 9 | "annots": ["%_Liq_entry_approve"]}, 10 | {"prim": "or", 11 | "args": [{"prim": "pair", 12 | "args": [{"prim": "address"}, 13 | {"prim": "pair", "args": [{"prim": "address"}, {"prim": "nat"}]}], 14 | "annots": ["%_Liq_entry_transferFrom"]}, 15 | {"prim": "or", 16 | "args": [{"prim": "pair", 17 | "args": [{"prim": "address"}, 18 | {"prim": "contract", 19 | "args": [{"prim": "nat"}], 20 | "annots": [":NatContract"]}], 21 | "annots": ["%_Liq_entry_balanceOf"]}, 22 | {"prim": "or", 23 | "args": [{"prim": "pair", 24 | "args": [{"prim": "address"}, 25 | {"prim": "pair", 26 | "args": [{"prim": "address"}, 27 | {"prim": "contract", 28 | "args": [{"prim": "pair", 29 | "args": [{"prim": "nat"}, {"prim": "nat"}]}], 30 | "annots": [":NatNatContract"]}]}], 31 | "annots": ["%_Liq_entry_allowance"]}, 32 | {"prim": "or", 33 | "args": [{"prim": "pair", 34 | "args": [{"prim": "address"}, {"prim": "nat"}], 35 | "annots": ["%_Liq_entry_createAccount"]}, 36 | {"prim": "list", 37 | "args": [{"prim": "pair", 38 | "args": [{"prim": "address"}, {"prim": "nat"}]}], 39 | "annots": ["%_Liq_entry_createAccounts"]}]}]}]}]}]}], 40 | "annots": [":_entries"]}]}, 41 | {"prim": "storage", 42 | "args": [{"prim": "pair", 43 | "args": [{"prim": "big_map", 44 | "args": [{"prim": "address"}, 45 | {"prim": "pair", 46 | "args": [{"prim": "nat", "annots": ["%balance"]}, 47 | {"prim": "map", 48 | "args": [{"prim": "address"}, {"prim": "nat"}], 49 | "annots": ["%allowances"]}], 50 | "annots": [":account"]}], 51 | "annots": [":accounts"]}, 52 | {"prim": "pair", 53 | "args": [{"prim": "nat", "annots": ["%version"]}, 54 | {"prim": "pair", 55 | "args": [{"prim": "nat", "annots": ["%totalSupply"]}, 56 | {"prim": "pair", 57 | "args": [{"prim": "nat", "annots": ["%decimals"]}, 58 | {"prim": "pair", 59 | "args": [{"prim": "string", "annots": ["%name"]}, 60 | {"prim": "pair", 61 | "args": [{"prim": "string", "annots": ["%symbol"]}, 62 | {"prim": "address", "annots": ["%owner"]}]}]}]}]}]}], 63 | "annots": [":storage"]}]}], 64 | "storage": {"prim": "Pair", 65 | "args": [[], 66 | {"prim": "Pair", 67 | "args": [{"int": "1"}, 68 | {"prim": "Pair", 69 | "args": [{"int": "111111111111111110"}, 70 | {"prim": "Pair", 71 | "args": [{"int": "18"}, 72 | {"prim": "Pair", 73 | "args": [{"string": "test1"}, 74 | {"prim": "Pair", 75 | "args": [{"string": "TST"}, 76 | {"string": "KT1GE2AZhazRxGsAjRVkQccHcB2pvANXQWd7"}]}]}]}]}]}]}} -------------------------------------------------------------------------------- /tests/test_crypto.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from unittest.mock import patch 3 | from parameterized import parameterized 4 | 5 | from pytezos.crypto import Key 6 | 7 | 8 | class TestCrypto(TestCase): 9 | """ 10 | Test data generation: 11 | ./tezos-client gen keys test_ed25519 -s ed25519 --force (--encrypted) 12 | ./tezos-client gen keys test_secp256k1 -s secp256k1 --force (--encrypted) 13 | ./tezos-client gen keys test_p256 -s p256 --force (--encrypted) 14 | ./tezos-client show address test_ed25519 -S 15 | ./tezos-client show address test_secp256k1 -S 16 | ./tezos-client show address test_p256 -S 17 | ./tezos-client sign bytes 0x74657374 for test_ed25519 18 | ./tezos-client sign bytes 0x74657374 for test_secp256k1 19 | ./tezos-client sign bytes 0x74657374 for test_p256 20 | 21 | Issues: 22 | * `tezos-client sign bytes` does not support P256 curve 23 | """ 24 | 25 | @parameterized.expand([ 26 | ('edsk3nM41ygNfSxVU4w1uAW3G9EnTQEB5rjojeZedLTGmiGRcierVv', 27 | 'edpku976gpuAD2bXyx1XGraeKuCo1gUZ3LAJcHM12W1ecxZwoiu22R', 28 | 'tz1eKkWU5hGtfLUiqNpucHrXymm83z3DG9Sq'), 29 | ('spsk1zkqrmst1yg2c4xi3crWcZPqgdc9KtPtb9SAZWYHAdiQzdHy7j', 30 | 'sppk7aMNM3xh14haqEyaxNjSt7hXanCDyoWtRcxF8wbtya859ak6yZT', 31 | 'tz28YZoayJjVz2bRgGeVjxE8NonMiJ3r2Wdu'), 32 | ('p2sk3PM77YMR99AvD3fSSxeLChMdiQ6kkEzqoPuSwQqhPsh29irGLC', 33 | 'p2pk679D18uQNkdjpRxuBXL5CqcDKTKzsiXVtc9oCUT6xb82zQmgUks', 34 | 'tz3agP9LGe2cXmKQyYn6T68BHKjjktDbbSWX') 35 | ]) 36 | def test_derive_key_data(self, sk, pk, hash): 37 | public_key = Key(pk) 38 | self.assertFalse(public_key.is_secret) 39 | self.assertEqual(pk, public_key.public_key()) 40 | self.assertEqual(hash, public_key.public_key_hash()) 41 | 42 | secret_key = Key(sk) 43 | self.assertTrue(secret_key.is_secret) 44 | self.assertEqual(pk, secret_key.public_key()) 45 | self.assertEqual(sk, secret_key.secret_key()) 46 | 47 | @parameterized.expand([ 48 | ('edpku976gpuAD2bXyx1XGraeKuCo1gUZ3LAJcHM12W1ecxZwoiu22R', b'test', 49 | 'edsigtzLBGCyadERX1QsYHKpwnxSxEYQeGLnJGsSkHEsyY8vB5GcNdnvzUZDdFevJK7YZQ2ujwVjvQZn62ahCEcy74AwtbA8HuN'), 50 | ('sppk7aMNM3xh14haqEyaxNjSt7hXanCDyoWtRcxF8wbtya859ak6yZT', b'test', 51 | 'spsig1RriZtYADyRhyNoQMa6AiPuJJ7AUDcrxWZfgqexzgANqMv4nXs6qsXDoXcoChBgmCcn2t7Y3EkJaVRuAmNh2cDDxWTdmsz'), 52 | ('p2pk67wVncLFS1DQDm2gVR45sYCzQSXTtqn3bviNYXVCq6WRoqtxHXL', 53 | '017a06a770000508440322bf4860a065d1c8747a08f7685be9c79da2b21d5930c12fff86b230081d223b000000005c752b3' 54 | 'a04bc5b950ff781580616c12a646af98285da66232b232661f179c98d6f8c8912ae00000011000000010000000008000000' 55 | '00009b55bda7ad9debcd2657b76d444b14807c7b5dc13e06f754e2b43186d0fb22b3d3332c0000000000031048815b00', 56 | 'sigqWxz3GKFXg6G8ndSzJF8JD9j7m12kPWZj6bHLqdKw6XpxhVLwGm26hVqMdEfgPdoz8qoA5QkM9mvnMyMFmYny9sqjb5bE'), 57 | ('p2pk66n1NmhPDEkcf9sXEKe9kBoTwBoTYxke1hx16aTRVq8MoXuwNqo', 58 | '027a06a770ad828485977947451e23e99f5040ead0f09ef89f58be2583640edcb1e295d0cb000005085e', 59 | 'sigQVTY9CkYw8qL6Xa7QWestkLSdtPv6HZ4ToSMHDcRot3BwRGwZhSwXd9jJwKkDvvotTLSNWQdUqiDSfXuCNUfjbEaY2j6j') 60 | ]) 61 | def test_verify_ext_signatures(self, pk, msg, sig): 62 | key = Key(pk) 63 | key.verify(sig, msg) 64 | self.assertRaises(ValueError, key.verify, sig, b'fake') 65 | 66 | @parameterized.expand([ 67 | ('edsk3nM41ygNfSxVU4w1uAW3G9EnTQEB5rjojeZedLTGmiGRcierVv', '0xdeadbeaf'), 68 | ('spsk1zkqrmst1yg2c4xi3crWcZPqgdc9KtPtb9SAZWYHAdiQzdHy7j', b'hello'), 69 | ('p2sk3PM77YMR99AvD3fSSxeLChMdiQ6kkEzqoPuSwQqhPsh29irGLC', b'test') 70 | ]) 71 | def test_sign_and_verify(self, sk, msg): 72 | key = Key(sk) 73 | sig = key.sign(msg) 74 | key.verify(sig, msg) 75 | self.assertRaises(ValueError, key.verify, sig, b'fake') 76 | 77 | @parameterized.expand([ 78 | ('edsk3nM41ygNfSxVU4w1uAW3G9EnTQEB5rjojeZedLTGmiGRcierVv', b'test', 79 | 'edsigtzLBGCyadERX1QsYHKpwnxSxEYQeGLnJGsSkHEsyY8vB5GcNdnvzUZDdFevJK7YZQ2ujwVjvQZn62ahCEcy74AwtbA8HuN'), 80 | ('spsk1zkqrmst1yg2c4xi3crWcZPqgdc9KtPtb9SAZWYHAdiQzdHy7j', b'test', 81 | 'spsig1RriZtYADyRhyNoQMa6AiPuJJ7AUDcrxWZfgqexzgANqMv4nXs6qsXDoXcoChBgmCcn2t7Y3EkJaVRuAmNh2cDDxWTdmsz'), 82 | ]) 83 | def test_deterministic_signatures(self, sk, msg, sig): 84 | """ 85 | See RFC6979 for explanation 86 | https://tools.ietf.org/html/rfc6979#section-3.2 87 | """ 88 | key = Key(sk) 89 | signature = key.sign(msg) 90 | self.assertEqual(sig, signature) 91 | 92 | @parameterized.expand([ 93 | ('edesk1zxaPJkhNGSzgZDDSphvPzSNrnbmqes8xzUrw1wdFxdRT7ePiQz8D2Q18fMjn6fC9ZRS2rUbg8d8snxxznE', 94 | 'qqq', b'\xf2h\xbb\xf5\xc7\xe2\xb9\x97', 'edpktmNJub2v7tVjSU8nA9jZrdV5JezmFtZA4yd3jj18i6VKcCJzdo'), 95 | ('spesk21cruoqtYmxfq5fpkXiZZRLRw4vh7VFJauGCAgHxZf3q6Q5LTv9m9dnMxyVjna6RzWQL45q4ppGLh97xZpV', 96 | 'qqq', b'\xbe\xb8\xeefi\x14\\T', 'sppk7Zbcqfy67b6pRMAKax5QKzAxTQUxmfQcCuvn1QMFQsXqy1NkSkz'), 97 | ('p2esk1rqdHRPz4xQh8uP8JaWSVnGFTKxkh2utdjK5CPDTXAzzh5sXnnobLkGrXEZzGhCKFDSjv8Ggrjt7PnobRzs', 98 | 'qqq', b'"\xf8\x0e \x0f]hc', 'p2pk68Ky2h9UZZ4jUYws8mU8Cazhu4H1LdK22wD8HgDPRSvsJPBDtJ7'), 99 | ]) 100 | def test_encrypted_keys(self, sk, passphrase, salt, pk): 101 | key = Key(sk, passphrase=passphrase) 102 | self.assertEqual(pk, key.public_key()) 103 | 104 | with patch('pytezos.crypto.pysodium.randombytes', return_value=salt): 105 | self.assertEqual(sk, key.secret_key(passphrase)) 106 | -------------------------------------------------------------------------------- /tests/test_encoding.py: -------------------------------------------------------------------------------- 1 | from unittest import TestCase 2 | from parameterized import parameterized 3 | 4 | from pytezos.encoding import scrub_input, base58_encode, base58_decode, is_pkh, is_sig, is_bh 5 | 6 | 7 | class TestEncoding(TestCase): 8 | 9 | @parameterized.expand([ 10 | (b'NetXdQprcVkpaWU', b'Net'), 11 | (b'BKjWN8ALguCJ3oAjzMjZCNcFfUf1p9BfVAwYiVHs1QW3yMB9RNb', b'B'), 12 | (b'oop1fbAVi2ZwEt3vpu4uKpYGbbxumyMBSWwWf9qbByeM4JYAu92', b'o'), 13 | (b'LLoabcny4pVg1k6x3AktnNhwe1KSVBZh5Di45JeZPhUCmCu5Xj6ND', b'LLo'), 14 | (b'PtCJ7pwoxe8JasnHY8YonnLYjcVHmhiARPJvqcC6VfHT5s8k8sY', b'P'), 15 | (b'CoUeRwFZbV7NaAYRTz6n4ZLUkwiWcm7oKYdKCGcsEYHgVxSQxa4h', b'Co'), 16 | (b'tz1eKkWU5hGtfLUiqNpucHrXymm83z3DG9Sq', b'tz1'), 17 | (b'tz28YZoayJjVz2bRgGeVjxE8NonMiJ3r2Wdu', b'tz2'), 18 | (b'tz3agP9LGe2cXmKQyYn6T68BHKjjktDbbSWX', b'tz3'), 19 | (b'edpku976gpuAD2bXyx1XGraeKuCo1gUZ3LAJcHM12W1ecxZwoiu22R', b'edpk'), 20 | (b'sppk7aMNM3xh14haqEyaxNjSt7hXanCDyoWtRcxF8wbtya859ak6yZT', b'sppk'), 21 | (b'p2pk679D18uQNkdjpRxuBXL5CqcDKTKzsiXVtc9oCUT6xb82zQmgUks', b'p2pk'), 22 | (b'edsk3nM41ygNfSxVU4w1uAW3G9EnTQEB5rjojeZedLTGmiGRcierVv', b'edsk'), 23 | (b'spsk1zkqrmst1yg2c4xi3crWcZPqgdc9KtPtb9SAZWYHAdiQzdHy7j', b'spsk'), 24 | (b'p2sk3PM77YMR99AvD3fSSxeLChMdiQ6kkEzqoPuSwQqhPsh29irGLC', b'p2sk'), 25 | (b'edesk1zxaPJkhNGSzgZDDSphvPzSNrnbmqes8xzUrw1wdFxdRT7ePiQz8D2Q18fMjn6fC9ZRS2rUbg8d8snxxznE', b'edesk'), 26 | (b'spesk21cruoqtYmxfq5fpkXiZZRLRw4vh7VFJauGCAgHxZf3q6Q5LTv9m9dnMxyVjna6RzWQL45q4ppGLh97xZpV', b'spesk'), 27 | (b'p2esk1rqdHRPz4xQh8uP8JaWSVnGFTKxkh2utdjK5CPDTXAzzh5sXnnobLkGrXEZzGhCKFDSjv8Ggrjt7PnobRzs', b'p2esk'), 28 | (b'edsigtzLBGCyadERX1QsYHKpwnxSxEYQeGLnJGsSkHEsyY8vB5GcNdnvzUZDdFevJK7YZQ2ujwVjvQZn62ahCEcy74AwtbA8HuN', 29 | b'edsig'), 30 | (b'spsig1RriZtYADyRhyNoQMa6AiPuJJ7AUDcrxWZfgqexzgANqMv4nXs6qsXDoXcoChBgmCcn2t7Y3EkJaVRuAmNh2cDDxWTdmsz', 31 | b'spsig'), 32 | (b'sigUdRdXYCXW14xqT8mFTMkX4wSmDMBmcW1Vuz1vanGWqYTmuBodueUHGPUsbxgn73AroNwpEBHwPdhXUswzmvCzquiqtcHC', 33 | b'sig') 34 | ]) 35 | def test_b58_decode_encode(self, string, prefix): 36 | data = base58_decode(string) 37 | result = base58_encode(data, prefix) 38 | self.assertEqual(string, result) 39 | 40 | @parameterized.expand([ 41 | ('test', b'test'), 42 | (b'test', b'test'), 43 | ('0x74657374', b'test'), 44 | ]) 45 | def test_scrub_input(self, input_data, expected): 46 | self.assertEqual(expected, scrub_input(input_data)) 47 | 48 | @parameterized.expand([ 49 | ('tz1eKkWU5hGtfLUiqNpucHrXymm83z3DG9Sq', True), 50 | ('tz28YZoayJjVz2bRgGeVjxE8NonMiJ3r2Wdu', True), 51 | ('tz3agP9LGe2cXmKQyYn6T68BHKjjktDbbSWX', True), 52 | ('KT1ExvG3EjTrvDcAU7EqLNb77agPa5u6KvnY', False), 53 | ('qwerty', False), 54 | ('tz1eKkWU5hGtfLUiq', False) 55 | ]) 56 | def test_is_pkh(self, value, expected): 57 | self.assertEqual(expected, is_pkh(value)) 58 | 59 | @parameterized.expand([ 60 | ('edsigtzLBGCyadERX1QsYHKpwnxSxEYQeGLnJGsSkHEsyY8vB5GcNdnvzUZDdFevJK7YZQ2ujwVjvQZn62ahCEcy74AwtbA8HuN', True), 61 | ('spsig1RriZtYADyRhyNoQMa6AiPuJJ7AUDcrxWZfgqexzgANqMv4nXs6qsXDoXcoChBgmCcn2t7Y3EkJaVRuAmNh2cDDxWTdmsz', True), 62 | ('sigUdRdXYCXW14xqT8mFTMkX4wSmDMBmcW1Vuz1vanGWqYTmuBodueUHGPUsbxgn73AroNwpEBHwPdhXUswzmvCzquiqtcHC', True), 63 | ('qwerty', False), 64 | ('sigUdRdXYCXW14xqT8mFTMkX4wSmDMBmcW1Vuz1vanGWqYT', False), 65 | ]) 66 | def test_is_sig(self, value, expected): 67 | self.assertEqual(expected, is_sig(value)) 68 | 69 | @parameterized.expand([ 70 | ('BLrbVv8rUfkpDZZ6efByhgjyDgPUFeKAfTMq8mWPmjXb9c5m8LJ', True), 71 | ('qwerty', False), 72 | ]) 73 | def test_is_bh(self, value, expected): 74 | self.assertEqual(expected, is_bh(value)) 75 | -------------------------------------------------------------------------------- /tests/test_micheline.py: -------------------------------------------------------------------------------- 1 | import os 2 | import simplejson as json 3 | from unittest import TestCase 4 | from parameterized import parameterized 5 | 6 | from pytezos.micheline.grammar import MichelineParser 7 | from pytezos.micheline.schema import build_schema, decode_data, encode_data 8 | 9 | 10 | def get_data(filename): 11 | path = os.path.join(os.path.dirname(__file__), 'data', filename) 12 | with open(path) as f: 13 | return f.read() 14 | 15 | 16 | class TestMichelineParser(TestCase): 17 | 18 | def setUp(self): 19 | self.parser = MichelineParser() 20 | self.maxDiff = None 21 | 22 | @parameterized.expand([ 23 | ('script/sample_0.tz', 'script/sample_0.json'), 24 | ('script/sample_1.tz', 'script/sample_1.json'), 25 | ('script/sample_2.tz', 'script/sample_2.json'), 26 | ]) 27 | def test_parser(self, source_name, expected_name): 28 | source = get_data(source_name) 29 | res = self.parser.parse(source) 30 | expected = json.loads(get_data(expected_name)) 31 | self.assertListEqual(expected, res) 32 | 33 | @parameterized.expand([ 34 | ('storage/sample_0.json',), 35 | ('storage/sample_1.json',), 36 | ]) 37 | def test_storage_parsing(self, source_name): 38 | script = json.loads(get_data(source_name)) 39 | storage = next(s for s in script['code'] if s['prim'] == 'storage') 40 | schema = build_schema(storage) 41 | data = decode_data(script['storage'], schema) 42 | res = encode_data(data, schema) 43 | self.assertDictEqual(script['storage'], res) 44 | 45 | @parameterized.expand([ 46 | ('parameter/code_0.json', 'parameter/parameters_0.json'), 47 | ]) 48 | def test_parameter_parsing(self, code_name, parameters_name): 49 | code = json.loads(get_data(code_name)) 50 | parameters = json.loads(get_data(parameters_name)) 51 | schema = build_schema(code) 52 | data = decode_data(parameters, schema) 53 | res = encode_data(data, schema) 54 | self.assertDictEqual(parameters, res) 55 | --------------------------------------------------------------------------------