├── .flake8 ├── .github └── workflows │ ├── codeql.yml │ ├── test.yml │ └── upload.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── Makefile ├── README.md ├── flake.lock ├── flake.nix ├── poetry.lock ├── pyproject.toml ├── pystarport ├── __init__.py ├── app.py ├── bot.py ├── cli.py ├── cluster.py ├── cosmoscli.py ├── default.nix ├── expansion.py ├── ledger.py ├── ports.py ├── proto_python │ ├── api_util.py │ └── grpc_util.py ├── tests │ ├── test_expansion │ │ ├── base.jsonnet │ │ ├── base.yaml │ │ ├── cronos_has_dotenv.jsonnet │ │ ├── cronos_has_dotenv.yaml │ │ ├── cronos_has_posix_no_dotenv.jsonnet │ │ ├── cronos_has_posix_no_dotenv.yaml │ │ ├── cronos_no_dotenv.jsonnet │ │ ├── cronos_no_dotenv.yaml │ │ ├── dotenv │ │ ├── dotenv1 │ │ ├── test_expansion.py │ │ └── yaml_doc.jsonnet │ └── test_utils.py └── utils.py └── setup.cfg /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 88 3 | extend-ignore = E203 4 | exclude = .git,__pycache__,./integration_tests/contracts,./integration_tests/**/*_pb2.py 5 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "main" ] 6 | pull_request: 7 | branches: [ "main" ] 8 | schedule: 9 | - cron: "15 13 * * 3" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ python ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v3 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v2 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v2 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v2 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | on: 3 | pull_request: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - "*" 9 | 10 | jobs: 11 | lint: 12 | name: lint 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: cachix/install-nix-action@v30 17 | - uses: DeterminateSystems/magic-nix-cache-action@main 18 | - name: Run linting 19 | run: nix develop -c make lint-ci 20 | tests: 21 | runs-on: ubuntu-latest 22 | steps: 23 | - uses: actions/checkout@v2 24 | - uses: cachix/install-nix-action@v30 25 | - uses: DeterminateSystems/magic-nix-cache-action@main 26 | - name: Run tests 27 | run: nix develop -c python -mpytest -vv -s 28 | nix-flake: 29 | runs-on: ubuntu-latest 30 | steps: 31 | - uses: actions/checkout@v2 32 | - uses: cachix/install-nix-action@v15 33 | - run: nix run 34 | -------------------------------------------------------------------------------- /.github/workflows/upload.yml: -------------------------------------------------------------------------------- 1 | name: upload 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*.*.*' 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | upload: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: install dependencies 17 | run: python3 -m pip install --user --upgrade poetry 18 | - name: build 19 | run: poetry build 20 | - name: release 21 | uses: softprops/action-gh-release@v1 22 | with: 23 | files: | 24 | dist/* 25 | env: 26 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 27 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | 140 | # VS Code configuration 141 | .vscode 142 | 143 | # macos 144 | .DS_Store 145 | 146 | /.direnv 147 | /.envrc 148 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog 2 | 3 | ## Unreleased 4 | 5 | - [#86](https://github.com/crypto-com/pystarport/pull/86) support golang relayer 6 | - [#91](https://github.com/crypto-com/pystarport/pull/91) replace block with sync broadcast mode 7 | - [#93](https://github.com/crypto-com/pystarport/pull/93) add default gas prices for golang relayer 8 | - [#94](https://github.com/crypto-com/pystarport/pull/94) allow custom broadcast mode when create node 9 | - [#95](https://github.com/crypto-com/pystarport/pull/95) make golang relayer optional 10 | - [#96](https://github.com/crypto-com/pystarport/pull/96) change default listen address to 127.0.0.1 11 | - [#97](https://github.com/crypto-com/pystarport/pull/97) update `websocket_addr` to `event_source` for hermes v1.6.0 12 | - [#99](https://github.com/crypto-com/pystarport/pull/99) add relayer related flag when init cluster 13 | - [#100](https://github.com/crypto-com/pystarport/pull/100) manage golang relayer under supervisord 14 | - [#101](https://github.com/crypto-com/pystarport/pull/101) support legacy hermes before v1.6.0 15 | - [#104](https://github.com/crypto-com/pystarport/pull/104) support json_rpc_addr in relayer config 16 | - [#109](https://github.com/crypto-com/pystarport/pull/109) add feegrants in relayer config 17 | - [#110](https://github.com/crypto-com/pystarport/pull/110) add event_query_tx_for to allow subscribe and wait for transaction. 18 | - [#112](https://github.com/crypto-com/pystarport/pull/112) add cmd for migrate keystore. 19 | - [#113](https://github.com/crypto-com/pystarport/pull/113) support ibc related cmd. 20 | - [#115](https://github.com/crypto-com/pystarport/pull/115) avoid cli redundant migrated key log in stdout. 21 | - [#117](https://github.com/crypto-com/pystarport/pull/117) make event_query_tx_for optional. 22 | - [#121](https://github.com/crypto-com/pystarport/pull/121), [#122](https://github.com/crypto-com/pystarport/pull/122), [#125](https://github.com/crypto-com/pystarport/pull/125) Support sdk 0.50. 23 | - [#127](https://github.com/crypto-com/pystarport/pull/127) Support adding new key when patching config 24 | - [#128](https://github.com/crypto-com/pystarport/pull/128) fix wrong description on empty flag when create validator and align flags for edit validator. 25 | - [#129](https://github.com/crypto-com/pystarport/pull/129) create and get validator are incompatible. 26 | - [#137](https://github.com/crypto-com/pystarport/pull/137) support ica and icaauth cmd. 27 | - [#139](https://github.com/crypto-com/pystarport/pull/139) support ibc channel upgrade related methods. 28 | - [#141](https://github.com/crypto-com/pystarport/pull/141) make cmd flag support multiple chains. 29 | - [#142](https://github.com/crypto-com/pystarport/pull/142) add coin type when create account. 30 | - [#145](https://github.com/crypto-com/pystarport/pull/145) Backward compatible with binary that don't have event-query-tx-for. 31 | - [#147](https://github.com/crypto-com/pystarport/pull/147) suppport query exposed by the external community pool. 32 | 33 | *Feb 7, 2023* 34 | 35 | ## v0.2.5 36 | 37 | - [#51](https://github.com/crypto-com/pystarport/pull/51) support include yaml 38 | - [#52](https://github.com/crypto-com/pystarport/pull/52) support jsonnet as config language 39 | - [#56](https://github.com/crypto-com/pystarport/pull/56) Support override config.toml for all validators 40 | - [#69](https://github.com/crypto-com/pystarport/pull/69) Support hermes 1.x 41 | - [#70](https://github.com/crypto-com/pystarport/pull/70) Add config item `cmd-flags` to supply custom flags for all 42 | chain binary commands 43 | - [#71](https://github.com/crypto-com/pystarport/pull/71) strip ansi codes from the node's output 44 | - [#73](https://github.com/crypto-com/pystarport/pull/73) Support more optional validator fields: 45 | commission_rate/commission_max_rate/commission_max_change_rate/details/security_contact 46 | - [#78](https://github.com/crypto-com/pystarport/pull/78) Set working directory for node processes in supervisor 47 | - [#79](https://github.com/crypto-com/pystarport/pull/79) Add directory for relayer log 48 | - [#81](https://github.com/crypto-com/pystarport/pull/81) Fix grpc port in client.toml 49 | 50 | *Feb 18, 2022* 51 | 52 | ## v0.2.4 53 | 54 | - [#41](https://github.com/crypto-com/pystarport/pull/41) don't install the license as data files. 55 | - [#42](https://github.com/crypto-com/pystarport/pull/42) add `--no_remove` option to keep existing data directory. 56 | - [#43](https://github.com/crypto-com/pystarport/pull/43) prefer cmd parameter passed in cli to the one in config file. 57 | - [#46](https://github.com/crypto-com/pystarport/pull/46) support tendermint 0.35. 58 | - [#48](https://github.com/crypto-com/pystarport/pull/48) don't validate genesis for state-sync mode. 59 | 60 | *Dec 3, 2021* 61 | 62 | ## v0.2.3 63 | 64 | - [29](https://github.com/crypto-com/pystarport/pull/29) Allow vesting portion of the allocated fund in account 65 | - [28](https://github.com/crypto-com/pystarport/pull/28) Support overwrite default relayer config with configs used to setup chains 66 | - [13](https://github.com/crypto-com/pystarport/issues/13) Support configure start command flags 67 | - [19](https://github.com/crypto-com/pystarport/issues/19) Support `config` to patch `config.toml` for each validator 68 | - [37](https://github.com/crypto-com/pystarport/pull/37) Add expansion feature 69 | 70 | *Jul 6, 2021* 71 | 72 | ## v0.2.2 73 | 74 | - [12](https://github.com/crypto-com/pystarport/issues/12) Add back mnemonics 75 | 76 | - [11](https://github.com/crypto-com/pystarport/pull/11) 77 | Add min-gas prices 78 | Add quotes on validator pubkey, as ProtoJSON is used in 0.43 79 | 80 | 81 | *Jun 17, 2021* 82 | ## v0.2.1 83 | 84 | - [5](https://github.com/crypto-com/pystarport/issues/5) Add `query_denom_by_name` to check existing denom before issuing 85 | - [2](https://github.com/crypto-com/pystarport/issues/2) Add mnemonic option field for accounts 86 | 87 | ## v0.2.0 88 | 89 | - Support `app-config` to patch `app.toml` for each validator 90 | 91 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2018-present Crypto.com 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | test: 2 | @(cd pystarport/tests && poetry run pytest) 3 | 4 | lint: 5 | @poetry run flake8 --show-source --count --statistics 6 | @poetry run isort --check-only . 7 | 8 | lint-ci: 9 | @flake8 --show-source --count --statistics \ 10 | --format="::error file=%(path)s,line=%(row)d,col=%(col)d::%(path)s:%(row)d:%(col)d: %(code)s %(text)s" 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | pystarport is like a [cosmos starport](https://github.com/tendermint/starport) 2 | without the scaffolding feature. it's mainly used for development and testing. It's developed for the Crypto.org Chain, but 3 | it can also be used for any cosmos-sdk based projects. 4 | 5 | ## Configuration 6 | 7 | a typical configuration for a devnet is like this: 8 | 9 | ``` 10 | chainmaind: 11 | cmd: chain-maind # chain binary to use, optional 12 | validators: # genesis validators 13 | - coins: 10cro 14 | staked: 10cro 15 | - coins: 10cro 16 | staked: 10cro 17 | accounts: # genesis accounts 18 | - name: community 19 | coins: 100cro 20 | - name: ecosystem 21 | coins: 200cro 22 | - name: reserve 23 | coins: 200cro 24 | vesting_coins: 100cro # if not provided, the all of coins will be vested 25 | vesting: "1d" 26 | - name: launch 27 | coins: 100cro 28 | genesis: # patch genesis states 29 | app_state: 30 | staking: 31 | params: 32 | unbonding_time: "10s" 33 | ``` 34 | 35 | The `validators` section defines how many nodes to run, for each node, a home directory is initialized in 36 | `data/node{i}`, and a validator account with specified coins is created. 37 | 38 | The `accounts` defines other non-validator accounts, they are created in `node0`'s keyring. 39 | 40 | In the `genesis` section you can override any genesis configuration with the same json path. 41 | 42 | ## Usage 43 | 44 | ``` 45 | NAME 46 | pystarport serve - prepare and start a devnet from scatch 47 | 48 | SYNOPSIS 49 | pystarport serve 50 | 51 | DESCRIPTION 52 | prepare and start a devnet from scatch 53 | 54 | FLAGS 55 | --data=DATA 56 | Type: str 57 | Default: './data' 58 | path to the root data directory 59 | --config=CONFIG 60 | Type: str 61 | Default: './config.yaml' 62 | path to the configuration file 63 | --base_port=BASE_PORT 64 | Type: int 65 | Default: 26650 66 | the base port to use, the service ports of different nodes are calculated based on this 67 | --cmd=CMD 68 | Type: str 69 | Default: 'chain-maind' 70 | the chain binary to use 71 | ``` 72 | 73 | ## Port rules 74 | 75 | The rules to calculate service ports based on base port is defined in the 76 | [`ports.py`](https://github.com/crypto-org-chain/chain-main/blob/master/pystarport/pystarport/ports.py) module. 77 | 78 | For example, with default base port `26650`, the url of api servers of the nodes would be: 79 | 80 | - Node0: http://127.0.0.1:26654 81 | - Node1: http://127.0.0.1:26664 82 | 83 | > The swagger doc of node0 is http://127.0.0.1:26654/swagger/ 84 | > 85 | > The default rpc port used by `chain-maind` is `26657`, that's the default node0's rpc port, so you can use 86 | > `chain-maind` without change to access node0's rpc. 87 | 88 | ## Supervisor 89 | 90 | `pystarport` embeded a [supervisor](http://supervisord.org/) to manage processes of multiple nodes, you can use 91 | `pystarport supervisorctl` to manage the processes: 92 | 93 | ``` 94 | $ pystarport supervisorctl status 95 | node0 RUNNING pid 35210, uptime 0:00:29 96 | node1 RUNNING pid 35211, uptime 0:00:29 97 | $ pystarport supervisorctl help 98 | 99 | default commands (type help ): 100 | ===================================== 101 | add exit open reload restart start tail 102 | avail fg pid remove shutdown status update 103 | clear maintail quit reread signal stop version 104 | ``` 105 | 106 | Or enter an interactive shell: 107 | 108 | ``` 109 | $ pystarport supervisorctl 110 | node0 RUNNING pid 35210, uptime 0:01:53 111 | node1 RUNNING pid 35211, uptime 0:01:53 112 | supervisor> 113 | ``` 114 | 115 | ## Cli 116 | 117 | After started the chain, you can use `chain-maind` cli directly, there are also some wrapper commands provided by 118 | `pystarport cli`. It understands the directory structure and port rules, also assuming `keyring-backend=test`, and there 119 | are shortcuts for commonly used commands, so arguments are shorter. 120 | 121 | ``` 122 | $ pystarport cli - --help 123 | ... 124 | ``` 125 | 126 | ## Transaction Bot 127 | 128 | A simple transaction bot that works for cluster created by pystarport as well as a local node 129 | 130 | Copy and modify `bot.yaml.sample` to `bot.yaml` with your desired bot configurations. 131 | 132 | ### If you are running on a pystarport created cluster: 133 | 1. Make sure you have provide the `node` for each job in the `bot.yaml` 134 | 2. Run the command 135 | ``` 136 | $ pystarport bot --chain-id=[cluster_chain_id] - start 137 | ``` 138 | 139 | ### If you are running on a local node 140 | ``` 141 | $ pstarport bot --node_rpc=tcp://127.0.0.1:26657 --data=/path/to/your/local/node/home/ - start 142 | ``` 143 | 144 | ## docker-compose 145 | 146 | When used with `docker-compose` or multiple machines, you need to config hostnames, and you probabely want to use a same 147 | `base_port` since you don't have port conflicts, you can config the `validators` like this: 148 | 149 | ```yaml 150 | validators: 151 | - coins: 10cro 152 | staked: 10cro 153 | base_port: 26650 154 | hostname: node0 155 | - coins: 10cro 156 | staked: 10cro 157 | base_port: 26650 158 | hostname: node1 159 | ``` 160 | 161 | `pystarport init --gen_compose_file` will also generate a `docker-compose.yml` file for you. 162 | 163 | ## IBC 164 | 165 | It can setup multiple devnets at once, and connect them with ibc relayer. 166 | 167 | ``` 168 | ibc-0: 169 | validators: 170 | - coins: 10cro 171 | staked: 10cro 172 | base_port: 26650 173 | - coins: 10cro 174 | staked: 10cro 175 | accounts: 176 | - name: relayer 177 | coins: 100cro 178 | genesis: 179 | app_state: 180 | transfer: 181 | params: 182 | receive_enabled: true 183 | send_enabled: true 184 | ibc-1: 185 | validators: 186 | - coins: 10cro 187 | staked: 10cro 188 | base_port: 26750 189 | - coins: 10cro 190 | staked: 10cro 191 | base_port: 26760 192 | accounts: 193 | - name: relayer 194 | coins: 100cro 195 | genesis: 196 | app_state: 197 | transfer: 198 | params: 199 | receive_enabled: true 200 | send_enabled: true 201 | relayer: # refer here (https://hermes.informal.systems/config.html) for more configs 202 | global: 203 | strategy: 'packets' 204 | log_level: 'info' 205 | chains: 206 | - id: "ibc-0" # id is needed to match chain id 207 | trusting_period: "20s" 208 | - id: "ibc-1" # id is needed to match chain id 209 | trusting_period: "20s" 210 | 211 | ``` 212 | 213 | With following commands to setup ibc, you are ready to play with ibc functionalities: 214 | 215 | ``` 216 | # spawn the devnets 217 | pystarport serve --config ibc.yaml 218 | # setup ibc channel 219 | hermes -c data/relayer.toml create channel ibc-0 ibc-1 --port-a transfer --port-b transfer 220 | # start relayer process 221 | supervisorctl -c data/tasks.ini start relayer-demo 222 | ``` 223 | 224 | ## Development 225 | ### Set up development environment 226 | More about [poetry](https://python-poetry.org/docs/). 227 | ``` 228 | poetry install 229 | ``` 230 | ### Recommended VS Code workspace settings 231 | Remember to run `poetry env info` after `poetry install` and update this `python.defaultInterpreterPath` 232 | ```json 233 | { 234 | "python.formatting.provider": "black", 235 | // after running `poetry env info`, change python.defaultInterpreterPath to Path under Virtualenv 236 | "python.defaultInterpreterPath": "~/Library/Caches/pypoetry/virtualenvs/to_be_updated", 237 | "editor.formatOnSave": true, 238 | "python.linting.flake8Enabled": true, 239 | "python.formatting.blackArgs": [ 240 | "--line-length=88" 241 | ], 242 | "python.sortImports.args": [ 243 | "--profile", 244 | "black" 245 | ], 246 | "[python]": { 247 | "editor.codeActionsOnSave": { 248 | "source.organizeImports": true 249 | } 250 | } 251 | } 252 | ``` 253 | 254 | ## Test 255 | ### Install jsonnet 256 | More about [jsonnet](https://jsonnet.org). 257 | ``` 258 | make test 259 | ``` 260 | 261 | 262 | ## FAQ 263 | 264 | ### Have this error on Mac: `AF_UNIX path too long´ 265 | 266 | Set environment variable `TMPPATH=/tmp` to shorten the unix domain socket path. 267 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "locked": { 5 | "lastModified": 1653893745, 6 | "narHash": "sha256-0jntwV3Z8//YwuOjzhV2sgJJPt+HY6KhU7VZUL0fKZQ=", 7 | "owner": "numtide", 8 | "repo": "flake-utils", 9 | "rev": "1ed9fb1935d260de5fe1c2f7ee0ebaae17ed2fa1", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "numtide", 14 | "repo": "flake-utils", 15 | "type": "github" 16 | } 17 | }, 18 | "nix-github-actions": { 19 | "inputs": { 20 | "nixpkgs": [ 21 | "poetry2nix", 22 | "nixpkgs" 23 | ] 24 | }, 25 | "locked": { 26 | "lastModified": 1703863825, 27 | "narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=", 28 | "owner": "nix-community", 29 | "repo": "nix-github-actions", 30 | "rev": "5163432afc817cf8bd1f031418d1869e4c9d5547", 31 | "type": "github" 32 | }, 33 | "original": { 34 | "owner": "nix-community", 35 | "repo": "nix-github-actions", 36 | "type": "github" 37 | } 38 | }, 39 | "nixpkgs": { 40 | "locked": { 41 | "lastModified": 1716715802, 42 | "narHash": "sha256-usk0vE7VlxPX8jOavrtpOqphdfqEQpf9lgedlY/r66c=", 43 | "owner": "NixOS", 44 | "repo": "nixpkgs", 45 | "rev": "e2dd4e18cc1c7314e24154331bae07df76eb582f", 46 | "type": "github" 47 | }, 48 | "original": { 49 | "owner": "NixOS", 50 | "ref": "nixpkgs-unstable", 51 | "repo": "nixpkgs", 52 | "type": "github" 53 | } 54 | }, 55 | "poetry2nix": { 56 | "inputs": { 57 | "flake-utils": [ 58 | "flake-utils" 59 | ], 60 | "nix-github-actions": "nix-github-actions", 61 | "nixpkgs": [ 62 | "nixpkgs" 63 | ], 64 | "systems": "systems", 65 | "treefmt-nix": "treefmt-nix" 66 | }, 67 | "locked": { 68 | "lastModified": 1716813403, 69 | "narHash": "sha256-9+G8tEOh3QkjSUV2UMC+TpvzKOR8IUFlkJJTMpVQMkc=", 70 | "owner": "nix-community", 71 | "repo": "poetry2nix", 72 | "rev": "12599ecaa9ec641c29dc8fd07f8267b23874bf3a", 73 | "type": "github" 74 | }, 75 | "original": { 76 | "owner": "nix-community", 77 | "repo": "poetry2nix", 78 | "type": "github" 79 | } 80 | }, 81 | "root": { 82 | "inputs": { 83 | "flake-utils": "flake-utils", 84 | "nixpkgs": "nixpkgs", 85 | "poetry2nix": "poetry2nix" 86 | } 87 | }, 88 | "systems": { 89 | "locked": { 90 | "lastModified": 1681028828, 91 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", 92 | "owner": "nix-systems", 93 | "repo": "default", 94 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", 95 | "type": "github" 96 | }, 97 | "original": { 98 | "id": "systems", 99 | "type": "indirect" 100 | } 101 | }, 102 | "treefmt-nix": { 103 | "inputs": { 104 | "nixpkgs": [ 105 | "poetry2nix", 106 | "nixpkgs" 107 | ] 108 | }, 109 | "locked": { 110 | "lastModified": 1715940852, 111 | "narHash": "sha256-wJqHMg/K6X3JGAE9YLM0LsuKrKb4XiBeVaoeMNlReZg=", 112 | "owner": "numtide", 113 | "repo": "treefmt-nix", 114 | "rev": "2fba33a182602b9d49f0b2440513e5ee091d838b", 115 | "type": "github" 116 | }, 117 | "original": { 118 | "owner": "numtide", 119 | "repo": "treefmt-nix", 120 | "type": "github" 121 | } 122 | } 123 | }, 124 | "root": "root", 125 | "version": 7 126 | } 127 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | inputs = { 3 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; 4 | flake-utils.url = "github:numtide/flake-utils"; 5 | poetry2nix = { 6 | url = "github:nix-community/poetry2nix"; 7 | inputs.nixpkgs.follows = "nixpkgs"; 8 | inputs.flake-utils.follows = "flake-utils"; 9 | }; 10 | }; 11 | outputs = 12 | { 13 | self, 14 | nixpkgs, 15 | flake-utils, 16 | poetry2nix, 17 | }: 18 | (flake-utils.lib.eachDefaultSystem ( 19 | system: 20 | let 21 | pkgs = ( 22 | import nixpkgs { 23 | inherit system; 24 | config = { }; 25 | overlays = [ 26 | poetry2nix.overlays.default 27 | ]; 28 | } 29 | ); 30 | overrides = pkgs.poetry2nix.overrides.withDefaults ( 31 | self: super: 32 | let 33 | buildSystems = { 34 | durations = [ "setuptools" ]; 35 | multitail2 = [ "setuptools" ]; 36 | pytest-github-actions-annotate-failures = [ "setuptools" ]; 37 | flake8-black = [ "setuptools" ]; 38 | flake8-isort = [ "hatchling" ]; 39 | docker = [ 40 | "hatchling" 41 | "hatch-vcs" 42 | ]; 43 | }; 44 | in 45 | pkgs.lib.mapAttrs ( 46 | attr: systems: 47 | super.${attr}.overridePythonAttrs (old: { 48 | nativeBuildInputs = (old.nativeBuildInputs or [ ]) ++ map (a: self.${a}) systems; 49 | }) 50 | ) buildSystems 51 | ); 52 | in 53 | rec { 54 | packages.default = pkgs.poetry2nix.mkPoetryApplication { 55 | projectDir = ./.; 56 | inherit overrides; 57 | }; 58 | apps.default = { 59 | type = "app"; 60 | program = "${packages.default}/bin/pystarport"; 61 | }; 62 | devShells.default = pkgs.mkShell { 63 | buildInputs = [ 64 | (pkgs.poetry2nix.mkPoetryEnv { 65 | projectDir = ./.; 66 | inherit overrides; 67 | }) 68 | (pkgs.poetry2nix.mkPoetryEditablePackage { 69 | projectDir = ./.; 70 | editablePackageSources = { 71 | pystarport = ./pystarport; 72 | }; 73 | }) 74 | ]; 75 | }; 76 | } 77 | )); 78 | } 79 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "pystarport" 3 | version = "0.2.5" 4 | description = "Spawn local devnets for cosmos-sdk chains" 5 | authors = ["chain-dev-team "] 6 | license = "Apache" 7 | keywords = ["CRO", "blockchain", "crypto.com", "cosmos"] 8 | readme = "README.md" 9 | homepage = "https://github.com/crypto-com/pystarport" 10 | repository = "https://github.com/crypto-com/pystarport" 11 | 12 | [tool.poetry.dependencies] 13 | python = "^3.9" 14 | fire = "^0" 15 | tomlkit = "^0" 16 | jsonmerge = "^1.7" 17 | python-dateutil = "^2.8" 18 | durations = "^0" 19 | supervisor = "^4.2" 20 | docker = "^7.0" 21 | bech32 = "^1.1" 22 | multitail2 = "^1.5" 23 | python-dotenv = "^1.0" 24 | pyyaml-include = "^1.3" 25 | jsonnet = "^0" 26 | pyyaml = "^6.0" 27 | 28 | [tool.poetry.dev-dependencies] 29 | pytest = "^8.0" 30 | deepdiff = "^6.7" 31 | flake8 = "^7" 32 | flake8-black = "^0" 33 | flake8-isort = "^6.1" 34 | 35 | [tool.poetry.scripts] 36 | pystarport = "pystarport.cli:main" 37 | 38 | [build-system] 39 | requires = ["poetry-core>=1.0.0"] 40 | build-backend = "poetry.core.masonry.api" 41 | 42 | [tool.black] 43 | line-length = 88 44 | target-version = ['py39'] 45 | include = '\.pyi?$' 46 | exclude = ''' 47 | 48 | ( 49 | /( 50 | \.eggs # exclude a few common directories in the 51 | | \.git # root of the project 52 | | \.hg 53 | | \.mypy_cache 54 | | \.tox 55 | | \.venv 56 | | _build 57 | | buck-out 58 | | build 59 | | dist 60 | )/ 61 | | foo.py # also separately exclude a file named foo.py in 62 | # the root of the project 63 | ) 64 | ''' 65 | 66 | [tool.isort] 67 | profile = "black" 68 | -------------------------------------------------------------------------------- /pystarport/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | from pathlib import Path 4 | 5 | proto_folder = Path(os.path.abspath(__file__)).parent.joinpath("proto_python") 6 | sys.path.append(str(proto_folder)) 7 | -------------------------------------------------------------------------------- /pystarport/app.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | CHAIN = "" # edit by nix-build 4 | if not CHAIN: 5 | CHAIN = os.environ.get("CHAIN_MAIND", "chain-maind") 6 | IMAGE = "docker.pkg.github.com/crypto-org-chain/chain-main/chain-main-pystarport:latest" 7 | 8 | SUPERVISOR_CONFIG_FILE = "tasks.ini" 9 | -------------------------------------------------------------------------------- /pystarport/bot.py: -------------------------------------------------------------------------------- 1 | import random 2 | import sys 3 | import threading 4 | import time 5 | 6 | import yaml 7 | 8 | from .cluster import ClusterCLI 9 | from .cosmoscli import CosmosCLI 10 | 11 | 12 | class TxJobThread(threading.Thread): 13 | def __init__(self, label, job, cosmos_cli: CosmosCLI): 14 | threading.Thread.__init__(self) 15 | self.label = label 16 | self.job = job 17 | self.cosmos_cli = cosmos_cli 18 | 19 | def transfer_tx_job(self): 20 | from_address = self.cosmos_cli.address( 21 | self.job["from_account"], 22 | ) 23 | to_address = self.job["to_address"] 24 | if "random_amount" in self.job: 25 | amount = random_amount( 26 | self.job["random_amount"][0], 27 | self.job["random_amount"][1], 28 | self.job["random_amount"][2], 29 | ) 30 | else: 31 | amount = self.job["amount"] 32 | 33 | print( 34 | "[%s] Transfer %s from %s to %s" 35 | % (self.label, amount, from_address, to_address) 36 | ) 37 | result = self.cosmos_cli.transfer(from_address, to_address, amount) 38 | print(result) 39 | 40 | def delegate_tx_job(self): 41 | from_address = self.cosmos_cli.address(self.job["from_account"]) 42 | to_address = self.job["to_validator_address"] 43 | if "random_amount" in self.job: 44 | amount = random_amount( 45 | self.job["random_amount"][0], 46 | self.job["random_amount"][1], 47 | self.job["random_amount"][2], 48 | ) 49 | else: 50 | amount = self.job["amount"] 51 | 52 | print( 53 | "[%s] Delegate %s from %s to %s" 54 | % (self.label, amount, from_address, to_address) 55 | ) 56 | result = self.cosmos_cli.delegate_amount(to_address, amount, from_address) 57 | print(result) 58 | 59 | def withdraw_all_rewards_job(self): 60 | from_address = self.cosmos_cli.address(self.job["from_account"]) 61 | print("[%s] Withdraw all rewards from %s" % (self.label, from_address)) 62 | result = self.cosmos_cli.withdraw_all_rewards(from_address) 63 | print(result) 64 | 65 | def next_interval(self): 66 | if "random_interval" in self.job: 67 | return random.randint( 68 | self.job["random_interval"][0], 69 | self.job["random_interval"][1], 70 | ) 71 | return self.job["interval"] 72 | 73 | def run(self): 74 | job_type = self.job["type"] 75 | while 1: 76 | begin = time.time() 77 | 78 | try: 79 | if job_type == "transfer": 80 | self.transfer_tx_job() 81 | elif job_type == "delegate": 82 | self.delegate_tx_job() 83 | elif job_type == "withdraw_all_rewards": 84 | self.withdraw_all_rewards_job() 85 | else: 86 | print("Unknown job type: %s", job_type) 87 | sys.exit() 88 | except Exception as e: 89 | print("error executing job:", sys.exc_info(), str(e)) 90 | 91 | interval = self.next_interval() 92 | 93 | duration = time.time() - begin 94 | if duration < interval: 95 | sleep = interval - duration 96 | print("Next %s in %ds ...\n" % (job_type, sleep)) 97 | time.sleep(sleep) 98 | 99 | 100 | def random_amount(min, max, denom): 101 | return "%d%s" % (random.randint(min, max), denom) 102 | 103 | 104 | class BotClusterCLI: 105 | "transaction bot Cluster CLI" 106 | 107 | def __init__(self, config_path, cluster_cli: ClusterCLI): 108 | with open(config_path) as f: 109 | self.config = yaml.safe_load(f.read()) 110 | self.cluster_cli = cluster_cli 111 | 112 | def start(self): 113 | """ 114 | prepare and start a transaction bot from configuration 115 | """ 116 | threads = [] 117 | for i, job in enumerate(self.config["jobs"], start=1): 118 | node_i = job.get("node", 0) 119 | cli = CosmosCLI( 120 | self.cluster_cli.home(node_i), 121 | self.cluster_cli.node_rpc(node_i), 122 | chain_id=self.cluster_cli.chain_id, 123 | cmd=self.cluster_cli.cmd, 124 | ) 125 | thread = TxJobThread(job.get("label", i), job, cli) 126 | 127 | threads.append(thread) 128 | thread.start() 129 | 130 | for thread in threads: 131 | thread.join() 132 | 133 | 134 | class BotCLI: 135 | "transaction bot CLI" 136 | 137 | def __init__(self, config_path, cosmos_cli=None): 138 | with open(config_path) as f: 139 | self.config = yaml.safe_load(f.read()) 140 | self.cosmos_cli = cosmos_cli 141 | 142 | def start(self): 143 | """ 144 | prepare and start a transaction bot from configuration 145 | """ 146 | threads = [] 147 | for i, job in enumerate(self.config["jobs"], start=1): 148 | thread = TxJobThread(job.get("label", i), job, self.cosmos_cli) 149 | 150 | threads.append(thread) 151 | thread.start() 152 | 153 | for thread in threads: 154 | thread.join() 155 | -------------------------------------------------------------------------------- /pystarport/cli.py: -------------------------------------------------------------------------------- 1 | import os 2 | import signal 3 | from pathlib import Path 4 | 5 | import fire 6 | 7 | from .app import IMAGE, SUPERVISOR_CONFIG_FILE 8 | from .bot import BotCLI, BotClusterCLI 9 | from .cluster import ( 10 | ClusterCLI, 11 | Relayer, 12 | init_cluster, 13 | start_cluster, 14 | start_tail_logs_thread, 15 | ) 16 | from .cosmoscli import CosmosCLI 17 | from .utils import build_cli_args, interact 18 | 19 | 20 | def init( 21 | data, 22 | config, 23 | base_port, 24 | dotenv, 25 | *args, 26 | no_remove=False, 27 | relayer=Relayer.HERMES.value, 28 | **kwargs, 29 | ): 30 | if not no_remove: 31 | interact( 32 | f"rm -r {data}; mkdir {data}", 33 | ignore_error=True, 34 | ) 35 | return init_cluster( 36 | data, 37 | config, 38 | base_port, 39 | dotenv, 40 | relayer=relayer, 41 | *args, 42 | **kwargs, 43 | ) 44 | 45 | 46 | def start(data, quiet): 47 | supervisord = start_cluster(data) 48 | 49 | # register signal to quit supervisord 50 | for signame in ("SIGINT", "SIGTERM"): 51 | signal.signal(getattr(signal, signame), lambda *args: supervisord.terminate()) 52 | 53 | if not quiet: 54 | tailer = start_tail_logs_thread(data) 55 | 56 | supervisord.wait() 57 | 58 | if not quiet: 59 | tailer.stop() 60 | tailer.join() 61 | 62 | 63 | def serve( 64 | data, 65 | config, 66 | base_port, 67 | dotenv, 68 | cmd, 69 | quiet, 70 | no_remove=False, 71 | relayer=Relayer.HERMES.value, 72 | ): 73 | init(data, config, base_port, dotenv, cmd=cmd, no_remove=no_remove, relayer=relayer) 74 | start(data, quiet) 75 | 76 | 77 | class CLI: 78 | def __init__(self, /, cmd=None): 79 | """ 80 | :param cmd: path to the chain binary 81 | """ 82 | self.cmd = cmd 83 | 84 | def init( 85 | self, 86 | data: str = "./data", 87 | config: str = "./config.yaml", 88 | base_port: int = 26650, 89 | dotenv: str = None, 90 | image: str = IMAGE, 91 | gen_compose_file: bool = False, 92 | no_remove: bool = False, 93 | relayer: str = Relayer.HERMES.value, 94 | ): 95 | """ 96 | prepare all the configurations of a devnet 97 | 98 | :param data: path to the root data directory 99 | :param config: path to the configuration file 100 | :param base_port: the base port to use, the service ports of different nodes 101 | are calculated based on this 102 | :param dotenv: path to .env file 103 | :param image: the image used in the generated docker-compose.yml 104 | :param gen_compose_file: generate a docker-compose.yml 105 | :param no_remove: don't remove existing data directory 106 | """ 107 | init( 108 | Path(data), 109 | config, 110 | base_port, 111 | dotenv, 112 | image, 113 | self.cmd, 114 | gen_compose_file, 115 | no_remove=no_remove, 116 | relayer=relayer, 117 | ) 118 | 119 | def start(self, data: str = "./data", quiet: bool = False): 120 | """ 121 | start the prepared devnet 122 | 123 | :param data: path to the root data directory 124 | :param quiet: don't print logs of subprocesses 125 | """ 126 | start(Path(data), quiet) 127 | 128 | def chaind(self, *args, **kwargs): 129 | """ 130 | start one node whose home directory is already initialized 131 | can be used to launch chain-maind 132 | 133 | :param home: home directory 134 | """ 135 | os.execvp(self.cmd, [self.cmd] + build_cli_args(*args, **kwargs)) 136 | 137 | def serve( 138 | self, 139 | data: str = "./data", 140 | config: str = "./config.yaml", 141 | base_port: int = 26650, 142 | dotenv: str = None, 143 | quiet: bool = False, 144 | no_remove: bool = False, 145 | relayer: str = Relayer.HERMES.value, 146 | ): 147 | """ 148 | prepare and start a devnet from scatch 149 | 150 | :param data: path to the root data directory 151 | :param config: path to the configuration file 152 | :param base_port: the base port to use, the service ports of different nodes 153 | are calculated based on this 154 | :param dotenv: path to .env file 155 | :param quiet: don't print logs of subprocesses 156 | :param no_remove: don't remove existing data directory 157 | """ 158 | serve( 159 | Path(data), 160 | config, 161 | base_port, 162 | dotenv, 163 | self.cmd, 164 | quiet, 165 | no_remove=no_remove, 166 | relayer=relayer, 167 | ) 168 | 169 | def supervisorctl(self, *args, data: str = "./data"): 170 | from supervisor.supervisorctl import main 171 | 172 | main(("-c", Path(data) / SUPERVISOR_CONFIG_FILE, *args)) 173 | 174 | def cli(self, *args, data: str = "./data", chain_id: str = "chainmaind"): 175 | """ 176 | pystarport CLI 177 | 178 | :param data: path to the root data directory 179 | :param chain_id: chain id of the cluster 180 | """ 181 | return ClusterCLI(Path(data), chain_id=chain_id, cmd=self.cmd) 182 | 183 | def bot( 184 | self, 185 | *args, 186 | data: str = "./data", 187 | config: str = "./bot.yaml", 188 | chain_id: str = "chainmaind", 189 | node_rpc: str = None, 190 | ): 191 | """ 192 | transaction bot CLI 193 | 194 | :param data: path to the root data directory if connecting to pystarport 195 | cluster. Path to the home directory if connecting to a node 196 | :param config: path to the bot configuration file 197 | (copy bot.yaml.example for reference) 198 | :param chain_id: chain id of the cluster 199 | :param node_rpc: custom Tendermint RPC endpoint to the node 200 | """ 201 | data_path = Path(data) 202 | config_path = Path(config) 203 | if node_rpc is None: 204 | cluster_cli = ClusterCLI(data_path, chain_id=chain_id, cmd=self.cmd) 205 | return BotClusterCLI(config_path, cluster_cli) 206 | else: 207 | cosmos_cli = CosmosCLI(data_path, node_rpc, cmd=self.cmd) 208 | return BotCLI(config_path, cosmos_cli) 209 | 210 | 211 | def main(): 212 | fire.Fire(CLI) 213 | 214 | 215 | if __name__ == "__main__": 216 | main() 217 | -------------------------------------------------------------------------------- /pystarport/cluster.py: -------------------------------------------------------------------------------- 1 | import base64 2 | import configparser 3 | import datetime 4 | import hashlib 5 | import json 6 | import os 7 | import re 8 | import subprocess 9 | import sys 10 | import threading 11 | import time 12 | from enum import Enum 13 | from pathlib import Path 14 | from typing import List 15 | 16 | import durations 17 | import jsonmerge 18 | import multitail2 19 | import tomlkit 20 | import yaml 21 | from dateutil.parser import isoparse 22 | from supervisor import xmlrpc 23 | from supervisor.compat import xmlrpclib 24 | 25 | from . import ports 26 | from .app import CHAIN, IMAGE, SUPERVISOR_CONFIG_FILE 27 | from .cosmoscli import ChainCommand, CosmosCLI, ModuleAccount, module_address 28 | from .expansion import expand_jsonnet, expand_yaml 29 | from .ledger import ZEMU_BUTTON_PORT, ZEMU_HOST 30 | from .utils import format_doc_string, get_sync_info, interact, write_ini 31 | 32 | COMMON_PROG_OPTIONS = { 33 | # redirect to supervisord's stdout, easier to collect all logs 34 | "autostart": "true", 35 | "autorestart": "true", 36 | "redirect_stderr": "true", 37 | "startsecs": "3", 38 | } 39 | 40 | 41 | def home_dir(data_dir, i): 42 | return data_dir / f"node{i}" 43 | 44 | 45 | class ClusterCLI: 46 | "the apis to interact with wallet and blockchain prepared with Cluster" 47 | 48 | def __init__( 49 | self, 50 | data, 51 | chain_id="chainmaind", 52 | cmd=None, 53 | zemu_address=ZEMU_HOST, 54 | zemu_button_port=ZEMU_BUTTON_PORT, 55 | ): 56 | self.data_root = data 57 | self.zemu_address = zemu_address 58 | self.zemu_button_port = zemu_button_port 59 | self.chain_id = chain_id 60 | self.data_dir = data / self.chain_id 61 | self.config = json.load((self.data_dir / "config.json").open()) 62 | self.cmd = cmd or self.config.get("cmd") or CHAIN 63 | 64 | self._supervisorctl = None 65 | self.output = None 66 | self.error = None 67 | 68 | def cosmos_cli(self, i=0): 69 | return CosmosCLI( 70 | self.home(i), 71 | self.node_rpc(i), 72 | chain_id=self.chain_id, 73 | cmd=self.cmd, 74 | zemu_address=self.zemu_address, 75 | zemu_button_port=self.zemu_button_port, 76 | ) 77 | 78 | @property 79 | def supervisor(self): 80 | "http://supervisord.org/api.html" 81 | # copy from: 82 | # https://github.com/Supervisor/supervisor/blob/76df237032f7d9fbe80a0adce3829c8b916d5b58/supervisor/options.py#L1718 83 | if self._supervisorctl is None: 84 | self._supervisorctl = xmlrpclib.ServerProxy( 85 | # dumbass ServerProxy won't allow us to pass in a non-HTTP url, 86 | # so we fake the url we pass into it and 87 | # always use the transport's 88 | # 'serverurl' to figure out what to attach to 89 | "http://127.0.0.1", 90 | transport=xmlrpc.SupervisorTransport( 91 | serverurl=f"unix://{self.data_root}/supervisor.sock" 92 | ), 93 | ) 94 | return self._supervisorctl.supervisor 95 | 96 | def reload_supervisor(self): 97 | subprocess.run( 98 | [ 99 | sys.executable, 100 | "-msupervisor.supervisorctl", 101 | "-c", 102 | self.data_root / SUPERVISOR_CONFIG_FILE, 103 | "update", 104 | ], 105 | check=True, 106 | ) 107 | 108 | def nodes_len(self): 109 | "find how many 'node{i}' sub-directories" 110 | return len( 111 | [p for p in self.data_dir.iterdir() if re.match(r"^node\d+$", p.name)] 112 | ) 113 | 114 | def copy_validator_key(self, from_node=1, to_node=2): 115 | "Copy the validtor file in from_node to to_node" 116 | from_key_file = "{}/node{}/config/priv_validator_key.json".format( 117 | self.data_dir, from_node 118 | ) 119 | to_key_file = "{}/node{}/config/priv_validator_key.json".format( 120 | self.data_dir, to_node 121 | ) 122 | with open(from_key_file, "r") as f: 123 | key = f.read() 124 | with open(to_key_file, "w") as f: 125 | f.write(key) 126 | 127 | def update_genesis(self, i, genesis_data): 128 | home = self.home(i) 129 | genesis_file = home / "config/genesis.json" 130 | with open(genesis_file, "w") as f: 131 | f.write(json.dumps(genesis_data, indent=4)) 132 | 133 | def stop_node(self, i=0): 134 | subprocess.run( 135 | [ 136 | sys.executable, 137 | "-msupervisor.supervisorctl", 138 | "-c", 139 | self.data_root / SUPERVISOR_CONFIG_FILE, 140 | "stop", 141 | "{}-node{}".format(self.chain_id, i), 142 | ] 143 | ) 144 | 145 | def stop_relayer(self): 146 | subprocess.run( 147 | [ 148 | sys.executable, 149 | "-msupervisor.supervisorctl", 150 | "-c", 151 | self.data_root / SUPERVISOR_CONFIG_FILE, 152 | "stop", 153 | "program:relayer-demo", 154 | ] 155 | ) 156 | 157 | def restart_relayer(self): 158 | subprocess.run( 159 | [ 160 | sys.executable, 161 | "-msupervisor.supervisorctl", 162 | "-c", 163 | self.data_root / SUPERVISOR_CONFIG_FILE, 164 | "restart", 165 | "program:relayer-demo", 166 | ] 167 | ) 168 | 169 | def start_node(self, i): 170 | subprocess.run( 171 | [ 172 | sys.executable, 173 | "-msupervisor.supervisorctl", 174 | "-c", 175 | self.data_root / SUPERVISOR_CONFIG_FILE, 176 | "start", 177 | "{}-node{}".format(self.chain_id, i), 178 | ] 179 | ) 180 | 181 | def create_node( 182 | self, 183 | base_port=None, 184 | moniker=None, 185 | hostname="127.0.0.1", 186 | statesync=False, 187 | mnemonic=None, 188 | broadcastmode="sync", 189 | coin_type=None, 190 | ): 191 | """create new node in the data directory, 192 | process information is written into supervisor config 193 | start it manually with supervisor commands 194 | 195 | :return: new node index and config 196 | """ 197 | i = self.nodes_len() 198 | 199 | # default configs 200 | if base_port is None: 201 | # use the node0's base_port + i * 10 as default base port for new ndoe 202 | base_port = self.config["validators"][0]["base_port"] + i * 10 203 | if moniker is None: 204 | moniker = f"node{i}" 205 | 206 | # add config 207 | assert len(self.config["validators"]) == i 208 | self.config["validators"].append( 209 | { 210 | "base_port": base_port, 211 | "hostname": hostname, 212 | "moniker": moniker, 213 | } 214 | ) 215 | (self.data_dir / "config.json").write_text(json.dumps(self.config)) 216 | 217 | # init home directory 218 | self.init(i) 219 | home = self.home(i) 220 | (home / "config/genesis.json").unlink() 221 | (home / "config/genesis.json").symlink_to("../../genesis.json") 222 | (home / "config/client.toml").write_text( 223 | tomlkit.dumps( 224 | { 225 | "chain-id": self.chain_id, 226 | "keyring-backend": "test", 227 | "output": "json", 228 | "node": self.node_rpc(i), 229 | "broadcast-mode": broadcastmode, 230 | } 231 | ) 232 | ) 233 | 234 | # use p2p peers from node0's config 235 | node0 = tomlkit.parse((self.data_dir / "node0/config/config.toml").read_text()) 236 | 237 | def custom_edit_tm(doc): 238 | if statesync: 239 | info = get_sync_info(self.status()) 240 | doc["statesync"].update( 241 | { 242 | "enable": True, 243 | "rpc_servers": ",".join(self.node_rpc(i) for i in range(2)), 244 | "trust_height": int(info["latest_block_height"]), 245 | "trust_hash": info["latest_block_hash"], 246 | "temp_dir": str(self.data_dir), 247 | "discovery_time": "5s", 248 | } 249 | ) 250 | 251 | edit_tm_cfg( 252 | home / "config/config.toml", 253 | base_port, 254 | node0["p2p"]["persistent_peers"], 255 | {}, 256 | custom_edit=custom_edit_tm, 257 | ) 258 | edit_app_cfg(home / "config/app.toml", base_port, {}) 259 | 260 | # create validator account 261 | self.create_account("validator", i, mnemonic, coin_type=coin_type) 262 | 263 | # add process config into supervisor 264 | path = self.data_dir / SUPERVISOR_CONFIG_FILE 265 | ini = configparser.RawConfigParser() 266 | ini.read(path) 267 | chain_id = self.chain_id 268 | prgname = f"{chain_id}-node{i}" 269 | section = f"program:{prgname}" 270 | ini.add_section(section) 271 | directory = f"%(here)s/node{i}" 272 | ini[section].update( 273 | dict( 274 | COMMON_PROG_OPTIONS, 275 | directory=directory, 276 | command=f"{self.cmd} start --home .", 277 | autostart="false", 278 | stdout_logfile=f"{directory}.log", 279 | ) 280 | ) 281 | with path.open("w") as fp: 282 | ini.write(fp) 283 | self.reload_supervisor() 284 | return i 285 | 286 | def home(self, i): 287 | "home directory of i-th node" 288 | return home_dir(self.data_dir, i) 289 | 290 | def base_port(self, i): 291 | return self.config["validators"][i]["base_port"] 292 | 293 | def node_rpc(self, i): 294 | "rpc url of i-th node" 295 | return "tcp://127.0.0.1:%d" % ports.rpc_port(self.base_port(i)) 296 | 297 | # for query 298 | def ipport_grpc(self, i): 299 | "grpc url of i-th node" 300 | return "127.0.0.1:%d" % ports.grpc_port(self.base_port(i)) 301 | 302 | # tx broadcast only 303 | def ipport_grpc_tx(self, i): 304 | "grpc url of i-th node" 305 | return "127.0.0.1:%d" % ports.grpc_port_tx_only(self.base_port(i)) 306 | 307 | def node_id(self, i): 308 | "get i-th node's tendermint node id" 309 | return self.cosmos_cli(i).node_id() 310 | 311 | def delete_account(self, name, i=0): 312 | "delete account in i-th node's keyring" 313 | return self.cosmos_cli(i).delete_account(name) 314 | 315 | def create_account(self, name, i=0, mnemonic=None, **kwargs): 316 | "create new keypair in i-th node's keyring" 317 | return self.cosmos_cli(i).create_account(name, mnemonic, **kwargs) 318 | 319 | def create_account_ledger(self, name, i=0, **kwargs): 320 | "create new ledger keypair" 321 | return self.cosmos_cli(i).create_account_ledger(name, **kwargs) 322 | 323 | def init(self, i): 324 | "the i-th node's config is already added" 325 | return self.cosmos_cli(i).init(self.config["validators"][i]["moniker"]) 326 | 327 | def export(self, i=0): 328 | return self.cosmos_cli(i).export() 329 | 330 | def validate_genesis(self, *args, i=0): 331 | return self.cosmos_cli(i).validate_genesis(*args) 332 | 333 | def add_genesis_account(self, addr, coins, i=0, **kwargs): 334 | return self.cosmos_cli(i).add_genesis_account(addr, coins, **kwargs) 335 | 336 | def gentx( 337 | self, name, coins, *args, i=0, min_self_delegation=1, pubkey=None, **kwargs 338 | ): 339 | return self.cosmos_cli(i).gentx( 340 | name, 341 | coins, 342 | *args, 343 | min_self_delegation=min_self_delegation, 344 | pubkey=pubkey, 345 | **kwargs, 346 | ) 347 | 348 | def collect_gentxs(self, gentx_dir, i=0): 349 | return self.cosmos_cli(i).collect_gentxs(gentx_dir) 350 | 351 | def status(self, i=0): 352 | return self.cosmos_cli(i).status() 353 | 354 | def block_height(self, i=0): 355 | return self.cosmos_cli(i).block_height() 356 | 357 | def block_time(self, i=0): 358 | return self.cosmos_cli(i).block_time() 359 | 360 | def balances(self, addr, height=0, i=0): 361 | return self.cosmos_cli(i).balances(addr, height) 362 | 363 | def balance(self, addr, denom=None, height=0, i=0): 364 | return self.cosmos_cli(i).balance(addr, denom, height) 365 | 366 | def query_all_txs(self, addr, i=0): 367 | return self.cosmos_cli(i).query_all_txs(addr) 368 | 369 | def distribution_commission(self, addr, i=0): 370 | return self.cosmos_cli(i).distribution_commission(addr) 371 | 372 | def distribution_community(self, i=0, **kwargs): 373 | return self.cosmos_cli(i).distribution_community(**kwargs) 374 | 375 | def distribution_reward(self, delegator_addr, i=0): 376 | return self.cosmos_cli(i).distribution_reward(delegator_addr) 377 | 378 | def address(self, name, i=0, bech="acc"): 379 | return self.cosmos_cli(i).address(name, bech) 380 | 381 | @format_doc_string( 382 | options=",".join(v.value for v in ModuleAccount.__members__.values()) 383 | ) 384 | def module_address(self, name): 385 | """ 386 | get address of module accounts 387 | 388 | :param name: name of module account, values: {options} 389 | """ 390 | return module_address(name) 391 | 392 | def account(self, addr, i=0): 393 | return self.cosmos_cli(i).account(addr) 394 | 395 | def supply(self, supply_type, i=0): 396 | return self.cosmos_cli(i).supply(supply_type) 397 | 398 | def validator(self, addr, i=0): 399 | return self.cosmos_cli(i).validator(addr) 400 | 401 | def validators(self, i=0): 402 | return self.cosmos_cli(i).validators() 403 | 404 | def staking_params(self, i=0): 405 | return self.cosmos_cli(i).staking_params() 406 | 407 | def staking_pool(self, bonded=True, i=0): 408 | return self.cosmos_cli(i).staking_pool(bonded) 409 | 410 | def transfer_offline(self, from_, to, coins, sequence, i=0, fees=None): 411 | return self.cosmos_cli(i).transfer_offline(from_, to, coins, sequence, fees) 412 | 413 | def transfer( 414 | self, 415 | from_, 416 | to, 417 | coins, 418 | i=0, 419 | generate_only=False, 420 | event_query_tx=True, 421 | **kwargs, 422 | ): 423 | return self.cosmos_cli(i).transfer( 424 | from_, 425 | to, 426 | coins, 427 | generate_only, 428 | event_query_tx=event_query_tx, 429 | **kwargs, 430 | ) 431 | 432 | def transfer_from_ledger( 433 | self, 434 | from_, 435 | to, 436 | coins, 437 | i=0, 438 | generate_only=False, 439 | fees=None, 440 | event_query_tx=True, 441 | **kwargs, 442 | ): 443 | return self.cosmos_cli(i).transfer_from_ledger( 444 | from_, 445 | to, 446 | coins, 447 | generate_only, 448 | fees, 449 | event_query_tx=event_query_tx, 450 | **kwargs, 451 | ) 452 | 453 | def get_delegated_amount(self, which_addr, i=0): 454 | return self.cosmos_cli(i).get_delegated_amount(which_addr) 455 | 456 | def delegate_amount( 457 | self, 458 | to_addr, 459 | amount, 460 | from_addr, 461 | i=0, 462 | gas_price=None, 463 | event_query_tx=True, 464 | **kwargs, 465 | ): 466 | return self.cosmos_cli(i).delegate_amount( 467 | to_addr, 468 | amount, 469 | from_addr, 470 | gas_price, 471 | event_query_tx=event_query_tx, 472 | **kwargs, 473 | ) 474 | 475 | # to_addr: croclcl1... , from_addr: cro1... 476 | def unbond_amount( 477 | self, to_addr, amount, from_addr, i=0, event_query_tx=True, **kwargs 478 | ): 479 | return self.cosmos_cli(i).unbond_amount( 480 | to_addr, 481 | amount, 482 | from_addr, 483 | event_query_tx=event_query_tx, 484 | **kwargs, 485 | ) 486 | 487 | # to_validator_addr: crocncl1... , from_from_validator_addraddr: crocl1... 488 | def redelegate_amount( 489 | self, 490 | to_validator_addr, 491 | from_validator_addr, 492 | amount, 493 | from_addr, 494 | i=0, 495 | event_query_tx=True, 496 | **kwargs, 497 | ): 498 | return self.cosmos_cli(i).redelegate_amount( 499 | to_validator_addr, 500 | from_validator_addr, 501 | amount, 502 | from_addr, 503 | event_query_tx=event_query_tx, 504 | **kwargs, 505 | ) 506 | 507 | def withdraw_all_rewards(self, from_delegator, i=0, event_query_tx=True, **kwargs): 508 | return self.cosmos_cli(i).withdraw_all_rewards( 509 | from_delegator, 510 | event_query_tx=event_query_tx, 511 | **kwargs, 512 | ) 513 | 514 | def make_multisig(self, name, signer1, signer2, i=0, **kwargs): 515 | return self.cosmos_cli(i).make_multisig(name, signer1, signer2, **kwargs) 516 | 517 | def sign_multisig_tx(self, tx_file, multi_addr, signer_name, i=0, **kwargs): 518 | return self.cosmos_cli(i).sign_multisig_tx( 519 | tx_file, multi_addr, signer_name, **kwargs 520 | ) 521 | 522 | def sign_batch_multisig_tx( 523 | self, tx_file, multi_addr, signer_name, account_num, sequence, i=0, **kwargs 524 | ): 525 | return self.cosmos_cli(i).sign_batch_multisig_tx( 526 | tx_file, multi_addr, signer_name, account_num, sequence, **kwargs 527 | ) 528 | 529 | def encode_signed_tx(self, signed_tx, i=0, **kwargs): 530 | return self.cosmos_cli(i).encode_signed_tx(signed_tx, **kwargs) 531 | 532 | def sign_single_tx(self, tx_file, signer_name, i=0, **kwargs): 533 | return self.cosmos_cli(i).sign_single_tx(tx_file, signer_name, **kwargs) 534 | 535 | def combine_multisig_tx( 536 | self, tx_file, multi_name, signer1_file, signer2_file, i=0, **kwargs 537 | ): 538 | return self.cosmos_cli(i).combine_multisig_tx( 539 | tx_file, 540 | multi_name, 541 | signer1_file, 542 | signer2_file, 543 | **kwargs, 544 | ) 545 | 546 | def combine_batch_multisig_tx( 547 | self, tx_file, multi_name, signer1_file, signer2_file, i=0, **kwargs 548 | ): 549 | return self.cosmos_cli(i).combine_batch_multisig_tx( 550 | tx_file, 551 | multi_name, 552 | signer1_file, 553 | signer2_file, 554 | **kwargs, 555 | ) 556 | 557 | def broadcast_tx(self, tx_file, i=0, **kwargs): 558 | return self.cosmos_cli(i).broadcast_tx(tx_file, **kwargs) 559 | 560 | def unjail(self, addr, i=0, event_query_tx=True, **kwargs): 561 | return self.cosmos_cli(i).unjail(addr, event_query_tx=event_query_tx, **kwargs) 562 | 563 | def create_validator( 564 | self, 565 | amount, 566 | options, 567 | i, 568 | **kwargs, 569 | ): 570 | """MsgCreateValidator 571 | create the node with create_node before call this""" 572 | options.setdefault("moniker", self.config["validators"][i]["moniker"]) 573 | return self.cosmos_cli(i).create_validator(amount, options, **kwargs) 574 | 575 | def create_validator_legacy( 576 | self, 577 | amount, 578 | i, 579 | **kwargs, 580 | ): 581 | """MsgCreateValidator 582 | create the node with create_node before call this""" 583 | kwargs.setdefault("moniker", self.config["validators"][i]["moniker"]) 584 | return self.cosmos_cli(i).create_validator_legacy(amount, **kwargs) 585 | 586 | def edit_validator( 587 | self, 588 | i, 589 | commission_rate=None, 590 | moniker=None, 591 | identity=None, 592 | website=None, 593 | security_contact=None, 594 | details=None, 595 | event_query_tx=True, 596 | **kwargs, 597 | ): 598 | """MsgEditValidator""" 599 | return self.cosmos_cli(i).edit_validator( 600 | commission_rate, 601 | moniker, 602 | identity, 603 | website, 604 | security_contact, 605 | details, 606 | event_query_tx=event_query_tx, 607 | **kwargs, 608 | ) 609 | 610 | def gov_propose(self, proposer, kind, proposal, i=0, **kwargs): 611 | return self.cosmos_cli(i).gov_propose(proposer, kind, proposal, **kwargs) 612 | 613 | def gov_vote(self, voter, proposal_id, option, i=0, event_query_tx=True, **kwargs): 614 | return self.cosmos_cli(i).gov_vote( 615 | voter, 616 | proposal_id, 617 | option, 618 | event_query_tx=event_query_tx, 619 | **kwargs, 620 | ) 621 | 622 | def gov_deposit( 623 | self, depositor, proposal_id, amount, i=0, event_query_tx=True, **kwargs 624 | ): 625 | return self.cosmos_cli(i).gov_deposit( 626 | depositor, 627 | proposal_id, 628 | amount, 629 | event_query_tx=event_query_tx, 630 | **kwargs, 631 | ) 632 | 633 | def query_proposals(self, depositor=None, limit=None, status=None, voter=None, i=0): 634 | return self.cosmos_cli(i).query_proposals(depositor, limit, status, voter) 635 | 636 | def query_proposal(self, proposal_id, i=0): 637 | res = self.cosmos_cli(i).query_proposal(proposal_id) 638 | return res.get("proposal") or res 639 | 640 | def query_tally(self, proposal_id, i=0): 641 | return self.cosmos_cli(i).query_tally(proposal_id) 642 | 643 | def ibc_transfer( 644 | self, 645 | from_, 646 | to, 647 | amount, 648 | channel, # src channel 649 | target_version, # chain version number of target chain 650 | i=0, 651 | event_query_tx=True, 652 | **kwargs, 653 | ): 654 | return self.cosmos_cli(i).ibc_transfer( 655 | from_, 656 | to, 657 | amount, 658 | channel, 659 | target_version, 660 | event_query_tx=event_query_tx, 661 | **kwargs, 662 | ) 663 | 664 | def create_nft( 665 | self, 666 | from_addr, 667 | denomid="mydenomid", 668 | denomname="mydenomname", 669 | schema='{"title":"Asset Metadata","type":"object",' 670 | '"properties":{"name":{"type":"string",' 671 | '"description":"testidentity"},"description":' 672 | '{"type":"string","description":"testdescription"},' 673 | '"image":{"type":"string","description":"testdescription"}}}', 674 | fees=None, 675 | i=0, 676 | event_query_tx=True, 677 | **kwargs, 678 | ): 679 | return self.cosmos_cli(i).create_nft( 680 | from_addr, 681 | denomid, 682 | denomname, 683 | schema, 684 | fees, 685 | event_query_tx=event_query_tx, 686 | **kwargs, 687 | ) 688 | 689 | def query_nft(self, denomid="mydenomid", i=0): 690 | return self.cosmos_cli(i).query_nft(denomid) 691 | 692 | def query_denom_by_name(self, denomname="mydenomname", i=0): 693 | return self.cosmos_cli(i).query_denom_by_name(denomname) 694 | 695 | def create_nft_token( 696 | self, 697 | from_addr, 698 | to_addr, 699 | denomid="mydenomid", 700 | tokenid="mytokenid", 701 | uri="myuri", 702 | fees=None, 703 | i=0, 704 | event_query_tx=True, 705 | **kwargs, 706 | ): 707 | return self.cosmos_cli(i).create_nft_token( 708 | from_addr, 709 | to_addr, 710 | denomid, 711 | tokenid, 712 | uri, 713 | fees, 714 | event_query_tx=event_query_tx, 715 | **kwargs, 716 | ) 717 | 718 | def query_nft_token(self, denomid="mydenomid", tokenid="mytokenid", i=0): 719 | return self.cosmos_cli(i).query_nft_token(denomid, tokenid) 720 | 721 | def burn_nft_token( 722 | self, 723 | from_addr, 724 | denomid="mydenomid", 725 | tokenid="mytokenid", 726 | i=0, 727 | event_query_tx=True, 728 | **kwargs, 729 | ): 730 | return self.cosmos_cli(i).burn_nft_token( 731 | from_addr, 732 | denomid, 733 | tokenid, 734 | event_query_tx=event_query_tx, 735 | **kwargs, 736 | ) 737 | 738 | def edit_nft_token( 739 | self, 740 | from_addr, 741 | denomid="mydenomid", 742 | tokenid="mytokenid", 743 | newuri="newuri", 744 | newname="newname", 745 | i=0, 746 | event_query_tx=True, 747 | **kwargs, 748 | ): 749 | return self.cosmos_cli(i).edit_nft_token( 750 | from_addr, 751 | denomid, 752 | tokenid, 753 | newuri, 754 | newname, 755 | event_query_tx=event_query_tx, 756 | **kwargs, 757 | ) 758 | 759 | def transfer_nft_token( 760 | self, 761 | from_addr, 762 | to_addr, 763 | denomid="mydenomid", 764 | tokenid="mytokenid", 765 | i=0, 766 | event_query_tx=True, 767 | **kwargs, 768 | ): 769 | return self.cosmos_cli(i).transfer_nft_token( 770 | from_addr, 771 | to_addr, 772 | denomid, 773 | tokenid, 774 | event_query_tx=event_query_tx, 775 | **kwargs, 776 | ) 777 | 778 | def event_query_tx_for(self, hash, i=0): 779 | return self.cosmos_cli(i).event_query_tx_for(hash) 780 | 781 | def migrate_keystore(self, i=0): 782 | return self.cosmos_cli(i).migrate_keystore() 783 | 784 | def ibc_query_channels(self, connid, i=0, **kwargs): 785 | return self.cosmos_cli(i).ibc_query_channels(connid, **kwargs) 786 | 787 | def ibc_query_channel(self, port_id, channel_id, i=0, **kwargs): 788 | return self.cosmos_cli(i).ibc_query_channel(port_id, channel_id, **kwargs) 789 | 790 | def ica_register_account(self, connid, i=0, event_query_tx=True, **kwargs): 791 | return self.cosmos_cli(i).ica_register_account( 792 | connid, 793 | event_query_tx=event_query_tx, 794 | **kwargs, 795 | ) 796 | 797 | def ica_query_account(self, connid, owner, i=0, **kwargs): 798 | return self.cosmos_cli(i).ica_query_account(connid, owner, **kwargs) 799 | 800 | def ica_submit_tx( 801 | self, 802 | connid, 803 | tx, 804 | timeout_duration="1h", 805 | i=0, 806 | event_query_tx=True, 807 | **kwargs, 808 | ): 809 | return self.cosmos_cli(i).ica_submit_tx( 810 | connid, 811 | tx, 812 | timeout_duration, 813 | event_query_tx=event_query_tx, 814 | **kwargs, 815 | ) 816 | 817 | def ica_generate_packet_data(self, tx, memo=None, encoding="proto3", i=0, **kwargs): 818 | return self.cosmos_cli(i).ica_generate_packet_data(memo, encoding, **kwargs) 819 | 820 | def ibc_upgrade_channels(self, version, from_addr, i=0, **kwargs): 821 | return self.cosmos_cli(i).ibc_upgrade_channels(version, from_addr, **kwargs) 822 | 823 | def register_counterparty_payee( 824 | self, port_id, channel_id, relayer, counterparty_payee, i=0, **kwargs 825 | ): 826 | return self.cosmos_cli(i).register_counterparty_payee( 827 | port_id, channel_id, relayer, counterparty_payee, **kwargs 828 | ) 829 | 830 | def pay_packet_fee(self, port_id, channel_id, packet_seq, i=0, **kwargs): 831 | return self.cosmos_cli(i).pay_packet_fee( 832 | port_id, channel_id, packet_seq, **kwargs 833 | ) 834 | 835 | def ibc_denom_trace(self, path, node, i=0): 836 | return self.cosmos_cli(i).ibc_denom_trace(path, node) 837 | 838 | def ibc_denom(self, path, node, i=0): 839 | return self.cosmos_cli(i).ibc_denom(path, node) 840 | 841 | 842 | def start_cluster(data_dir): 843 | cmd = [ 844 | sys.executable, 845 | "-msupervisor.supervisord", 846 | "-c", 847 | data_dir / SUPERVISOR_CONFIG_FILE, 848 | ] 849 | return subprocess.Popen(cmd, env=dict(os.environ, PYTHONPATH=":".join(sys.path))) 850 | 851 | 852 | class TailLogsThread(threading.Thread): 853 | def __init__(self, base_dir, pats: List[str]): 854 | self.base_dir = base_dir 855 | self.tailer = multitail2.MultiTail([str(base_dir / pat) for pat in pats]) 856 | self._stop_event = threading.Event() 857 | super().__init__() 858 | 859 | def run(self): 860 | while not self.stopped: 861 | for (path, _), s in self.tailer.poll(): 862 | print(Path(path).relative_to(self.base_dir), s) 863 | 864 | # TODO Replace this with FAM/inotify for watching filesystem events. 865 | time.sleep(0.5) 866 | 867 | def stop(self): 868 | self._stop_event.set() 869 | 870 | @property 871 | def stopped(self): 872 | return self._stop_event.is_set() 873 | 874 | 875 | def start_tail_logs_thread(data_dir): 876 | t = TailLogsThread(data_dir, ["*/node*.log", "relayer-*.log"]) 877 | t.start() 878 | return t 879 | 880 | 881 | def process_config(config, base_port): 882 | """ 883 | fill default values in config 884 | """ 885 | for i, val in enumerate(config["validators"]): 886 | if "moniker" not in val: 887 | val["moniker"] = f"node{i}" 888 | if "base_port" not in val: 889 | val["base_port"] = base_port + i * 10 890 | if "hostname" not in val: 891 | val["hostname"] = "127.0.0.1" 892 | 893 | 894 | def init_devnet( 895 | data_dir, 896 | config, 897 | base_port, 898 | image=IMAGE, 899 | cmd=None, 900 | gen_compose_file=False, 901 | ): 902 | """ 903 | init data directory 904 | """ 905 | 906 | def create_account(cli, account, use_ledger=False): 907 | coin_type = account.get("coin-type") 908 | if use_ledger: 909 | acct = cli.create_account_ledger(account["name"], coin_type=coin_type) 910 | elif account.get("address"): 911 | # if address field exists, will use account with that address directly 912 | acct = {"name": account.get("name"), "address": account.get("address")} 913 | else: 914 | mnemonic = account.get("mnemonic") 915 | acct = cli.create_account( 916 | account["name"], mnemonic=mnemonic, coin_type=coin_type 917 | ) 918 | if mnemonic: 919 | acct["mnemonic"] = mnemonic 920 | vesting = account.get("vesting") 921 | if not vesting: 922 | cli.add_genesis_account(acct["address"], account["coins"]) 923 | else: 924 | genesis_time = isoparse(genesis["genesis_time"]) 925 | end_time = genesis_time + datetime.timedelta( 926 | seconds=durations.Duration(vesting).to_seconds() 927 | ) 928 | vend = int(end_time.timestamp()) 929 | # allow vest only some of coins allocated, where account["coins"] must larger than account["vesting_coins"] if any vesting_coins specified in config.yaml. # noqa 501 930 | vesting_amount = account.get("vesting_coins", account["coins"]) 931 | cli.add_genesis_account( 932 | acct["address"], 933 | account["coins"], 934 | vesting_amount=vesting_amount, 935 | vesting_end_time=vend, 936 | ) 937 | return acct 938 | 939 | process_config(config, base_port) 940 | 941 | (data_dir / "config.json").write_text(json.dumps(config)) 942 | 943 | cmd = cmd or config.get("cmd") or CHAIN 944 | 945 | # init home directories 946 | for i, val in enumerate(config["validators"]): 947 | ChainCommand(cmd)( 948 | "init", 949 | val["moniker"], 950 | config.get("cmd-flags"), 951 | chain_id=config["chain_id"], 952 | home=home_dir(data_dir, i), 953 | ) 954 | if "consensus_key" in val: 955 | # restore consensus private key 956 | with (home_dir(data_dir, i) / "config/priv_validator_key.json").open( 957 | "w" 958 | ) as fp: 959 | json.dump( 960 | { 961 | "address": hashlib.sha256( 962 | base64.b64decode(val["consensus_key"]["pub"]) 963 | ) 964 | .hexdigest()[:40] 965 | .upper(), 966 | "pub_key": { 967 | "type": "tendermint/PubKeyEd25519", 968 | "value": val["consensus_key"]["pub"], 969 | }, 970 | "priv_key": { 971 | "type": "tendermint/PrivKeyEd25519", 972 | "value": val["consensus_key"]["priv"], 973 | }, 974 | }, 975 | fp, 976 | ) 977 | if "genesis_file" in config: 978 | with open( 979 | config["genesis_file"] % {"here": Path(config["path"]).parent}, "rb" 980 | ) as f: 981 | genesis_bytes = f.read() 982 | else: 983 | genesis_bytes = (data_dir / "node0/config/genesis.json").read_bytes() 984 | (data_dir / "genesis.json").write_bytes(genesis_bytes) 985 | (data_dir / "gentx").mkdir() 986 | for i, val in enumerate(config["validators"]): 987 | src = data_dir / f"node{i}/config/genesis.json" 988 | src.unlink() 989 | src.symlink_to("../../genesis.json") 990 | (data_dir / f"node{i}/config/gentx").symlink_to("../../gentx") 991 | 992 | # write client config 993 | rpc_port = ports.rpc_port(val["base_port"]) 994 | (data_dir / f"node{i}/config/client.toml").write_text( 995 | tomlkit.dumps( 996 | jsonmerge.merge( 997 | { 998 | "chain-id": config["chain_id"], 999 | "keyring-backend": "test", 1000 | "output": "json", 1001 | "node": f"tcp://{val['hostname']}:{rpc_port}", 1002 | "broadcast-mode": "sync", 1003 | }, 1004 | val.get("client_config", {}), 1005 | ) 1006 | ) 1007 | ) 1008 | 1009 | # now we can create ClusterCLI 1010 | cli = ClusterCLI(data_dir.parent, chain_id=config["chain_id"], cmd=cmd) 1011 | 1012 | # patch the genesis file 1013 | genesis = jsonmerge.merge( 1014 | json.loads((data_dir / "genesis.json").read_text()), 1015 | config.get("genesis", {}), 1016 | ) 1017 | (data_dir / "genesis.json").write_text(json.dumps(genesis)) 1018 | 1019 | # create accounts 1020 | accounts = [] 1021 | for i, node in enumerate(config["validators"]): 1022 | mnemonic = node.get("mnemonic") 1023 | coin_type = node.get("coin-type") 1024 | account = cli.create_account( 1025 | "validator", i, mnemonic=mnemonic, coin_type=coin_type 1026 | ) 1027 | if mnemonic: 1028 | account["mnemonic"] = mnemonic 1029 | accounts.append(account) 1030 | if "coins" in node: 1031 | cli.add_genesis_account(account["address"], node["coins"], i) 1032 | if "staked" in node: 1033 | optional_fields = [ 1034 | "commission_max_change_rate", 1035 | "commission_max_rate", 1036 | "commission_rate", 1037 | "details", 1038 | "security_contact", 1039 | "gas_prices", 1040 | ] 1041 | extra_kwargs = { 1042 | name: str(node[name]) for name in optional_fields if name in node 1043 | } 1044 | cli.gentx( 1045 | "validator", 1046 | node["staked"], 1047 | config.get("cmd-flags"), 1048 | i=i, 1049 | min_self_delegation=node.get("min_self_delegation", 1), 1050 | pubkey=node.get("pubkey"), 1051 | **extra_kwargs, 1052 | ) 1053 | 1054 | # create accounts 1055 | for account in config.get("accounts", []): 1056 | account = create_account(cli, account) 1057 | accounts.append(account) 1058 | 1059 | account_hw = config.get("hw_account") 1060 | if account_hw: 1061 | account = create_account(cli, account_hw, True) 1062 | accounts.append(account) 1063 | 1064 | # output accounts 1065 | (data_dir / "accounts.json").write_text(json.dumps(accounts)) 1066 | 1067 | # collect-gentxs if directory not empty 1068 | if next((data_dir / "gentx").iterdir(), None) is not None: 1069 | cli.collect_gentxs(data_dir / "gentx", 0) 1070 | 1071 | # realise the symbolic links, so the node directories can be used independently 1072 | genesis_bytes = (data_dir / "genesis.json").read_bytes() 1073 | for i in range(len(config["validators"])): 1074 | (data_dir / f"node{i}/config/gentx").unlink() 1075 | tmp = data_dir / f"node{i}/config/genesis.json" 1076 | tmp.unlink() 1077 | tmp.write_bytes(genesis_bytes) 1078 | 1079 | # write tendermint config 1080 | peers = config.get("peers") or ",".join( 1081 | [ 1082 | "tcp://%s@%s:%d" 1083 | % (cli.node_id(i), val["hostname"], ports.p2p_port(val["base_port"])) 1084 | for i, val in enumerate(config["validators"]) 1085 | ] 1086 | ) 1087 | for i, val in enumerate(config["validators"]): 1088 | self_peer = "tcp://%s@%s:%d" % ( 1089 | cli.node_id(i), 1090 | val["hostname"], 1091 | ports.p2p_port(val["base_port"]), 1092 | ) 1093 | clean_peers = try_remove_peer(peers, self_peer) 1094 | edit_tm_cfg( 1095 | data_dir / f"node{i}/config/config.toml", 1096 | val["base_port"], 1097 | clean_peers, 1098 | jsonmerge.merge(config.get("config", {}), val.get("config", {})), 1099 | ) 1100 | edit_app_cfg( 1101 | data_dir / f"node{i}/config/app.toml", 1102 | val["base_port"], 1103 | jsonmerge.merge(config.get("app-config", {}), val.get("app-config", {})), 1104 | ) 1105 | 1106 | # if the first validator is using statesync mode, then don't validate genesis, 1107 | # because the new binary may be a breaking one. 1108 | doc = tomlkit.parse((data_dir / "node0/config/config.toml").read_text()) 1109 | if not doc["statesync"]["enable"]: 1110 | cli.validate_genesis(config.get("cmd-flags", {})) 1111 | 1112 | # write supervisord config file 1113 | start_flags = " ".join( 1114 | [config.get("start-flags", ""), config.get("cmd-flags", "")] 1115 | ).strip() 1116 | with (data_dir / SUPERVISOR_CONFIG_FILE).open("w") as fp: 1117 | write_ini( 1118 | fp, 1119 | supervisord_ini( 1120 | cmd, 1121 | config["validators"], 1122 | config["chain_id"], 1123 | start_flags=start_flags, 1124 | ), 1125 | ) 1126 | 1127 | if gen_compose_file: 1128 | yaml.dump( 1129 | docker_compose_yml(cmd, config["validators"], data_dir, image), 1130 | (data_dir / "docker-compose.yml").open("w"), 1131 | ) 1132 | 1133 | 1134 | def get_relayer_chain_config(relayer_chains_config, chain_id): 1135 | return next((i for i in relayer_chains_config if i["id"] == chain_id), {}) 1136 | 1137 | 1138 | def relayer_chain_config_hermes(data_dir, chain, relayer_chains_config): 1139 | chain_id = chain["chain_id"] 1140 | cfg = json.load((data_dir / chain_id / "config.json").open()) 1141 | base_port = cfg["validators"][0]["base_port"] 1142 | rpc_port = ports.rpc_port(base_port) 1143 | grpc_port = ports.grpc_port(base_port) 1144 | config = { 1145 | "key_name": "relayer", 1146 | "id": chain_id, 1147 | "rpc_addr": f"http://127.0.0.1:{rpc_port}", 1148 | "grpc_addr": f"http://127.0.0.1:{grpc_port}", 1149 | "rpc_timeout": "10s", 1150 | "account_prefix": chain.get("account-prefix", "cro"), 1151 | "store_prefix": "ibc", 1152 | "max_gas": 300000, 1153 | "gas_price": {"price": 0, "denom": "basecro"}, 1154 | "trusting_period": "336h", 1155 | } 1156 | raw = subprocess.check_output(["hermes", "--version"]).decode("utf-8") 1157 | version = raw.strip().split("+")[0].removeprefix("hermes ") 1158 | is_legacy = tuple(map(int, version.split("."))) < (1, 6, 0) 1159 | if is_legacy: 1160 | config["websocket_addr"] = f"ws://localhost:{rpc_port}/websocket" 1161 | else: 1162 | config["event_source"] = { 1163 | "mode": "push", 1164 | "url": f"ws://127.0.0.1:{rpc_port}/websocket", 1165 | "batch_delay": "200ms", 1166 | } 1167 | return jsonmerge.merge( 1168 | config, 1169 | get_relayer_chain_config(relayer_chains_config, chain_id), 1170 | ) 1171 | 1172 | 1173 | def relayer_chain_config_rly(data_dir, chain, relayer_chains_config): 1174 | chain_id = chain["chain_id"] 1175 | folder = data_dir / chain_id 1176 | cfg = json.load((folder / "config.json").open()) 1177 | base_port = cfg["validators"][0]["base_port"] 1178 | rpc_port = ports.rpc_port(base_port) 1179 | json_rpc_addr = ports.evmrpc_port(base_port) 1180 | chain_config = get_relayer_chain_config(relayer_chains_config, chain_id) 1181 | address_type = chain_config.get("address_type", {}) 1182 | derivation = address_type.get("derivation") 1183 | gas_price = chain_config.get("gas_price", {}) 1184 | price = gas_price.get("price", 0) 1185 | denom = gas_price.get("denom", "basecro") 1186 | prices = f"{price}{denom}" 1187 | precompiled = chain_config.get("precompiled_contract_address", "") 1188 | return { 1189 | "type": "cosmos", 1190 | "value": { 1191 | "key-directory": f"{folder}/node0", 1192 | "key": "relayer", 1193 | "chain-id": chain_id, 1194 | "rpc-addr": f"http://127.0.0.1:{rpc_port}", 1195 | "json-rpc-addr": f"http://127.0.0.1:{json_rpc_addr}", 1196 | "account-prefix": chain.get("account-prefix", "cro"), 1197 | "keyring-backend": chain_config.get("keyring-backend", "test"), 1198 | "gas-adjustment": chain_config.get("gas_multiplier", 1.2), 1199 | "feegrants": chain_config.get("feegrants", None), 1200 | "gas-prices": prices, 1201 | "extension-options": chain_config.get("extension_options", []), 1202 | "min-gas-amount": 0, 1203 | "max-gas-amount": chain_config.get("max_gas", 300000), 1204 | "debug": chain_config.get("debug", False), 1205 | "timeout": chain_config.get("timeout", "20s"), 1206 | "block-timeout": "", 1207 | "output-format": "json", 1208 | "sign-mode": "direct", 1209 | "extra-codecs": [derivation] if derivation else [], 1210 | "coin-type": chain.get("coin-type", 118), 1211 | "precompiled-contract-address": precompiled, 1212 | "signing-algorithm": "", 1213 | "broadcast-mode": "batch", 1214 | "min-loop-duration": "0s", 1215 | }, 1216 | } 1217 | 1218 | 1219 | class Relayer(Enum): 1220 | HERMES = "hermes" 1221 | RLY = "rly" 1222 | 1223 | 1224 | class ChannelOrder(Enum): 1225 | ORDERED = "ORDER_ORDERED" 1226 | UNORDERED = "ORDER_UNORDERED" 1227 | 1228 | 1229 | def init_cluster( 1230 | data_dir, 1231 | config_path, 1232 | base_port, 1233 | dotenv=None, 1234 | image=IMAGE, 1235 | cmd=None, 1236 | gen_compose_file=False, 1237 | relayer=Relayer.HERMES.value, 1238 | ): 1239 | is_hermes = relayer == Relayer.HERMES.value 1240 | extension = Path(config_path).suffix 1241 | if extension == ".jsonnet": 1242 | config = expand_jsonnet(config_path, dotenv) 1243 | else: 1244 | config = expand_yaml(config_path, dotenv) 1245 | 1246 | relayer_config = config.pop("relayer", {}) 1247 | for chain_id, cfg in config.items(): 1248 | cfg["path"] = str(config_path) 1249 | cfg["chain_id"] = chain_id 1250 | 1251 | chains = list(config.values()) 1252 | 1253 | # for multiple chains, there can be multiple cmds splited by `,` 1254 | if cmd is not None: 1255 | cmds = cmd.split(",") 1256 | else: 1257 | cmds = [None] * len(chains) 1258 | 1259 | for chain, cmd in zip(chains, cmds): 1260 | (data_dir / chain["chain_id"]).mkdir() 1261 | init_devnet( 1262 | data_dir / chain["chain_id"], chain, base_port, image, cmd, gen_compose_file 1263 | ) 1264 | with (data_dir / SUPERVISOR_CONFIG_FILE).open("w") as fp: 1265 | write_ini( 1266 | fp, 1267 | supervisord_ini_group(config.keys(), is_hermes), 1268 | ) 1269 | if len(chains) > 1: 1270 | cfg = relayer_config.pop("chains", {}) 1271 | if is_hermes: 1272 | # write relayer config for hermes 1273 | relayer_config_hermes = data_dir / "relayer.toml" 1274 | relayer_config_hermes.write_text( 1275 | tomlkit.dumps( 1276 | jsonmerge.merge( 1277 | { 1278 | "global": { 1279 | "log_level": "info", 1280 | }, 1281 | "chains": [ 1282 | relayer_chain_config_hermes(data_dir, c, cfg) 1283 | for c in chains 1284 | ], 1285 | }, 1286 | relayer_config, 1287 | ) 1288 | ) 1289 | ) 1290 | else: 1291 | # write relayer config folder for rly 1292 | relayer_config_dir = data_dir / "relayer/config" 1293 | relayer_config_dir.mkdir(parents=True, exist_ok=True) 1294 | relayer_config_rly = relayer_config_dir / "config.yaml" 1295 | log_level = relayer_config.get("global", {}).get("log_level", "") 1296 | relayer_config_rly.write_text( 1297 | yaml.dump( 1298 | { 1299 | "global": { 1300 | "api-listen-addr": ":5183", 1301 | "debug-listen-addr": ":5183", 1302 | "enable-debug-server": True, 1303 | "timeout": "10s", 1304 | "memo": "", 1305 | "light-cache-size": 20, 1306 | "log-level": log_level, 1307 | }, 1308 | "chains": { 1309 | c["chain_id"]: relayer_chain_config_rly(data_dir, c, cfg) 1310 | for c in chains 1311 | }, 1312 | } 1313 | ) 1314 | ) 1315 | for chain in chains: 1316 | key_name = chain.get("key_name", "relayer") 1317 | mnemonic = find_account(data_dir, chain["chain_id"], key_name)["mnemonic"] 1318 | mnemonic_path = Path(data_dir) / "relayer.env" 1319 | mnemonic_path.write_text(mnemonic) 1320 | if is_hermes: 1321 | # restore the relayer account for hermes 1322 | subprocess.run( 1323 | [ 1324 | "hermes", 1325 | "--config", 1326 | relayer_config_hermes, 1327 | "keys", 1328 | "add", 1329 | "--chain", 1330 | chain["chain_id"], 1331 | "--mnemonic-file", 1332 | str(mnemonic_path), 1333 | "--overwrite", 1334 | "--hd-path", 1335 | "m/44'/" + str(chain.get("coin-type", 394)) + "'/0'/0/0", 1336 | ], 1337 | check=True, 1338 | ) 1339 | else: 1340 | # restore the relayer account for rly 1341 | subprocess.run( 1342 | [ 1343 | "rly", 1344 | "keys", 1345 | "restore", 1346 | chain["chain_id"], 1347 | "relayer", 1348 | mnemonic, 1349 | "--home", 1350 | str(data_dir / "relayer"), 1351 | ], 1352 | check=True, 1353 | ) 1354 | 1355 | 1356 | def find_account(data_dir, chain_id, name): 1357 | accounts = json.load((data_dir / chain_id / "accounts.json").open()) 1358 | return next(acct for acct in accounts if acct["name"] == name) 1359 | 1360 | 1361 | def supervisord_ini(cmd, validators, chain_id, start_flags=""): 1362 | ini = {} 1363 | for i, node in enumerate(validators): 1364 | directory = f"%(here)s/node{i}" 1365 | ini[f"program:{chain_id}-node{i}"] = dict( 1366 | COMMON_PROG_OPTIONS, 1367 | directory=directory, 1368 | command=f"{cmd} start --home . {start_flags}", 1369 | stdout_logfile=f"{directory}.log", 1370 | ) 1371 | return ini 1372 | 1373 | 1374 | def supervisord_ini_group(chain_ids, is_hermes): 1375 | directory = "%(here)s" 1376 | cfg = { 1377 | "include": { 1378 | "files": " ".join( 1379 | f"{directory}/{chain_id}/tasks.ini" for chain_id in chain_ids 1380 | ) 1381 | }, 1382 | "supervisord": { 1383 | "pidfile": f"{directory}/supervisord.pid", 1384 | "nodaemon": "true", 1385 | "logfile": "/dev/null", 1386 | "logfile_maxbytes": "0", 1387 | "strip_ansi": "true", 1388 | }, 1389 | "rpcinterface:supervisor": { 1390 | "supervisor.rpcinterface_factory": "supervisor.rpcinterface:" 1391 | "make_main_rpcinterface", 1392 | }, 1393 | "unix_http_server": {"file": f"{directory}/supervisor.sock"}, 1394 | "supervisorctl": {"serverurl": f"unix://{directory}/supervisor.sock"}, 1395 | } 1396 | command = "hermes --config relayer.toml start" 1397 | if not is_hermes: 1398 | command = "rly start chainmain-cronos --home relayer" 1399 | cfg["program:relayer-demo"] = dict( 1400 | COMMON_PROG_OPTIONS, 1401 | directory=directory, 1402 | command=command, 1403 | stdout_logfile=f"{directory}/relayer-demo.log", 1404 | autostart="false", 1405 | ) 1406 | return cfg 1407 | 1408 | 1409 | def docker_compose_yml(cmd, validators, data_dir, image): 1410 | return { 1411 | "version": "3", 1412 | "services": { 1413 | f"node{i}": { 1414 | "image": image, 1415 | "command": "chaind start", 1416 | "volumes": [f"{data_dir.absolute() / f'node{i}'}:/.chain-maind:Z"], 1417 | } 1418 | for i, val in enumerate(validators) 1419 | }, 1420 | } 1421 | 1422 | 1423 | def edit_tm_cfg(path, base_port, peers, config, *, custom_edit=None): 1424 | "field name changed after tendermint 0.35, support both flavours." 1425 | with open(path) as f: 1426 | doc = tomlkit.parse(f.read()) 1427 | doc["mode"] = "validator" 1428 | # tendermint is start in process, not needed 1429 | # doc['proxy_app'] = 'tcp://127.0.0.1:%d' % abci_port(base_port) 1430 | rpc = doc["rpc"] 1431 | rpc["laddr"] = "tcp://127.0.0.1:%d" % ports.rpc_port(base_port) 1432 | rpc["pprof_laddr"] = rpc["pprof-laddr"] = "127.0.0.1:%d" % ( 1433 | ports.pprof_port(base_port), 1434 | ) 1435 | rpc["timeout_broadcast_tx_commit"] = rpc["timeout-broadcast-tx-commit"] = "30s" 1436 | rpc["grpc_laddr"] = rpc["grpc-laddr"] = "tcp://127.0.0.1:%d" % ( 1437 | ports.grpc_port_tx_only(base_port), 1438 | ) 1439 | p2p = doc["p2p"] 1440 | # p2p["use-legacy"] = True 1441 | p2p["laddr"] = "tcp://127.0.0.1:%d" % ports.p2p_port(base_port) 1442 | p2p["persistent_peers"] = p2p["persistent-peers"] = peers 1443 | p2p["addr_book_strict"] = p2p["addr-book-strict"] = False 1444 | p2p["allow_duplicate_ip"] = p2p["allow-duplicate-ip"] = True 1445 | doc["consensus"]["timeout_commit"] = doc["consensus"]["timeout-commit"] = "1s" 1446 | patch_toml_doc(doc, config) 1447 | if custom_edit is not None: 1448 | custom_edit(doc) 1449 | with open(path, "w") as f: 1450 | f.write(tomlkit.dumps(doc)) 1451 | 1452 | 1453 | def patch_toml_doc(doc, patch): 1454 | for k, v in patch.items(): 1455 | if isinstance(v, dict): 1456 | patch_toml_doc(doc.setdefault(k, {}), v) 1457 | else: 1458 | doc[k] = v 1459 | 1460 | 1461 | def edit_app_cfg(path, base_port, app_config): 1462 | default_patch = { 1463 | "api": { 1464 | "enable": True, 1465 | "swagger": True, 1466 | "enable-unsafe-cors": True, 1467 | "address": "tcp://127.0.0.1:%d" % ports.api_port(base_port), 1468 | }, 1469 | "grpc": { 1470 | "address": "127.0.0.1:%d" % ports.grpc_port(base_port), 1471 | }, 1472 | "pruning": "nothing", 1473 | "state-sync": { 1474 | "snapshot-interval": 5, 1475 | "snapshot-keep-recent": 10, 1476 | }, 1477 | "minimum-gas-prices": "0basecro", 1478 | } 1479 | 1480 | app_config = format_value( 1481 | app_config, 1482 | { 1483 | "EVMRPC_PORT": ports.evmrpc_port(base_port), 1484 | "EVMRPC_PORT_WS": ports.evmrpc_ws_port(base_port), 1485 | }, 1486 | ) 1487 | with open(path) as f: 1488 | doc = tomlkit.parse(f.read()) 1489 | doc["grpc-web"] = {} 1490 | doc["grpc-web"]["address"] = "127.0.0.1:%d" % ports.grpc_web_port(base_port) 1491 | patch_toml_doc(doc, jsonmerge.merge(default_patch, app_config)) 1492 | open(path, "w").write(tomlkit.dumps(doc)) 1493 | 1494 | 1495 | def format_value(v, ctx): 1496 | if isinstance(v, str): 1497 | return v.format(**ctx) 1498 | elif isinstance(v, dict): 1499 | return {k: format_value(vv, ctx) for k, vv in v.items()} 1500 | else: 1501 | return v 1502 | 1503 | 1504 | def try_remove_peer(peers, peer): 1505 | "try remove peer from peers, do nothing if don't contains the peer." 1506 | items = peers.split(",") 1507 | try: 1508 | items.remove(peer) 1509 | except ValueError: 1510 | return peers 1511 | else: 1512 | return ",".join(items) 1513 | 1514 | 1515 | if __name__ == "__main__": 1516 | interact("rm -r data; mkdir data", ignore_error=True) 1517 | data_dir = Path("data") 1518 | init_cluster(data_dir, "config.yaml", 26650) 1519 | supervisord = start_cluster(data_dir) 1520 | t = start_tail_logs_thread(data_dir) 1521 | supervisord.wait() 1522 | t.stop() 1523 | t.join() 1524 | -------------------------------------------------------------------------------- /pystarport/cosmoscli.py: -------------------------------------------------------------------------------- 1 | import enum 2 | import hashlib 3 | import json 4 | import subprocess 5 | import tempfile 6 | import threading 7 | import time 8 | 9 | import bech32 10 | import durations 11 | from dateutil.parser import isoparse 12 | 13 | from .app import CHAIN 14 | from .ledger import ZEMU_BUTTON_PORT, ZEMU_HOST, LedgerButton 15 | from .utils import ( 16 | build_cli_args_safe, 17 | format_doc_string, 18 | get_sync_info, 19 | interact, 20 | parse_amount, 21 | ) 22 | 23 | 24 | class ModuleAccount(enum.Enum): 25 | FeeCollector = "fee_collector" 26 | Mint = "mint" 27 | Gov = "gov" 28 | Distribution = "distribution" 29 | BondedPool = "bonded_tokens_pool" 30 | NotBondedPool = "not_bonded_tokens_pool" 31 | IBCTransfer = "transfer" 32 | 33 | 34 | @format_doc_string( 35 | options=",".join(v.value for v in ModuleAccount.__members__.values()) 36 | ) 37 | def module_address(name): 38 | """ 39 | get address of module accounts 40 | 41 | :param name: name of module account, values: {options} 42 | """ 43 | data = hashlib.sha256(ModuleAccount(name).value.encode()).digest()[:20] 44 | return bech32.bech32_encode("cro", bech32.convertbits(data, 8, 5)) 45 | 46 | 47 | class ChainCommand: 48 | def __init__(self, cmd=None): 49 | self.cmd = cmd or CHAIN 50 | 51 | def prob_genesis_subcommand(self): 52 | 'test if the command has "genesis" subcommand, introduced in sdk 0.50' 53 | try: 54 | output = self("genesis") 55 | except AssertionError: 56 | # non-zero return code 57 | return False 58 | 59 | return "Available Commands" in output.decode() 60 | 61 | def prob_icaauth_subcommand(self): 62 | 'test if the command has "icaauth" subcommand, removed after ibc 8.3' 63 | try: 64 | output = self("q", "icaauth") 65 | except AssertionError: 66 | # non-zero return code 67 | return False 68 | 69 | return "Available Commands" in output.decode() 70 | 71 | def prob_event_query_tx_for(self): 72 | 'test if the command has "event-query-tx-for" subcommand' 73 | try: 74 | output = self("q", "event-query-tx-for") 75 | except AssertionError: 76 | # non-zero return code 77 | return False 78 | 79 | return "Available Commands" in output.decode() 80 | 81 | def __call__(self, cmd, *args, stdin=None, stderr=subprocess.STDOUT, **kwargs): 82 | "execute chain-maind" 83 | args = " ".join(build_cli_args_safe(cmd, *args, **kwargs)) 84 | return interact(f"{self.cmd} {args}", input=stdin, stderr=stderr) 85 | 86 | 87 | class CosmosCLI: 88 | "the apis to interact with wallet and blockchain" 89 | 90 | def __init__( 91 | self, 92 | data_dir, 93 | node_rpc, 94 | chain_id=None, 95 | cmd=None, 96 | zemu_address=ZEMU_HOST, 97 | zemu_button_port=ZEMU_BUTTON_PORT, 98 | ): 99 | self.data_dir = data_dir 100 | if chain_id is None: 101 | src = (self.data_dir / "config" / "genesis.json").read_text() 102 | self._genesis = json.loads(src) 103 | self.chain_id = self._genesis["chain_id"] 104 | else: 105 | self.chain_id = chain_id 106 | self.node_rpc = node_rpc 107 | self.raw = ChainCommand(cmd) 108 | self.leger_button = LedgerButton(zemu_address, zemu_button_port) 109 | self.output = None 110 | self.error = None 111 | self.has_genesis_subcommand = self.raw.prob_genesis_subcommand() 112 | self.has_icaauth_subcommand = self.raw.prob_icaauth_subcommand() 113 | self.has_event_query_tx_for = self.raw.prob_event_query_tx_for() 114 | 115 | def node_id(self): 116 | "get tendermint node id" 117 | output = self.raw("tendermint", "show-node-id", home=self.data_dir) 118 | return output.decode().strip() 119 | 120 | def delete_account(self, name): 121 | "delete wallet account in node's keyring" 122 | return self.raw( 123 | "keys", 124 | "delete", 125 | name, 126 | "-y", 127 | "--force", 128 | home=self.data_dir, 129 | output="json", 130 | keyring_backend="test", 131 | ) 132 | 133 | def create_account(self, name, mnemonic=None, **kwargs): 134 | "create new keypair in node's keyring" 135 | if mnemonic is None: 136 | output = self.raw( 137 | "keys", 138 | "add", 139 | name, 140 | home=self.data_dir, 141 | output="json", 142 | keyring_backend="test", 143 | **kwargs, 144 | ) 145 | else: 146 | output = self.raw( 147 | "keys", 148 | "add", 149 | name, 150 | "--recover", 151 | home=self.data_dir, 152 | output="json", 153 | keyring_backend="test", 154 | stdin=mnemonic.encode() + b"\n", 155 | **kwargs, 156 | ) 157 | return json.loads(output) 158 | 159 | def create_account_ledger(self, name, **kwargs): 160 | "create new ledger keypair" 161 | 162 | def send_request(): 163 | try: 164 | self.output = self.raw( 165 | "keys", 166 | "add", 167 | name, 168 | "--ledger", 169 | home=self.data_dir, 170 | output="json", 171 | keyring_backend="test", 172 | **kwargs, 173 | ) 174 | except Exception as e: 175 | self.error = e 176 | 177 | t = threading.Thread(target=send_request) 178 | t.start() 179 | time.sleep(3) 180 | for _ in range(0, 3): 181 | self.leger_button.press_right() 182 | time.sleep(0.2) 183 | self.leger_button.press_both() 184 | t.join() 185 | if self.error: 186 | raise self.error 187 | return json.loads(self.output) 188 | 189 | def init(self, moniker): 190 | "the node's config is already added" 191 | return self.raw( 192 | "init", 193 | moniker, 194 | chain_id=self.chain_id, 195 | home=self.data_dir, 196 | ) 197 | 198 | def genesis_subcommand(self, *args, **kwargs): 199 | if self.has_genesis_subcommand: 200 | return self.raw("genesis", *args, **kwargs) 201 | else: 202 | return self.raw(*args, **kwargs) 203 | 204 | def validate_genesis(self, *args): 205 | return self.genesis_subcommand("validate-genesis", *args, home=self.data_dir) 206 | 207 | def add_genesis_account(self, addr, coins, **kwargs): 208 | return self.genesis_subcommand( 209 | "add-genesis-account", 210 | addr, 211 | coins, 212 | home=self.data_dir, 213 | output="json", 214 | **kwargs, 215 | ) 216 | 217 | def gentx(self, name, coins, *args, min_self_delegation=1, pubkey=None, **kwargs): 218 | return self.genesis_subcommand( 219 | "gentx", 220 | name, 221 | coins, 222 | *args, 223 | min_self_delegation=str(min_self_delegation), 224 | home=self.data_dir, 225 | chain_id=self.chain_id, 226 | keyring_backend="test", 227 | pubkey=pubkey, 228 | **kwargs, 229 | ) 230 | 231 | def collect_gentxs(self, gentx_dir): 232 | return self.genesis_subcommand("collect-gentxs", gentx_dir, home=self.data_dir) 233 | 234 | def status(self): 235 | return json.loads(self.raw("status", node=self.node_rpc)) 236 | 237 | def block_height(self): 238 | return int(get_sync_info(self.status())["latest_block_height"]) 239 | 240 | def block_time(self): 241 | return isoparse(get_sync_info(self.status())["latest_block_time"]) 242 | 243 | def balances(self, addr, height=0): 244 | return json.loads( 245 | self.raw( 246 | "query", "bank", "balances", addr, height=height, home=self.data_dir 247 | ) 248 | )["balances"] 249 | 250 | def balance(self, addr, denom=None, height=0): 251 | coins = self.balances(addr, height=height) 252 | if denom is None: 253 | if len(coins) == 0: 254 | return 0 255 | coin = coins[0] 256 | return int(coin["amount"]) 257 | denoms = {coin["denom"]: int(coin["amount"]) for coin in coins} 258 | return denoms.get(denom, 0) 259 | 260 | def query_tx(self, tx_type, tx_value): 261 | tx = self.raw( 262 | "query", 263 | "tx", 264 | "--type", 265 | tx_type, 266 | tx_value, 267 | home=self.data_dir, 268 | node=self.node_rpc, 269 | ) 270 | return json.loads(tx) 271 | 272 | def query_all_txs(self, addr): 273 | txs = self.raw( 274 | "query", 275 | "txs-all", 276 | addr, 277 | home=self.data_dir, 278 | keyring_backend="test", 279 | node=self.node_rpc, 280 | ) 281 | return json.loads(txs) 282 | 283 | def distribution_commission(self, addr): 284 | res = json.loads( 285 | self.raw( 286 | "query", 287 | "distribution", 288 | "commission", 289 | addr, 290 | output="json", 291 | node=self.node_rpc, 292 | ) 293 | )["commission"] 294 | if isinstance(res, dict): 295 | res = res["commission"] 296 | return parse_amount(res[0]) 297 | 298 | def distribution_community(self, **kwargs): 299 | for module in ["distribution", "protocolpool"]: 300 | try: 301 | res = json.loads( 302 | self.raw( 303 | "query", 304 | module, 305 | "community-pool", 306 | output="json", 307 | node=self.node_rpc, 308 | **kwargs, 309 | ) 310 | ) 311 | return parse_amount(res["pool"][0]) 312 | except Exception as e: 313 | if ( 314 | module == "distribution" 315 | and "CommunityPool query exposed by the external community pool" 316 | in str(e) 317 | ): 318 | continue 319 | raise 320 | 321 | def distribution_reward(self, delegator_addr): 322 | res = json.loads( 323 | self.raw( 324 | "query", 325 | "distribution", 326 | "rewards", 327 | delegator_addr, 328 | output="json", 329 | node=self.node_rpc, 330 | ) 331 | ) 332 | return parse_amount(res["total"][0]) 333 | 334 | def address(self, name, bech="acc"): 335 | output = self.raw( 336 | "keys", 337 | "show", 338 | name, 339 | "-a", 340 | home=self.data_dir, 341 | keyring_backend="test", 342 | bech=bech, 343 | ) 344 | return output.strip().decode() 345 | 346 | def account(self, addr): 347 | return json.loads( 348 | self.raw( 349 | "query", "auth", "account", addr, output="json", node=self.node_rpc 350 | ) 351 | ) 352 | 353 | def supply(self, supply_type): 354 | return json.loads( 355 | self.raw("query", "supply", supply_type, output="json", node=self.node_rpc) 356 | ) 357 | 358 | def validator(self, addr): 359 | res = json.loads( 360 | self.raw( 361 | "query", 362 | "staking", 363 | "validator", 364 | addr, 365 | output="json", 366 | node=self.node_rpc, 367 | ) 368 | ) 369 | return res.get("validator") or res 370 | 371 | def validators(self): 372 | return json.loads( 373 | self.raw( 374 | "query", "staking", "validators", output="json", node=self.node_rpc 375 | ) 376 | )["validators"] 377 | 378 | def staking_params(self): 379 | res = json.loads( 380 | self.raw("query", "staking", "params", output="json", node=self.node_rpc) 381 | ) 382 | return res.get("params") or res 383 | 384 | def staking_pool(self, bonded=True): 385 | res = self.raw("query", "staking", "pool", output="json", node=self.node_rpc) 386 | res = json.loads(res) 387 | res = res.get("pool") or res 388 | return int(res["bonded_tokens" if bonded else "not_bonded_tokens"]) 389 | 390 | def transfer( 391 | self, 392 | from_, 393 | to, 394 | coins, 395 | generate_only=False, 396 | event_query_tx=True, 397 | **kwargs, 398 | ): 399 | rsp = json.loads( 400 | self.raw( 401 | "tx", 402 | "bank", 403 | "send", 404 | from_, 405 | to, 406 | coins, 407 | "-y", 408 | "--generate-only" if generate_only else None, 409 | home=self.data_dir, 410 | keyring_backend="test", 411 | chain_id=self.chain_id, 412 | node=self.node_rpc, 413 | **kwargs, 414 | ) 415 | ) 416 | if ( 417 | not generate_only 418 | and rsp["code"] == 0 419 | and event_query_tx 420 | and self.has_event_query_tx_for 421 | ): 422 | rsp = self.event_query_tx_for(rsp["txhash"]) 423 | return rsp 424 | 425 | def transfer_from_ledger( 426 | self, 427 | from_, 428 | to, 429 | coins, 430 | generate_only=False, 431 | fees=None, 432 | event_query_tx=True, 433 | **kwargs, 434 | ): 435 | def send_request(): 436 | try: 437 | self.output = json.loads( 438 | self.raw( 439 | "tx", 440 | "bank", 441 | "send", 442 | from_, 443 | to, 444 | coins, 445 | "-y", 446 | "--generate-only" if generate_only else "", 447 | "--ledger", 448 | home=self.data_dir, 449 | keyring_backend="test", 450 | chain_id=self.chain_id, 451 | node=self.node_rpc, 452 | fees=fees, 453 | sign_mode="amino-json", 454 | **kwargs, 455 | ) 456 | ) 457 | if ( 458 | not generate_only 459 | and self.output["code"] == 0 460 | and event_query_tx 461 | and self.has_event_query_tx_for 462 | ): 463 | self.output = self.event_query_tx_for(self.output["txhash"]) 464 | except Exception as e: 465 | self.error = e 466 | 467 | t = threading.Thread(target=send_request) 468 | t.start() 469 | time.sleep(3) 470 | for _ in range(0, 11): 471 | self.leger_button.press_right() 472 | time.sleep(0.4) 473 | self.leger_button.press_both() 474 | t.join() 475 | if self.error: 476 | raise self.error 477 | return self.output 478 | 479 | def get_delegated_amount(self, which_addr): 480 | return json.loads( 481 | self.raw( 482 | "query", 483 | "staking", 484 | "delegations", 485 | which_addr, 486 | home=self.data_dir, 487 | node=self.node_rpc, 488 | output="json", 489 | ) 490 | ) 491 | 492 | def delegate_amount( 493 | self, 494 | to_addr, 495 | amount, 496 | from_addr, 497 | gas_price=None, 498 | event_query_tx=True, 499 | **kwargs, 500 | ): 501 | rsp = json.loads( 502 | self.raw( 503 | "tx", 504 | "staking", 505 | "delegate", 506 | to_addr, 507 | amount, 508 | "-y", 509 | home=self.data_dir, 510 | from_=from_addr, 511 | keyring_backend="test", 512 | chain_id=self.chain_id, 513 | node=self.node_rpc, 514 | gas_prices=gas_price, 515 | **kwargs, 516 | ) 517 | ) 518 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 519 | rsp = self.event_query_tx_for(rsp["txhash"]) 520 | return rsp 521 | 522 | # to_addr: croclcl1... , from_addr: cro1... 523 | def unbond_amount(self, to_addr, amount, from_addr, event_query_tx=True, **kwargs): 524 | rsp = json.loads( 525 | self.raw( 526 | "tx", 527 | "staking", 528 | "unbond", 529 | to_addr, 530 | amount, 531 | "-y", 532 | home=self.data_dir, 533 | from_=from_addr, 534 | keyring_backend="test", 535 | chain_id=self.chain_id, 536 | node=self.node_rpc, 537 | **kwargs, 538 | ) 539 | ) 540 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 541 | rsp = self.event_query_tx_for(rsp["txhash"]) 542 | return rsp 543 | 544 | # to_validator_addr: crocncl1... , from_from_validator_addraddr: crocl1... 545 | def redelegate_amount( 546 | self, 547 | to_validator_addr, 548 | from_validator_addr, 549 | amount, 550 | from_addr, 551 | event_query_tx=True, 552 | **kwargs, 553 | ): 554 | rsp = json.loads( 555 | self.raw( 556 | "tx", 557 | "staking", 558 | "redelegate", 559 | from_validator_addr, 560 | to_validator_addr, 561 | amount, 562 | "-y", 563 | home=self.data_dir, 564 | from_=from_addr, 565 | keyring_backend="test", 566 | chain_id=self.chain_id, 567 | node=self.node_rpc, 568 | **kwargs, 569 | ) 570 | ) 571 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 572 | rsp = self.event_query_tx_for(rsp["txhash"]) 573 | return rsp 574 | 575 | # from_delegator can be account name or address 576 | def withdraw_all_rewards(self, from_delegator, event_query_tx=True, **kwargs): 577 | rsp = json.loads( 578 | self.raw( 579 | "tx", 580 | "distribution", 581 | "withdraw-all-rewards", 582 | "-y", 583 | from_=from_delegator, 584 | home=self.data_dir, 585 | keyring_backend="test", 586 | chain_id=self.chain_id, 587 | node=self.node_rpc, 588 | **kwargs, 589 | ) 590 | ) 591 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 592 | rsp = self.event_query_tx_for(rsp["txhash"]) 593 | return rsp 594 | 595 | def make_multisig(self, name, signer1, signer2, **kwargs): 596 | self.raw( 597 | "keys", 598 | "add", 599 | name, 600 | multisig=f"{signer1},{signer2}", 601 | multisig_threshold="2", 602 | home=self.data_dir, 603 | keyring_backend="test", 604 | **kwargs, 605 | ) 606 | 607 | def sign_multisig_tx(self, tx_file, multi_addr, signer_name, **kwargs): 608 | return json.loads( 609 | self.raw( 610 | "tx", 611 | "sign", 612 | tx_file, 613 | from_=signer_name, 614 | multisig=multi_addr, 615 | home=self.data_dir, 616 | keyring_backend="test", 617 | chain_id=self.chain_id, 618 | node=self.node_rpc, 619 | **kwargs, 620 | ) 621 | ) 622 | 623 | def sign_batch_multisig_tx( 624 | self, 625 | tx_file, 626 | multi_addr, 627 | signer_name, 628 | account_number, 629 | sequence_number, 630 | **kwargs, 631 | ): 632 | r = self.raw( 633 | "tx", 634 | "sign-batch", 635 | "--offline", 636 | tx_file, 637 | account_number=account_number, 638 | sequence=sequence_number, 639 | from_=signer_name, 640 | multisig=multi_addr, 641 | home=self.data_dir, 642 | keyring_backend="test", 643 | chain_id=self.chain_id, 644 | node=self.node_rpc, 645 | **kwargs, 646 | ) 647 | return r.decode("utf-8") 648 | 649 | def encode_signed_tx(self, signed_tx, **kwargs): 650 | return self.raw( 651 | "tx", 652 | "encode", 653 | signed_tx, 654 | **kwargs, 655 | ) 656 | 657 | def sign_single_tx(self, tx_file, signer_name, **kwargs): 658 | return json.loads( 659 | self.raw( 660 | "tx", 661 | "sign", 662 | tx_file, 663 | from_=signer_name, 664 | home=self.data_dir, 665 | keyring_backend="test", 666 | chain_id=self.chain_id, 667 | node=self.node_rpc, 668 | **kwargs, 669 | ) 670 | ) 671 | 672 | def combine_multisig_tx( 673 | self, tx_file, multi_name, signer1_file, signer2_file, **kwargs 674 | ): 675 | return json.loads( 676 | self.raw( 677 | "tx", 678 | "multisign", 679 | tx_file, 680 | multi_name, 681 | signer1_file, 682 | signer2_file, 683 | home=self.data_dir, 684 | keyring_backend="test", 685 | chain_id=self.chain_id, 686 | node=self.node_rpc, 687 | **kwargs, 688 | ) 689 | ) 690 | 691 | def combine_batch_multisig_tx( 692 | self, tx_file, multi_name, signer1_file, signer2_file, **kwargs 693 | ): 694 | r = self.raw( 695 | "tx", 696 | "multisign-batch", 697 | tx_file, 698 | multi_name, 699 | signer1_file, 700 | signer2_file, 701 | home=self.data_dir, 702 | keyring_backend="test", 703 | chain_id=self.chain_id, 704 | node=self.node_rpc, 705 | **kwargs, 706 | ) 707 | return r.decode("utf-8") 708 | 709 | def broadcast_tx(self, tx_file, **kwargs): 710 | r = self.raw( 711 | "tx", 712 | "broadcast", 713 | tx_file, 714 | node=self.node_rpc, 715 | broadcast_mode="sync", 716 | **kwargs, 717 | ) 718 | return r.decode("utf-8") 719 | 720 | def unjail(self, addr, event_query_tx=True, **kwargs): 721 | rsp = json.loads( 722 | self.raw( 723 | "tx", 724 | "slashing", 725 | "unjail", 726 | "-y", 727 | from_=addr, 728 | home=self.data_dir, 729 | node=self.node_rpc, 730 | keyring_backend="test", 731 | chain_id=self.chain_id, 732 | **kwargs, 733 | ) 734 | ) 735 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 736 | rsp = self.event_query_tx_for(rsp["txhash"]) 737 | return rsp 738 | 739 | def create_validator( 740 | self, 741 | amount, 742 | options, 743 | event_query_tx=True, 744 | **kwargs, 745 | ): 746 | options = { 747 | "commission-max-change-rate": "0.01", 748 | "commission-rate": "0.1", 749 | "commission-max-rate": "0.2", 750 | "min-self-delegation": "1", 751 | "amount": amount, 752 | } | options 753 | 754 | if "pubkey" not in options: 755 | pubkey = ( 756 | self.raw( 757 | "tendermint", 758 | "show-validator", 759 | home=self.data_dir, 760 | ) 761 | .strip() 762 | .decode() 763 | ) 764 | options["pubkey"] = json.loads(pubkey) 765 | 766 | with tempfile.NamedTemporaryFile("w") as fp: 767 | json.dump(options, fp) 768 | fp.flush() 769 | raw = self.raw( 770 | "tx", 771 | "staking", 772 | "create-validator", 773 | fp.name, 774 | "-y", 775 | from_=self.address("validator"), 776 | # basic 777 | home=self.data_dir, 778 | node=self.node_rpc, 779 | keyring_backend="test", 780 | chain_id=self.chain_id, 781 | **kwargs, 782 | ) 783 | rsp = json.loads(raw) 784 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 785 | rsp = self.event_query_tx_for(rsp["txhash"]) 786 | return rsp 787 | 788 | def create_validator_legacy( 789 | self, 790 | amount, 791 | moniker=None, 792 | commission_max_change_rate="0.01", 793 | commission_rate="0.1", 794 | commission_max_rate="0.2", 795 | min_self_delegation="1", 796 | event_query_tx=True, 797 | **kwargs, 798 | ): 799 | """MsgCreateValidator 800 | create the node with create_node before call this""" 801 | pubkey = ( 802 | self.raw( 803 | "tendermint", 804 | "show-validator", 805 | home=self.data_dir, 806 | ) 807 | .strip() 808 | .decode() 809 | ) 810 | options = { 811 | "amount": amount, 812 | "min-self-delegation": min_self_delegation, 813 | "commission-rate": commission_rate, 814 | "commission-max-rate": commission_max_rate, 815 | "commission-max-change-rate": commission_max_change_rate, 816 | "moniker": moniker, 817 | } 818 | options["pubkey"] = "'" + pubkey + "'" 819 | raw = self.raw( 820 | "tx", 821 | "staking", 822 | "create-validator", 823 | "-y", 824 | from_=self.address("validator"), 825 | # basic 826 | home=self.data_dir, 827 | node=self.node_rpc, 828 | keyring_backend="test", 829 | chain_id=self.chain_id, 830 | **{k: v for k, v in options.items() if v is not None}, 831 | **kwargs, 832 | ) 833 | rsp = json.loads(raw) 834 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 835 | rsp = self.event_query_tx_for(rsp["txhash"]) 836 | return rsp 837 | 838 | def edit_validator( 839 | self, 840 | commission_rate=None, 841 | new_moniker=None, 842 | identity=None, 843 | website=None, 844 | security_contact=None, 845 | details=None, 846 | event_query_tx=True, 847 | **kwargs, 848 | ): 849 | """MsgEditValidator""" 850 | options = dict( 851 | commission_rate=commission_rate, 852 | # description 853 | new_moniker=new_moniker, 854 | identity=identity, 855 | website=website, 856 | security_contact=security_contact, 857 | details=details, 858 | ) 859 | rsp = json.loads( 860 | self.raw( 861 | "tx", 862 | "staking", 863 | "edit-validator", 864 | "-y", 865 | from_=self.address("validator"), 866 | home=self.data_dir, 867 | node=self.node_rpc, 868 | keyring_backend="test", 869 | chain_id=self.chain_id, 870 | **{k: v for k, v in options.items() if v is not None}, 871 | **kwargs, 872 | ) 873 | ) 874 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 875 | rsp = self.event_query_tx_for(rsp["txhash"]) 876 | return rsp 877 | 878 | def gov_propose(self, proposer, kind, proposal, **kwargs): 879 | if kind == "software-upgrade": 880 | return json.loads( 881 | self.raw( 882 | "tx", 883 | "gov", 884 | "submit-proposal", 885 | kind, 886 | proposal["name"], 887 | "-y", 888 | from_=proposer, 889 | # content 890 | title=proposal.get("title"), 891 | description=proposal.get("description"), 892 | upgrade_height=proposal.get("upgrade-height"), 893 | upgrade_time=proposal.get("upgrade-time"), 894 | upgrade_info=proposal.get("upgrade-info"), 895 | deposit=proposal.get("deposit"), 896 | # basic 897 | home=self.data_dir, 898 | node=self.node_rpc, 899 | keyring_backend="test", 900 | chain_id=self.chain_id, 901 | **kwargs, 902 | ) 903 | ) 904 | elif kind == "cancel-software-upgrade": 905 | return json.loads( 906 | self.raw( 907 | "tx", 908 | "gov", 909 | "submit-proposal", 910 | kind, 911 | "-y", 912 | from_=proposer, 913 | # content 914 | title=proposal.get("title"), 915 | description=proposal.get("description"), 916 | deposit=proposal.get("deposit"), 917 | # basic 918 | home=self.data_dir, 919 | node=self.node_rpc, 920 | keyring_backend="test", 921 | chain_id=self.chain_id, 922 | **kwargs, 923 | ) 924 | ) 925 | else: 926 | with tempfile.NamedTemporaryFile("w") as fp: 927 | json.dump(proposal, fp) 928 | fp.flush() 929 | return json.loads( 930 | self.raw( 931 | "tx", 932 | "gov", 933 | "submit-proposal", 934 | kind, 935 | fp.name, 936 | "-y", 937 | from_=proposer, 938 | # basic 939 | home=self.data_dir, 940 | node=self.node_rpc, 941 | keyring_backend="test", 942 | chain_id=self.chain_id, 943 | **kwargs, 944 | ) 945 | ) 946 | 947 | def gov_vote(self, voter, proposal_id, option, event_query_tx=True, **kwargs): 948 | print(voter) 949 | print(proposal_id) 950 | print(option) 951 | rsp = json.loads( 952 | self.raw( 953 | "tx", 954 | "gov", 955 | "vote", 956 | proposal_id, 957 | option, 958 | "-y", 959 | from_=voter, 960 | home=self.data_dir, 961 | node=self.node_rpc, 962 | keyring_backend="test", 963 | chain_id=self.chain_id, 964 | stderr=subprocess.DEVNULL, 965 | **kwargs, 966 | ) 967 | ) 968 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 969 | rsp = self.event_query_tx_for(rsp["txhash"]) 970 | return rsp 971 | 972 | def gov_deposit( 973 | self, depositor, proposal_id, amount, event_query_tx=True, **kwargs 974 | ): 975 | rsp = json.loads( 976 | self.raw( 977 | "tx", 978 | "gov", 979 | "deposit", 980 | proposal_id, 981 | amount, 982 | "-y", 983 | from_=depositor, 984 | home=self.data_dir, 985 | node=self.node_rpc, 986 | keyring_backend="test", 987 | chain_id=self.chain_id, 988 | **kwargs, 989 | ) 990 | ) 991 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 992 | rsp = self.event_query_tx_for(rsp["txhash"]) 993 | return rsp 994 | 995 | def query_proposals(self, depositor=None, limit=None, status=None, voter=None): 996 | return json.loads( 997 | self.raw( 998 | "query", 999 | "gov", 1000 | "proposals", 1001 | depositor=depositor, 1002 | count_total=limit, 1003 | status=status, 1004 | voter=voter, 1005 | output="json", 1006 | node=self.node_rpc, 1007 | ) 1008 | ) 1009 | 1010 | def query_proposal(self, proposal_id): 1011 | res = json.loads( 1012 | self.raw( 1013 | "query", 1014 | "gov", 1015 | "proposal", 1016 | proposal_id, 1017 | output="json", 1018 | node=self.node_rpc, 1019 | ) 1020 | ) 1021 | return res.get("proposal") or res 1022 | 1023 | def query_tally(self, proposal_id): 1024 | res = json.loads( 1025 | self.raw( 1026 | "query", 1027 | "gov", 1028 | "tally", 1029 | proposal_id, 1030 | output="json", 1031 | node=self.node_rpc, 1032 | ) 1033 | ) 1034 | return res.get("tally") or res 1035 | 1036 | def ibc_transfer( 1037 | self, 1038 | from_, 1039 | to, 1040 | amount, 1041 | channel, # src channel 1042 | target_version, # chain version number of target chain 1043 | event_query_tx=True, 1044 | **kwargs, 1045 | ): 1046 | rsp = json.loads( 1047 | self.raw( 1048 | "tx", 1049 | "ibc-transfer", 1050 | "transfer", 1051 | "transfer", # src port 1052 | channel, 1053 | to, 1054 | amount, 1055 | "-y", 1056 | # FIXME https://github.com/cosmos/cosmos-sdk/issues/8059 1057 | "--absolute-timeouts", 1058 | from_=from_, 1059 | home=self.data_dir, 1060 | node=self.node_rpc, 1061 | keyring_backend="test", 1062 | chain_id=self.chain_id, 1063 | packet_timeout_height=f"{target_version}-10000000000", 1064 | packet_timeout_timestamp=0, 1065 | **kwargs, 1066 | ) 1067 | ) 1068 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1069 | rsp = self.event_query_tx_for(rsp["txhash"]) 1070 | return rsp 1071 | 1072 | def export(self): 1073 | return self.raw("export", home=self.data_dir) 1074 | 1075 | def unsaferesetall(self): 1076 | return self.raw("unsafe-reset-all") 1077 | 1078 | def create_nft( 1079 | self, 1080 | from_addr, 1081 | denomid, 1082 | denomname, 1083 | schema, 1084 | fees, 1085 | event_query_tx=True, 1086 | **kwargs, 1087 | ): 1088 | rsp = json.loads( 1089 | self.raw( 1090 | "tx", 1091 | "nft", 1092 | "issue", 1093 | denomid, 1094 | "-y", 1095 | fees=fees, 1096 | name=denomname, 1097 | schema=schema, 1098 | home=self.data_dir, 1099 | from_=from_addr, 1100 | keyring_backend="test", 1101 | chain_id=self.chain_id, 1102 | node=self.node_rpc, 1103 | **kwargs, 1104 | ) 1105 | ) 1106 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1107 | rsp = self.event_query_tx_for(rsp["txhash"]) 1108 | return rsp 1109 | 1110 | def query_nft(self, denomid): 1111 | return json.loads( 1112 | self.raw( 1113 | "query", 1114 | "nft", 1115 | "denom", 1116 | denomid, 1117 | output="json", 1118 | home=self.data_dir, 1119 | node=self.node_rpc, 1120 | ) 1121 | ) 1122 | 1123 | def query_denom_by_name(self, denomname): 1124 | return json.loads( 1125 | self.raw( 1126 | "query", 1127 | "nft", 1128 | "denom-by-name", 1129 | denomname, 1130 | output="json", 1131 | home=self.data_dir, 1132 | node=self.node_rpc, 1133 | ) 1134 | ) 1135 | 1136 | def create_nft_token( 1137 | self, 1138 | from_addr, 1139 | to_addr, 1140 | denomid, 1141 | tokenid, 1142 | uri, 1143 | fees, 1144 | event_query_tx=True, 1145 | **kwargs, 1146 | ): 1147 | rsp = json.loads( 1148 | self.raw( 1149 | "tx", 1150 | "nft", 1151 | "mint", 1152 | denomid, 1153 | tokenid, 1154 | "-y", 1155 | uri=uri, 1156 | recipient=to_addr, 1157 | home=self.data_dir, 1158 | from_=from_addr, 1159 | keyring_backend="test", 1160 | chain_id=self.chain_id, 1161 | node=self.node_rpc, 1162 | **kwargs, 1163 | ) 1164 | ) 1165 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1166 | rsp = self.event_query_tx_for(rsp["txhash"]) 1167 | return rsp 1168 | 1169 | def query_nft_token(self, denomid, tokenid): 1170 | return json.loads( 1171 | self.raw( 1172 | "query", 1173 | "nft", 1174 | "token", 1175 | denomid, 1176 | tokenid, 1177 | output="json", 1178 | home=self.data_dir, 1179 | node=self.node_rpc, 1180 | ) 1181 | ) 1182 | 1183 | def burn_nft_token( 1184 | self, from_addr, denomid, tokenid, event_query_tx=True, **kwargs 1185 | ): 1186 | rsp = json.loads( 1187 | self.raw( 1188 | "tx", 1189 | "nft", 1190 | "burn", 1191 | denomid, 1192 | tokenid, 1193 | "-y", 1194 | from_=from_addr, 1195 | keyring_backend="test", 1196 | home=self.data_dir, 1197 | chain_id=self.chain_id, 1198 | node=self.node_rpc, 1199 | **kwargs, 1200 | ) 1201 | ) 1202 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1203 | rsp = self.event_query_tx_for(rsp["txhash"]) 1204 | return rsp 1205 | 1206 | def edit_nft_token( 1207 | self, 1208 | from_addr, 1209 | denomid, 1210 | tokenid, 1211 | newuri, 1212 | newname, 1213 | event_query_tx=True, 1214 | **kwargs, 1215 | ): 1216 | rsp = json.loads( 1217 | self.raw( 1218 | "tx", 1219 | "nft", 1220 | "edit", 1221 | denomid, 1222 | tokenid, 1223 | "-y", 1224 | from_=from_addr, 1225 | uri=newuri, 1226 | name=newname, 1227 | keyring_backend="test", 1228 | home=self.data_dir, 1229 | chain_id=self.chain_id, 1230 | node=self.node_rpc, 1231 | **kwargs, 1232 | ) 1233 | ) 1234 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1235 | rsp = self.event_query_tx_for(rsp["txhash"]) 1236 | return rsp 1237 | 1238 | def transfer_nft_token( 1239 | self, 1240 | from_addr, 1241 | to_addr, 1242 | denomid, 1243 | tokenid, 1244 | event_query_tx=True, 1245 | **kwargs, 1246 | ): 1247 | rsp = json.loads( 1248 | self.raw( 1249 | "tx", 1250 | "nft", 1251 | "transfer", 1252 | to_addr, 1253 | denomid, 1254 | tokenid, 1255 | "-y", 1256 | from_=from_addr, 1257 | keyring_backend="test", 1258 | home=self.data_dir, 1259 | chain_id=self.chain_id, 1260 | node=self.node_rpc, 1261 | **kwargs, 1262 | ) 1263 | ) 1264 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1265 | rsp = self.event_query_tx_for(rsp["txhash"]) 1266 | return rsp 1267 | 1268 | def event_query_tx_for(self, hash): 1269 | return json.loads( 1270 | self.raw( 1271 | "query", 1272 | "event-query-tx-for", 1273 | hash, 1274 | home=self.data_dir, 1275 | stderr=subprocess.DEVNULL, 1276 | ) 1277 | ) 1278 | 1279 | def migrate_keystore(self): 1280 | return self.raw("keys", "migrate", home=self.data_dir) 1281 | 1282 | def ibc_query_channels(self, connid, **kwargs): 1283 | default_kwargs = { 1284 | "node": self.node_rpc, 1285 | "output": "json", 1286 | } 1287 | return json.loads( 1288 | self.raw( 1289 | "q", 1290 | "ibc", 1291 | "channel", 1292 | "connections", 1293 | connid, 1294 | **(default_kwargs | kwargs), 1295 | ) 1296 | ) 1297 | 1298 | def ibc_query_channel(self, port_id, channel_id, **kwargs): 1299 | default_kwargs = { 1300 | "node": self.node_rpc, 1301 | "output": "json", 1302 | } 1303 | return json.loads( 1304 | self.raw( 1305 | "q", 1306 | "ibc", 1307 | "channel", 1308 | "end", 1309 | port_id, 1310 | channel_id, 1311 | **(default_kwargs | kwargs), 1312 | ) 1313 | ) 1314 | 1315 | def ica_register_account(self, connid, event_query_tx=True, **kwargs): 1316 | "execute on host chain to attach an account to the connection" 1317 | default_kwargs = { 1318 | "home": self.data_dir, 1319 | "node": self.node_rpc, 1320 | "chain_id": self.chain_id, 1321 | "keyring_backend": "test", 1322 | } 1323 | args = ( 1324 | ["icaauth", "register-account"] 1325 | if self.has_icaauth_subcommand 1326 | else ["ica", "controller", "register"] 1327 | ) 1328 | rsp = json.loads( 1329 | self.raw( 1330 | "tx", 1331 | *args, 1332 | connid, 1333 | "-y", 1334 | **(default_kwargs | kwargs), 1335 | ) 1336 | ) 1337 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1338 | rsp = self.event_query_tx_for(rsp["txhash"]) 1339 | return rsp 1340 | 1341 | def ica_query_account(self, connid, owner, **kwargs): 1342 | default_kwargs = { 1343 | "node": self.node_rpc, 1344 | "output": "json", 1345 | } 1346 | args = ( 1347 | ["icaauth", "interchain-account-address", connid, owner] 1348 | if self.has_icaauth_subcommand 1349 | else ["ica", "controller", "interchain-account", owner, connid] 1350 | ) 1351 | return json.loads( 1352 | self.raw( 1353 | "q", 1354 | *args, 1355 | **(default_kwargs | kwargs), 1356 | ) 1357 | ) 1358 | 1359 | def ica_submit_tx( 1360 | self, 1361 | connid, 1362 | tx, 1363 | timeout_duration="1h", 1364 | event_query_tx=True, 1365 | **kwargs, 1366 | ): 1367 | default_kwargs = { 1368 | "home": self.data_dir, 1369 | "node": self.node_rpc, 1370 | "chain_id": self.chain_id, 1371 | "keyring_backend": "test", 1372 | } 1373 | if self.has_icaauth_subcommand: 1374 | args = ["icaauth", "submit-tx"] 1375 | else: 1376 | args = ["ica", "controller", "send-tx"] 1377 | 1378 | duration_args = [] 1379 | if timeout_duration: 1380 | if self.has_icaauth_subcommand: 1381 | duration_args = ["--timeout-duration", timeout_duration] 1382 | else: 1383 | timeout = int(durations.Duration(timeout_duration).to_seconds() * 1e9) 1384 | duration_args = ["--relative-packet-timeout", timeout] 1385 | 1386 | rsp = json.loads( 1387 | self.raw( 1388 | "tx", 1389 | *args, 1390 | connid, 1391 | tx, 1392 | *duration_args, 1393 | "-y", 1394 | **(default_kwargs | kwargs), 1395 | ) 1396 | ) 1397 | if rsp["code"] == 0 and event_query_tx and self.has_event_query_tx_for: 1398 | rsp = self.event_query_tx_for(rsp["txhash"]) 1399 | return rsp 1400 | 1401 | def ica_generate_packet_data(self, tx, memo=None, encoding="proto3", **kwargs): 1402 | return json.loads( 1403 | self.raw( 1404 | "tx", 1405 | "interchain-accounts", 1406 | "host", 1407 | "generate-packet-data", 1408 | tx, 1409 | memo=memo, 1410 | encoding=encoding, 1411 | home=self.data_dir, 1412 | **kwargs, 1413 | ) 1414 | ) 1415 | 1416 | def ibc_upgrade_channels(self, version, from_addr, **kwargs): 1417 | return json.loads( 1418 | self.raw( 1419 | "tx", 1420 | "ibc", 1421 | "channel", 1422 | "upgrade-channels", 1423 | json.dumps(version), 1424 | "-y", 1425 | "--json", 1426 | from_=from_addr, 1427 | keyring_backend="test", 1428 | chain_id=self.chain_id, 1429 | home=self.data_dir, 1430 | stderr=subprocess.DEVNULL, 1431 | **kwargs, 1432 | ) 1433 | ) 1434 | 1435 | def register_counterparty_payee( 1436 | self, port_id, channel_id, relayer, counterparty_payee, **kwargs 1437 | ): 1438 | rsp = json.loads( 1439 | self.raw( 1440 | "tx", 1441 | "ibc-fee", 1442 | "register-counterparty-payee", 1443 | port_id, 1444 | channel_id, 1445 | relayer, 1446 | counterparty_payee, 1447 | "-y", 1448 | home=self.data_dir, 1449 | **kwargs, 1450 | ) 1451 | ) 1452 | if rsp["code"] == 0 and self.has_event_query_tx_for: 1453 | rsp = self.event_query_tx_for(rsp["txhash"]) 1454 | return rsp 1455 | 1456 | def pay_packet_fee(self, port_id, channel_id, packet_seq, **kwargs): 1457 | rsp = json.loads( 1458 | self.raw( 1459 | "tx", 1460 | "ibc-fee", 1461 | "pay-packet-fee", 1462 | port_id, 1463 | channel_id, 1464 | str(packet_seq), 1465 | "-y", 1466 | home=self.data_dir, 1467 | **kwargs, 1468 | ) 1469 | ) 1470 | if rsp["code"] == 0 and self.has_event_query_tx_for: 1471 | rsp = self.event_query_tx_for(rsp["txhash"]) 1472 | return rsp 1473 | 1474 | def ibc_denom_trace(self, path, node): 1475 | denom_hash = hashlib.sha256(path.encode()).hexdigest().upper() 1476 | return json.loads( 1477 | self.raw( 1478 | "q", 1479 | "ibc-transfer", 1480 | "denom-trace", 1481 | denom_hash, 1482 | node=node, 1483 | output="json", 1484 | ) 1485 | )["denom_trace"] 1486 | 1487 | def ibc_denom(self, path, node): 1488 | denom_hash = hashlib.sha256(path.encode()).hexdigest().upper() 1489 | return json.loads( 1490 | self.raw( 1491 | "q", 1492 | "ibc-transfer", 1493 | "denom", 1494 | denom_hash, 1495 | node=node, 1496 | output="json", 1497 | ) 1498 | )["denom"] 1499 | -------------------------------------------------------------------------------- /pystarport/default.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? import {}, commit ? "" }: 2 | with pkgs; 3 | pkgs.mkShell { 4 | buildInputs = [ 5 | python38 6 | python38Packages.grpcio 7 | python38Packages.grpcio-tools 8 | git 9 | ]; 10 | } 11 | -------------------------------------------------------------------------------- /pystarport/expansion.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | from pathlib import Path 4 | from typing import Any, Mapping, Optional, Text 5 | 6 | import _jsonnet 7 | import jsonmerge 8 | import yaml 9 | from dotenv import dotenv_values, load_dotenv 10 | from dotenv.variables import parse_variables 11 | from yamlinclude import YamlIncludeConstructor 12 | 13 | 14 | def expand_posix_vars(obj: Any, variables: Mapping[Text, Optional[Any]]) -> Any: 15 | """expand_posix_vars recursively expands POSIX values in an object. 16 | 17 | Args: 18 | obj (any): object in which to interpolate variables. 19 | variables (dict): dictionary that maps variable names to their value 20 | """ 21 | if isinstance(obj, (dict,)): 22 | for key, val in obj.items(): 23 | obj[key] = expand_posix_vars(val, variables) 24 | elif isinstance(obj, (list,)): 25 | for index in range(len(obj)): 26 | obj[index] = expand_posix_vars(obj[index], variables) 27 | elif isinstance(obj, (str,)): 28 | obj = _expand(obj, variables) 29 | return obj 30 | 31 | 32 | def _expand(value, variables): 33 | """_expand does POSIX-style variable expansion 34 | 35 | This is adapted from python-dotenv, specifically here: 36 | 37 | https://github.com/theskumar/python-dotenv/commit/17dba65244c1d4d10f591fe37c924bd2c6fd1cfc 38 | 39 | We need this layer here so we can explicitly pass in variables; 40 | python-dotenv assumes you want to use os.environ. 41 | """ 42 | 43 | if not isinstance(value, (str,)): 44 | return value 45 | atoms = parse_variables(value) 46 | return "".join([str(atom.resolve(variables)) for atom in atoms]) 47 | 48 | 49 | def expand(config, dotenv, path): 50 | config_vars = dict(os.environ) # load system env 51 | 52 | if dotenv is not None: 53 | if "dotenv" in config: 54 | _ = config.pop("dotenv", {}) # remove dotenv field if exists 55 | elif "dotenv" in config: 56 | dotenv = config.pop("dotenv", {}) # pop dotenv field if exists 57 | 58 | if dotenv: 59 | if not isinstance(dotenv, str): 60 | raise ValueError(f"Invalid value passed to dotenv: {dotenv}") 61 | env_path = path.parent / dotenv 62 | if not env_path.is_file(): 63 | raise ValueError( 64 | f"Dotenv specified in config but not found at path: {env_path}" 65 | ) 66 | config_vars.update(dotenv_values(dotenv_path=env_path)) # type: ignore 67 | load_dotenv(dotenv_path=env_path) 68 | 69 | return expand_posix_vars(config, config_vars) 70 | 71 | 72 | def expand_yaml(config_path, dotenv): 73 | path = Path(config_path) 74 | YamlIncludeConstructor.add_to_loader_class( 75 | loader_class=yaml.FullLoader, 76 | base_dir=path.parent, 77 | ) 78 | 79 | with open(path) as f: 80 | config = yaml.load(f, Loader=yaml.FullLoader) 81 | 82 | include = config.pop("include", {}) 83 | if include: 84 | config = jsonmerge.merge(include, config) 85 | 86 | config = expand(config, dotenv, path) 87 | return config 88 | 89 | 90 | def expand_jsonnet(config_path, dotenv): 91 | path = Path(config_path) 92 | config = json.loads(_jsonnet.evaluate_file(str(config_path))) 93 | config = expand(config, dotenv, path) 94 | return config 95 | -------------------------------------------------------------------------------- /pystarport/ledger.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import time 3 | import uuid 4 | 5 | import docker 6 | 7 | ZEMU_HOST = "127.0.0.1" 8 | ZEMU_BUTTON_PORT = 9997 9 | ZEMU_GRPC_SERVER_PORT = 3002 10 | # dockerfile is integration_test/hardware_wallet/Dockerfile 11 | ZEMU_IMAGE = "cryptocom/builder-zemu:latest" 12 | 13 | 14 | class Ledger: 15 | def __init__(self): 16 | self.ledger_name = f"ledger_simulator_{uuid.uuid4().time_mid}" 17 | self.proxy_name = f"ledger_proxy_{uuid.uuid4().time_mid}" 18 | self.grpc_name = f"ledger_grpc_server_{uuid.uuid4().time_mid}" 19 | self.cmds = { 20 | self.ledger_name: [ 21 | "./speculos/speculos.py", 22 | "--display=headless", 23 | f"--button-port={ZEMU_BUTTON_PORT}", 24 | "./speculos/apps/crypto.elf", 25 | ], 26 | self.proxy_name: ["./speculos/tools/ledger-live-http-proxy.py", "-v"], 27 | self.grpc_name: ["bash", "-c", "RUST_LOG=debug zemu-grpc-server"], 28 | } 29 | self.client = docker.from_env() 30 | self.client.images.pull(ZEMU_IMAGE) 31 | self.containers = [] 32 | 33 | def start(self): 34 | host_config_ledger = self.client.api.create_host_config( 35 | auto_remove=True, 36 | port_bindings={ 37 | ZEMU_BUTTON_PORT: ZEMU_BUTTON_PORT, 38 | ZEMU_GRPC_SERVER_PORT: ZEMU_GRPC_SERVER_PORT, 39 | }, 40 | ) 41 | container_ledger = self.client.api.create_container( 42 | ZEMU_IMAGE, 43 | self.cmds[self.ledger_name], 44 | name=self.ledger_name, 45 | ports=[ZEMU_BUTTON_PORT, ZEMU_GRPC_SERVER_PORT], 46 | host_config=host_config_ledger, 47 | ) 48 | self.client.api.start(container_ledger["Id"]) 49 | self.containers.append(container_ledger) 50 | for name in [self.proxy_name, self.grpc_name]: 51 | cmd = self.cmds[name] 52 | try: 53 | host_config = self.client.api.create_host_config( 54 | auto_remove=True, network_mode=f"container:{self.ledger_name}" 55 | ) 56 | container = self.client.api.create_container( 57 | ZEMU_IMAGE, 58 | cmd, 59 | name=name, 60 | host_config=host_config, 61 | ) 62 | self.client.api.start(container["Id"]) 63 | self.containers.append(container) 64 | time.sleep(2) 65 | except Exception as e: 66 | print(e) 67 | 68 | def stop(self): 69 | for container in self.containers: 70 | try: 71 | self.client.api.remove_container(container["Id"], force=True) 72 | print("stop docker {}".format(container["Name"])) 73 | except Exception as e: 74 | print(e) 75 | 76 | 77 | class LedgerButton: 78 | def __init__(self, zemu_address, zemu_button_port): 79 | self._client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 80 | self.zemu_address = zemu_address 81 | self.zemu_button_port = zemu_button_port 82 | self.connected = False 83 | 84 | @property 85 | def client(self): 86 | if not self.connected: 87 | time.sleep(5) 88 | self._client.connect((self.zemu_address, self.zemu_button_port)) 89 | self.connected = True 90 | return self._client 91 | 92 | def press_left(self): 93 | data = "Ll" 94 | self.client.send(data.encode()) 95 | 96 | def press_right(self): 97 | data = "Rr" 98 | self.client.send(data.encode()) 99 | 100 | def press_both(self): 101 | data = "LRlr" 102 | self.client.send(data.encode()) 103 | -------------------------------------------------------------------------------- /pystarport/ports.py: -------------------------------------------------------------------------------- 1 | def p2p_port(base_port): 2 | return base_port 3 | 4 | 5 | def evmrpc_port(base_port): 6 | return base_port + 1 7 | 8 | 9 | def evmrpc_ws_port(base_port): 10 | return base_port + 2 11 | 12 | 13 | def grpc_port(base_port): 14 | return base_port + 3 15 | 16 | 17 | def api_port(base_port): 18 | return base_port + 4 19 | 20 | 21 | def pprof_port(base_port): 22 | return base_port + 5 23 | 24 | 25 | def grpc_port_tx_only(base_port): 26 | return base_port + 6 27 | 28 | 29 | def rpc_port(base_port): 30 | return base_port + 7 31 | 32 | 33 | def grpc_web_port(base_port): 34 | return base_port + 8 35 | -------------------------------------------------------------------------------- /pystarport/proto_python/api_util.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | 4 | class ApiUtil: 5 | def __init__(self, ip_port): 6 | self.base_url = f"http://127.0.0.1:{ip_port}" 7 | 8 | def balance(self, address): 9 | url = f"{self.base_url}/cosmos/bank/v1beta1/balances/{address}" 10 | response = requests.get(url) 11 | balance = response.json()["balances"] 12 | if len(balance) > 0: 13 | return int(balance[0]["amount"]) 14 | else: 15 | return 0 16 | 17 | def account_info(self, address): 18 | url = f"{self.base_url}/cosmos/auth/v1beta1/accounts/{address}" 19 | response = requests.get(url) 20 | account_info = response.json()["account"] 21 | account_num = int(account_info["account_number"]) 22 | sequence = int(account_info["sequence"]) 23 | return {"account_num": account_num, "sequence": sequence} 24 | 25 | def broadcast_tx(self, signed_tx: dict): 26 | url = f"{self.base_url}/txs" 27 | response = requests.post(url, json=signed_tx) 28 | if not response.ok: 29 | raise Exception( 30 | f"response code: {response.status_code}, " 31 | f"{response.reason}, {response.json()}" 32 | ) 33 | result = response.json() 34 | if result.get("code"): 35 | raise Exception(result["raw_log"]) 36 | return result 37 | -------------------------------------------------------------------------------- /pystarport/proto_python/grpc_util.py: -------------------------------------------------------------------------------- 1 | import base64 2 | 3 | import cosmos.bank.v1beta1.tx_pb2 4 | import cosmos.bank.v1beta1.tx_pb2_grpc 5 | import cosmos.crypto.ed25519.keys_pb2 6 | import cosmos.staking.v1beta1.query_pb2 7 | import cosmos.staking.v1beta1.query_pb2_grpc 8 | import grpc 9 | import tendermint.rpc.grpc.types_pb2_grpc 10 | 11 | 12 | # for query only 13 | class GrpcUtil: 14 | def __init__(self, ip_port): 15 | self.ip_port = ip_port 16 | 17 | def get_validators(self): 18 | channel = grpc.insecure_channel(self.ip_port) 19 | stub = cosmos.staking.v1beta1.query_pb2_grpc.QueryStub(channel) 20 | response = stub.Validators( 21 | cosmos.staking.v1beta1.query_pb2.QueryValidatorsRequest() 22 | ) 23 | return response 24 | 25 | 26 | # for tx broadcast only 27 | class GrpcUtilTxBroadcast: 28 | def __init__(self, ip_port): 29 | self.ip_port = ip_port 30 | 31 | def send_tx_in_base64(self, tx_base64): 32 | tx_raw_bytes = base64.b64decode(tx_base64) 33 | channel = grpc.insecure_channel(self.ip_port) 34 | stub = tendermint.rpc.grpc.types_pb2_grpc.BroadcastAPIStub(channel) 35 | request = tendermint.rpc.grpc.types_pb2.RequestBroadcastTx() 36 | request.tx = tx_raw_bytes 37 | response = stub.BroadcastTx(request) 38 | return response 39 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/base.jsonnet: -------------------------------------------------------------------------------- 1 | { 2 | 'cronos_777-1': { 3 | cmd: 'cronosd', 4 | 'start-flags': '--trace', 5 | 'app-config': { 6 | 'minimum-gas-prices': '5000000000000basetcro', 7 | 'json-rpc': { 8 | address: '127.0.0.1:{EVMRPC_PORT}', 9 | 'ws-address': '127.0.0.1:{EVMRPC_PORT_WS}', 10 | }, 11 | }, 12 | validators: [{ 13 | coins: '1000000000000000000stake,10000000000000000000000basetcro', 14 | staked: '1000000000000000000stake', 15 | mnemonic: 'visit craft resemble online window solution west chuckle music diesel vital settle comic tribe project blame bulb armed flower region sausage mercy arrive release', 16 | }, { 17 | coins: '1000000000000000000stake,10000000000000000000000basetcro', 18 | staked: '1000000000000000000stake', 19 | mnemonic: 'direct travel shrug hand twice agent sail sell jump phone velvet pilot mango charge usual multiply orient garment bleak virtual action mention panda vast', 20 | }], 21 | accounts: [{ 22 | name: 'community', 23 | coins: '10000000000000000000000basetcro', 24 | mnemonic: 'notable error gospel wave pair ugly measure elite toddler cost various fly make eye ketchup despair slab throw tribe swarm word fruit into inmate', 25 | }, { 26 | name: 'signer1', 27 | coins: '20000000000000000000000basetcro', 28 | mnemonic: 'shed crumble dismiss loyal latin million oblige gesture shrug still oxygen custom remove ribbon disorder palace addict again blanket sad flock consider obey popular', 29 | }, { 30 | name: 'signer2', 31 | coins: '30000000000000000000000basetcro', 32 | mnemonic: 'night renew tonight dinner shaft scheme domain oppose echo summer broccoli agent face guitar surface belt veteran siren poem alcohol menu custom crunch index', 33 | }], 34 | genesis: { 35 | consensus_params: { 36 | block: { 37 | max_bytes: '1048576', 38 | max_gas: '81500000', 39 | }, 40 | }, 41 | app_state: { 42 | evm: { 43 | params: { 44 | evm_denom: 'basetcro', 45 | }, 46 | }, 47 | cronos: { 48 | params: { 49 | cronos_admin: 'crc12luku6uxehhak02py4rcz65zu0swh7wjsrw0pp', 50 | enable_auto_deployment: true, 51 | ibc_cro_denom: 'ibc/6411AE2ADA1E73DB59DB151A8988F9B7D5E7E233D8414DB6817F8F1A01611F86', 52 | }, 53 | }, 54 | gov: { 55 | voting_params: { 56 | voting_period: '10s', 57 | }, 58 | deposit_params: { 59 | max_deposit_period: '10s', 60 | min_deposit: [ 61 | { 62 | denom: 'basetcro', 63 | amount: '1', 64 | }, 65 | ], 66 | }, 67 | }, 68 | transfer: { 69 | params: { 70 | receive_enabled: true, 71 | send_enabled: true, 72 | }, 73 | }, 74 | }, 75 | }, 76 | }, 77 | } 78 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/base.yaml: -------------------------------------------------------------------------------- 1 | cronos_777-1: 2 | cmd: cronosd 3 | start-flags: "--trace" 4 | app-config: 5 | minimum-gas-prices: 5000000000000basetcro 6 | json-rpc: 7 | address: "127.0.0.1:{EVMRPC_PORT}" 8 | ws-address: "127.0.0.1:{EVMRPC_PORT_WS}" 9 | validators: 10 | - coins: 1000000000000000000stake,10000000000000000000000basetcro 11 | staked: 1000000000000000000stake 12 | mnemonic: visit craft resemble online window solution west chuckle music diesel vital settle comic tribe project blame bulb armed flower region sausage mercy arrive release 13 | - coins: 1000000000000000000stake,10000000000000000000000basetcro 14 | staked: 1000000000000000000stake 15 | mnemonic: direct travel shrug hand twice agent sail sell jump phone velvet pilot mango charge usual multiply orient garment bleak virtual action mention panda vast 16 | accounts: 17 | - name: community 18 | coins: 10000000000000000000000basetcro 19 | mnemonic: "notable error gospel wave pair ugly measure elite toddler cost various fly make eye ketchup despair slab throw tribe swarm word fruit into inmate" 20 | - name: signer1 21 | coins: 20000000000000000000000basetcro 22 | mnemonic: shed crumble dismiss loyal latin million oblige gesture shrug still oxygen custom remove ribbon disorder palace addict again blanket sad flock consider obey popular 23 | - name: signer2 24 | coins: 30000000000000000000000basetcro 25 | mnemonic: night renew tonight dinner shaft scheme domain oppose echo summer broccoli agent face guitar surface belt veteran siren poem alcohol menu custom crunch index 26 | 27 | genesis: 28 | consensus_params: 29 | block: 30 | max_bytes: "1048576" 31 | max_gas: "81500000" 32 | app_state: 33 | evm: 34 | params: 35 | evm_denom: basetcro 36 | cronos: 37 | params: 38 | cronos_admin: crc12luku6uxehhak02py4rcz65zu0swh7wjsrw0pp 39 | enable_auto_deployment: true 40 | ibc_cro_denom: ibc/6411AE2ADA1E73DB59DB151A8988F9B7D5E7E233D8414DB6817F8F1A01611F86 41 | gov: 42 | voting_params: 43 | voting_period: "10s" 44 | deposit_params: 45 | max_deposit_period: "10s" 46 | min_deposit: 47 | - denom: "basetcro" 48 | amount: "1" 49 | transfer: 50 | params: 51 | receive_enabled: true 52 | send_enabled: true 53 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/cronos_has_dotenv.jsonnet: -------------------------------------------------------------------------------- 1 | local config = import './cronos_has_posix_no_dotenv.jsonnet'; 2 | 3 | config { 4 | dotenv+: 'dotenv', 5 | } 6 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/cronos_has_dotenv.yaml: -------------------------------------------------------------------------------- 1 | include: !include base.yaml 2 | dotenv: dotenv 3 | cronos_777-1: 4 | validators: 5 | - coins: 1000000000000000000stake,10000000000000000000000basetcro 6 | staked: 1000000000000000000stake 7 | mnemonic: ${VALIDATOR1_MNEMONIC} 8 | - coins: 1000000000000000000stake,10000000000000000000000basetcro 9 | staked: 1000000000000000000stake 10 | mnemonic: ${VALIDATOR2_MNEMONIC} 11 | accounts: 12 | - name: community 13 | coins: 10000000000000000000000basetcro 14 | mnemonic: ${COMMUNITY_MNEMONIC} 15 | - name: signer1 16 | coins: 20000000000000000000000basetcro 17 | mnemonic: ${SIGNER1_MNEMONIC} 18 | - name: signer2 19 | coins: 30000000000000000000000basetcro 20 | mnemonic: ${SIGNER2_MNEMONIC} 21 | 22 | genesis: 23 | app_state: 24 | cronos: 25 | params: 26 | cronos_admin: ${CRONOS_ADMIN} 27 | ibc_cro_denom: ${IBC_CRO_DENOM} 28 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/cronos_has_posix_no_dotenv.jsonnet: -------------------------------------------------------------------------------- 1 | local config = import './base.jsonnet'; 2 | 3 | config { 4 | 'cronos_777-1'+: { 5 | validators: [{ 6 | coins: '1000000000000000000stake,10000000000000000000000basetcro', 7 | staked: '1000000000000000000stake', 8 | mnemonic: '${VALIDATOR1_MNEMONIC}', 9 | }, { 10 | coins: '1000000000000000000stake,10000000000000000000000basetcro', 11 | staked: '1000000000000000000stake', 12 | mnemonic: '${VALIDATOR2_MNEMONIC}', 13 | }], 14 | accounts: [{ 15 | name: 'community', 16 | coins: '10000000000000000000000basetcro', 17 | mnemonic: '${COMMUNITY_MNEMONIC}', 18 | }, { 19 | name: 'signer1', 20 | coins: '20000000000000000000000basetcro', 21 | mnemonic: '${SIGNER1_MNEMONIC}', 22 | }, { 23 | name: 'signer2', 24 | coins: '30000000000000000000000basetcro', 25 | mnemonic: '${SIGNER2_MNEMONIC}', 26 | }], 27 | genesis+: { 28 | app_state+: { 29 | cronos: { 30 | params: { 31 | cronos_admin: '${CRONOS_ADMIN}', 32 | enable_auto_deployment: true, 33 | ibc_cro_denom: '${IBC_CRO_DENOM}', 34 | }, 35 | }, 36 | }, 37 | }, 38 | }, 39 | } 40 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/cronos_has_posix_no_dotenv.yaml: -------------------------------------------------------------------------------- 1 | include: !include base.yaml 2 | cronos_777-1: 3 | validators: 4 | - coins: 1000000000000000000stake,10000000000000000000000basetcro 5 | staked: 1000000000000000000stake 6 | mnemonic: ${VALIDATOR1_MNEMONIC} 7 | - coins: 1000000000000000000stake,10000000000000000000000basetcro 8 | staked: 1000000000000000000stake 9 | mnemonic: ${VALIDATOR2_MNEMONIC} 10 | accounts: 11 | - name: community 12 | coins: 10000000000000000000000basetcro 13 | mnemonic: ${COMMUNITY_MNEMONIC} 14 | - name: signer1 15 | coins: 20000000000000000000000basetcro 16 | mnemonic: ${SIGNER1_MNEMONIC} 17 | - name: signer2 18 | coins: 30000000000000000000000basetcro 19 | mnemonic: ${SIGNER2_MNEMONIC} 20 | 21 | genesis: 22 | app_state: 23 | cronos: 24 | params: 25 | cronos_admin: ${CRONOS_ADMIN} 26 | ibc_cro_denom: ${IBC_CRO_DENOM} 27 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/cronos_no_dotenv.jsonnet: -------------------------------------------------------------------------------- 1 | import './base.jsonnet' 2 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/cronos_no_dotenv.yaml: -------------------------------------------------------------------------------- 1 | include: !include base.yaml -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/dotenv: -------------------------------------------------------------------------------- 1 | export VALIDATOR_KEY='826E479F5385C8C32CD96B0C0ACCDB8CC4FA5CACCC1BE54C1E3AA4D676A6EFF5' 2 | export COMMUNITY_KEY='5D665FBD2FB40CB8E9849263B04457BA46D5F948972D0FE4C1F19B6B0F243574' 3 | export PASSWORD='123456' 4 | export VALIDATOR2_MNEMONIC="visit craft resemble online window solution west chuckle music diesel vital settle comic tribe project blame bulb armed flower region sausage mercy arrive release" 5 | export VALIDATOR1_MNEMONIC="direct travel shrug hand twice agent sail sell jump phone velvet pilot mango charge usual multiply orient garment bleak virtual action mention panda vast" 6 | export COMMUNITY_MNEMONIC="notable error gospel wave pair ugly measure elite toddler cost various fly make eye ketchup despair slab throw tribe swarm word fruit into inmate" 7 | export SIGNER1_MNEMONIC="shed crumble dismiss loyal latin million oblige gesture shrug still oxygen custom remove ribbon disorder palace addict again blanket sad flock consider obey popular" 8 | export SIGNER2_MNEMONIC="night renew tonight dinner shaft scheme domain oppose echo summer broccoli agent face guitar surface belt veteran siren poem alcohol menu custom crunch index" 9 | export CRONOS_ADMIN="crc12luku6uxehhak02py4rcz65zu0swh7wjsrw0pp" 10 | export IBC_CRO_DENOM="ibc/6411AE2ADA1E73DB59DB151A8988F9B7D5E7E233D8414DB6817F8F1A01611F86" 11 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/dotenv1: -------------------------------------------------------------------------------- 1 | export VALIDATOR_KEY='826E479F5385C8C32CD96B0C0ACCDB8CC4FA5CACCC1BE54C1E3AA4D676A6EFF5' 2 | export COMMUNITY_KEY='5D665FBD2FB40CB8E9849263B04457BA46D5F948972D0FE4C1F19B6B0F243574' 3 | export PASSWORD='123456' 4 | export VALIDATOR1_MNEMONIC="good" 5 | export VALIDATOR2_MNEMONIC="direct travel shrug hand twice agent sail sell jump phone velvet pilot mango charge usual multiply orient garment bleak virtual action mention panda vast" 6 | export COMMUNITY_MNEMONIC="notable error gospel wave pair ugly measure elite toddler cost various fly make eye ketchup despair slab throw tribe swarm word fruit into inmate" 7 | export SIGNER1_MNEMONIC="shed crumble dismiss loyal latin million oblige gesture shrug still oxygen custom remove ribbon disorder palace addict again blanket sad flock consider obey popular" 8 | export SIGNER2_MNEMONIC="night renew tonight dinner shaft scheme domain oppose echo summer broccoli agent face guitar surface belt veteran siren poem alcohol menu custom crunch index" 9 | export CRONOS_ADMIN="crc12luku6uxehhak02py4rcz65zu0swh7wjsrw0pp" 10 | export IBC_CRO_DENOM="ibc/6411AE2ADA1E73DB59DB151A8988F9B7D5E7E233D8414DB6817F8F1A01611F86" 11 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/test_expansion.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pathlib import Path 3 | 4 | import pytest 5 | import yaml 6 | from deepdiff import DeepDiff 7 | 8 | from pystarport.expansion import expand_jsonnet, expand_yaml 9 | 10 | 11 | def _get_base_config(): 12 | with open(Path(__file__).parent / "base.yaml") as f: 13 | return yaml.safe_load(f.read()) 14 | 15 | 16 | @pytest.mark.parametrize( 17 | "type, func", 18 | [(".yaml", expand_yaml), (".jsonnet", expand_jsonnet)], 19 | ) 20 | def test_expansion(type, func): 21 | parent = Path(__file__).parent 22 | cronos_has_dotenv = parent / ("cronos_has_dotenv" + type) 23 | cronos_no_dotenv = parent / ("cronos_no_dotenv" + type) 24 | cronos_has_posix_no_dotenv = parent / ("cronos_no_dotenv" + type) 25 | base_config = _get_base_config() 26 | # `expand_yaml` is backward compatible, not expanded, and no diff 27 | config = func(cronos_no_dotenv, None) 28 | assert base_config == config 29 | 30 | # `expand_yaml` is expanded but no diff 31 | config = func(cronos_has_dotenv, None) 32 | assert not DeepDiff( 33 | base_config, 34 | config, 35 | ignore_order=True, 36 | ) 37 | 38 | # overriding dotenv with relative path is expanded and has diff) 39 | dotenv = "dotenv1" 40 | config = func(cronos_has_dotenv, dotenv) 41 | assert DeepDiff( 42 | base_config, 43 | config, 44 | ignore_order=True, 45 | ) == { 46 | "values_changed": { 47 | "root['cronos_777-1']['validators'][0]['mnemonic']": { 48 | "new_value": "good", 49 | "old_value": "visit craft resemble online window solution west chuckle " 50 | "music diesel vital settle comic tribe project blame bulb armed flower " 51 | "region sausage mercy arrive release", 52 | } 53 | } 54 | } 55 | 56 | path = Path(__file__).parent 57 | 58 | # overriding dotenv with absolute path is expanded and has diff 59 | dotenv = os.path.abspath(path / "dotenv1") 60 | config = func(cronos_has_dotenv, dotenv) 61 | assert DeepDiff( 62 | base_config, 63 | config, 64 | ignore_order=True, 65 | ) == { 66 | "values_changed": { 67 | "root['cronos_777-1']['validators'][0]['mnemonic']": { 68 | "new_value": "good", 69 | "old_value": "visit craft resemble online window solution west chuckle " 70 | "music diesel vital settle comic tribe project blame bulb armed flower " 71 | "region sausage mercy arrive release", 72 | } 73 | } 74 | } 75 | 76 | # overriding dotenv with absolute path is expanded and no diff 77 | dotenv = os.path.abspath(path / "dotenv") 78 | config = func(cronos_has_posix_no_dotenv, dotenv) 79 | assert not DeepDiff( 80 | base_config, 81 | config, 82 | ignore_order=True, 83 | ) 84 | -------------------------------------------------------------------------------- /pystarport/tests/test_expansion/yaml_doc.jsonnet: -------------------------------------------------------------------------------- 1 | // jsonnet yaml_doc.jsonnet -m . -S 2 | { 3 | 'base.yaml': std.manifestYamlDoc(import './base.jsonnet', true, false), 4 | 'cronos_has_dotenv.yaml': std.manifestYamlDoc(import './cronos_has_dotenv.jsonnet', true, false), 5 | 'cronos_has_posix_no_dotenv.yaml': std.manifestYamlDoc(import './cronos_has_posix_no_dotenv.jsonnet', true, false), 6 | 'cronos_no_dotenv.yaml': std.manifestYamlDoc(import './cronos_no_dotenv.jsonnet', true, false), 7 | } 8 | -------------------------------------------------------------------------------- /pystarport/tests/test_utils.py: -------------------------------------------------------------------------------- 1 | from pystarport.utils import parse_amount 2 | 3 | 4 | def test_parse_amount(): 5 | assert parse_amount("1000000.01uatom") == 1000000.01 6 | assert parse_amount({"amount": "1000000.01", "denom": "uatom"}) == 1000000.01 7 | -------------------------------------------------------------------------------- /pystarport/utils.py: -------------------------------------------------------------------------------- 1 | import configparser 2 | import subprocess 3 | from itertools import takewhile 4 | 5 | 6 | def interact(cmd, ignore_error=False, input=None, **kwargs): 7 | kwargs.setdefault("stderr", subprocess.STDOUT) 8 | proc = subprocess.Popen( 9 | cmd, 10 | stdin=subprocess.PIPE, 11 | stdout=subprocess.PIPE, 12 | shell=True, 13 | **kwargs, 14 | ) 15 | # begin = time.perf_counter() 16 | (stdout, _) = proc.communicate(input=input) 17 | # print('[%.02f] %s' % (time.perf_counter() - begin, cmd)) 18 | if not ignore_error: 19 | assert proc.returncode == 0, f'{stdout.decode("utf-8")} ({cmd})' 20 | return stdout 21 | 22 | 23 | def write_ini(fp, cfg): 24 | ini = configparser.RawConfigParser() 25 | for section, items in cfg.items(): 26 | ini.add_section(section) 27 | sec = ini[section] 28 | sec.update(items) 29 | ini.write(fp) 30 | 31 | 32 | def safe_cli_string(s): 33 | 'wrap string in "", used for cli argument when contains spaces' 34 | if len(f"{s}".split()) > 1: 35 | return f"'{s}'" 36 | return f"{s}" 37 | 38 | 39 | def build_cli_args_safe(*args, **kwargs): 40 | args = [safe_cli_string(arg) for arg in args if arg] 41 | for k, v in kwargs.items(): 42 | if v is None: 43 | continue 44 | args.append("--" + k.strip("_").replace("_", "-")) 45 | args.append(safe_cli_string(v)) 46 | return list(map(str, args)) 47 | 48 | 49 | def build_cli_args(*args, **kwargs): 50 | args = [arg for arg in args if arg is not None] 51 | for k, v in kwargs.items(): 52 | if v is None: 53 | continue 54 | args.append("--" + k.strip("_").replace("_", "-")) 55 | args.append(v) 56 | return list(map(str, args)) 57 | 58 | 59 | def format_doc_string(**kwargs): 60 | def decorator(target): 61 | target.__doc__ = target.__doc__.format(**kwargs) 62 | return target 63 | 64 | return decorator 65 | 66 | 67 | def get_sync_info(s): 68 | return s.get("SyncInfo") or s.get("sync_info") 69 | 70 | 71 | def parse_amount(coin): 72 | """ 73 | parse amount from coin representation, compatible with multiple sdk versions: 74 | - pre-sdk-50: {"denom": "uatom", "amount": "1000000.00"} 75 | - post-sdk-50: "1000000.00uatom" 76 | """ 77 | if isinstance(coin, dict): 78 | return float(coin["amount"]) 79 | else: 80 | return float("".join(takewhile(is_float, coin))) 81 | 82 | 83 | def is_float(s): 84 | return str.isdigit(s) or s == "." 85 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | extend-ignore = E203 3 | max-line-length = 88 --------------------------------------------------------------------------------