├── .github └── workflows │ ├── main.yml │ └── test.yml ├── .gitignore ├── LICENSE ├── README.md ├── examples ├── README.md └── candid.py ├── ic ├── __init__.py ├── agent.py ├── candid.py ├── canister.py ├── certificate.py ├── client.py ├── common │ ├── __init__.py │ ├── cycles_wallet.py │ ├── governance.py │ ├── ledger.py │ └── management.py ├── constants.py ├── identity.py ├── parser │ ├── DIDEmitter.py │ ├── DIDLexer.py │ ├── DIDParser.py │ ├── DIDParserListener.py │ └── __init__.py ├── principal.py ├── system_state.py └── utils.py ├── parser ├── DIDLexer.g4 ├── DIDParser.g4 ├── antlr-4.9.3-complete.jar └── compile.sh ├── pics └── ic-py.png ├── pyproject.toml ├── setup.py ├── test_agent.py ├── test_canister.py ├── test_client.py ├── test_readstate.py ├── tests ├── test_agent.py ├── test_candid.py ├── test_identity.py └── test_principal.py └── tox.ini /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | 2 | name: Publish Python 🐍 distributions 📦 to PyPI and TestPyPI 3 | 4 | on: push 5 | 6 | jobs: 7 | build-n-publish: 8 | name: Build and publish Python 🐍 distributions 📦 to PyPI and TestPyPI 9 | runs-on: ubuntu-18.04 10 | 11 | steps: 12 | - uses: actions/checkout@master 13 | - name: Set up Python 3.6 14 | uses: actions/setup-python@v1 15 | with: 16 | python-version: 3.6 17 | 18 | - name: Install pypa/build 19 | run: >- 20 | python -m 21 | pip install 22 | build 23 | --user 24 | - name: Build a binary wheel and a source tarball 25 | run: >- 26 | python -m 27 | build 28 | --sdist 29 | --wheel 30 | --outdir dist/ 31 | . 32 | 33 | - name: Publish package 34 | if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') 35 | uses: pypa/gh-action-pypi-publish@release/v1 36 | with: 37 | user: __token__ 38 | password: ${{ secrets.PYPI_API_TOKEN }} 39 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: test 2 | on: 3 | # Trigger the workflow on push or pull request, 4 | # but only for the main branch 5 | push: 6 | branches: 7 | - main 8 | pull_request: 9 | branches: 10 | - main 11 | 12 | jobs: 13 | test: 14 | strategy: 15 | matrix: 16 | os: [ubuntu-latest, macos-latest] 17 | python-version: [3.6, 3.7, 3.8, 3.9] 18 | runs-on: ${{ matrix.os }} 19 | steps: 20 | - run: git config --global core.autocrlf input 21 | - uses: actions/checkout@v2 22 | with: 23 | fetch-depth: 0 24 | - uses: actions/setup-python@v2 25 | with: 26 | python-version: ${{ matrix.python-version }} 27 | - run: pip install --upgrade tox 28 | - run: tox -v 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__ 2 | *.swp 3 | .vscode 4 | .lh 5 | test.py 6 | 7 | build/ 8 | *.egg-info 9 | 10 | .tox/ 11 | .DS_Store 12 | 13 | .antlr 14 | dist/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Rocklabs 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Python Agent Library for the Internet Computer 2 | 3 | ![ic-py](./pics/ic-py.png) 4 | 5 | `ic-py` provides basic modules to interact with canisters on the DFINITY Internet Computer. 6 | 7 | ### Install 8 | 9 | ``` 10 | pip3 install ic-py 11 | ``` 12 | 13 | ### Features 14 | 15 | 1. candid types encode & decode 16 | 2. support secp256k1 & ed25519 identity, pem file import 17 | 3. canister DID file parsing 18 | 4. canister class, initialized with canister id and DID file 19 | 5. common canister interfaces: ledger, management, nns, cycles wallet 20 | 6. async support 21 | 22 | ### Modules & Usage 23 | 24 | #### 1. Principal 25 | 26 | Create an instance: 27 | 28 | ```python 29 | from ic.principal import Principal 30 | p = Principal() # default is management canister id `aaaaa-aa` 31 | p1 = Principal(bytes=b'') # create an instance from bytes 32 | p2 = Principal.anonymous() # create anonymous principal 33 | p3 = Principal.self_authenticating(pubkey) # create a principal from public key 34 | p4 = Principal.from_str('aaaaa-aa') # create an instance from string 35 | p5 = Principal.from_hex('xxx') # create an instance from hex 36 | ``` 37 | 38 | Class methods: 39 | 40 | ```python 41 | p.bytes # principal bytes 42 | p.len # byte array length 43 | p.to_str() # convert to string 44 | ``` 45 | 46 | #### 2. Identity 47 | 48 | Create an instance: 49 | 50 | ```python 51 | from ic.identity import Identity 52 | i = Identity() # create an identity instance, key is randomly generated 53 | i1 = Identity(privkey = "833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42") # create an instance from private key 54 | ``` 55 | 56 | Sign a message: 57 | 58 | ```python 59 | msg = b”ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f“ 60 | sig = i.sign(msg) # sig = (der_encoded_pubkey, signature) 61 | ``` 62 | 63 | #### 3. Client 64 | 65 | Create an instance: 66 | 67 | ```python 68 | from ic.client import Client 69 | client = Client(url = "https://ic0.app") 70 | ``` 71 | 72 | #### 4. Candid 73 | 74 | Encode parameters: 75 | 76 | ```python 77 | from ic.candid import encode, decode, Types 78 | # params is an array, return value is encoded bytes 79 | params = [{'type': Types.Nat, 'value': 10}] 80 | data = encode(params) 81 | ``` 82 | 83 | Decode parameters: 84 | 85 | ```python 86 | # data is bytes, return value is an parameter array 87 | params = decode(data) 88 | ``` 89 | 90 | #### 5. Agent 91 | 92 | Create an instance: 93 | 94 | ```python 95 | from ic.client import Client 96 | from ic.identity import Identity 97 | from ic.agent import Agent 98 | # Identity and Client are dependencies of Agent 99 | iden = Identity() 100 | client = Client() 101 | agent = Agent(iden, client) 102 | ``` 103 | 104 | Query call: 105 | 106 | ```python 107 | # query the name of token canister `gvbup-jyaaa-aaaah-qcdwa-cai` 108 | name = agent.query_raw("gvbup-jyaaa-aaaah-qcdwa-cai", "name", encode([])) 109 | ``` 110 | 111 | Update call: 112 | 113 | ```python 114 | # transfer 100 token to blackhole address `aaaaa-aa` 115 | params = [ 116 | {'type': Types.Principal, 'value': 'aaaaa-aa'}, 117 | {'type': Types.Nat, 'value': 10000000000} 118 | ] 119 | result = agent.update_raw("gvbup-jyaaa-aaaah-qcdwa-cai", "transfer", encode(params)) 120 | ``` 121 | 122 | #### 6. Canister 123 | 124 | Create a canister instance with candid interface file and canister id, and call canister method with canister instance: 125 | 126 | ```python 127 | from ic.canister import Canister 128 | from ic.client import Client 129 | from ic.identity import Identity 130 | from ic.agent import Agent 131 | from ic.candid import Types 132 | 133 | iden = Identity() 134 | client = Client() 135 | agent = Agent(iden, client) 136 | # read governance candid from file 137 | governance_did = open("governance.did").read() 138 | # create a governance canister instance 139 | governance = Canister(agent=agent, canister_id="rrkah-fqaaa-aaaaa-aaaaq-cai", candid=governance_did) 140 | # call canister method with instance 141 | res = governance.list_proposals( 142 | { 143 | 'include_reward_status': [], 144 | 'before_proposal': [], 145 | 'limit': 100, 146 | 'exclude_topic': [], 147 | 'include_status': [1] 148 | } 149 | ) 150 | ``` 151 | 152 | ### 7. Async request 153 | 154 | ic-py also supports async requests: 155 | 156 | ```python 157 | import asyncio 158 | from ic.canister import Canister 159 | from ic.client import Client 160 | from ic.identity import Identity 161 | from ic.agent import Agent 162 | from ic.candid import Types 163 | 164 | iden = Identity() 165 | client = Client() 166 | agent = Agent(iden, client) 167 | # read governance candid from file 168 | governance_did = open("governance.did").read() 169 | # create a governance canister instance 170 | governance = Canister(agent=agent, canister_id="rrkah-fqaaa-aaaaa-aaaaq-cai", candid=governance_did) 171 | # async call 172 | async def async_test(): 173 | res = await governance.list_proposals_async( 174 | { 175 | 'include_reward_status': [], 176 | 'before_proposal': [], 177 | 'limit': 100, 178 | 'exclude_topic': [], 179 | 'include_status': [1] 180 | } 181 | ) 182 | print(res) 183 | asyncio.run(async_test()) 184 | ``` 185 | 186 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Candid type - Python type 2 | 3 | |Candid|Python|Example| 4 | |------|------|-------| 5 | |bool|bool|True, False| 6 | |int|int|-1| 7 | |int8|int [-128, 127]|-128| 8 | |int16|int [-32768, 32767]|-32768| 9 | |int32|int [-2147483648, 2147483647]|-2147483648| 10 | |int64|int [-922337203685477580, 922337203685477579]|-922337203685477580| 11 | |nat|int >= 0|0| 12 | |nat8|int [0, 255]|255| 13 | |nat16|int [0, 65535]|65535| 14 | |nat32|int [0, 4294967295]|4294967295| 15 | |nat64|int [0, 18446744073709551615]|18446744073709551615| 16 | |float32|float0.1| 17 | |float64|float|3.1415| 18 | |text|str|"hello world"| 19 | |opt|list with length <= 1|Null:[], Some:[1]| 20 | |principal|str, bytes or Principal|"aaaaa-aa"| 21 | |vec|list|[1,2,3]| 22 | |record|dict|{"key": "val"}| 23 | |variant|dict|{"ok": 1}| 24 | |null|None|None| 25 | 26 | # Encode parameters: 27 | 28 | ```python 29 | from ic.candid import encode, decode, Types 30 | # params is an array, return value is encoded bytes 31 | params = [{'type': Types.Nat, 'value': 10}] 32 | data = encode(params) 33 | ``` 34 | 35 | # Decode parameters: 36 | 37 | ```python 38 | # data is bytes, return value is an parameter array 39 | params = decode(data) 40 | ``` -------------------------------------------------------------------------------- /examples/candid.py: -------------------------------------------------------------------------------- 1 | ''' 2 | 3 | test candid encode/decode 4 | include: Null, Nat, Int, Text, Variant, Record, .... 5 | 6 | ''' 7 | 8 | from ic.candid import Types, encode, decode 9 | 10 | 11 | ''' 12 | @param: Required 13 | format for example: [{'type': Types.Nat, 'value': 0}, ...] 14 | @rawTypes: Optional 15 | if rawTypes is None, decode return 16 | However, if you specific return types, it will return what you want. 17 | rawType accosiated with your did files. In future, we will auto parse 18 | return types once you provides did file. 19 | ''' 20 | def test(params, rawTypes = None): 21 | print('------------------------------------------') 22 | print('input params:', params) 23 | res = encode(params) 24 | print('encode: ', res.hex()) 25 | if rawTypes: 26 | print('specific return type:', rawTypes) 27 | print(' decode:', decode(res, rawTypes)) 28 | else: 29 | print('There is no specific return type:') 30 | print(' decode:', decode(res)) 31 | 32 | # Empty Test 33 | types = Types.Empty 34 | val = None 35 | params = [ 36 | {'type': types, 'value': val} 37 | ] 38 | # TypeError: Invalid empty argument: None 39 | try: 40 | encode(params) 41 | except: 42 | print('Empty encode error: Invalid empty argument: None') 43 | 44 | # ValueError: Empty cannot appear as an output 45 | try: 46 | decode(bytes.fromhex('4449444c00016f')) 47 | except: 48 | print('Empty decode error: Empty cannot appear as an output') 49 | 50 | 51 | 52 | # Null Test 53 | types = Types.Null 54 | val = None 55 | params = [ 56 | {'type': types, 'value': val} 57 | ] 58 | # There is no specific return type 59 | test(params=params) 60 | # Sepecific return types 61 | test(params=params, rawTypes=types) 62 | 63 | 64 | # Bool Test 65 | types = Types.Bool 66 | val = True 67 | params = [ 68 | {'type': types, 'value': val} 69 | ] 70 | # There is no specific return type 71 | test(params=params) 72 | # Sepecific return types 73 | test(params=params, rawTypes=types) 74 | 75 | # Text Test 76 | types = Types.Text 77 | val1 = 'Rocklabs!' 78 | val2 = "icpy is a good SDK for ic developers" 79 | params = [ 80 | {'type': types, 'value': val1}, 81 | {'type': types, 'value': val2}, 82 | 83 | ] 84 | # There is no specific return type 85 | test(params=params) 86 | # Sepecific return types 87 | test(params=params, rawTypes=[types, types]) 88 | 89 | # Int Test 90 | types = Types.Int 91 | val1 = 12345 92 | val2 = -12345 93 | params = [ 94 | {'type': types, 'value': val1}, 95 | {'type': types, 'value': val2}, 96 | 97 | ] 98 | # There is no specific return type 99 | test(params=params) 100 | # Sepecific return types 101 | test(params=params, rawTypes=[types, types]) 102 | 103 | 104 | # Nat Test 105 | types = Types.Nat 106 | val1 = 12345 107 | val2 = 6789 108 | params = [ 109 | {'type': types, 'value': val1}, 110 | {'type': types, 'value': val2}, 111 | 112 | ] 113 | # There is no specific return type 114 | test(params=params) 115 | # Sepecific return types 116 | test(params=params, rawTypes=[types, types]) 117 | 118 | # Float32 and Float64 Test 119 | type1 = Types.Float32 120 | type2 = Types.Float64 121 | val1 = 12.34 122 | val2 = 56.789 123 | params = [ 124 | {'type': type1, 'value': val1}, 125 | {'type': type2, 'value': val2}, 126 | 127 | ] 128 | # There is no specific return type 129 | test(params=params) 130 | # Sepecific return types 131 | test(params=params, rawTypes=[type1, type2]) 132 | 133 | 134 | # Int8,16,32,64 and Nat8,16,32,64 Test 135 | type1 = Types.Int8 136 | type2 = Types.Int16 137 | type3 = Types.Int32 138 | type4 = Types.Int64 139 | 140 | type5 = Types.Nat8 141 | type6 = Types.Nat16 142 | type7 = Types.Nat32 143 | type8 = Types.Nat64 144 | 145 | val1 = -113 146 | val2 = -12455 147 | val3 = 13454 148 | val4 = 346745456 149 | 150 | val5 = 12 151 | val6 = 35654 152 | val7 = 456787656 153 | val8 = 56789876567654567 154 | params = [ 155 | {'type': type1, 'value': val1}, 156 | {'type': type2, 'value': val2}, 157 | {'type': type3, 'value': val3}, 158 | {'type': type4, 'value': val4}, 159 | {'type': type5, 'value': val5}, 160 | {'type': type6, 'value': val6}, 161 | {'type': type7, 'value': val7}, 162 | {'type': type8, 'value': val8}, 163 | 164 | ] 165 | # There is no specific return type 166 | test(params=params) 167 | # Sepecific return types (part of returns) 168 | test(params=params, rawTypes=[type1, type2, type3]) 169 | 170 | 171 | # Tuple Test 172 | types = Types.Tuple(Types.Nat, Types.Text) 173 | vals = (123456, 'rocklabs') 174 | params = [ 175 | {'type': types, 'value': vals}, 176 | 177 | ] 178 | # There is no specific return type 179 | test(params=params) 180 | # Sepecific return types 181 | test(params=params, rawTypes=types) 182 | 183 | 184 | # Opt Test 185 | types = Types.Opt(Types.Text) 186 | val = ['rocklabs'] 187 | params = [ 188 | {'type': types, 'value': val}, 189 | 190 | ] 191 | # There is no specific return type 192 | test(params=params) 193 | # Sepecific return types 194 | test(params=params, rawTypes=types) 195 | 196 | 197 | # Vec Test 198 | types = Types.Vec(Types.Nat) 199 | vals = [1, 2, 3, 4] 200 | params = [ 201 | {'type': types, 'value': vals}, 202 | 203 | ] 204 | # There is no specific return type 205 | test(params=params) 206 | # Sepecific return types 207 | test(params=params, rawTypes=types) 208 | 209 | 210 | # Vec + Tuple Test 211 | types = Types.Vec(Types.Tuple(Types.Nat, Types.Text)) 212 | vals = [(123, 'rocklabs')] 213 | params = [ 214 | {'type': types, 'value': vals}, 215 | 216 | ] 217 | # There is no specific return type 218 | test(params=params) 219 | # Sepecific return types 220 | test(params=params, rawTypes=types) 221 | 222 | 223 | # Record Test 224 | types = Types.Record({'name':Types.Text, 'assets': Types.Int}) 225 | vals = {'name': 'rocklabs', 'assets': 888888888} 226 | params = [ 227 | {'type': types, 'value': vals}, 228 | 229 | ] 230 | # There is no specific return type 231 | test(params=params) 232 | # Sepecific return types 233 | test(params=params, rawTypes=types) 234 | 235 | 236 | # Tuple(Vec, Record) Test 237 | types = Types.Tuple(Types.Vec(Types.Text), Types.Record({'name':Types.Text, 'assets': Types.Int})) 238 | vals = (['rocklabs'], {'name': 'rocklabs', 'assets': 888888888}) 239 | params = [ 240 | {'type': types, 'value': vals}, 241 | 242 | ] 243 | # There is no specific return type 244 | test(params=params) 245 | # Sepecific return types 246 | test(params=params, rawTypes=types) 247 | 248 | 249 | # Variant Test 250 | types = Types.Variant({'ok': Types.Text, 'err': Types.Text}) 251 | val = {'ok': 'rocklabs!'} 252 | params = [ 253 | {'type': types, 'value': val}, 254 | 255 | ] 256 | # There is no specific return type 257 | test(params=params) 258 | # Sepecific return types 259 | test(params=params, rawTypes=types) 260 | 261 | 262 | # Tuple(Variant) Test 263 | types = Types.Tuple(Types.Variant({'ok': Types.Text, 'err': Types.Text})) 264 | val = ({'ok': 'rocklabs!'},) 265 | params = [ 266 | {'type': types, 'value': val}, 267 | 268 | ] 269 | # There is no specific return type 270 | test(params=params) 271 | # Sepecific return types 272 | test(params=params, rawTypes=types) 273 | 274 | 275 | # Principle Test 276 | types = Types.Principal 277 | val = 'expmt-gtxsw-inftj-ttabj-qhp5s-nozup-n3bbo-k7zvn-dg4he-knac3-lae' 278 | params = [ 279 | {'type': types, 'value': val}, 280 | 281 | ] 282 | # There is no specific return type 283 | test(params=params) 284 | # Sepecific return types 285 | test(params=params, rawTypes=types) 286 | 287 | # Opt(Principle Test) 288 | types = Types.Opt(Types.Principal) 289 | val = ['expmt-gtxsw-inftj-ttabj-qhp5s-nozup-n3bbo-k7zvn-dg4he-knac3-lae'] 290 | params = [ 291 | {'type': types, 'value': val}, 292 | 293 | ] 294 | # There is no specific return type 295 | test(params=params) 296 | # Sepecific return types 297 | test(params=params, rawTypes=types) 298 | 299 | #Func Text 300 | types = Types.Func([Types.Text], [Types.Nat], ['query']) 301 | val = ['expmt-gtxsw-inftj-ttabj-qhp5s-nozup-n3bbo-k7zvn-dg4he-knac3-lae', 'rocklabs'] 302 | params = [ 303 | {'type': types, 'value': val}, 304 | 305 | ] 306 | # There is no specific return type 307 | test(params=params) 308 | # Sepecific return types 309 | test(params=params, rawTypes=types) 310 | 311 | 312 | #Service Text 313 | types = Types.Service({'rocklabs' : Types.Func([Types.Text], [Types.Nat], ['query'])}) 314 | val = 'expmt-gtxsw-inftj-ttabj-qhp5s-nozup-n3bbo-k7zvn-dg4he-knac3-lae' 315 | params = [ 316 | {'type': types, 'value': val}, 317 | 318 | ] 319 | # There is no specific return type 320 | test(params=params) 321 | # Sepecific return types 322 | test(params=params, rawTypes=types) -------------------------------------------------------------------------------- /ic/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rocklabs-io/ic-py/79196d9149a7117b15f85077c4cf19cdf6808f3c/ic/__init__.py -------------------------------------------------------------------------------- /ic/agent.py: -------------------------------------------------------------------------------- 1 | import time 2 | import cbor2 3 | from waiter import wait 4 | from .candid import decode, Types 5 | from .identity import * 6 | from .constants import * 7 | from .utils import to_request_id 8 | from .certificate import lookup 9 | 10 | DEFAULT_POLL_TIMEOUT_SECS=60.0 11 | 12 | def sign_request(req, iden): 13 | req_id = to_request_id(req) 14 | msg = IC_REQUEST_DOMAIN_SEPARATOR + req_id 15 | sig = iden.sign(msg) 16 | envelop = { 17 | 'content': req, 18 | 'sender_pubkey': sig[0], 19 | 'sender_sig': sig[1] 20 | } 21 | if type(iden) == DelegateIdentity: 22 | envelop.update({ 23 | "sender_pubkey": iden.der_pubkey, 24 | "sender_delegation": iden.delegations 25 | }) 26 | return req_id, cbor2.dumps(envelop) 27 | 28 | class Agent: 29 | def __init__(self, identity, client, nonce_factory=None, ingress_expiry=300, root_key=IC_ROOT_KEY): 30 | self.identity = identity 31 | self.client = client 32 | self.ingress_expiry = ingress_expiry 33 | self.root_key = root_key 34 | self.nonce_factory = nonce_factory 35 | 36 | def get_principal(self): 37 | return self.identity.sender() 38 | 39 | def get_expiry_date(self): 40 | return int((time.time() + self.ingress_expiry) * 10**9) 41 | 42 | def query_endpoint(self, canister_id, data): 43 | ret = self.client.query(canister_id, data) 44 | return cbor2.loads(ret) 45 | 46 | async def query_endpoint_async(self, canister_id, data): 47 | ret = await self.client.query_async(canister_id, data) 48 | return cbor2.loads(ret) 49 | 50 | def call_endpoint(self, canister_id, request_id, data): 51 | self.client.call(canister_id, request_id, data) 52 | return request_id 53 | 54 | async def call_endpoint_async(self, canister_id, request_id, data): 55 | await self.client.call_async(canister_id, request_id, data) 56 | return request_id 57 | 58 | def read_state_endpoint(self, canister_id, data): 59 | result = self.client.read_state(canister_id, data) 60 | return result 61 | 62 | async def read_state_endpoint_async(self, canister_id, data): 63 | result = await self.client.read_state_async(canister_id, data) 64 | return result 65 | 66 | def query_raw(self, canister_id, method_name, arg, return_type = None, effective_canister_id = None): 67 | req = { 68 | 'request_type': "query", 69 | 'sender': self.identity.sender().bytes, 70 | 'canister_id': Principal.from_str(canister_id).bytes if isinstance(canister_id, str) else canister_id.bytes, 71 | 'method_name': method_name, 72 | 'arg': arg, 73 | 'ingress_expiry': self.get_expiry_date() 74 | } 75 | _, data = sign_request(req, self.identity) 76 | result = self.query_endpoint(canister_id if effective_canister_id is None else effective_canister_id, data) 77 | if type(result) != dict or "status" not in result: 78 | raise Exception("Malformed result: " + str(result)) 79 | if result['status'] == 'replied': 80 | arg = result['reply']['arg'] 81 | if (arg[:4] == b"DIDL"): 82 | return decode(arg, return_type) 83 | else: 84 | return arg 85 | elif result['status'] == 'rejected': 86 | raise Exception("Canister reject the call: " + result['reject_message']) 87 | 88 | async def query_raw_async(self, canister_id, method_name, arg, return_type = None, effective_canister_id = None): 89 | req = { 90 | 'request_type': "query", 91 | 'sender': self.identity.sender().bytes, 92 | 'canister_id': Principal.from_str(canister_id).bytes if isinstance(canister_id, str) else canister_id.bytes, 93 | 'method_name': method_name, 94 | 'arg': arg, 95 | 'ingress_expiry': self.get_expiry_date() 96 | } 97 | _, data = sign_request(req, self.identity) 98 | result = await self.query_endpoint_async(canister_id if effective_canister_id is None else effective_canister_id, data) 99 | if type(result) != dict or "status" not in result: 100 | raise Exception("Malformed result: " + str(result)) 101 | if result['status'] == 'replied': 102 | arg = result['reply']['arg'] 103 | if (arg[:4] == b"DIDL"): 104 | return decode(arg, return_type) 105 | else: 106 | return arg 107 | elif result['status'] == 'rejected': 108 | raise Exception("Canister reject the call: " + result['reject_message']) 109 | 110 | def update_raw(self, canister_id, method_name, arg, return_type = None, effective_canister_id = None, **kwargs): 111 | req = { 112 | 'request_type': "call", 113 | 'sender': self.identity.sender().bytes, 114 | 'canister_id': Principal.from_str(canister_id).bytes if isinstance(canister_id, str) else canister_id.bytes, 115 | 'method_name': method_name, 116 | 'arg': arg, 117 | 'ingress_expiry': self.get_expiry_date() 118 | } 119 | req_id, data = sign_request(req, self.identity) 120 | eid = canister_id if effective_canister_id is None else effective_canister_id 121 | _ = self.call_endpoint(eid, req_id, data) 122 | # print('update.req_id:', req_id.hex()) 123 | status, result = self.poll(eid, req_id, **kwargs) 124 | if status == 'rejected': 125 | raise Exception('Rejected: ' + result.decode()) 126 | elif status == 'replied': 127 | if result[:4] == b'DIDL': 128 | return decode(result, return_type) 129 | else: 130 | # Some canisters don't use DIDL (e.g. they might encode using json instead) 131 | return result 132 | else: 133 | raise Exception('Timeout to poll result, current status: ' + str(status)) 134 | 135 | async def update_raw_async(self, canister_id, method_name, arg, return_type = None, effective_canister_id = None, **kwargs): 136 | req = { 137 | 'request_type': "call", 138 | 'sender': self.identity.sender().bytes, 139 | 'canister_id': Principal.from_str(canister_id).bytes if isinstance(canister_id, str) else canister_id.bytes, 140 | 'method_name': method_name, 141 | 'arg': arg, 142 | 'ingress_expiry': self.get_expiry_date() 143 | } 144 | req_id, data = sign_request(req, self.identity) 145 | eid = canister_id if effective_canister_id is None else effective_canister_id 146 | _ = await self.call_endpoint_async(eid, req_id, data) 147 | # print('update.req_id:', req_id.hex()) 148 | status, result = await self.poll_async(eid, req_id, **kwargs) 149 | if status == 'rejected': 150 | raise Exception('Rejected: ' + result.decode()) 151 | elif status == 'replied': 152 | if (result[:4] == b"DIDL"): 153 | return decode(result, return_type) 154 | else: 155 | return result 156 | else: 157 | raise Exception('Timeout to poll result, current status: ' + str(status)) 158 | 159 | def read_state_raw(self, canister_id, paths): 160 | req = { 161 | 'request_type': 'read_state', 162 | 'sender': self.identity.sender().bytes, 163 | 'paths': paths, 164 | 'ingress_expiry': self.get_expiry_date(), 165 | } 166 | _, data = sign_request(req, self.identity) 167 | ret = self.read_state_endpoint(canister_id, data) 168 | if ret == b'Invalid path requested.': 169 | raise ValueError('Invalid path requested!') 170 | elif ret == b'Could not parse body as read request: invalid type: byte array, expected a sequence': 171 | raise ValueError('Could not parse body as read request: invalid type: byte array, expected a sequence') 172 | try: 173 | d = cbor2.loads(ret) 174 | except: 175 | raise ValueError("Unable to decode cbor value: " + ret.decode()) 176 | cert = cbor2.loads(d['certificate']) 177 | return cert 178 | 179 | async def read_state_raw_async(self, canister_id, paths): 180 | req = { 181 | 'request_type': 'read_state', 182 | 'sender': self.identity.sender().bytes, 183 | 'paths': paths, 184 | 'ingress_expiry': self.get_expiry_date(), 185 | } 186 | _, data = sign_request(req, self.identity) 187 | ret = await self.read_state_endpoint_async(canister_id, data) 188 | if ret == b'Invalid path requested.': 189 | raise ValueError('Invalid path requested!') 190 | elif ret == b'Could not parse body as read request: invalid type: byte array, expected a sequence': 191 | raise ValueError('Could not parse body as read request: invalid type: byte array, expected a sequence') 192 | d = cbor2.loads(ret) 193 | cert = cbor2.loads(d['certificate']) 194 | return cert 195 | 196 | def request_status_raw(self, canister_id, req_id): 197 | paths = [ 198 | ['request_status'.encode(), req_id], 199 | ] 200 | cert = self.read_state_raw(canister_id, paths) 201 | status = lookup(['request_status'.encode(), req_id, 'status'.encode()], cert) 202 | if (status == None): 203 | return status, cert 204 | else: 205 | return status.decode(), cert 206 | 207 | async def request_status_raw_async(self, canister_id, req_id): 208 | paths = [ 209 | ['request_status'.encode(), req_id], 210 | ] 211 | cert = await self.read_state_raw_async(canister_id, paths) 212 | status = lookup(['request_status'.encode(), req_id, 'status'.encode()], cert) 213 | if (status == None): 214 | return status, cert 215 | else: 216 | return status.decode(), cert 217 | 218 | def poll(self, canister_id, req_id, delay=1, timeout=DEFAULT_POLL_TIMEOUT_SECS): 219 | status = None 220 | for _ in wait(delay, timeout): 221 | status, cert = self.request_status_raw(canister_id, req_id) 222 | if status == 'replied' or status == 'done' or status == 'rejected': 223 | break 224 | 225 | if status == 'replied': 226 | path = ['request_status'.encode(), req_id, 'reply'.encode()] 227 | res = lookup(path, cert) 228 | return status, res 229 | elif status == 'rejected': 230 | path = ['request_status'.encode(), req_id, 'reject_message'.encode()] 231 | msg = lookup(path, cert) 232 | return status, msg 233 | else: 234 | return status, _ 235 | 236 | async def poll_async(self, canister_id, req_id, delay=1, timeout=DEFAULT_POLL_TIMEOUT_SECS): 237 | status = None 238 | for _ in wait(delay, timeout): 239 | status, cert = await self.request_status_raw_async(canister_id, req_id) 240 | if status == 'replied' or status == 'done' or status == 'rejected': 241 | break 242 | 243 | if status == 'replied': 244 | path = ['request_status'.encode(), req_id, 'reply'.encode()] 245 | res = lookup(path, cert) 246 | return status, res 247 | elif status == 'rejected': 248 | path = ['request_status'.encode(), req_id, 'reject_message'.encode()] 249 | msg = lookup(path, cert) 250 | return status, msg 251 | else: 252 | return status, _ 253 | -------------------------------------------------------------------------------- /ic/candid.py: -------------------------------------------------------------------------------- 1 | # candid.example.py shows how to use candid's en/decode 2 | 3 | import leb128 4 | from collections.abc import Iterable 5 | from struct import pack,unpack 6 | from abc import abstractclassmethod, ABCMeta 7 | from enum import Enum 8 | import math 9 | from .principal import Principal as P 10 | from .utils import labelHash 11 | 12 | class TypeIds(Enum): 13 | Null = -1 14 | Bool = -2 15 | Nat = -3 16 | Int = -4 17 | Nat8 = -5 18 | Nat16 = -6 19 | Nat32 = -7 20 | Nat64 = -8 21 | Int8 = -9 22 | Int16 = -10 23 | Int32 = -11 24 | Int64 = -12 25 | Float32 = -13 26 | Float64 = -14 27 | Text = -15 28 | Reserved = -16 29 | Empty = -17 30 | Opt = -18 31 | Vec = -19 32 | Record = -20 33 | Variant = -21 34 | Func = -22 35 | Service = -23 36 | Principal = -24 37 | 38 | prefix = "DIDL" 39 | 40 | class Pipe : 41 | def __init__(self, buffer = b'', length = 0): 42 | self._buffer = buffer 43 | self._view = buffer[0:len(buffer)] 44 | 45 | @property 46 | def buffer(self): 47 | return self._view 48 | 49 | @property 50 | def length(self): 51 | return len(self._view) 52 | 53 | @property 54 | def end(self) -> bool: 55 | return self.length == 0 56 | 57 | def read(self, num:int): 58 | if len(self._view) < num: 59 | raise ValueError("Wrong: out of bound") 60 | res = self._view[:num] 61 | self._view = self._view[num:] 62 | return res 63 | 64 | def readbyte(self): 65 | res = self._view[0] 66 | self._view = self._view[1:] 67 | return res 68 | 69 | 70 | class ConstructType: pass 71 | class TypeTable(): 72 | def __init__(self) -> None: 73 | self._typs = [] 74 | self._idx = {} 75 | 76 | def has(self, obj: ConstructType): 77 | return obj.name in self._idx 78 | 79 | def add(self, obj: ConstructType, buf): 80 | idx = len(self._typs) 81 | self._idx[obj.name] = idx 82 | self._typs.append(buf) 83 | 84 | def merge(self, obj: ConstructType, knot:str): 85 | idx = self._idx[obj.name] if self.has(obj) else None 86 | knotIdx = self._idx[knot] if knot in self._idx else None 87 | if idx == None: 88 | raise ValueError("Missing type index for " + obj.name) 89 | if knotIdx == None: 90 | raise ValueError("Missing type index for " + knot) 91 | self._typs[idx] = self._typs[knotIdx] 92 | 93 | #delete the type 94 | del self._typs[knotIdx] 95 | del self._idx[knot] 96 | 97 | def encode(self) : 98 | l = 0 99 | for t in self._typs: 100 | if len(t) != 0: 101 | l += 1 102 | length = leb128.u.encode(l) 103 | buf = b''.join(self._typs) 104 | return length + buf 105 | 106 | def indexOf(self, typeName:str) : 107 | if not typeName in self._idx: 108 | raise ValueError("Missing type index for " + typeName) 109 | return leb128.i.encode(self._idx[typeName] | 0) 110 | 111 | 112 | # Represents an IDL type. 113 | class Type(metaclass=ABCMeta): 114 | 115 | def display(self): 116 | return self.name 117 | 118 | def buildTypeTable(self, typeTable: TypeTable): 119 | if not typeTable.has(self): 120 | self._buildTypeTableImpl(typeTable) 121 | 122 | @abstractclassmethod 123 | def covariant(): pass 124 | 125 | @abstractclassmethod 126 | def decodeValue(): pass 127 | 128 | @abstractclassmethod 129 | def encodeType(): pass 130 | 131 | @abstractclassmethod 132 | def encodeValue(): pass 133 | 134 | @abstractclassmethod 135 | def checkType(): pass 136 | 137 | @abstractclassmethod 138 | def _buildTypeTableImpl(): pass 139 | 140 | class PrimitiveType(Type): 141 | def __init__(self) -> None: 142 | super().__init__() 143 | 144 | def checkType(self, t: Type): 145 | if self.name != t.name : 146 | raise ValueError("type mismatch: type on the wire {}, expect type {}".format(t.name, self.name)) 147 | return t 148 | 149 | def _buildTypeTableImpl(self, typeTable: TypeTable) : 150 | # No type table encoding for Primitive types. 151 | return 152 | 153 | class ConstructType(Type, metaclass=ABCMeta): 154 | def __init__(self) -> None: 155 | super().__init__() 156 | 157 | def checkType(self, t: Type) -> ConstructType : 158 | if isinstance(t, RecClass): 159 | ty = t.getType() 160 | if ty == None: 161 | raise ValueError("type mismatch with uninitialized type") 162 | return ty 163 | else: 164 | raise ValueError("type mismatch: type on the wire {}, expect type {}".format(type.name, self.name)) 165 | 166 | def encodeType(self, typeTable: TypeTable): 167 | return typeTable.indexOf(self.name) 168 | 169 | # Represents an IDL Empty, a type which has no inhabitants. 170 | class EmptyClass(PrimitiveType): 171 | def __init__(self) -> None: 172 | super().__init__() 173 | 174 | def covariant(self, x): 175 | return False 176 | 177 | def encodeValue(self, val): 178 | raise ValueError("Empty cannot appear as a function argument") 179 | 180 | def encodeType(self, typeTable: TypeTable): 181 | return leb128.i.encode(TypeIds.Empty.value) 182 | 183 | def decodeValue(self, b: Pipe, t: Type): 184 | raise ValueError("Empty cannot appear as an output") 185 | 186 | @property 187 | def name(self) -> str: 188 | return 'empty' 189 | 190 | @property 191 | def id(self) -> int: 192 | return TypeIds.Empty.value 193 | 194 | # Represents an IDL Bool 195 | class BoolClass(PrimitiveType): 196 | def __init__(self) -> None: 197 | super().__init__() 198 | 199 | def covariant(self, x): 200 | return isinstance(x, bool) 201 | 202 | def encodeValue(self, val): 203 | return leb128.u.encode(1 if val else 0) 204 | 205 | def encodeType(self, typeTable: TypeTable): 206 | return leb128.i.encode(TypeIds.Bool.value) 207 | 208 | def decodeValue(self, b: Pipe, t: Type): 209 | self.checkType(t) 210 | byte = safeReadByte(b) 211 | if leb128.u.decode(byte) == 1: 212 | return True 213 | elif leb128.u.decode(byte) == 0: 214 | return False 215 | else: 216 | raise ValueError("Boolean value out of range") 217 | 218 | @property 219 | def name(self) -> str: 220 | return 'bool' 221 | 222 | @property 223 | def id(self) -> int: 224 | return TypeIds.Bool.value 225 | 226 | # Represents an IDL Null 227 | # check None == Null ? 228 | class NullClass(PrimitiveType): 229 | def __init__(self) -> None: 230 | super().__init__() 231 | 232 | def covariant(self, x): 233 | return x == None 234 | 235 | def encodeValue(self, val): 236 | return b'' 237 | 238 | def encodeType(self, typeTable: TypeTable): 239 | return leb128.i.encode(TypeIds.Null.value) 240 | 241 | def decodeValue(self, b: Pipe, t: Type): 242 | self.checkType(t) 243 | return None 244 | 245 | @property 246 | def name(self) -> str: 247 | return 'null' 248 | 249 | @property 250 | def id(self) -> int: 251 | return TypeIds.Null.value 252 | 253 | # Represents an IDL Reserved 254 | class ReservedClass(PrimitiveType): 255 | def __init__(self) -> None: 256 | super().__init__() 257 | 258 | def covariant(self, x): 259 | return True 260 | 261 | def encodeValue(self): 262 | return b'' 263 | 264 | def encodeType(self, typeTable: TypeTable): 265 | return leb128.i.encode(TypeIds.Reserved.value) 266 | 267 | def decodeValue(self, b: Pipe, t: Type): 268 | if self.name != t.name: 269 | t.decodeValue(b, t) 270 | return None 271 | 272 | @property 273 | def name(self) -> str: 274 | return 'reserved' 275 | 276 | @property 277 | def id(self) -> int: 278 | return TypeIds.Reserved.value 279 | 280 | # Represents an IDL Text 281 | class TextClass(PrimitiveType): 282 | def __init__(self) -> None: 283 | super().__init__() 284 | 285 | def covariant(self, x): 286 | return isinstance(x, str) 287 | 288 | def encodeValue(self, val: str): 289 | buf = val.encode() 290 | length = leb128.u.encode(len(buf)) 291 | return length + buf 292 | 293 | def encodeType(self, typeTable: TypeTable): 294 | return leb128.i.encode(TypeIds.Text.value) 295 | 296 | def decodeValue(self, b, t: Type): 297 | self.checkType(t) 298 | length = leb128uDecode(b) 299 | buf = safeRead(b, length) 300 | return buf.decode() 301 | 302 | @property 303 | def name(self) -> str: 304 | return 'text' 305 | 306 | @property 307 | def id(self) -> int: 308 | return TypeIds.Text.value 309 | 310 | # Represents an IDL Int 311 | class IntClass(PrimitiveType): 312 | def __init__(self) -> None: 313 | super().__init__() 314 | 315 | def covariant(self, x): 316 | return isinstance(x, int) 317 | 318 | def encodeValue(self, val): 319 | return leb128.i.encode(val) 320 | 321 | def encodeType(self, typeTable: TypeTable): 322 | return leb128.i.encode(TypeIds.Int.value) 323 | 324 | def decodeValue(self, b: Pipe, t: Type): 325 | self.checkType(t) 326 | return leb128iDecode(b) 327 | 328 | @property 329 | def name(self) -> str: 330 | return 'int' 331 | 332 | @property 333 | def id(self) -> int: 334 | return TypeIds.Int.value 335 | 336 | # Represents an IDL Nat 337 | class NatClass(PrimitiveType): 338 | def __init__(self) -> None: 339 | super().__init__() 340 | 341 | def covariant(self, x): 342 | return isinstance(x, int) and x >= 0 343 | 344 | def encodeValue(self, val): 345 | return leb128.u.encode(val) 346 | 347 | def encodeType(self, typeTable: TypeTable): 348 | return leb128.i.encode(TypeIds.Nat.value) 349 | 350 | def decodeValue(self, b: Pipe, t: Type): 351 | self.checkType(t) 352 | return leb128uDecode(b) 353 | 354 | @property 355 | def name(self) -> str: 356 | return 'nat' 357 | 358 | @property 359 | def id(self) -> int: 360 | return TypeIds.Nat.value 361 | 362 | # Represents an IDL Float 363 | class FloatClass(PrimitiveType): 364 | def __init__(self, _bits): 365 | super().__init__() 366 | self._bits = _bits 367 | if _bits != 32 and _bits != 64: 368 | raise ValueError("not a valid float type") 369 | 370 | def covariant(self, x): 371 | return isinstance(x, float) 372 | 373 | def encodeValue(self, val): 374 | if self._bits == 32: 375 | buf = pack('f', val) 376 | elif self._bits == 64: 377 | buf = pack('d', val) 378 | else: 379 | raise ValueError("The length of float have to be 32 bits or 64 bits ") 380 | return buf 381 | 382 | def encodeType(self, typeTable: TypeTable): 383 | opcode = TypeIds.Float32.value if self._bits == 32 else TypeIds.Float64.value 384 | return leb128.i.encode(opcode) 385 | 386 | def decodeValue(self, b: Pipe, t: Type) -> float: 387 | self.checkType(t) 388 | by = safeRead(b, self._bits // 8) 389 | if self._bits == 32: 390 | return unpack('f', by)[0] 391 | elif self._bits == 64: 392 | return unpack('d', by)[0] 393 | else: 394 | raise ValueError("The length of float have to be 32 bits or 64 bits ") 395 | 396 | @property 397 | def name(self) -> str: 398 | return 'float' + str(self._bits) 399 | 400 | @property 401 | def id(self) -> int: 402 | return TypeIds.Float32.value if self._bits == 32 else TypeIds.Float64.value 403 | 404 | # Represents an IDL fixed-width Int(n) 405 | class FixedIntClass(PrimitiveType): 406 | def __init__(self, _bits): 407 | super().__init__() 408 | self._bits = _bits 409 | if _bits != 8 and _bits != 16 and \ 410 | _bits != 32 and _bits != 64 : 411 | raise ValueError("bits only support 8, 16, 32, 64") 412 | 413 | def covariant(self, x): 414 | minVal = -1 * 2 ** (self._bits - 1) 415 | maxVal = -1 + 2 ** (self._bits - 1) 416 | if x >= minVal and x <= maxVal: 417 | return True 418 | else: 419 | return False 420 | 421 | def encodeValue(self, val): 422 | if self._bits == 8: 423 | buf = pack('b', val) # signed char -> Int8 424 | elif self._bits == 16: 425 | buf = pack('h', val) # short -> Int16 426 | elif self._bits == 32: 427 | buf = pack('i', val) # int -> Int32 428 | elif self._bits == 64: 429 | buf = pack('q', val) # long long -> Int64 430 | else: 431 | raise ValueError("bits only support 8, 16, 32, 64") 432 | return buf 433 | 434 | def encodeType(self, typeTable: TypeTable): 435 | offset = int(math.log2(self._bits) - 3) 436 | return leb128.i.encode(-9 - offset) 437 | 438 | def decodeValue(self, b: Pipe, t: Type): 439 | self.checkType(t) 440 | by = safeRead(b, self._bits // 8) 441 | if self._bits == 8: 442 | return unpack('b', by)[0] # signed char -> Int8 443 | elif self._bits == 16: 444 | return unpack('h', by)[0] # short -> Int16 445 | elif self._bits == 32: 446 | return unpack('i', by)[0] # int -> Int32 447 | elif self._bits == 64: 448 | return unpack('q', by)[0] # long long -> Int64 449 | else: 450 | raise ValueError("bits only support 8, 16, 32, 64") 451 | 452 | @property 453 | def name(self) -> str: 454 | return 'int' + str(self._bits) 455 | 456 | @property 457 | def id(self) -> int: 458 | if self._bits == 8: 459 | return TypeIds.Int8.value 460 | if self._bits == 16: 461 | return TypeIds.Int16.value 462 | if self._bits == 32: 463 | return TypeIds.Int32.value 464 | if self._bits == 64: 465 | return TypeIds.Int64.value 466 | 467 | # Represents an IDL fixed-width Nat(n) 468 | class FixedNatClass(PrimitiveType): 469 | def __init__(self, _bits): 470 | super().__init__() 471 | self._bits = _bits 472 | if _bits != 8 and _bits != 16 and \ 473 | _bits != 32 and _bits != 64 : 474 | raise ValueError("bits only support 8, 16, 32, 64") 475 | 476 | def covariant(self, x): 477 | maxVal = -1 + 2 ** self._bits 478 | if x >= 0 and x <= maxVal: 479 | return True 480 | else: 481 | return False 482 | 483 | def encodeValue(self, val): 484 | if self._bits == 8: 485 | buf = pack('B', val) # unsigned char -> Nat8 486 | elif self._bits == 16: 487 | buf = pack('H', val) # unsigned short -> Nat16 488 | elif self._bits == 32: 489 | buf = pack('I', val) # unsigned int -> Nat32 490 | elif self._bits == 64: 491 | buf = pack('Q', val) # unsigned long long -> Nat64 492 | else: 493 | raise ValueError("bits only support 8, 16, 32, 64") 494 | return buf 495 | 496 | def encodeType(self, typeTable: TypeTable): 497 | offset = int(math.log2(self._bits) - 3) 498 | return leb128.i.encode(-5 - offset) 499 | 500 | def decodeValue(self, b: Pipe, t: Type): 501 | self.checkType(t) 502 | by = safeRead(b, self._bits // 8) 503 | if self._bits == 8: 504 | return unpack('B', by)[0] # unsigned char -> Nat8 505 | elif self._bits == 16: 506 | return unpack('H', by)[0] # unsigned short -> Nat16 507 | elif self._bits == 32: 508 | return unpack('I', by)[0] # unsigned int -> Nat32 509 | elif self._bits == 64: 510 | return unpack('Q', by)[0] # unsigned long long -> Nat64 511 | else: 512 | raise ValueError("bits only support 8, 16, 32, 64") 513 | 514 | @property 515 | def name(self) -> str: 516 | return 'nat' + str(self._bits) 517 | 518 | @property 519 | def id(self) -> int: 520 | if self._bits == 8: 521 | return TypeIds.Nat8.value 522 | if self._bits == 16: 523 | return TypeIds.Nat16.value 524 | if self._bits == 32: 525 | return TypeIds.Nat32.value 526 | if self._bits == 64: 527 | return TypeIds.Nat64.value 528 | 529 | # Represents an IDL Array 530 | class VecClass(ConstructType): 531 | def __init__(self, _type: Type): 532 | super().__init__() 533 | self._type = _type 534 | 535 | def covariant(self, x): 536 | return isinstance(x, Iterable) and not False in list(map(self._type.covariant, x)) 537 | 538 | def encodeValue(self, val): 539 | length = leb128.u.encode(len(val)) 540 | vec = list(map(self._type.encodeValue, val)) 541 | return length + b''.join(vec) 542 | 543 | def _buildTypeTableImpl(self, typeTable: TypeTable): 544 | self._type.buildTypeTable(typeTable) 545 | opCode = leb128.i.encode(TypeIds.Vec.value) 546 | buffer = self._type.encodeType(typeTable) 547 | typeTable.add(self, opCode + buffer) 548 | 549 | def decodeValue(self, b: Pipe, t: Type): 550 | vec = self.checkType(t) 551 | if not isinstance(vec, VecClass): 552 | raise ValueError("Not a vector type") 553 | length = leb128uDecode(b) 554 | rets = [] 555 | for _ in range(length): 556 | rets.append(self._type.decodeValue(b, vec._type)) 557 | return rets 558 | 559 | @property 560 | def name(self) -> str: 561 | return 'vec ({})'.format(str(self._type.name)) 562 | 563 | @property 564 | def id(self) -> int: 565 | return TypeIds.Vec.value 566 | 567 | def display(self): 568 | return 'vec {}'.format(self._type.display()) 569 | 570 | # Represents an IDL Option 571 | class OptClass(ConstructType): 572 | def __init__(self, _type: Type): 573 | super().__init__() 574 | self._type = _type 575 | 576 | def covariant(self, x): 577 | return type(x) == list and (len(x) == 0 | (len(x) == 1 and self._type.covariant(x[0]))) 578 | 579 | def encodeValue(self, val): 580 | if len(val) == 0: 581 | return b'\x00' 582 | else: 583 | return b'\x01' + self._type.encodeValue(val[0]) 584 | 585 | def _buildTypeTableImpl(self, typeTable: TypeTable): 586 | self._type.buildTypeTable(typeTable) 587 | opCode = leb128.i.encode(TypeIds.Opt.value) 588 | buffer = self._type.encodeType(typeTable) 589 | typeTable.add(self, opCode + buffer) 590 | 591 | def decodeValue(self, b: Pipe, t: Type): 592 | opt = self.checkType(t) 593 | if not isinstance(opt, OptClass): 594 | raise ValueError("Not an option type") 595 | flag = safeReadByte(b) 596 | if flag == b'\x00': 597 | return [] 598 | elif flag == b'\x01': 599 | return [self._type.decodeValue(b, opt._type)] 600 | else: 601 | raise ValueError("Not an option value") 602 | 603 | @property 604 | def name(self) -> str: 605 | return 'opt ({})'.format(str(self._type.name)) 606 | 607 | @property 608 | def id(self) -> int: 609 | return TypeIds.Opt.value 610 | 611 | def display(self): 612 | return 'opt ({})'.format(self._type.display()) 613 | 614 | # Represents an IDL Record 615 | class RecordClass(ConstructType): 616 | def __init__(self, field: dict): 617 | super().__init__() 618 | self._fields = dict(sorted(field.items(), key=lambda kv: labelHash(kv[0]))) # check 619 | 620 | def tryAsTuple(self): 621 | res = [] 622 | idx = 0 623 | for k, v in self._fields.items(): 624 | if k != '_' + str(idx) + '_': 625 | return None 626 | res.append(v) 627 | idx += 1 628 | return res 629 | 630 | def covariant(self, x: dict): 631 | if type(x) != dict: 632 | raise ValueError("Expected dict type input.") 633 | for k, v in self._fields.items(): 634 | if not k in x: 635 | raise ValueError("Record is missing key {}".format(k)) 636 | if v.covariant(x[k]): 637 | continue 638 | else: 639 | return False 640 | return True 641 | 642 | def encodeValue(self, val): 643 | bufs = [] 644 | for k,v in self._fields.items(): 645 | bufs.append(v.encodeValue(val[k])) 646 | return b''.join(bufs) 647 | 648 | 649 | def _buildTypeTableImpl(self, typeTable: TypeTable): 650 | for _, v in self._fields.items(): 651 | v.buildTypeTable(typeTable) 652 | opCode = leb128.i.encode(TypeIds.Record.value) 653 | length = leb128.u.encode(len(self._fields)) 654 | fields = b'' 655 | for k, v in self._fields.items(): 656 | fields += (leb128.u.encode(labelHash(k)) + v.encodeType(typeTable)) 657 | typeTable.add(self, opCode + length + fields) 658 | 659 | 660 | def decodeValue(self, b: Pipe, t: Type): 661 | record = self.checkType(t) 662 | if not isinstance(record, RecordClass): 663 | raise ValueError("Not a record type") 664 | 665 | x = {} 666 | idx = 0 667 | keys = list(self._fields.keys()) 668 | for k, v in record._fields.items() : 669 | if idx >= len(self._fields) or ( labelHash(keys[idx]) != labelHash(k) ): 670 | # skip field 671 | v.decodeValue(b, v) 672 | continue 673 | expectKey = keys[idx] 674 | exceptValue = self._fields[expectKey] 675 | x[expectKey] = exceptValue.decodeValue(b, v) 676 | idx += 1 677 | if idx < len(self._fields): 678 | raise ValueError("Cannot find field {}".format(keys[idx])) 679 | return x 680 | 681 | @property 682 | def name(self) -> str: 683 | fields = ";".join(map(lambda kv: str(kv[0]) + ":" + kv[1].name, self._fields.items())) 684 | return "record {{{}}}".format(fields) 685 | 686 | @property 687 | def id(self) -> int: 688 | return TypeIds.Record.value 689 | 690 | def display(self): 691 | d = {} 692 | for k, v in self._fields.items(): 693 | d[v] = v.display() 694 | return "record {}".format(d) 695 | 696 | # Represents Tuple, a syntactic sugar for Record. 697 | class TupleClass(RecordClass): 698 | def __init__(self, *_components): 699 | x = {} 700 | for i, v in enumerate(_components): 701 | x['_' + str(i) + '_'] = v 702 | super().__init__(x) 703 | self._components = _components 704 | 705 | 706 | def covariant(self, x): 707 | if type(x) != tuple: 708 | raise ValueError("Expected tuple type input.") 709 | for idx, v in enumerate(self._components): 710 | if v.covariant(x[idx]): 711 | continue 712 | else: 713 | return False 714 | if len(x) < len(self._fields): 715 | return False 716 | return True 717 | 718 | def encodeValue(self, val:list): 719 | bufs = b'' 720 | for i in range(len(self._components)): 721 | bufs += self._components[i].encodeValue(val[i]) 722 | return bufs 723 | 724 | 725 | def decodeValue(self, b: Pipe, t: Type): 726 | tup = self.checkType(t) 727 | if not isinstance(tup, TupleClass): 728 | raise ValueError("not a tuple type") 729 | if len(tup._components) != len(self._components): 730 | raise ValueError("tuple mismatch") 731 | res = [] 732 | for i, wireType in enumerate(tup._components): 733 | if i >= len(self._components): 734 | wireType.decodeValue(b, wireType) 735 | else: 736 | res.append(self._components[i].decodeValue(b, wireType)) 737 | return res 738 | 739 | @property 740 | def id(self) -> int: 741 | return TypeIds.Tuple.value 742 | 743 | def display(self): 744 | d = [] 745 | for item in self._components: 746 | d.append(item.display()) 747 | return "record {" + '{}'.format(';'.join(d)) + '}' 748 | 749 | # Represents an IDL Variant 750 | class VariantClass(ConstructType): 751 | def __init__(self, field): 752 | super().__init__() 753 | self._fields = dict(sorted(field.items(), key=lambda kv: labelHash(kv[0]))) # check 754 | 755 | 756 | def covariant(self, x): 757 | if len(x) != 1: 758 | return False 759 | for k, v in self._fields.items(): 760 | if not k in x or v.covariant(x[k]): 761 | continue 762 | else: 763 | return False 764 | return True 765 | 766 | def encodeValue(self, val): 767 | idx = 0 768 | for name, ty in self._fields.items(): 769 | if name in val: 770 | count = leb128.i.encode(idx) 771 | buf = ty.encodeValue(val[name]) 772 | return count + buf 773 | idx += 1 774 | raise ValueError("Variant has no data: {}".format(val)) 775 | 776 | def _buildTypeTableImpl(self, typeTable: TypeTable): 777 | for _, v in self._fields.items(): 778 | v.buildTypeTable(typeTable) 779 | opCode = leb128.i.encode(TypeIds.Variant.value) 780 | length = leb128.u.encode(len(self._fields)) 781 | fields = b'' 782 | for k, v in self._fields.items(): 783 | fields += leb128.u.encode(labelHash(k)) + v.encodeType(typeTable) 784 | typeTable.add(self, opCode + length + fields) 785 | 786 | 787 | def decodeValue(self, b: Pipe, t: Type): 788 | variant = self.checkType(t) 789 | if not isinstance(variant, VariantClass): 790 | raise ValueError("Not a variant type") 791 | idx = leb128uDecode(b) 792 | if idx >= len(variant._fields): 793 | raise ValueError("Invalid variant index: {}".format(idx)) 794 | keys = list(variant._fields.keys()) 795 | wireHash = keys[idx] 796 | wireType = variant._fields[wireHash] 797 | 798 | for key, expectType in self._fields.items(): 799 | if labelHash(wireHash) == labelHash(key): 800 | ret = {} 801 | if expectType: 802 | value = expectType.decodeValue(b, wireType) 803 | else: 804 | value = None 805 | ret[key] = value 806 | return ret 807 | raise ValueError("Cannot find field hash {}".format(wireHash)) 808 | 809 | 810 | @property 811 | def name(self) -> str: 812 | fields = ";".join(map(lambda kv: str(kv[0]) + ":" + kv[1].name, self._fields.items())) 813 | return 'variant {{{}}}'.format(fields) 814 | 815 | @property 816 | def id(self) -> int: 817 | return TypeIds.Variant.value 818 | 819 | def display(self): 820 | d = {} 821 | for k, v in self._fields.items(): 822 | d[k] = '' if v.name == None else v.name 823 | return 'variant {}'.format(d) 824 | 825 | # Represents a reference to an IDL type, used for defining recursive data types. 826 | class RecClass(ConstructType): 827 | _counter = 0 828 | def __init__(self): 829 | super().__init__() 830 | self._id = RecClass._counter 831 | RecClass._counter += 1 832 | self._type = None 833 | 834 | def fill(self, t: ConstructType): 835 | self._type = t 836 | 837 | def getType(self): 838 | if isinstance(self._type, RecClass): 839 | return self._type.getType() 840 | return self._type 841 | 842 | def covariant(self, x): 843 | return False if self._type == None else self._type.covariant(x) 844 | 845 | def encodeValue(self, val): 846 | if self._type == None: 847 | raise ValueError("Recursive type uninitialized") 848 | else: 849 | return self._type.encodeValue(val) 850 | 851 | def encodeType(self, typeTable): 852 | if isinstance(self._type, PrimitiveType): 853 | return self._type.encodeType(typeTable) 854 | else: 855 | return super().encodeType(typeTable) 856 | 857 | def _buildTypeTableImpl(self, typeTable: TypeTable): 858 | if self._type == None: 859 | raise ValueError("Recursive type uninitialized") 860 | else: 861 | if not isinstance(self.getType(), PrimitiveType): 862 | typeTable.add(self, b'') 863 | self._type.buildTypeTable(typeTable) 864 | typeTable.merge(self, self._type.name) 865 | 866 | 867 | def decodeValue(self, b: Pipe, t: Type): 868 | if self._type == None: 869 | raise ValueError("Recursive type uninitialized") 870 | else: 871 | return self._type.decodeValue(b, t) 872 | 873 | @property 874 | def name(self) -> str: 875 | return 'rec_{}'.format(self._id) 876 | 877 | 878 | def display(self): 879 | if self._type == None: 880 | raise ValueError("Recursive type uninitialized") 881 | else: 882 | return '{}.{}'.format(self.name, self._type.name) 883 | 884 | # Represents an IDL principal reference 885 | class PrincipalClass(PrimitiveType): 886 | def __init__(self) -> None: 887 | super().__init__() 888 | 889 | def covariant(self, x): 890 | if isinstance(x,str): 891 | p = P.from_str(x) 892 | elif isinstance(x, bytes): 893 | p = P.from_hex(x.hex()) 894 | else: 895 | raise ValueError("only support string or bytes format") 896 | return p.isPrincipal 897 | 898 | 899 | def encodeValue(self, val): 900 | tag = int.to_bytes(1, 1, byteorder='big') 901 | if isinstance(val, str): 902 | buf = P.from_str(val).bytes 903 | elif isinstance(val, bytes): 904 | buf = val 905 | else: 906 | raise ValueError("Principal should be string or bytes.") 907 | l = leb128.u.encode(len(buf)) 908 | return tag + l + buf 909 | 910 | def encodeType(self,typeTable: TypeTable): 911 | return leb128.i.encode(TypeIds.Principal.value) 912 | 913 | def decodeValue(self, b: Pipe, t: Type): 914 | self.checkType(t) 915 | res = safeReadByte(b) 916 | if leb128.u.decode(res) != 1: 917 | raise ValueError("Cannot decode principal") 918 | length = leb128uDecode(b) 919 | return P.from_hex(safeRead(b, length).hex()) 920 | 921 | @property 922 | def name(self) -> str: 923 | return 'principal' 924 | 925 | @property 926 | def id(self) -> int: 927 | return TypeIds.Principal.value 928 | 929 | #Represents an IDL Func reference 930 | class FuncClass(ConstructType): 931 | def __init__(self, argTypes: list, retTypes: list, annotations: list): 932 | super().__init__() 933 | self.argTypes = argTypes 934 | self.retTypes = retTypes 935 | self.annotations = annotations 936 | 937 | def covariant(self, x): 938 | return type(x) == list and len(x) == 2 and x[0] and \ 939 | (P.from_str(x[0]) if type(x[0]) == str else P.from_hex(x[0].hex())).isPrincipal \ 940 | and type(x[1]) == str 941 | 942 | def encodeValue(self, vals): 943 | principal = vals[0] 944 | methodName = vals[1] 945 | tag = int.to_bytes(1, 1, byteorder='big') 946 | if isinstance(principal, str): 947 | buf = P.from_str(principal).bytes 948 | elif isinstance(principal, bytes): 949 | buf = principal 950 | else: 951 | raise ValueError("Principal should be string or bytes.") 952 | l = leb128.u.encode(len(buf)) 953 | canister = tag + l + buf 954 | 955 | method = methodName.encode() 956 | methodLen = leb128.u.encode(len(method)) 957 | return tag + canister + methodLen + method 958 | 959 | def _buildTypeTableImpl(self, typeTable: TypeTable): 960 | for arg in self.argTypes: 961 | arg.buildTypeTable(typeTable) 962 | for ret in self.retTypes: 963 | ret.buildTypeTable(typeTable) 964 | 965 | opCode = leb128.i.encode(TypeIds.Func.value) 966 | argLen = leb128.u.encode(len(self.argTypes)) 967 | args = b'' 968 | for arg in self.argTypes: 969 | args += arg.encodeType(typeTable) 970 | retLen = leb128.u.encode(len(self.retTypes)) 971 | rets = b'' 972 | for ret in self.retTypes: 973 | rets += ret.encodeType(typeTable) 974 | annLen = leb128.u.encode(len(self.annotations)) 975 | anns = b'' 976 | for a in self.annotations: 977 | anns += self._encodeAnnotation(a) 978 | typeTable.add(self, opCode + argLen + args + retLen + rets + annLen + anns) 979 | 980 | def decodeValue(self, b: Pipe, t: Type): 981 | x = safeReadByte(b) 982 | if leb128.u.decode(x) != 1: 983 | raise ValueError('Cannot decode function reference') 984 | res = safeReadByte(b) 985 | if leb128.u.decode(res) != 1: 986 | raise ValueError("Cannot decode principal") 987 | length = leb128uDecode(b) 988 | canister = P.from_hex(safeRead(b, length).hex()) 989 | mLen = leb128uDecode(b) 990 | buf = safeRead(b, mLen) 991 | method = buf.decode('utf-8') 992 | 993 | return [canister, method] 994 | 995 | @property 996 | def name(self) -> str: 997 | args = ', '.join(arg.name for arg in self.argTypes) 998 | rets = ', '.join(ret.name for ret in self.retTypes) 999 | anns = ' '.join(self.annotations) 1000 | return '({}) → ({}) {}'.format(args, rets, anns) 1001 | 1002 | @property 1003 | def id(self) -> int: 1004 | return TypeIds.Func.value 1005 | 1006 | def display(self): 1007 | args = ', '.join(arg.display() for arg in self.argTypes) 1008 | rets = ', '.join(ret.display() for ret in self.retTypes) 1009 | anns = ' '.join(self.annotations) 1010 | return '({}) → ({}) {}'.format(args, rets, anns) 1011 | 1012 | def _encodeAnnotation(self, ann: str): 1013 | if ann == 'query': 1014 | return int.to_bytes(1, 1, byteorder='big') 1015 | elif ann == 'oneway': 1016 | return int.to_bytes(2, 1, byteorder='big') 1017 | else: 1018 | raise ValueError('Illeagal function annotation') 1019 | 1020 | # Represents an IDL Service reference 1021 | class ServiceClass(ConstructType): 1022 | def __init__(self, field): 1023 | super().__init__() 1024 | self._fields = dict(sorted(field.items(), key=lambda kv: labelHash(kv[0]))) # check 1025 | 1026 | def covariant(self, x): 1027 | if isinstance(x,str): 1028 | p = P.from_str(x) 1029 | elif isinstance(x, bytes): 1030 | p = P.from_hex(x.hex()) 1031 | else: 1032 | raise ValueError("only support string or bytes format") 1033 | return p.isPrincipal 1034 | 1035 | 1036 | def encodeValue(self, val): 1037 | tag = int.to_bytes(1, 1, byteorder='big') 1038 | if isinstance(val, str): 1039 | buf = P.from_str(val).bytes 1040 | elif isinstance(val, bytes): 1041 | buf = val 1042 | else: 1043 | raise ValueError("Principal should be string or bytes.") 1044 | l = leb128.u.encode(len(buf)) 1045 | return tag + l + buf 1046 | 1047 | def _buildTypeTableImpl(self, typeTable: TypeTable): 1048 | for _, v in self._fields.items(): 1049 | v.buildTypeTable(typeTable) 1050 | opCode = leb128.i.encode(TypeIds.Service.value) 1051 | length = leb128.u.encode(len(self._fields)) 1052 | fields = b'' 1053 | for k, v in self._fields.items(): 1054 | fields += leb128.u.encode(len(k.encode())) + k.encode() + v.encodeType(typeTable) 1055 | typeTable.add(self, opCode + length + fields) 1056 | 1057 | def decodeValue(self, b: Pipe, t: Type): 1058 | res = safeReadByte(b) 1059 | if leb128.u.decode(res) != 1: 1060 | raise ValueError("Cannot decode principal") 1061 | length = leb128uDecode(b) 1062 | return P.from_hex(safeRead(b, length).hex()) 1063 | 1064 | @property 1065 | def name(self) -> str: 1066 | fields = '' 1067 | for k, v in self._fields.items(): 1068 | fields += k + ' : ' + v.name 1069 | return 'service {}'.format(fields) 1070 | 1071 | @property 1072 | def id(self) -> int: 1073 | return TypeIds.Service.value 1074 | 1075 | # through Pipe to decode bytes 1076 | def leb128uDecode(pipe: Pipe): 1077 | res = b'' 1078 | while True: 1079 | byte = safeReadByte(pipe) 1080 | res += byte 1081 | if byte < b'\x80' or pipe.length == 0: 1082 | break 1083 | return leb128.u.decode(res) 1084 | 1085 | def leb128iDecode(pipe: Pipe): 1086 | length = len(pipe._view) 1087 | for i in range(length): 1088 | if pipe._view[i:i+1] < b'\x80': 1089 | if pipe._view[i:i+1] < b'\x40': 1090 | return leb128uDecode(pipe) 1091 | break 1092 | res = safeRead(pipe, i + 1) 1093 | return leb128.i.decode(res) 1094 | 1095 | def safeRead(pipe: Pipe, num:int): 1096 | if pipe.length < num: 1097 | raise ValueError("unexpected end of buffer") 1098 | return pipe.read(num) 1099 | 1100 | def safeReadByte(pipe: Pipe): 1101 | if pipe.length < 1: 1102 | raise ValueError("unexpected end of buffer") 1103 | return pipe.read(1) 1104 | 1105 | def readTypeTable(pipe): 1106 | #types length 1107 | typeTable = [] 1108 | typeTable_len = leb128uDecode(pipe) 1109 | for _ in range(typeTable_len): 1110 | ty = leb128iDecode(pipe) 1111 | if ty == TypeIds.Opt.value or ty == TypeIds.Vec.value: 1112 | t = leb128iDecode(pipe) 1113 | typeTable.append([ty, t]) 1114 | elif ty == TypeIds.Record.value or ty == TypeIds.Variant.value: 1115 | fields = [] 1116 | objLength = leb128uDecode(pipe) 1117 | prevHash = -1 1118 | for _ in range(objLength): 1119 | hash = leb128uDecode(pipe) 1120 | if hash >= math.pow(2, 32): 1121 | raise ValueError("field id out of 32-bit range") 1122 | if type(prevHash) == int and prevHash >= hash: 1123 | raise ValueError("field id collision or not sorted") 1124 | prevHash = hash 1125 | t = leb128iDecode(pipe) 1126 | fields.append([hash, t]) 1127 | typeTable.append([ty, fields]) 1128 | elif ty == TypeIds.Func.value: 1129 | for _ in range(2): 1130 | funLen = leb128uDecode(pipe) 1131 | for _ in range(funLen): leb128iDecode(pipe) 1132 | annLen = leb128uDecode(pipe) 1133 | safeRead(pipe, annLen) 1134 | typeTable.append([ty, None]) 1135 | elif ty == TypeIds.Service.value: 1136 | servLen = leb128uDecode(pipe) 1137 | for _ in range(servLen): 1138 | l = leb128uDecode(pipe) 1139 | safeRead(pipe, l) 1140 | leb128iDecode(pipe) 1141 | typeTable.append([ty, None]) 1142 | 1143 | else: 1144 | raise ValueError("Illegal op_code: {}".format(ty)) 1145 | 1146 | rawList = [] 1147 | types_len = leb128uDecode(pipe) 1148 | for _ in range(types_len): 1149 | rawList.append(leb128iDecode(pipe)) 1150 | return typeTable, rawList 1151 | 1152 | def getType(rawTable, table, t:int) -> Type : 1153 | idl = Types() 1154 | if t < -24: 1155 | raise ValueError("not supported type") 1156 | if t < 0: 1157 | if t == -1: 1158 | return idl.Null 1159 | elif t == -2: 1160 | return idl.Bool 1161 | elif t == -3: 1162 | return idl.Nat 1163 | elif t == -4: 1164 | return idl.Int 1165 | elif t == -5: 1166 | return idl.Nat8 1167 | elif t == -6: 1168 | return idl.Nat16 1169 | elif t == -7: 1170 | return idl.Nat32 1171 | elif t == -8: 1172 | return idl.Nat64 1173 | elif t == -9: 1174 | return idl.Int8 1175 | elif t == -10: 1176 | return idl.Int16 1177 | elif t == -11: 1178 | return idl.Int32 1179 | elif t == -12: 1180 | return idl.Int64 1181 | elif t == -13: 1182 | return idl.Float32 1183 | elif t == -14: 1184 | return idl.Float64 1185 | elif t == -15: 1186 | return idl.Text 1187 | elif t == -16: 1188 | return idl.Reserved 1189 | elif t == -17: 1190 | return idl.Empty 1191 | elif t == -24: 1192 | return idl.Principal 1193 | else: 1194 | raise ValueError("Illegal op_code:{}".format(t)) 1195 | if t >= len(rawTable): 1196 | raise ValueError("type index out of range" ) 1197 | return table[t] 1198 | 1199 | 1200 | def buildType(rawTable, table, entry): 1201 | ty = entry[0] 1202 | if ty == TypeIds.Vec.value: 1203 | if ty >= len(rawTable): 1204 | raise ValueError("type index out of range") 1205 | t = getType(rawTable, table, entry[1]) 1206 | if t == None: 1207 | t = table[t] 1208 | return Types.Vec(t) 1209 | elif ty == TypeIds.Opt.value: 1210 | if ty >= len(rawTable): 1211 | raise ValueError("type index out of range") 1212 | t = getType(rawTable, table, entry[1]) 1213 | if t == None: 1214 | t = table[t] 1215 | return Types.Opt(t) 1216 | elif ty == TypeIds.Record.value: 1217 | fields = {} 1218 | for hash , t in entry[1]: 1219 | name = '_' + str(hash) + '_' 1220 | if t >= len(rawTable): 1221 | raise ValueError("type index out of range") 1222 | temp = getType(rawTable, table, t) 1223 | fields[name] = temp 1224 | record = Types.Record(fields) 1225 | tup = record.tryAsTuple() 1226 | if type(tup) == list: 1227 | return Types.Tuple(*tup) 1228 | else: 1229 | return record 1230 | elif ty == TypeIds.Variant.value: 1231 | fields = {} 1232 | for hash , t in entry[1]: 1233 | name = '_' + str(hash) + '_' 1234 | if t >= len(rawTable): 1235 | raise ValueError("type index out of range") 1236 | temp = getType(rawTable, table, t) 1237 | fields[name] = temp 1238 | return Types.Variant(fields) 1239 | elif ty == TypeIds.Func.value: 1240 | return Types.Func([], [], []) 1241 | elif ty == TypeIds.Service.value: 1242 | return Types.Service({}) 1243 | else: 1244 | raise ValueError("Illegal op_code: {}".format(ty)) 1245 | 1246 | 1247 | 1248 | # params = [{type, value}] 1249 | # data = b'DIDL' + len(params) + encoded types + encoded values 1250 | def encode(params): 1251 | argTypes = [] 1252 | args = [] 1253 | for p in params: 1254 | argTypes.append(p['type']) 1255 | args.append(p['value']) 1256 | # argTypes: List, args: List 1257 | if len(argTypes) != len(args): 1258 | raise ValueError("Wrong number of message arguments") 1259 | typetable = TypeTable() 1260 | for item in argTypes: 1261 | item.buildTypeTable(typetable) 1262 | 1263 | pre = prefix.encode() 1264 | table = typetable.encode() 1265 | length = leb128.u.encode(len(args)) 1266 | 1267 | typs = b'' 1268 | for t in argTypes: 1269 | typs += t.encodeType(typetable) 1270 | vals = b'' 1271 | for i in range(len(args)): 1272 | t = argTypes[i] 1273 | if not t.covariant(args[i]): 1274 | raise TypeError("Invalid {} argument: {}".format(t.display(), str(args[i]))) 1275 | vals += t.encodeValue(args[i]) 1276 | return pre + table + length + typs + vals 1277 | 1278 | # decode a bytes value 1279 | # def decode(retTypes, data): 1280 | def decode(data, retTypes=None): 1281 | b = Pipe(data) 1282 | if len(data) < len(prefix): 1283 | raise ValueError("Message length smaller than prefix number") 1284 | prefix_buffer = safeRead(b, len(prefix)).decode() 1285 | if prefix_buffer != prefix: 1286 | raise ValueError("Wrong prefix:" + prefix_buffer + 'expected prefix: DIDL') 1287 | rawTable, rawTypes = readTypeTable(b) 1288 | if retTypes: 1289 | if type(retTypes) != list: 1290 | retTypes = [retTypes] 1291 | if len(rawTypes) < len(retTypes): 1292 | raise ValueError("Wrong number of return value") 1293 | 1294 | table = [] 1295 | for _ in range(len(rawTable)): 1296 | table.append(Types.Rec()) 1297 | 1298 | for i, entry in enumerate(rawTable): 1299 | t = buildType(rawTable, table, entry) 1300 | table[i].fill(t) 1301 | 1302 | types = [] 1303 | for t in rawTypes: 1304 | types.append(getType(rawTable, table, t)) 1305 | outputs = [] 1306 | for i, t in enumerate(types if retTypes == None else retTypes): 1307 | outputs.append({ 1308 | 'type': t.name, 1309 | 'value': t.decodeValue(b, types[i]) 1310 | }) 1311 | 1312 | return outputs 1313 | 1314 | class Types(): 1315 | Null = NullClass() 1316 | Empty = EmptyClass() 1317 | Bool = BoolClass() 1318 | Int = IntClass() 1319 | Reserved = ReservedClass() 1320 | Nat = NatClass() 1321 | Text = TextClass() 1322 | Principal = PrincipalClass() 1323 | Float32 = FloatClass(32) 1324 | Float64 = FloatClass(64) 1325 | Int8 = FixedIntClass(8) 1326 | Int16 = FixedIntClass(16) 1327 | Int32 = FixedIntClass(32) 1328 | Int64 = FixedIntClass(64) 1329 | Nat8 = FixedNatClass(8) 1330 | Nat16 = FixedNatClass(16) 1331 | Nat32 = FixedNatClass(32) 1332 | Nat64 = FixedNatClass(64) 1333 | 1334 | def Tuple(*types): 1335 | return TupleClass(*types) 1336 | 1337 | def Vec(t): 1338 | return VecClass(t) 1339 | 1340 | def Opt(t): 1341 | return OptClass(t) 1342 | 1343 | def Record(t): 1344 | return RecordClass(t) 1345 | 1346 | def Variant(fields): 1347 | return VariantClass(fields) 1348 | 1349 | def Rec(): 1350 | return RecClass() 1351 | 1352 | def Func(args, ret, annotations): 1353 | return FuncClass(args, ret, annotations) 1354 | 1355 | def Service(t): 1356 | return ServiceClass(t) 1357 | -------------------------------------------------------------------------------- /ic/canister.py: -------------------------------------------------------------------------------- 1 | from .parser.DIDEmitter import *; 2 | from antlr4 import * 3 | from antlr4.InputStream import InputStream 4 | from .candid import encode, FuncClass 5 | 6 | class Canister: 7 | def __init__(self, agent, canister_id, candid=None): 8 | self.agent = agent 9 | self.canister_id = canister_id 10 | if candid: 11 | self.candid = candid 12 | else: 13 | candid = agent.query_raw(canister_id, "__get_candid_interface_tmp_hack", encode([])) 14 | self.candid = candid[0]['value'] 15 | if 'has no query method' in candid: 16 | print(candid) 17 | print("Please provide candid description") 18 | raise BaseException("canister " + str(canister_id) + " has no __get_candid_interface_tmp_hack method.") 19 | 20 | input_stream = InputStream(self.candid) 21 | lexer = DIDLexer(input_stream) 22 | token_stream = CommonTokenStream(lexer) 23 | parser = DIDParser(token_stream) 24 | tree = parser.program() 25 | 26 | emitter = DIDEmitter() 27 | walker = ParseTreeWalker() 28 | walker.walk(emitter, tree) 29 | 30 | self.actor = emitter.getActor() 31 | 32 | for name, method in self.actor["methods"].items(): 33 | assert type(method) == FuncClass 34 | anno = None if len(method.annotations) == 0 else method.annotations[0] 35 | setattr(self, name, CaniterMethod(agent, canister_id, name, method.argTypes, method.retTypes, anno)) 36 | setattr(self, name + '_async', CaniterMethodAsync(agent, canister_id, name, method.argTypes, method.retTypes, anno)) 37 | 38 | class CaniterMethod: 39 | def __init__(self, agent, canister_id, name, args, rets, anno = None): 40 | self.agent = agent 41 | self.canister_id = canister_id 42 | self.name = name 43 | self.args = args 44 | self.rets = rets 45 | 46 | self.anno = anno 47 | 48 | def __call__(self, *args, **kwargs): 49 | if len(args) != len(self.args): 50 | raise ValueError("Arguments length not match") 51 | arguments = [] 52 | for i, arg in enumerate(args): 53 | arguments.append({"type": self.args[i], "value": arg}) 54 | 55 | effective_cansiter_id = args[0]['canister_id'] if self.canister_id == 'aaaaa-aa' and len(args) > 0 and type(args[0]) == dict and 'canister_id' in args[0] else self.canister_id 56 | if self.anno == 'query': 57 | res = self.agent.query_raw( 58 | self.canister_id, 59 | self.name, 60 | encode(arguments), 61 | self.rets, 62 | effective_cansiter_id 63 | ) 64 | else: 65 | res = self.agent.update_raw( 66 | self.canister_id, 67 | self.name, 68 | encode(arguments), 69 | self.rets, 70 | effective_cansiter_id 71 | ) 72 | 73 | if type(res) is not list: 74 | return res 75 | 76 | return list(map(lambda item: item["value"], res)) 77 | 78 | class CaniterMethodAsync: 79 | def __init__(self, agent, canister_id, name, args, rets, anno = None): 80 | self.agent = agent 81 | self.canister_id = canister_id 82 | self.name = name 83 | self.args = args 84 | self.rets = rets 85 | 86 | self.anno = anno 87 | 88 | async def __call__(self, *args, **kwargs): 89 | if len(args) != len(self.args): 90 | raise ValueError("Arguments length not match") 91 | arguments = [] 92 | for i, arg in enumerate(args): 93 | arguments.append({"type": self.args[i], "value": arg}) 94 | 95 | effective_cansiter_id = args[0]['canister_id'] if self.canister_id == 'aaaaa-aa' and len(args) > 0 and type(args[0]) == dict and 'canister_id' in args[0] else self.canister_id 96 | if self.anno == 'query': 97 | res = await self.agent.query_raw_async( 98 | self.canister_id, 99 | self.name, 100 | encode(arguments), 101 | self.rets, 102 | effective_cansiter_id 103 | ) 104 | else: 105 | res = await self.agent.update_raw_async( 106 | self.canister_id, 107 | self.name, 108 | encode(arguments), 109 | self.rets, 110 | effective_cansiter_id 111 | ) 112 | 113 | if type(res) is not list: 114 | return res 115 | 116 | return list(map(lambda item: item["value"], res)) -------------------------------------------------------------------------------- /ic/certificate.py: -------------------------------------------------------------------------------- 1 | # reference: https://smartcontracts.org/docs/interface-spec/index.html#certification 2 | 3 | ''' 4 | A certificate consists of: 5 | - a tree 6 | - a signature on the tree root hash valid under some public key 7 | - an optional delegation that links that public key to root public key. 8 | 9 | Certificate = { 10 | tree : HashTree 11 | signature : Signature 12 | delegation : NoDelegation | Delegation 13 | } 14 | HashTree 15 | = Empty 16 | | Fork HashTree HashTree 17 | | Labeled Label HashTree 18 | | Leaf blob 19 | | Pruned Hash 20 | Label = Blob 21 | Hash = Blob 22 | Signature = Blob 23 | 24 | Lookup: 25 | 26 | lookup(path, cert) = lookup_path(path, cert.tree) 27 | 28 | lookup_path([], Empty) = Absent 29 | lookup_path([], Leaf v) = v 30 | lookup_path([], Pruned _) = Unknown 31 | lookup_path([], Labeled _ _) = Error 32 | lookup_path([], Fork _ _) = Error 33 | 34 | lookup_path(l::ls, tree) = 35 | match find_label(l, flatten_forks(tree)) with 36 | | Absent -> Absent 37 | | Unknown -> Unknown 38 | | Error -> Error 39 | | Found subtree -> lookup_path ls subtree 40 | 41 | flatten_forks(Empty) = [] 42 | flatten_forks(Fork t1 t2) = flatten_forks(t1) · flatten_forks(t2) 43 | flatten_forks(t) = [t] 44 | 45 | find_label(l, _ · Labeled l1 t · _) | l == l1 = Found t 46 | find_label(l, _ · Labeled l1 _ · Labeled l2 _ · _) | l1 < l < l2 = Absent 47 | find_label(l, Labeled l2 _ · _) | l < l2 = Absent 48 | find_label(l, _ · Labeled l1 _ ) | l1 < l = Absent 49 | find_label(l, []) = Absent 50 | find_label(l, _) = Unknown 51 | ''' 52 | from enum import Enum 53 | 54 | class NodeId(Enum): 55 | Empty = 0 56 | Fork = 1 57 | Labeled = 2 58 | Leaf = 3 59 | Pruned = 4 60 | 61 | def lookup(path, cert): 62 | return lookup_path(path, cert['tree']) 63 | 64 | def lookup_path(path, tree): 65 | offset = 0 66 | if len(path) == 0: 67 | if tree[0] == NodeId.Leaf.value: 68 | return tree[1] 69 | else: 70 | return None 71 | label = path[0].encode() if type(path[0]) == str else path[0] 72 | t = find_label(label, flatten_forks(tree)) 73 | if t: 74 | offset +=1 75 | return lookup_path(path[offset:], t) 76 | 77 | 78 | def flatten_forks(t): 79 | if t[0] == NodeId.Empty.value: 80 | return [] 81 | elif t[0] == NodeId.Fork.value: 82 | val1 = flatten_forks(t[1]) 83 | val2 = flatten_forks(t[2]) 84 | val1.extend(val2) 85 | return val1 86 | else: 87 | return [t] 88 | 89 | def find_label(l, trees): 90 | if len(trees) == 0: 91 | return None 92 | for t in trees: 93 | if t[0] == NodeId.Labeled.value: 94 | p = t[1] 95 | if l == p : 96 | return t[2] 97 | 98 | if __name__=='__main__': 99 | tree = [1, [4, b'W\xb4\x1b\x00\xc9x\xc0\xcb\\\xf4\xb6\xa1\xbbE\\\x9fr\xe2\x1a8\xd2bE\x14\x11\xab:\xb5\x1b`\x98\x9d'], [1, [4, b'\xac>_\x80\xeb.$\x9c\x00\xbc\x12\xce&!^\xa8,i\x08\xaeH\x8e\x9ce9\x87\xbahGPo\xe6'], [2, b'time', [3, b'\xd2\xac\xd3\x8a\xfc\xa0\xd0\xe0\x16']]]] 100 | tree2 = [1, [4, b'5J\xe2\x98A\x8d5\xc8\xe6\x94V\xc9\x90\x87\x00\xc9:\xe1\xb3i\x91fS\xc0udD\x19mQ\x1c\x85'], [1, [4, b'\xac>_\x80\xeb.$\x9c\x00\xbc\x12\xce&!^\xa8,i\x08\xaeH\x8e\x9ce9\x87\xbahGPo\xe6'], [2, b'time', [3, b'\xe7\xfc\xcf\x90\x87\x85\xd0\xe0\x16']]]] 101 | path = b'time' 102 | print(lookup_path([path], tree)) -------------------------------------------------------------------------------- /ic/client.py: -------------------------------------------------------------------------------- 1 | # http client 2 | 3 | import httpx 4 | 5 | DEFAULT_TIMEOUT = 120.0 6 | DEFAULT_TIMEOUT_QUERY = 30.0 7 | 8 | class Client: 9 | def __init__(self, url = "https://ic0.app"): 10 | self.url = url 11 | 12 | def query(self, canister_id, data, *, timeout = DEFAULT_TIMEOUT_QUERY): 13 | endpoint = self.url + '/api/v2/canister/' + canister_id + '/query' 14 | headers = {'Content-Type': 'application/cbor'} 15 | ret = httpx.post(endpoint, data = data, headers=headers, timeout=timeout) 16 | return ret.content 17 | 18 | def call(self, canister_id, req_id, data, *, timeout = DEFAULT_TIMEOUT): 19 | endpoint = self.url + '/api/v2/canister/' + canister_id + '/call' 20 | headers = {'Content-Type': 'application/cbor'} 21 | ret = httpx.post(endpoint, data = data, headers=headers, timeout=timeout) 22 | return req_id 23 | 24 | def read_state(self, canister_id, data, *, timeout = DEFAULT_TIMEOUT_QUERY): 25 | endpoint = self.url + '/api/v2/canister/' + canister_id + '/read_state' 26 | headers = {'Content-Type': 'application/cbor'} 27 | ret = httpx.post(endpoint, data = data, headers=headers, timeout=timeout) 28 | return ret.content 29 | 30 | def status(self, *, timeout = DEFAULT_TIMEOUT_QUERY): 31 | endpoint = self.url + '/api/v2/status' 32 | ret = httpx.get(endpoint, timeout=timeout) 33 | print('client.status:', ret.text) 34 | return ret.content 35 | 36 | async def query_async(self, canister_id, data, *, timeout = DEFAULT_TIMEOUT_QUERY): 37 | async with httpx.AsyncClient(timeout=timeout) as client: 38 | endpoint = self.url + '/api/v2/canister/' + canister_id + '/query' 39 | headers = {'Content-Type': 'application/cbor'} 40 | ret = await client.post(endpoint, data = data, headers=headers) 41 | return ret.content 42 | 43 | async def call_async(self, canister_id, req_id, data, *, timeout = DEFAULT_TIMEOUT): 44 | async with httpx.AsyncClient(timeout=timeout) as client: 45 | endpoint = self.url + '/api/v2/canister/' + canister_id + '/call' 46 | headers = {'Content-Type': 'application/cbor'} 47 | await client.post(endpoint, data = data, headers=headers) 48 | return req_id 49 | 50 | async def read_state_async(self, canister_id, data, *, timeout = DEFAULT_TIMEOUT_QUERY): 51 | async with httpx.AsyncClient(timeout=timeout) as client: 52 | endpoint = self.url + '/api/v2/canister/' + canister_id + '/read_state' 53 | headers = {'Content-Type': 'application/cbor'} 54 | ret = await client.post(endpoint, data = data, headers=headers) 55 | return ret.content 56 | 57 | async def status_async(self, *, timeout = DEFAULT_TIMEOUT_QUERY): 58 | async with httpx.AsyncClient(timeout=timeout) as client: 59 | endpoint = self.url + '/api/v2/status' 60 | ret = await client.get(endpoint) 61 | print('client.status:', ret.text) 62 | return ret.content 63 | -------------------------------------------------------------------------------- /ic/common/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rocklabs-io/ic-py/79196d9149a7117b15f85077c4cf19cdf6808f3c/ic/common/__init__.py -------------------------------------------------------------------------------- /ic/common/cycles_wallet.py: -------------------------------------------------------------------------------- 1 | from ic.canister import Canister 2 | 3 | wallet_did = """ 4 | type EventKind = variant { 5 | CyclesSent: record { 6 | to: principal; 7 | amount: nat64; 8 | refund: nat64; 9 | }; 10 | CyclesReceived: record { 11 | from: principal; 12 | amount: nat64; 13 | memo: opt text; 14 | }; 15 | AddressAdded: record { 16 | id: principal; 17 | name: opt text; 18 | role: Role; 19 | }; 20 | AddressRemoved: record { 21 | id: principal; 22 | }; 23 | CanisterCreated: record { 24 | canister: principal; 25 | cycles: nat64; 26 | }; 27 | CanisterCalled: record { 28 | canister: principal; 29 | method_name: text; 30 | cycles: nat64; 31 | }; 32 | WalletDeployed: record { 33 | canister: principal; 34 | } 35 | }; 36 | 37 | type EventKind128 = variant { 38 | CyclesSent: record { 39 | to: principal; 40 | amount: nat; 41 | refund: nat; 42 | }; 43 | CyclesReceived: record { 44 | from: principal; 45 | amount: nat; 46 | memo: opt text; 47 | }; 48 | AddressAdded: record { 49 | id: principal; 50 | name: opt text; 51 | role: Role; 52 | }; 53 | AddressRemoved: record { 54 | id: principal; 55 | }; 56 | CanisterCreated: record { 57 | canister: principal; 58 | cycles: nat; 59 | }; 60 | CanisterCalled: record { 61 | canister: principal; 62 | method_name: text; 63 | cycles: nat; 64 | }; 65 | WalletDeployed: record { 66 | canister: principal; 67 | }; 68 | }; 69 | 70 | type Event = record { 71 | id: nat32; 72 | timestamp: nat64; 73 | kind: EventKind; 74 | }; 75 | 76 | type Event128 = record { 77 | id: nat32; 78 | timestamp: nat64; 79 | kind: EventKind128; 80 | }; 81 | 82 | type Role = variant { 83 | Contact; 84 | Custodian; 85 | Controller; 86 | }; 87 | 88 | type Kind = variant { 89 | Unknown; 90 | User; 91 | Canister; 92 | }; 93 | 94 | // An entry in the address book. It must have an ID and a role. 95 | type AddressEntry = record { 96 | id: principal; 97 | name: opt text; 98 | kind: Kind; 99 | role: Role; 100 | }; 101 | 102 | type ManagedCanisterInfo = record { 103 | id: principal; 104 | name: opt text; 105 | created_at: nat64; 106 | }; 107 | 108 | type ManagedCanisterEventKind = variant { 109 | CyclesSent: record { 110 | amount: nat64; 111 | refund: nat64; 112 | }; 113 | Called: record { 114 | method_name: text; 115 | cycles: nat64; 116 | }; 117 | Created: record { 118 | cycles: nat64; 119 | }; 120 | }; 121 | 122 | type ManagedCanisterEventKind128 = variant { 123 | CyclesSent: record { 124 | amount: nat; 125 | refund: nat; 126 | }; 127 | Called: record { 128 | method_name: text; 129 | cycles: nat; 130 | }; 131 | Created: record { 132 | cycles: nat; 133 | }; 134 | }; 135 | 136 | type ManagedCanisterEvent = record { 137 | id: nat32; 138 | timestamp: nat64; 139 | kind: ManagedCanisterEventKind; 140 | }; 141 | 142 | type ManagedCanisterEvent128 = record { 143 | id: nat32; 144 | timestamp: nat64; 145 | kind: ManagedCanisterEventKind128; 146 | }; 147 | 148 | type ReceiveOptions = record { 149 | memo: opt text; 150 | }; 151 | 152 | type WalletResultCreate = variant { 153 | Ok : record { canister_id: principal }; 154 | Err: text; 155 | }; 156 | 157 | type WalletResult = variant { 158 | Ok : null; 159 | Err : text; 160 | }; 161 | 162 | type WalletResultCall = variant { 163 | Ok : record { return: blob }; 164 | Err : text; 165 | }; 166 | 167 | type CanisterSettings = record { 168 | controller: opt principal; 169 | controllers: opt vec principal; 170 | compute_allocation: opt nat; 171 | memory_allocation: opt nat; 172 | freezing_threshold: opt nat; 173 | }; 174 | 175 | type CreateCanisterArgs = record { 176 | cycles: nat64; 177 | settings: CanisterSettings; 178 | }; 179 | 180 | type CreateCanisterArgs128 = record { 181 | cycles: nat; 182 | settings: CanisterSettings; 183 | }; 184 | 185 | // Assets 186 | type HeaderField = record { text; text; }; 187 | 188 | type HttpRequest = record { 189 | method: text; 190 | url: text; 191 | headers: vec HeaderField; 192 | body: blob; 193 | }; 194 | 195 | type HttpResponse = record { 196 | status_code: nat16; 197 | headers: vec HeaderField; 198 | body: blob; 199 | streaming_strategy: opt StreamingStrategy; 200 | }; 201 | 202 | type StreamingCallbackHttpResponse = record { 203 | body: blob; 204 | token: opt Token; 205 | }; 206 | 207 | type Token = record {}; 208 | 209 | type StreamingStrategy = variant { 210 | Callback: record { 211 | callback: func (Token) -> (StreamingCallbackHttpResponse) query; 212 | token: Token; 213 | }; 214 | }; 215 | 216 | service : { 217 | wallet_api_version: () -> (text) query; 218 | 219 | // Wallet Name 220 | name: () -> (opt text) query; 221 | set_name: (text) -> (); 222 | 223 | // Controller Management 224 | get_controllers: () -> (vec principal) query; 225 | add_controller: (principal) -> (); 226 | remove_controller: (principal) -> (WalletResult); 227 | 228 | // Custodian Management 229 | get_custodians: () -> (vec principal) query; 230 | authorize: (principal) -> (); 231 | deauthorize: (principal) -> (WalletResult); 232 | 233 | // Cycle Management 234 | wallet_balance: () -> (record { amount: nat64 }) query; 235 | wallet_balance128: () -> (record { amount: nat }) query; 236 | wallet_send: (record { canister: principal; amount: nat64 }) -> (WalletResult); 237 | wallet_send128: (record { canister: principal; amount: nat }) -> (WalletResult); 238 | wallet_receive: (opt ReceiveOptions) -> (); // Endpoint for receiving cycles. 239 | 240 | // Managing canister 241 | wallet_create_canister: (CreateCanisterArgs) -> (WalletResultCreate); 242 | wallet_create_canister128: (CreateCanisterArgs128) -> (WalletResultCreate); 243 | 244 | wallet_create_wallet: (CreateCanisterArgs) -> (WalletResultCreate); 245 | wallet_create_wallet128: (CreateCanisterArgs128) -> (WalletResultCreate); 246 | 247 | wallet_store_wallet_wasm: (record { 248 | wasm_module: blob; 249 | }) -> (); 250 | 251 | // Call Forwarding 252 | wallet_call: (record { 253 | canister: principal; 254 | method_name: text; 255 | args: blob; 256 | cycles: nat64; 257 | }) -> (WalletResultCall); 258 | wallet_call128: (record { 259 | canister: principal; 260 | method_name: text; 261 | args: blob; 262 | cycles: nat; 263 | }) -> (WalletResultCall); 264 | 265 | // Address book 266 | add_address: (address: AddressEntry) -> (); 267 | list_addresses: () -> (vec AddressEntry) query; 268 | remove_address: (address: principal) -> (WalletResult); 269 | 270 | // Events 271 | // If `from` is not specified, it will start 20 from the end; if `to` is not specified, it will stop at the end 272 | get_events: (opt record { from: opt nat32; to: opt nat32; }) -> (vec Event) query; 273 | get_events128: (opt record { from: opt nat32; to: opt nat32; }) -> (vec Event128) query; 274 | get_chart: (opt record { count: opt nat32; precision: opt nat64; } ) -> (vec record { nat64; nat64; }) query; 275 | 276 | // Managed canisters 277 | list_managed_canisters: (record { from: opt nat32; to: opt nat32; }) -> (vec ManagedCanisterInfo, nat32) query; 278 | // If `from` is not specified, it will start 20 from the end; if `to` is not specified, it will stop at the end 279 | get_managed_canister_events: (record { canister: principal; from: opt nat32; to: opt nat32; }) -> (opt vec ManagedCanisterEvent) query; 280 | get_managed_canister_events128: (record { canister: principal; from: opt nat32; to: opt nat32; }) -> (opt vec ManagedCanisterEvent128) query; 281 | set_short_name: (principal, opt text) -> (opt ManagedCanisterInfo); 282 | 283 | // Assets 284 | http_request: (request: HttpRequest) -> (HttpResponse) query; 285 | } 286 | """ 287 | 288 | class CyclesWallet(Canister): 289 | def __init__(self, agent, wallet_id): 290 | super().__init__(agent, wallet_id, wallet_did) -------------------------------------------------------------------------------- /ic/common/governance.py: -------------------------------------------------------------------------------- 1 | from ic.canister import Canister 2 | 3 | governance_did = """ 4 | type AccountIdentifier = record { hash : vec nat8 }; 5 | type Action = variant { 6 | RegisterKnownNeuron : KnownNeuron; 7 | ManageNeuron : ManageNeuron; 8 | ExecuteNnsFunction : ExecuteNnsFunction; 9 | RewardNodeProvider : RewardNodeProvider; 10 | SetDefaultFollowees : SetDefaultFollowees; 11 | RewardNodeProviders : RewardNodeProviders; 12 | ManageNetworkEconomics : NetworkEconomics; 13 | ApproveGenesisKyc : ApproveGenesisKyc; 14 | AddOrRemoveNodeProvider : AddOrRemoveNodeProvider; 15 | Motion : Motion; 16 | }; 17 | type AddHotKey = record { new_hot_key : opt principal }; 18 | type AddOrRemoveNodeProvider = record { change : opt Change }; 19 | type Amount = record { e8s : nat64 }; 20 | type ApproveGenesisKyc = record { principals : vec principal }; 21 | type Ballot = record { vote : int32; voting_power : nat64 }; 22 | type BallotInfo = record { vote : int32; proposal_id : opt NeuronId }; 23 | type By = variant { 24 | NeuronIdOrSubaccount : record {}; 25 | MemoAndController : ClaimOrRefreshNeuronFromAccount; 26 | Memo : nat64; 27 | }; 28 | type Change = variant { ToRemove : NodeProvider; ToAdd : NodeProvider }; 29 | type ClaimOrRefresh = record { by : opt By }; 30 | type ClaimOrRefreshNeuronFromAccount = record { 31 | controller : opt principal; 32 | memo : nat64; 33 | }; 34 | type ClaimOrRefreshNeuronFromAccountResponse = record { result : opt Result_1 }; 35 | type ClaimOrRefreshResponse = record { refreshed_neuron_id : opt NeuronId }; 36 | type Command = variant { 37 | Spawn : Spawn; 38 | Split : Split; 39 | Follow : Follow; 40 | ClaimOrRefresh : ClaimOrRefresh; 41 | Configure : Configure; 42 | RegisterVote : RegisterVote; 43 | Merge : Merge; 44 | DisburseToNeuron : DisburseToNeuron; 45 | MakeProposal : Proposal; 46 | MergeMaturity : MergeMaturity; 47 | Disburse : Disburse; 48 | }; 49 | type Command_1 = variant { 50 | Error : GovernanceError; 51 | Spawn : SpawnResponse; 52 | Split : SpawnResponse; 53 | Follow : record {}; 54 | ClaimOrRefresh : ClaimOrRefreshResponse; 55 | Configure : record {}; 56 | RegisterVote : record {}; 57 | Merge : record {}; 58 | DisburseToNeuron : SpawnResponse; 59 | MakeProposal : MakeProposalResponse; 60 | MergeMaturity : MergeMaturityResponse; 61 | Disburse : DisburseResponse; 62 | }; 63 | type Command_2 = variant { 64 | Spawn : Spawn; 65 | Split : Split; 66 | Configure : Configure; 67 | Merge : Merge; 68 | DisburseToNeuron : DisburseToNeuron; 69 | ClaimOrRefreshNeuron : ClaimOrRefresh; 70 | MergeMaturity : MergeMaturity; 71 | Disburse : Disburse; 72 | }; 73 | type Configure = record { operation : opt Operation }; 74 | type Disburse = record { 75 | to_account : opt AccountIdentifier; 76 | amount : opt Amount; 77 | }; 78 | type DisburseResponse = record { transfer_block_height : nat64 }; 79 | type DisburseToNeuron = record { 80 | dissolve_delay_seconds : nat64; 81 | kyc_verified : bool; 82 | amount_e8s : nat64; 83 | new_controller : opt principal; 84 | nonce : nat64; 85 | }; 86 | type DissolveState = variant { 87 | DissolveDelaySeconds : nat64; 88 | WhenDissolvedTimestampSeconds : nat64; 89 | }; 90 | type ExecuteNnsFunction = record { nns_function : int32; payload : vec nat8 }; 91 | type Follow = record { topic : int32; followees : vec NeuronId }; 92 | type Followees = record { followees : vec NeuronId }; 93 | type Governance = record { 94 | default_followees : vec record { int32; Followees }; 95 | wait_for_quiet_threshold_seconds : nat64; 96 | metrics : opt GovernanceCachedMetrics; 97 | node_providers : vec NodeProvider; 98 | economics : opt NetworkEconomics; 99 | latest_reward_event : opt RewardEvent; 100 | to_claim_transfers : vec NeuronStakeTransfer; 101 | short_voting_period_seconds : nat64; 102 | proposals : vec record { nat64; ProposalData }; 103 | in_flight_commands : vec record { nat64; NeuronInFlightCommand }; 104 | neurons : vec record { nat64; Neuron }; 105 | genesis_timestamp_seconds : nat64; 106 | }; 107 | type GovernanceCachedMetrics = record { 108 | not_dissolving_neurons_e8s_buckets : vec record { nat64; float64 }; 109 | garbage_collectable_neurons_count : nat64; 110 | neurons_with_invalid_stake_count : nat64; 111 | not_dissolving_neurons_count_buckets : vec record { nat64; nat64 }; 112 | total_supply_icp : nat64; 113 | neurons_with_less_than_6_months_dissolve_delay_count : nat64; 114 | dissolved_neurons_count : nat64; 115 | total_staked_e8s : nat64; 116 | not_dissolving_neurons_count : nat64; 117 | dissolved_neurons_e8s : nat64; 118 | neurons_with_less_than_6_months_dissolve_delay_e8s : nat64; 119 | dissolving_neurons_count_buckets : vec record { nat64; nat64 }; 120 | dissolving_neurons_count : nat64; 121 | dissolving_neurons_e8s_buckets : vec record { nat64; float64 }; 122 | community_fund_total_staked_e8s : nat64; 123 | timestamp_seconds : nat64; 124 | }; 125 | type GovernanceError = record { error_message : text; error_type : int32 }; 126 | type IncreaseDissolveDelay = record { 127 | additional_dissolve_delay_seconds : nat32; 128 | }; 129 | type KnownNeuron = record { 130 | id : opt NeuronId; 131 | known_neuron_data : opt KnownNeuronData; 132 | }; 133 | type KnownNeuronData = record { name : text; description : opt text }; 134 | type ListKnownNeuronsResponse = record { known_neurons : vec KnownNeuron }; 135 | type ListNeurons = record { 136 | neuron_ids : vec nat64; 137 | include_neurons_readable_by_caller : bool; 138 | }; 139 | type ListNeuronsResponse = record { 140 | neuron_infos : vec record { nat64; NeuronInfo }; 141 | full_neurons : vec Neuron; 142 | }; 143 | type ListNodeProvidersResponse = record { node_providers : vec NodeProvider }; 144 | type ListProposalInfo = record { 145 | include_reward_status : vec int32; 146 | before_proposal : opt NeuronId; 147 | limit : nat32; 148 | exclude_topic : vec int32; 149 | include_status : vec int32; 150 | }; 151 | type ListProposalInfoResponse = record { proposal_info : vec ProposalInfo }; 152 | type MakeProposalResponse = record { proposal_id : opt NeuronId }; 153 | type ManageNeuron = record { 154 | id : opt NeuronId; 155 | command : opt Command; 156 | neuron_id_or_subaccount : opt NeuronIdOrSubaccount; 157 | }; 158 | type ManageNeuronResponse = record { command : opt Command_1 }; 159 | type Merge = record { source_neuron_id : opt NeuronId }; 160 | type MergeMaturity = record { percentage_to_merge : nat32 }; 161 | type MergeMaturityResponse = record { 162 | merged_maturity_e8s : nat64; 163 | new_stake_e8s : nat64; 164 | }; 165 | type Motion = record { motion_text : text }; 166 | type NetworkEconomics = record { 167 | neuron_minimum_stake_e8s : nat64; 168 | max_proposals_to_keep_per_topic : nat32; 169 | neuron_management_fee_per_proposal_e8s : nat64; 170 | reject_cost_e8s : nat64; 171 | transaction_fee_e8s : nat64; 172 | neuron_spawn_dissolve_delay_seconds : nat64; 173 | minimum_icp_xdr_rate : nat64; 174 | maximum_node_provider_rewards_e8s : nat64; 175 | }; 176 | type Neuron = record { 177 | id : opt NeuronId; 178 | controller : opt principal; 179 | recent_ballots : vec BallotInfo; 180 | kyc_verified : bool; 181 | not_for_profit : bool; 182 | maturity_e8s_equivalent : nat64; 183 | cached_neuron_stake_e8s : nat64; 184 | created_timestamp_seconds : nat64; 185 | aging_since_timestamp_seconds : nat64; 186 | hot_keys : vec principal; 187 | account : vec nat8; 188 | joined_community_fund_timestamp_seconds : opt nat64; 189 | dissolve_state : opt DissolveState; 190 | followees : vec record { int32; Followees }; 191 | neuron_fees_e8s : nat64; 192 | transfer : opt NeuronStakeTransfer; 193 | known_neuron_data : opt KnownNeuronData; 194 | }; 195 | type NeuronId = record { id : nat64 }; 196 | type NeuronIdOrSubaccount = variant { 197 | Subaccount : vec nat8; 198 | NeuronId : NeuronId; 199 | }; 200 | type NeuronInFlightCommand = record { 201 | command : opt Command_2; 202 | timestamp : nat64; 203 | }; 204 | type NeuronInfo = record { 205 | dissolve_delay_seconds : nat64; 206 | recent_ballots : vec BallotInfo; 207 | created_timestamp_seconds : nat64; 208 | state : int32; 209 | stake_e8s : nat64; 210 | joined_community_fund_timestamp_seconds : opt nat64; 211 | retrieved_at_timestamp_seconds : nat64; 212 | known_neuron_data : opt KnownNeuronData; 213 | voting_power : nat64; 214 | age_seconds : nat64; 215 | }; 216 | type NeuronStakeTransfer = record { 217 | to_subaccount : vec nat8; 218 | neuron_stake_e8s : nat64; 219 | from : opt principal; 220 | memo : nat64; 221 | from_subaccount : vec nat8; 222 | transfer_timestamp : nat64; 223 | block_height : nat64; 224 | }; 225 | type NodeProvider = record { 226 | id : opt principal; 227 | reward_account : opt AccountIdentifier; 228 | }; 229 | type Operation = variant { 230 | RemoveHotKey : RemoveHotKey; 231 | AddHotKey : AddHotKey; 232 | StopDissolving : record {}; 233 | StartDissolving : record {}; 234 | IncreaseDissolveDelay : IncreaseDissolveDelay; 235 | JoinCommunityFund : record {}; 236 | SetDissolveTimestamp : SetDissolveTimestamp; 237 | }; 238 | type Proposal = record { 239 | url : text; 240 | title : opt text; 241 | action : opt Action; 242 | summary : text; 243 | }; 244 | type ProposalData = record { 245 | id : opt NeuronId; 246 | failure_reason : opt GovernanceError; 247 | ballots : vec record { nat64; Ballot }; 248 | proposal_timestamp_seconds : nat64; 249 | reward_event_round : nat64; 250 | failed_timestamp_seconds : nat64; 251 | reject_cost_e8s : nat64; 252 | latest_tally : opt Tally; 253 | decided_timestamp_seconds : nat64; 254 | proposal : opt Proposal; 255 | proposer : opt NeuronId; 256 | wait_for_quiet_state : opt WaitForQuietState; 257 | executed_timestamp_seconds : nat64; 258 | }; 259 | type ProposalInfo = record { 260 | id : opt NeuronId; 261 | status : int32; 262 | topic : int32; 263 | failure_reason : opt GovernanceError; 264 | ballots : vec record { nat64; Ballot }; 265 | proposal_timestamp_seconds : nat64; 266 | reward_event_round : nat64; 267 | deadline_timestamp_seconds : opt nat64; 268 | failed_timestamp_seconds : nat64; 269 | reject_cost_e8s : nat64; 270 | latest_tally : opt Tally; 271 | reward_status : int32; 272 | decided_timestamp_seconds : nat64; 273 | proposal : opt Proposal; 274 | proposer : opt NeuronId; 275 | executed_timestamp_seconds : nat64; 276 | }; 277 | type RegisterVote = record { vote : int32; proposal : opt NeuronId }; 278 | type RemoveHotKey = record { hot_key_to_remove : opt principal }; 279 | type Result = variant { Ok; Err : GovernanceError }; 280 | type Result_1 = variant { Error : GovernanceError; NeuronId : NeuronId }; 281 | type Result_2 = variant { Ok : Neuron; Err : GovernanceError }; 282 | type Result_3 = variant { Ok : RewardNodeProviders; Err : GovernanceError }; 283 | type Result_4 = variant { Ok : NeuronInfo; Err : GovernanceError }; 284 | type Result_5 = variant { Ok : NodeProvider; Err : GovernanceError }; 285 | type RewardEvent = record { 286 | day_after_genesis : nat64; 287 | actual_timestamp_seconds : nat64; 288 | distributed_e8s_equivalent : nat64; 289 | settled_proposals : vec NeuronId; 290 | }; 291 | type RewardMode = variant { 292 | RewardToNeuron : RewardToNeuron; 293 | RewardToAccount : RewardToAccount; 294 | }; 295 | type RewardNodeProvider = record { 296 | node_provider : opt NodeProvider; 297 | reward_mode : opt RewardMode; 298 | amount_e8s : nat64; 299 | }; 300 | type RewardNodeProviders = record { 301 | use_registry_derived_rewards : opt bool; 302 | rewards : vec RewardNodeProvider; 303 | }; 304 | type RewardToAccount = record { to_account : opt AccountIdentifier }; 305 | type RewardToNeuron = record { dissolve_delay_seconds : nat64 }; 306 | type SetDefaultFollowees = record { 307 | default_followees : vec record { int32; Followees }; 308 | }; 309 | type SetDissolveTimestamp = record { dissolve_timestamp_seconds : nat64 }; 310 | type Spawn = record { 311 | percentage_to_spawn : opt nat32; 312 | new_controller : opt principal; 313 | nonce : opt nat64; 314 | }; 315 | type SpawnResponse = record { created_neuron_id : opt NeuronId }; 316 | type Split = record { amount_e8s : nat64 }; 317 | type Tally = record { 318 | no : nat64; 319 | yes : nat64; 320 | total : nat64; 321 | timestamp_seconds : nat64; 322 | }; 323 | type UpdateNodeProvider = record { reward_account : opt AccountIdentifier }; 324 | type WaitForQuietState = record { current_deadline_timestamp_seconds : nat64 }; 325 | service : (Governance) -> { 326 | claim_gtc_neurons : (principal, vec NeuronId) -> (Result); 327 | claim_or_refresh_neuron_from_account : (ClaimOrRefreshNeuronFromAccount) -> ( 328 | ClaimOrRefreshNeuronFromAccountResponse, 329 | ); 330 | get_build_metadata : () -> (text) query; 331 | get_full_neuron : (nat64) -> (Result_2) query; 332 | get_full_neuron_by_id_or_subaccount : (NeuronIdOrSubaccount) -> ( 333 | Result_2, 334 | ) query; 335 | get_monthly_node_provider_rewards : () -> (Result_3); 336 | get_neuron_ids : () -> (vec nat64) query; 337 | get_neuron_info : (nat64) -> (Result_4) query; 338 | get_neuron_info_by_id_or_subaccount : (NeuronIdOrSubaccount) -> ( 339 | Result_4, 340 | ) query; 341 | get_node_provider_by_caller : (null) -> (Result_5) query; 342 | get_pending_proposals : () -> (vec ProposalInfo) query; 343 | get_proposal_info : (nat64) -> (opt ProposalInfo) query; 344 | list_known_neurons : () -> (ListKnownNeuronsResponse) query; 345 | list_neurons : (ListNeurons) -> (ListNeuronsResponse) query; 346 | list_node_providers : () -> (ListNodeProvidersResponse) query; 347 | list_proposals : (ListProposalInfo) -> (ListProposalInfoResponse) query; 348 | manage_neuron : (ManageNeuron) -> (ManageNeuronResponse); 349 | transfer_gtc_neuron : (NeuronId, NeuronId) -> (Result); 350 | update_node_provider : (UpdateNodeProvider) -> (Result); 351 | } 352 | """ 353 | 354 | class Governance(Canister): 355 | def __init__(self, agent): 356 | super().__init__(agent, "rrkah-fqaaa-aaaaa-aaaaq-cai", governance_did) -------------------------------------------------------------------------------- /ic/common/ledger.py: -------------------------------------------------------------------------------- 1 | from ic.canister import Canister 2 | 3 | ledger_did = """ 4 | // This is the official Ledger interface that is guaranteed to be backward compatible. 5 | 6 | // Amount of tokens, measured in 10^-8 of a token. 7 | type Tokens = record { 8 | e8s : nat64; 9 | }; 10 | 11 | // Number of nanoseconds from the UNIX epoch in UTC timezone. 12 | type TimeStamp = record { 13 | timestamp_nanos: nat64; 14 | }; 15 | 16 | // AccountIdentifier is a 32-byte array. 17 | // The first 4 bytes is big-endian encoding of a CRC32 checksum of the last 28 bytes. 18 | type AccountIdentifier = blob; 19 | 20 | // Subaccount is an arbitrary 32-byte byte array. 21 | // Ledger uses subaccounts to compute the source address, which enables one 22 | // principal to control multiple ledger accounts. 23 | type SubAccount = blob; 24 | 25 | // Sequence number of a block produced by the ledger. 26 | type BlockIndex = nat64; 27 | 28 | // An arbitrary number associated with a transaction. 29 | // The caller can set it in a `transfer` call as a correlation identifier. 30 | type Memo = nat64; 31 | 32 | // Arguments for the `transfer` call. 33 | type TransferArgs = record { 34 | // Transaction memo. 35 | // See comments for the `Memo` type. 36 | memo: Memo; 37 | // The amount that the caller wants to transfer to the destination address. 38 | amount: Tokens; 39 | // The amount that the caller pays for the transaction. 40 | // Must be 10000 e8s. 41 | fee: Tokens; 42 | // The subaccount from which the caller wants to transfer funds. 43 | // If null, the ledger uses the default (all zeros) subaccount to compute the source address. 44 | // See comments for the `SubAccount` type. 45 | from_subaccount: opt SubAccount; 46 | // The destination account. 47 | // If the transfer is successful, the balance of this address increases by `amount`. 48 | to: AccountIdentifier; 49 | // The point in time when the caller created this request. 50 | // If null, the ledger uses current IC time as the timestamp. 51 | created_at_time: opt TimeStamp; 52 | }; 53 | 54 | type TransferError = variant { 55 | // The fee that the caller specified in the transfer request was not the one that ledger expects. 56 | // The caller can change the transfer fee to the `expected_fee` and retry the request. 57 | BadFee : record { expected_fee : Tokens; }; 58 | // The account specified by the caller doesn't have enough funds. 59 | InsufficientFunds : record { balance: Tokens; }; 60 | // The request is too old. 61 | // The ledger only accepts requests created within 24 hours window. 62 | // This is a non-recoverable error. 63 | TxTooOld : record { allowed_window_nanos: nat64 }; 64 | // The caller specified `created_at_time` that is too far in future. 65 | // The caller can retry the request later. 66 | TxCreatedInFuture : null; 67 | // The ledger has already executed the request. 68 | // `duplicate_of` field is equal to the index of the block containing the original transaction. 69 | TxDuplicate : record { duplicate_of: BlockIndex; } 70 | }; 71 | 72 | type TransferResult = variant { 73 | Ok : BlockIndex; 74 | Err : TransferError; 75 | }; 76 | 77 | // Arguments for the `account_balance` call. 78 | type AccountBalanceArgs = record { 79 | account: AccountIdentifier; 80 | }; 81 | 82 | type TransferFeeArg = record {}; 83 | 84 | type TransferFee = record { 85 | // The fee to pay to perform a transfer 86 | transfer_fee: Tokens; 87 | }; 88 | 89 | type GetBlocksArgs = record { 90 | // The index of the first block to fetch. 91 | start : BlockIndex; 92 | // Max number of blocks to fetch. 93 | length : nat64; 94 | }; 95 | 96 | type Operation = variant { 97 | Mint : record { 98 | to : AccountIdentifier; 99 | amount : Tokens; 100 | }; 101 | Burn : record { 102 | from : AccountIdentifier; 103 | amount : Tokens; 104 | }; 105 | Transfer : record { 106 | from : AccountIdentifier; 107 | to : AccountIdentifier; 108 | amount : Tokens; 109 | fee : Tokens; 110 | }; 111 | }; 112 | 113 | type Transaction = record { 114 | memo : Memo; 115 | operation : opt Operation; 116 | created_at_time : TimeStamp; 117 | }; 118 | 119 | type Block = record { 120 | parent_hash : opt blob; 121 | transaction : Transaction; 122 | timestamp : TimeStamp; 123 | }; 124 | 125 | // A prefix of the block range specified in the [GetBlocksArgs] request. 126 | type BlockRange = record { 127 | // A prefix of the requested block range. 128 | // The index of the first block is equal to [GetBlocksArgs.from]. 129 | // 130 | // Note that the number of blocks might be less than the requested 131 | // [GetBlocksArgs.len] for various reasons, for example: 132 | // 133 | // 1. The query might have hit the replica with an outdated state 134 | // that doesn't have the full block range yet. 135 | // 2. The requested range is too large to fit into a single reply. 136 | // 137 | // NOTE: the list of blocks can be empty if: 138 | // 1. [GetBlocksArgs.len] was zero. 139 | // 2. [GetBlocksArgs.from] was larger than the last block known to the canister. 140 | blocks : vec Block; 141 | }; 142 | 143 | // An error indicating that the arguments passed to [QueryArchiveFn] were invalid. 144 | type QueryArchiveError = variant { 145 | // [GetBlocksArgs.from] argument was smaller than the first block 146 | // served by the canister that received the request. 147 | BadFirstBlockIndex : record { 148 | requested_index : BlockIndex; 149 | first_valid_index : BlockIndex; 150 | }; 151 | 152 | // Reserved for future use. 153 | Other : record { 154 | error_code : nat64; 155 | error_message : text; 156 | }; 157 | }; 158 | 159 | type QueryArchiveResult = variant { 160 | // Successfully fetched zero or more blocks. 161 | Ok : BlockRange; 162 | // The [GetBlocksArgs] request was invalid. 163 | Err : QueryArchiveError; 164 | }; 165 | 166 | // A function that is used for fetching archived ledger blocks. 167 | type QueryArchiveFn = func (GetBlocksArgs) -> (QueryArchiveResult) query; 168 | 169 | // The result of a "query_blocks" call. 170 | // 171 | // The structure of the result is somewhat complicated because the main ledger canister might 172 | // not have all the blocks that the caller requested: One or more "archive" canisters might 173 | // store some of the requested blocks. 174 | // 175 | // Note: as of Q4 2021 when this interface is authored, the IC doesn't support making nested 176 | // query calls within a query call. 177 | type QueryBlocksResponse = record { 178 | // The total number of blocks in the chain. 179 | // If the chain length is positive, the index of the last block is `chain_len - 1`. 180 | chain_length : nat64; 181 | 182 | // System certificate for the hash of the latest block in the chain. 183 | // Only present if `query_blocks` is called in a non-replicated query context. 184 | certificate : opt blob; 185 | 186 | // List of blocks that were available in the ledger when it processed the call. 187 | // 188 | // The blocks form a contiguous range, with the first block having index 189 | // [first_block_index] (see below), and the last block having index 190 | // [first_block_index] + len(blocks) - 1. 191 | // 192 | // The block range can be an arbitrary sub-range of the originally requested range. 193 | blocks : vec Block; 194 | 195 | // The index of the first block in "blocks". 196 | // If the blocks vector is empty, the exact value of this field is not specified. 197 | first_block_index : BlockIndex; 198 | 199 | // Encoding of instructions for fetching archived blocks whose indices fall into the 200 | // requested range. 201 | // 202 | // For each entry `e` in [archived_blocks], `[e.from, e.from + len)` is a sub-range 203 | // of the originally requested block range. 204 | archived_blocks : vec record { 205 | // The index of the first archived block that can be fetched using the callback. 206 | start : BlockIndex; 207 | 208 | // The number of blocks that can be fetch using the callback. 209 | length : nat64; 210 | 211 | // The function that should be called to fetch the archived blocks. 212 | // The range of the blocks accessible using this function is given by [from] 213 | // and [len] fields above. 214 | callback : QueryArchiveFn; 215 | }; 216 | }; 217 | 218 | type Archive = record { 219 | canister_id: principal; 220 | }; 221 | 222 | type Archives = record { 223 | archives: vec Archive; 224 | }; 225 | 226 | service : { 227 | // Transfers tokens from a subaccount of the caller to the destination address. 228 | // The source address is computed from the principal of the caller and the specified subaccount. 229 | // When successful, returns the index of the block containing the transaction. 230 | transfer : (TransferArgs) -> (TransferResult); 231 | 232 | // Returns the amount of Tokens on the specified account. 233 | account_balance : (AccountBalanceArgs) -> (Tokens) query; 234 | 235 | // Returns the current transfer_fee. 236 | transfer_fee : (TransferFeeArg) -> (TransferFee) query; 237 | 238 | // Queries blocks in the specified range. 239 | query_blocks : (GetBlocksArgs) -> (QueryBlocksResponse) query; 240 | 241 | // Returns token symbol. 242 | symbol : () -> (record { symbol: text }) query; 243 | 244 | // Returns token name. 245 | name : () -> (record { name: text }) query; 246 | 247 | // Returns token decimals. 248 | decimals : () -> (record { decimals: nat32 }) query; 249 | 250 | // Returns the existing archive canisters information. 251 | archives : () -> (Archives) query; 252 | } 253 | """ 254 | 255 | class Ledger(Canister): 256 | def __init__(self, agent): 257 | super().__init__(agent, "ryjl3-tyaaa-aaaaa-aaaba-cai", ledger_did) 258 | -------------------------------------------------------------------------------- /ic/common/management.py: -------------------------------------------------------------------------------- 1 | 2 | # management canister interface `aaaaa-aa` 3 | # wrap basic interfaces provided by the management canister: 4 | # create_canister, install_code, canister_status, etc. 5 | 6 | from ic.canister import Canister 7 | 8 | 9 | management_did = """ 10 | type canister_id = principal; 11 | type user_id = principal; 12 | type wasm_module = blob; 13 | 14 | type canister_settings = record { 15 | controllers : opt vec principal; 16 | compute_allocation : opt nat; 17 | memory_allocation : opt nat; 18 | freezing_threshold : opt nat; 19 | }; 20 | 21 | type definite_canister_settings = record { 22 | controllers : vec principal; 23 | compute_allocation : nat; 24 | memory_allocation : nat; 25 | freezing_threshold : nat; 26 | }; 27 | 28 | service ic : { 29 | create_canister : (record { 30 | settings : opt canister_settings 31 | }) -> (record {canister_id : canister_id}); 32 | update_settings : (record { 33 | canister_id : principal; 34 | settings : canister_settings 35 | }) -> (); 36 | install_code : (record { 37 | mode : variant {install; reinstall; upgrade}; 38 | canister_id : canister_id; 39 | wasm_module : wasm_module; 40 | arg : blob; 41 | }) -> (); 42 | uninstall_code : (record {canister_id : canister_id}) -> (); 43 | start_canister : (record {canister_id : canister_id}) -> (); 44 | stop_canister : (record {canister_id : canister_id}) -> (); 45 | canister_status : (record {canister_id : canister_id}) -> (record { 46 | status : variant { running; stopping; stopped }; 47 | settings: definite_canister_settings; 48 | module_hash: opt blob; 49 | memory_size: nat; 50 | cycles: nat; 51 | }); 52 | delete_canister : (record {canister_id : canister_id}) -> (); 53 | deposit_cycles : (record {canister_id : canister_id}) -> (); 54 | 55 | // provisional interfaces for the pre-ledger world 56 | provisional_create_canister_with_cycles : (record { 57 | amount: opt nat; 58 | settings : opt canister_settings 59 | }) -> (record {canister_id : canister_id}); 60 | provisional_top_up_canister : 61 | (record { canister_id: canister_id; amount: nat }) -> (); 62 | } 63 | """ 64 | 65 | class Management(Canister): 66 | def __init__(self, agent): 67 | super().__init__(agent, "aaaaa-aa", management_did) 68 | -------------------------------------------------------------------------------- /ic/constants.py: -------------------------------------------------------------------------------- 1 | IC_REQUEST_DOMAIN_SEPARATOR = b"\x0Aic-request" 2 | IC_STATE_ROOT_DOMAIN_SEPARATOR = b"\x0Dic-state-root" 3 | IC_DELEGATION_DOMIAN_SEPARATOR = b"\x1Aic-request-auth-delegation" 4 | 5 | IC_ROOT_KEY = b"\x30\x81\x82\x30\x1d\x06\x0d\x2b\x06\x01\x04\x01\x82\xdc\x7c\x05\x03\x01\x02\x01\x06\x0c\x2b\x06\x01\x04\x01\x82\xdc\x7c\x05\x03\x02\x01\x03\x61\x00\x81\x4c\x0e\x6e\xc7\x1f\xab\x58\x3b\x08\xbd\x81\x37\x3c\x25\x5c\x3c\x37\x1b\x2e\x84\x86\x3c\x98\xa4\xf1\xe0\x8b\x74\x23\x5d\x14\xfb\x5d\x9c\x0c\xd5\x46\xd9\x68\x5f\x91\x3a\x0c\x0b\x2c\xc5\x34\x15\x83\xbf\x4b\x43\x92\xe4\x67\xdb\x96\xd6\x5b\x9b\xb4\xcb\x71\x71\x12\xf8\x47\x2e\x0d\x5a\x4d\x14\x50\x5f\xfd\x74\x84\xb0\x12\x91\x09\x1c\x5f\x87\xb9\x88\x83\x46\x3f\x98\x09\x1a\x0b\xaa\xae" 6 | -------------------------------------------------------------------------------- /ic/identity.py: -------------------------------------------------------------------------------- 1 | import hashlib 2 | import json 3 | 4 | from ecdsa.curves import Ed25519, SECP256k1 5 | from .principal import Principal 6 | import ecdsa 7 | from mnemonic import Mnemonic 8 | 9 | class Identity: 10 | def __init__(self, privkey = "", type = "ed25519", anonymous = False): 11 | privkey = bytes(bytearray.fromhex(privkey)) 12 | self.anonymous = anonymous 13 | if anonymous: 14 | return 15 | self.key_type = type 16 | if type == 'secp256k1': 17 | if len(privkey) > 0: 18 | self.sk = ecdsa.SigningKey.from_string(privkey, curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256) 19 | else: 20 | self.sk = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1, hashfunc=hashlib.sha256) 21 | self._privkey = self.sk.to_string().hex() 22 | self.vk = self.sk.get_verifying_key() 23 | self._pubkey = self.vk.to_string().hex() 24 | self._der_pubkey = self.vk.to_der() 25 | elif type == 'ed25519': 26 | if len(privkey) > 0: 27 | self.sk = ecdsa.SigningKey.from_string(privkey, curve=ecdsa.Ed25519) 28 | else: 29 | self.sk = ecdsa.SigningKey.generate(curve=ecdsa.Ed25519) 30 | self._privkey = self.sk.to_string().hex() 31 | self.vk = self.sk.get_verifying_key() 32 | self._pubkey = self.vk.to_string().hex() 33 | self._der_pubkey = self.vk.to_der() 34 | else: 35 | raise 'unsupported identity type' 36 | 37 | @staticmethod 38 | def from_seed(mnemonic: str): 39 | mnemo = Mnemonic('english') 40 | seed = mnemo.to_seed(mnemonic).hex() 41 | privkey = seed[:64] 42 | type = "ed25519" 43 | return Identity(privkey=privkey, type=type) 44 | 45 | @staticmethod 46 | def from_pem(pem: str): 47 | key = ecdsa.SigningKey.from_pem(pem) 48 | privkey = key.to_string().hex() 49 | type = "unknown" 50 | if key.curve == Ed25519: 51 | type = 'ed25519' 52 | elif key.curve == SECP256k1: 53 | type = 'secp256k1' 54 | return Identity(privkey=privkey, type=type) 55 | 56 | def to_pem(self): 57 | pem = self.sk.to_pem(format="pkcs8") 58 | return pem 59 | 60 | def sender(self): 61 | if self.anonymous: 62 | return Principal.anonymous() 63 | return Principal.self_authenticating(self._der_pubkey) 64 | 65 | def sign(self, msg: bytes): 66 | if self.anonymous: 67 | return (None, None) 68 | if self.key_type == 'ed25519': 69 | sig = self.sk.sign(msg) 70 | return (self._der_pubkey, sig) 71 | elif self.key_type == 'secp256k1': 72 | sig = self.sk.sign(msg) 73 | return (self._der_pubkey, sig) 74 | 75 | def verify(self, msg, sig: bytes) -> bool : 76 | if type(msg) == str: 77 | msg = bytes.fromhex(msg) 78 | if self.anonymous: 79 | return False 80 | return self.vk.verify(sig, msg) 81 | 82 | @property 83 | def privkey(self): 84 | return self._privkey 85 | 86 | @property 87 | def pubkey(self): 88 | return self._pubkey 89 | 90 | @property 91 | def der_pubkey(self): 92 | return self._der_pubkey 93 | 94 | def __repr__(self): 95 | return "Identity(" + self.key_type + ', ' + self._privkey + ", " + self._pubkey + ")" 96 | 97 | def __str__(self): 98 | return "(" + self.key_type + ', ' + self._privkey + ", " + self._pubkey + ")" 99 | 100 | def _map_delegation(delegation: dict): 101 | return { 102 | "delegation": { 103 | "expiration": int(delegation["delegation"]["expiration"], 16), 104 | "pubkey": bytes.fromhex(delegation["delegation"]["pubkey"]) 105 | }, 106 | "signature": bytes.fromhex(delegation["signature"]) 107 | } 108 | 109 | class DelegateIdentity: 110 | def __init__(self, identity: Identity, delegation): 111 | self.identity = identity 112 | self._delegations = [_map_delegation(d) for d in delegation['delegations']] 113 | self._der_pubkey = bytes.fromhex(delegation["publicKey"]) 114 | 115 | def sign(self, msg: bytes): 116 | return self.identity.sign(msg) 117 | 118 | def sender(self): 119 | return Principal.self_authenticating(self._der_pubkey) 120 | 121 | @property 122 | def delegations(self): 123 | return self._delegations 124 | 125 | @property 126 | def der_pubkey(self): 127 | return self._der_pubkey 128 | 129 | @staticmethod 130 | def from_json(ic_identity: str, ic_delegation: str): 131 | parsed_ic_identity = json.loads(ic_identity) 132 | parsed_ic_delegation = json.loads(ic_delegation) 133 | 134 | return DelegateIdentity( 135 | Identity(parsed_ic_identity[1][:64]), 136 | parsed_ic_delegation 137 | ) 138 | 139 | def __repr__(self): 140 | return "DelegationIdentity(" + self.identity + ',\n' + self.delegation + ")" 141 | 142 | def __str__(self): 143 | return "(" + self.identity + ',\n' + self.delegation + ")" -------------------------------------------------------------------------------- /ic/parser/DIDEmitter.py: -------------------------------------------------------------------------------- 1 | from ic.candid import Types 2 | from .DIDLexer import DIDLexer 3 | from .DIDParser import DIDParser 4 | from .DIDParserListener import DIDParserListener 5 | 6 | class DIDEmitter(DIDParserListener): 7 | def __init__(self): 8 | self.data = { 9 | "nat": Types.Nat, 10 | "nat8": Types.Nat8, 11 | 'nat16': Types.Nat16, 12 | 'nat32': Types.Nat32, 13 | 'nat64': Types.Nat64, 14 | 'int': Types.Int, 15 | 'int8': Types.Int8, 16 | 'int16': Types.Int16, 17 | 'int32': Types.Int32, 18 | 'int64': Types.Int64, 19 | 'float32': Types.Float32, 20 | 'float64': Types.Float64, 21 | 'bool': Types.Bool, 22 | 'text': Types.Text, 23 | 'null': Types.Null, 24 | 'reserved': Types.Reserved, 25 | 'empty': Types.Empty, 26 | 'principal': Types.Principal, 27 | 'blob': Types.Vec(Types.Nat8) 28 | } 29 | self.rec = {} 30 | self.datatype = None 31 | self.datalist = [] 32 | self.cache = {} 33 | self.argmode = False 34 | 35 | def getParsedData(self, name: str): 36 | return self.data[name] 37 | 38 | def getDataType(self): 39 | return self.datatype 40 | 41 | def getActor(self): 42 | try: 43 | return self.data['actor'] 44 | except: 45 | raise KeyError("Actor not exist") 46 | 47 | # Exit a parse tree produced by DIDParser#program. 48 | def exitProgram(self, ctx:DIDParser.ProgramContext): 49 | self.cache.clear() 50 | if len(self.rec) != 0: 51 | raise ValueError("Some type undefined:" + str(self.rec)) 52 | 53 | 54 | # Exit a parse tree produced by DIDParser#defination. 55 | def exitDefination(self, ctx:DIDParser.DefinationContext): 56 | typename = ctx.Name().getText() 57 | if typename in self.data: 58 | raise ValueError("Duplicated defination " + typename) 59 | if typename in self.rec: 60 | ref = self.rec[typename] 61 | ref.fill(self.datatype) 62 | self.data[typename] = ref 63 | del self.rec[typename] 64 | else: 65 | self.data[typename] = self.datatype 66 | 67 | 68 | # Exit a parse tree produced by DIDParser#actor. 69 | def exitActor(self, ctx:DIDParser.ActorContext): 70 | if ctx.tuptype() != None: 71 | args = self.cache[ctx.tuptype()] 72 | else: 73 | args = [] 74 | if ctx.actortype() != None: 75 | actor = self.cache[ctx.actortype()] 76 | else: 77 | num = len(ctx.Name()) 78 | name = ctx.Name(num - 1) 79 | actor = self.data[name.getText()] 80 | self.datatype = { 81 | "arguments": args, 82 | "methods": actor 83 | } 84 | self.data["actor"] = self.datatype 85 | 86 | 87 | # Exit a parse tree produced by DIDParser#actortype. 88 | def exitActortype(self, ctx:DIDParser.ActortypeContext): 89 | actor = {} 90 | for method in ctx.methodtype(): 91 | m = self.cache[method] 92 | actor[m[0]] = m[1] 93 | self.datatype = actor 94 | self.cache[ctx] = self.datatype 95 | 96 | 97 | # Exit a parse tree produced by DIDParser#Name. 98 | def exitName(self, ctx:DIDParser.NameContext): 99 | typename = ctx.Name().getText() 100 | if typename in self.data: 101 | # already in defined list 102 | self.datatype = self.data[typename] 103 | elif typename in self.rec: 104 | # already in rec list 105 | self.datatype = self.rec[typename] 106 | else: 107 | # new recursive type 108 | self.rec[typename] = Types.Rec() 109 | self.datatype = self.rec[typename] 110 | 111 | if self.argmode: 112 | self.cache[ctx] = self.datatype 113 | 114 | 115 | # Exit a parse tree produced by DIDParser#Primitive. 116 | def exitPrimitive(self, ctx:DIDParser.PrimitiveContext): 117 | prim = ctx.PrimType().getText() 118 | self.datatype = self.data[prim] 119 | 120 | if self.argmode: 121 | self.cache[ctx] = self.datatype 122 | 123 | # Exit a parse tree produced by DIDParser#Component. 124 | def exitComponent(self, ctx:DIDParser.ComponentContext): 125 | if self.argmode: 126 | self.cache[ctx] = self.datatype 127 | 128 | 129 | # Exit a parse tree produced by DIDParser#Option. 130 | def exitOption(self, ctx:DIDParser.OptionContext): 131 | self.datatype = Types.Opt(self.datatype) 132 | 133 | 134 | # Exit a parse tree produced by DIDParser#Vector. 135 | def exitVector(self, ctx:DIDParser.VectorContext): 136 | self.datatype = Types.Vec(self.datatype) 137 | 138 | 139 | # Exit a parse tree produced by DIDParser#EmptyRecord. 140 | def exitEmptyRecord(self, ctx:DIDParser.EmptyRecordContext): 141 | self.datatype = Types.Record({}) 142 | 143 | 144 | # Exit a parse tree produced by DIDParser#Record. 145 | def exitRecord(self, ctx:DIDParser.RecordContext): 146 | isTuple = False 147 | isObject = False 148 | k = 0 149 | record = {} 150 | for field in ctx.recordfield(): 151 | val = self.cache[field] 152 | if val[0] == None: 153 | key = "_" + str(k) + "_" 154 | k += 1 155 | isTuple = True 156 | else: 157 | key = val[0] 158 | isObject = True 159 | record[key] = val[1] 160 | if isTuple and isObject: 161 | raise ValueError("Anonymous record field not support") 162 | if isTuple: 163 | self.datatype = Types.Tuple(*record.values()) 164 | else: 165 | self.datatype = Types.Record(record) 166 | 167 | # Exit a parse tree produced by DIDParser#EmptyVariant. 168 | def exitEmptyVariant(self, ctx:DIDParser.EmptyVariantContext): 169 | self.datatype = Types.Variant({}) 170 | 171 | 172 | # Exit a parse tree produced by DIDParser#Variant. 173 | def exitVariant(self, ctx:DIDParser.VariantContext): 174 | variant = {} 175 | for field in ctx.variantfield(): 176 | val = self.cache[field] 177 | variant[val[0]] = val[1] 178 | self.datatype = Types.Variant(variant) 179 | 180 | 181 | # Exit a parse tree produced by DIDParser#RecordKV. 182 | def exitRecordKV(self, ctx:DIDParser.RecordKVContext): 183 | key = ctx.Name().getText() 184 | key = key.strip('"') 185 | self.cache[ctx] = (key, self.datatype) 186 | 187 | 188 | # Exit a parse tree produced by DIDParser#RecordData. 189 | def exitRecordData(self, ctx:DIDParser.RecordDataContext): 190 | self.cache[ctx] = (None, self.datatype) 191 | 192 | 193 | # Exit a parse tree produced by DIDParser#VariantKV. 194 | def exitVariantKV(self, ctx:DIDParser.VariantKVContext): 195 | key = ctx.Name().getText() 196 | key = key.strip('"') 197 | self.cache[ctx] = (key, self.datatype) 198 | 199 | 200 | # Exit a parse tree produced by DIDParser#VariantName. 201 | def exitVariantName(self, ctx:DIDParser.VariantNameContext): 202 | key = ctx.Name().getText() 203 | key = key.strip('"') 204 | self.cache[ctx] = (key, Types.Null) 205 | 206 | 207 | # Exit a parse tree produced by DIDParser#functype. 208 | def exitFunctype(self, ctx:DIDParser.FunctypeContext): 209 | argCtx = ctx.getChild(0, ttype=DIDParser.TuptypeContext) 210 | args = self.cache[argCtx] 211 | retCtx = ctx.getChild(1, ttype=DIDParser.TuptypeContext) 212 | rets = self.cache[retCtx] 213 | if ctx.funcann() == None: 214 | anno = [] 215 | else: 216 | anno = [ctx.funcann().getText()] 217 | self.datatype = Types.Func(args, rets, anno) 218 | 219 | # Exit a parse tree produced by DIDParser#EmptyTuple. 220 | def exitEmptyTuple(self, ctx:DIDParser.EmptyTupleContext): 221 | self.datatype = [] 222 | self.cache[ctx] = self.datatype 223 | 224 | 225 | # Enter a parse tree produced by DIDParser#Tuple. 226 | def enterTuple(self, ctx:DIDParser.TupleContext): 227 | self.datalist = [] 228 | 229 | # Exit a parse tree produced by DIDParser#Tuple. 230 | def exitTuple(self, ctx:DIDParser.TupleContext): 231 | self.datatype = self.datalist 232 | self.cache[ctx] = self.datatype 233 | 234 | 235 | def enterArgtypes(self, ctx: DIDParser.ArgtypesContext): 236 | self.argmode = True 237 | 238 | # Exit a parse tree produced by DIDParser#argtypes. 239 | def exitArgtypes(self, ctx:DIDParser.ArgtypesContext): 240 | for arg in ctx.datatype(): 241 | self.datalist.append(self.cache[arg]) 242 | self.argmode = False 243 | 244 | 245 | # Exit a parse tree produced by DIDParser#methodtype. 246 | def exitMethodtype(self, ctx:DIDParser.MethodtypeContext): 247 | name = ctx.Name().getText() 248 | self.datatype = (name, self.datatype) 249 | self.cache[ctx] = self.datatype -------------------------------------------------------------------------------- /ic/parser/DIDLexer.py: -------------------------------------------------------------------------------- 1 | # Generated from DIDLexer.g4 by ANTLR 4.9.3 2 | from antlr4 import * 3 | from io import StringIO 4 | import sys 5 | if sys.version_info[1] > 5: 6 | from typing import TextIO 7 | else: 8 | from typing.io import TextIO 9 | 10 | 11 | 12 | def serializedATN(): 13 | with StringIO() as buf: 14 | buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\32") 15 | buf.write("\u011a\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") 16 | buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") 17 | buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") 18 | buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") 19 | buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\3\2") 20 | buf.write("\3\2\3\2\3\2\7\2@\n\2\f\2\16\2C\13\2\3\2\3\2\3\2\3\2\3") 21 | buf.write("\2\3\3\3\3\3\3\3\3\7\3N\n\3\f\3\16\3Q\13\3\3\3\3\3\3\4") 22 | buf.write("\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3") 23 | buf.write("\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b") 24 | buf.write("\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3") 25 | buf.write("\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b") 26 | buf.write("\3\b\3\b\5\b\u008e\n\b\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t") 27 | buf.write("\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3") 28 | buf.write("\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t") 29 | buf.write("\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3") 30 | buf.write("\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\t\u00ca\n\t\3") 31 | buf.write("\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f") 32 | buf.write("\3\f\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16") 33 | buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\17\3\20") 34 | buf.write("\3\20\3\21\3\21\3\22\3\22\3\22\5\22\u00f7\n\22\3\23\3") 35 | buf.write("\23\3\24\5\24\u00fc\n\24\3\24\3\24\7\24\u0100\n\24\f\24") 36 | buf.write("\16\24\u0103\13\24\3\24\5\24\u0106\n\24\3\25\3\25\3\26") 37 | buf.write("\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\31\3\32\3\32\3\33") 38 | buf.write("\3\33\3\34\3\34\3\35\3\35\3A\2\36\3\3\5\4\7\5\t\6\13\7") 39 | buf.write("\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\2") 40 | buf.write("!\2#\2%\2\'\21)\22+\23-\24/\25\61\26\63\27\65\30\67\31") 41 | buf.write("9\32\3\2\7\4\2\f\f\17\17\5\2\13\f\17\17\"\"\4\2C\\c|\3") 42 | buf.write("\2\62;\5\2C\\aac|\2\u012d\2\3\3\2\2\2\2\5\3\2\2\2\2\7") 43 | buf.write("\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2") 44 | buf.write("\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2") 45 | buf.write("\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\'\3\2\2\2") 46 | buf.write("\2)\3\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2") 47 | buf.write("\2\2\2\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2") 48 | buf.write("\3;\3\2\2\2\5I\3\2\2\2\7T\3\2\2\2\tX\3\2\2\2\13]\3\2\2") 49 | buf.write("\2\rc\3\2\2\2\17\u008d\3\2\2\2\21\u00c9\3\2\2\2\23\u00cb") 50 | buf.write("\3\2\2\2\25\u00cf\3\2\2\2\27\u00d3\3\2\2\2\31\u00da\3") 51 | buf.write("\2\2\2\33\u00e2\3\2\2\2\35\u00ea\3\2\2\2\37\u00ef\3\2") 52 | buf.write("\2\2!\u00f1\3\2\2\2#\u00f6\3\2\2\2%\u00f8\3\2\2\2\'\u00fb") 53 | buf.write("\3\2\2\2)\u0107\3\2\2\2+\u0109\3\2\2\2-\u010b\3\2\2\2") 54 | buf.write("/\u010d\3\2\2\2\61\u010f\3\2\2\2\63\u0112\3\2\2\2\65\u0114") 55 | buf.write("\3\2\2\2\67\u0116\3\2\2\29\u0118\3\2\2\2;<\7\61\2\2<=") 56 | buf.write("\7,\2\2=A\3\2\2\2>@\13\2\2\2?>\3\2\2\2@C\3\2\2\2AB\3\2") 57 | buf.write("\2\2A?\3\2\2\2BD\3\2\2\2CA\3\2\2\2DE\7,\2\2EF\7\61\2\2") 58 | buf.write("FG\3\2\2\2GH\b\2\2\2H\4\3\2\2\2IJ\7\61\2\2JK\7\61\2\2") 59 | buf.write("KO\3\2\2\2LN\n\2\2\2ML\3\2\2\2NQ\3\2\2\2OM\3\2\2\2OP\3") 60 | buf.write("\2\2\2PR\3\2\2\2QO\3\2\2\2RS\b\3\2\2S\6\3\2\2\2TU\t\3") 61 | buf.write("\2\2UV\3\2\2\2VW\b\4\2\2W\b\3\2\2\2XY\7v\2\2YZ\7{\2\2") 62 | buf.write("Z[\7r\2\2[\\\7g\2\2\\\n\3\2\2\2]^\7s\2\2^_\7w\2\2_`\7") 63 | buf.write("g\2\2`a\7t\2\2ab\7{\2\2b\f\3\2\2\2cd\7q\2\2de\7p\2\2e") 64 | buf.write("f\7g\2\2fg\7y\2\2gh\7c\2\2hi\7{\2\2i\16\3\2\2\2j\u008e") 65 | buf.write("\5\21\t\2kl\7d\2\2lm\7q\2\2mn\7q\2\2n\u008e\7n\2\2op\7") 66 | buf.write("v\2\2pq\7g\2\2qr\7z\2\2r\u008e\7v\2\2st\7p\2\2tu\7w\2") 67 | buf.write("\2uv\7n\2\2v\u008e\7n\2\2wx\7t\2\2xy\7g\2\2yz\7u\2\2z") 68 | buf.write("{\7g\2\2{|\7t\2\2|}\7x\2\2}~\7g\2\2~\u008e\7f\2\2\177") 69 | buf.write("\u0080\7g\2\2\u0080\u0081\7o\2\2\u0081\u0082\7r\2\2\u0082") 70 | buf.write("\u0083\7v\2\2\u0083\u008e\7{\2\2\u0084\u0085\7r\2\2\u0085") 71 | buf.write("\u0086\7t\2\2\u0086\u0087\7k\2\2\u0087\u0088\7p\2\2\u0088") 72 | buf.write("\u0089\7e\2\2\u0089\u008a\7k\2\2\u008a\u008b\7r\2\2\u008b") 73 | buf.write("\u008c\7c\2\2\u008c\u008e\7n\2\2\u008dj\3\2\2\2\u008d") 74 | buf.write("k\3\2\2\2\u008do\3\2\2\2\u008ds\3\2\2\2\u008dw\3\2\2\2") 75 | buf.write("\u008d\177\3\2\2\2\u008d\u0084\3\2\2\2\u008e\20\3\2\2") 76 | buf.write("\2\u008f\u0090\7p\2\2\u0090\u0091\7c\2\2\u0091\u00ca\7") 77 | buf.write("v\2\2\u0092\u0093\7p\2\2\u0093\u0094\7c\2\2\u0094\u0095") 78 | buf.write("\7v\2\2\u0095\u00ca\7:\2\2\u0096\u0097\7p\2\2\u0097\u0098") 79 | buf.write("\7c\2\2\u0098\u0099\7v\2\2\u0099\u009a\7\63\2\2\u009a") 80 | buf.write("\u00ca\78\2\2\u009b\u009c\7p\2\2\u009c\u009d\7c\2\2\u009d") 81 | buf.write("\u009e\7v\2\2\u009e\u009f\7\65\2\2\u009f\u00ca\7\64\2") 82 | buf.write("\2\u00a0\u00a1\7p\2\2\u00a1\u00a2\7c\2\2\u00a2\u00a3\7") 83 | buf.write("v\2\2\u00a3\u00a4\78\2\2\u00a4\u00ca\7\66\2\2\u00a5\u00a6") 84 | buf.write("\7k\2\2\u00a6\u00a7\7p\2\2\u00a7\u00ca\7v\2\2\u00a8\u00a9") 85 | buf.write("\7k\2\2\u00a9\u00aa\7p\2\2\u00aa\u00ab\7v\2\2\u00ab\u00ca") 86 | buf.write("\7:\2\2\u00ac\u00ad\7k\2\2\u00ad\u00ae\7p\2\2\u00ae\u00af") 87 | buf.write("\7v\2\2\u00af\u00b0\7\63\2\2\u00b0\u00ca\78\2\2\u00b1") 88 | buf.write("\u00b2\7k\2\2\u00b2\u00b3\7p\2\2\u00b3\u00b4\7v\2\2\u00b4") 89 | buf.write("\u00b5\7\65\2\2\u00b5\u00ca\7\64\2\2\u00b6\u00b7\7k\2") 90 | buf.write("\2\u00b7\u00b8\7p\2\2\u00b8\u00b9\7v\2\2\u00b9\u00ba\7") 91 | buf.write("8\2\2\u00ba\u00ca\7\66\2\2\u00bb\u00bc\7h\2\2\u00bc\u00bd") 92 | buf.write("\7n\2\2\u00bd\u00be\7q\2\2\u00be\u00bf\7c\2\2\u00bf\u00c0") 93 | buf.write("\7v\2\2\u00c0\u00c1\7\65\2\2\u00c1\u00ca\7\64\2\2\u00c2") 94 | buf.write("\u00c3\7h\2\2\u00c3\u00c4\7n\2\2\u00c4\u00c5\7q\2\2\u00c5") 95 | buf.write("\u00c6\7c\2\2\u00c6\u00c7\7v\2\2\u00c7\u00c8\78\2\2\u00c8") 96 | buf.write("\u00ca\7\66\2\2\u00c9\u008f\3\2\2\2\u00c9\u0092\3\2\2") 97 | buf.write("\2\u00c9\u0096\3\2\2\2\u00c9\u009b\3\2\2\2\u00c9\u00a0") 98 | buf.write("\3\2\2\2\u00c9\u00a5\3\2\2\2\u00c9\u00a8\3\2\2\2\u00c9") 99 | buf.write("\u00ac\3\2\2\2\u00c9\u00b1\3\2\2\2\u00c9\u00b6\3\2\2\2") 100 | buf.write("\u00c9\u00bb\3\2\2\2\u00c9\u00c2\3\2\2\2\u00ca\22\3\2") 101 | buf.write("\2\2\u00cb\u00cc\7q\2\2\u00cc\u00cd\7r\2\2\u00cd\u00ce") 102 | buf.write("\7v\2\2\u00ce\24\3\2\2\2\u00cf\u00d0\7x\2\2\u00d0\u00d1") 103 | buf.write("\7g\2\2\u00d1\u00d2\7e\2\2\u00d2\26\3\2\2\2\u00d3\u00d4") 104 | buf.write("\7t\2\2\u00d4\u00d5\7g\2\2\u00d5\u00d6\7e\2\2\u00d6\u00d7") 105 | buf.write("\7q\2\2\u00d7\u00d8\7t\2\2\u00d8\u00d9\7f\2\2\u00d9\30") 106 | buf.write("\3\2\2\2\u00da\u00db\7x\2\2\u00db\u00dc\7c\2\2\u00dc\u00dd") 107 | buf.write("\7t\2\2\u00dd\u00de\7k\2\2\u00de\u00df\7c\2\2\u00df\u00e0") 108 | buf.write("\7p\2\2\u00e0\u00e1\7v\2\2\u00e1\32\3\2\2\2\u00e2\u00e3") 109 | buf.write("\7u\2\2\u00e3\u00e4\7g\2\2\u00e4\u00e5\7t\2\2\u00e5\u00e6") 110 | buf.write("\7x\2\2\u00e6\u00e7\7k\2\2\u00e7\u00e8\7e\2\2\u00e8\u00e9") 111 | buf.write("\7g\2\2\u00e9\34\3\2\2\2\u00ea\u00eb\7h\2\2\u00eb\u00ec") 112 | buf.write("\7w\2\2\u00ec\u00ed\7p\2\2\u00ed\u00ee\7e\2\2\u00ee\36") 113 | buf.write("\3\2\2\2\u00ef\u00f0\t\4\2\2\u00f0 \3\2\2\2\u00f1\u00f2") 114 | buf.write("\t\5\2\2\u00f2\"\3\2\2\2\u00f3\u00f7\5%\23\2\u00f4\u00f7") 115 | buf.write("\7a\2\2\u00f5\u00f7\5!\21\2\u00f6\u00f3\3\2\2\2\u00f6") 116 | buf.write("\u00f4\3\2\2\2\u00f6\u00f5\3\2\2\2\u00f7$\3\2\2\2\u00f8") 117 | buf.write("\u00f9\t\6\2\2\u00f9&\3\2\2\2\u00fa\u00fc\7$\2\2\u00fb") 118 | buf.write("\u00fa\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00fd\3\2\2\2") 119 | buf.write("\u00fd\u0101\5%\23\2\u00fe\u0100\5#\22\2\u00ff\u00fe\3") 120 | buf.write("\2\2\2\u0100\u0103\3\2\2\2\u0101\u00ff\3\2\2\2\u0101\u0102") 121 | buf.write("\3\2\2\2\u0102\u0105\3\2\2\2\u0103\u0101\3\2\2\2\u0104") 122 | buf.write("\u0106\7$\2\2\u0105\u0104\3\2\2\2\u0105\u0106\3\2\2\2") 123 | buf.write("\u0106(\3\2\2\2\u0107\u0108\7*\2\2\u0108*\3\2\2\2\u0109") 124 | buf.write("\u010a\7+\2\2\u010a,\3\2\2\2\u010b\u010c\7}\2\2\u010c") 125 | buf.write(".\3\2\2\2\u010d\u010e\7\177\2\2\u010e\60\3\2\2\2\u010f") 126 | buf.write("\u0110\7/\2\2\u0110\u0111\7@\2\2\u0111\62\3\2\2\2\u0112") 127 | buf.write("\u0113\7<\2\2\u0113\64\3\2\2\2\u0114\u0115\7=\2\2\u0115") 128 | buf.write("\66\3\2\2\2\u0116\u0117\7?\2\2\u01178\3\2\2\2\u0118\u0119") 129 | buf.write("\7.\2\2\u0119:\3\2\2\2\13\2AO\u008d\u00c9\u00f6\u00fb") 130 | buf.write("\u0101\u0105\3\b\2\2") 131 | return buf.getvalue() 132 | 133 | 134 | class DIDLexer(Lexer): 135 | 136 | atn = ATNDeserializer().deserialize(serializedATN()) 137 | 138 | decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] 139 | 140 | BlockComment = 1 141 | LineComment = 2 142 | S = 3 143 | Type = 4 144 | Query = 5 145 | Oneway = 6 146 | PrimType = 7 147 | NumType = 8 148 | OPT = 9 149 | VEC = 10 150 | RECORD = 11 151 | VARIANT = 12 152 | Service = 13 153 | FUNC = 14 154 | Name = 15 155 | LeftP = 16 156 | RightP = 17 157 | LeftB = 18 158 | RightB = 19 159 | Arrow = 20 160 | Colon = 21 161 | Semicolon = 22 162 | Eq = 23 163 | Comma = 24 164 | 165 | channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] 166 | 167 | modeNames = [ "DEFAULT_MODE" ] 168 | 169 | literalNames = [ "", 170 | "'type'", "'query'", "'oneway'", "'opt'", "'vec'", "'record'", 171 | "'variant'", "'service'", "'func'", "'('", "')'", "'{'", "'}'", 172 | "'->'", "':'", "';'", "'='", "','" ] 173 | 174 | symbolicNames = [ "", 175 | "BlockComment", "LineComment", "S", "Type", "Query", "Oneway", 176 | "PrimType", "NumType", "OPT", "VEC", "RECORD", "VARIANT", "Service", 177 | "FUNC", "Name", "LeftP", "RightP", "LeftB", "RightB", "Arrow", 178 | "Colon", "Semicolon", "Eq", "Comma" ] 179 | 180 | ruleNames = [ "BlockComment", "LineComment", "S", "Type", "Query", "Oneway", 181 | "PrimType", "NumType", "OPT", "VEC", "RECORD", "VARIANT", 182 | "Service", "FUNC", "Letter", "DIGIT", "NameChar", "NameStartChar", 183 | "Name", "LeftP", "RightP", "LeftB", "RightB", "Arrow", 184 | "Colon", "Semicolon", "Eq", "Comma" ] 185 | 186 | grammarFileName = "DIDLexer.g4" 187 | 188 | def __init__(self, input=None, output:TextIO = sys.stdout): 189 | super().__init__(input, output) 190 | self.checkVersion("4.9.3") 191 | self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) 192 | self._actions = None 193 | self._predicates = None 194 | 195 | 196 | -------------------------------------------------------------------------------- /ic/parser/DIDParserListener.py: -------------------------------------------------------------------------------- 1 | # Generated from DIDParser.g4 by ANTLR 4.9.3 2 | from antlr4 import * 3 | if __name__ is not None and "." in __name__: 4 | from .DIDParser import DIDParser 5 | else: 6 | from DIDParser import DIDParser 7 | 8 | # This class defines a complete listener for a parse tree produced by DIDParser. 9 | class DIDParserListener(ParseTreeListener): 10 | 11 | # Enter a parse tree produced by DIDParser#program. 12 | def enterProgram(self, ctx:DIDParser.ProgramContext): 13 | pass 14 | 15 | # Exit a parse tree produced by DIDParser#program. 16 | def exitProgram(self, ctx:DIDParser.ProgramContext): 17 | pass 18 | 19 | 20 | # Enter a parse tree produced by DIDParser#defination. 21 | def enterDefination(self, ctx:DIDParser.DefinationContext): 22 | pass 23 | 24 | # Exit a parse tree produced by DIDParser#defination. 25 | def exitDefination(self, ctx:DIDParser.DefinationContext): 26 | pass 27 | 28 | 29 | # Enter a parse tree produced by DIDParser#actor. 30 | def enterActor(self, ctx:DIDParser.ActorContext): 31 | pass 32 | 33 | # Exit a parse tree produced by DIDParser#actor. 34 | def exitActor(self, ctx:DIDParser.ActorContext): 35 | pass 36 | 37 | 38 | # Enter a parse tree produced by DIDParser#actortype. 39 | def enterActortype(self, ctx:DIDParser.ActortypeContext): 40 | pass 41 | 42 | # Exit a parse tree produced by DIDParser#actortype. 43 | def exitActortype(self, ctx:DIDParser.ActortypeContext): 44 | pass 45 | 46 | 47 | # Enter a parse tree produced by DIDParser#Name. 48 | def enterName(self, ctx:DIDParser.NameContext): 49 | pass 50 | 51 | # Exit a parse tree produced by DIDParser#Name. 52 | def exitName(self, ctx:DIDParser.NameContext): 53 | pass 54 | 55 | 56 | # Enter a parse tree produced by DIDParser#Primitive. 57 | def enterPrimitive(self, ctx:DIDParser.PrimitiveContext): 58 | pass 59 | 60 | # Exit a parse tree produced by DIDParser#Primitive. 61 | def exitPrimitive(self, ctx:DIDParser.PrimitiveContext): 62 | pass 63 | 64 | 65 | # Enter a parse tree produced by DIDParser#Component. 66 | def enterComponent(self, ctx:DIDParser.ComponentContext): 67 | pass 68 | 69 | # Exit a parse tree produced by DIDParser#Component. 70 | def exitComponent(self, ctx:DIDParser.ComponentContext): 71 | pass 72 | 73 | 74 | # Enter a parse tree produced by DIDParser#comptype. 75 | def enterComptype(self, ctx:DIDParser.ComptypeContext): 76 | pass 77 | 78 | # Exit a parse tree produced by DIDParser#comptype. 79 | def exitComptype(self, ctx:DIDParser.ComptypeContext): 80 | pass 81 | 82 | 83 | # Enter a parse tree produced by DIDParser#Option. 84 | def enterOption(self, ctx:DIDParser.OptionContext): 85 | pass 86 | 87 | # Exit a parse tree produced by DIDParser#Option. 88 | def exitOption(self, ctx:DIDParser.OptionContext): 89 | pass 90 | 91 | 92 | # Enter a parse tree produced by DIDParser#Vector. 93 | def enterVector(self, ctx:DIDParser.VectorContext): 94 | pass 95 | 96 | # Exit a parse tree produced by DIDParser#Vector. 97 | def exitVector(self, ctx:DIDParser.VectorContext): 98 | pass 99 | 100 | 101 | # Enter a parse tree produced by DIDParser#EmptyRecord. 102 | def enterEmptyRecord(self, ctx:DIDParser.EmptyRecordContext): 103 | pass 104 | 105 | # Exit a parse tree produced by DIDParser#EmptyRecord. 106 | def exitEmptyRecord(self, ctx:DIDParser.EmptyRecordContext): 107 | pass 108 | 109 | 110 | # Enter a parse tree produced by DIDParser#Record. 111 | def enterRecord(self, ctx:DIDParser.RecordContext): 112 | pass 113 | 114 | # Exit a parse tree produced by DIDParser#Record. 115 | def exitRecord(self, ctx:DIDParser.RecordContext): 116 | pass 117 | 118 | 119 | # Enter a parse tree produced by DIDParser#EmptyVariant. 120 | def enterEmptyVariant(self, ctx:DIDParser.EmptyVariantContext): 121 | pass 122 | 123 | # Exit a parse tree produced by DIDParser#EmptyVariant. 124 | def exitEmptyVariant(self, ctx:DIDParser.EmptyVariantContext): 125 | pass 126 | 127 | 128 | # Enter a parse tree produced by DIDParser#Variant. 129 | def enterVariant(self, ctx:DIDParser.VariantContext): 130 | pass 131 | 132 | # Exit a parse tree produced by DIDParser#Variant. 133 | def exitVariant(self, ctx:DIDParser.VariantContext): 134 | pass 135 | 136 | 137 | # Enter a parse tree produced by DIDParser#RecordKV. 138 | def enterRecordKV(self, ctx:DIDParser.RecordKVContext): 139 | pass 140 | 141 | # Exit a parse tree produced by DIDParser#RecordKV. 142 | def exitRecordKV(self, ctx:DIDParser.RecordKVContext): 143 | pass 144 | 145 | 146 | # Enter a parse tree produced by DIDParser#RecordData. 147 | def enterRecordData(self, ctx:DIDParser.RecordDataContext): 148 | pass 149 | 150 | # Exit a parse tree produced by DIDParser#RecordData. 151 | def exitRecordData(self, ctx:DIDParser.RecordDataContext): 152 | pass 153 | 154 | 155 | # Enter a parse tree produced by DIDParser#VariantKV. 156 | def enterVariantKV(self, ctx:DIDParser.VariantKVContext): 157 | pass 158 | 159 | # Exit a parse tree produced by DIDParser#VariantKV. 160 | def exitVariantKV(self, ctx:DIDParser.VariantKVContext): 161 | pass 162 | 163 | 164 | # Enter a parse tree produced by DIDParser#VariantName. 165 | def enterVariantName(self, ctx:DIDParser.VariantNameContext): 166 | pass 167 | 168 | # Exit a parse tree produced by DIDParser#VariantName. 169 | def exitVariantName(self, ctx:DIDParser.VariantNameContext): 170 | pass 171 | 172 | 173 | # Enter a parse tree produced by DIDParser#reftype. 174 | def enterReftype(self, ctx:DIDParser.ReftypeContext): 175 | pass 176 | 177 | # Exit a parse tree produced by DIDParser#reftype. 178 | def exitReftype(self, ctx:DIDParser.ReftypeContext): 179 | pass 180 | 181 | 182 | # Enter a parse tree produced by DIDParser#functype. 183 | def enterFunctype(self, ctx:DIDParser.FunctypeContext): 184 | pass 185 | 186 | # Exit a parse tree produced by DIDParser#functype. 187 | def exitFunctype(self, ctx:DIDParser.FunctypeContext): 188 | pass 189 | 190 | 191 | # Enter a parse tree produced by DIDParser#EmptyTuple. 192 | def enterEmptyTuple(self, ctx:DIDParser.EmptyTupleContext): 193 | pass 194 | 195 | # Exit a parse tree produced by DIDParser#EmptyTuple. 196 | def exitEmptyTuple(self, ctx:DIDParser.EmptyTupleContext): 197 | pass 198 | 199 | 200 | # Enter a parse tree produced by DIDParser#Tuple. 201 | def enterTuple(self, ctx:DIDParser.TupleContext): 202 | pass 203 | 204 | # Exit a parse tree produced by DIDParser#Tuple. 205 | def exitTuple(self, ctx:DIDParser.TupleContext): 206 | pass 207 | 208 | 209 | # Enter a parse tree produced by DIDParser#argtypes. 210 | def enterArgtypes(self, ctx:DIDParser.ArgtypesContext): 211 | pass 212 | 213 | # Exit a parse tree produced by DIDParser#argtypes. 214 | def exitArgtypes(self, ctx:DIDParser.ArgtypesContext): 215 | pass 216 | 217 | 218 | # Enter a parse tree produced by DIDParser#Query. 219 | def enterQuery(self, ctx:DIDParser.QueryContext): 220 | pass 221 | 222 | # Exit a parse tree produced by DIDParser#Query. 223 | def exitQuery(self, ctx:DIDParser.QueryContext): 224 | pass 225 | 226 | 227 | # Enter a parse tree produced by DIDParser#Oneway. 228 | def enterOneway(self, ctx:DIDParser.OnewayContext): 229 | pass 230 | 231 | # Exit a parse tree produced by DIDParser#Oneway. 232 | def exitOneway(self, ctx:DIDParser.OnewayContext): 233 | pass 234 | 235 | 236 | # Enter a parse tree produced by DIDParser#methodtype. 237 | def enterMethodtype(self, ctx:DIDParser.MethodtypeContext): 238 | pass 239 | 240 | # Exit a parse tree produced by DIDParser#methodtype. 241 | def exitMethodtype(self, ctx:DIDParser.MethodtypeContext): 242 | pass 243 | 244 | 245 | 246 | del DIDParser -------------------------------------------------------------------------------- /ic/parser/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rocklabs-io/ic-py/79196d9149a7117b15f85077c4cf19cdf6808f3c/ic/parser/__init__.py -------------------------------------------------------------------------------- /ic/principal.py: -------------------------------------------------------------------------------- 1 | 2 | # principal type: https://github.com/dfinity/ic-types/blob/main/src/principal.rs 3 | 4 | import zlib 5 | import math 6 | import base64 7 | import hashlib 8 | from enum import Enum 9 | 10 | CRC_LENGTH_IN_BYTES = 4 11 | HASH_LENGTH_IN_BYTES = 28 12 | MAX_LENGTH_IN_BYTES = 29 13 | 14 | class PrincipalClass(Enum): 15 | OpaqueId = 1 16 | SelfAuthenticating = 2 17 | DerivedId = 3 18 | Anonymous = 4 19 | # Unassigned 20 | 21 | class Principal: 22 | def __init__(self, bytes = b''): 23 | self._len = len(bytes) 24 | self._bytes = bytes 25 | self.hex = str(self._bytes.hex()).upper() 26 | self._isPrincipal = True 27 | 28 | @staticmethod 29 | def management_canister(): 30 | return Principal() 31 | 32 | @staticmethod 33 | def self_authenticating(pubkey): 34 | if isinstance(pubkey, str): 35 | pubkey = bytes.fromhex(pubkey) 36 | hash_ = hashlib.sha224(pubkey).digest() 37 | hash_ += bytes([PrincipalClass.SelfAuthenticating.value]) 38 | return Principal(bytes = hash_) 39 | 40 | @staticmethod 41 | def anonymous(): 42 | return Principal(bytes = b'\x04') 43 | 44 | @property 45 | def len(self): 46 | return self._len 47 | 48 | @property 49 | def bytes(self): 50 | return self._bytes 51 | 52 | @property 53 | def isPrincipal(self): 54 | return self._isPrincipal 55 | 56 | @staticmethod 57 | def from_str(s): 58 | s1 = s.replace('-', '') 59 | pad_len = math.ceil(len(s1) / 8) * 8 - len(s1) 60 | b = base64.b32decode(s1.upper().encode() + b'=' * pad_len) 61 | if len(b) < CRC_LENGTH_IN_BYTES: 62 | raise "principal length error" 63 | p = Principal(bytes = b[CRC_LENGTH_IN_BYTES:]) 64 | if not p.to_str() == s: 65 | raise "principal format error" 66 | return p 67 | 68 | @staticmethod 69 | def from_hex(s): 70 | return Principal(bytes.fromhex(s.lower())) 71 | 72 | def to_str(self): 73 | checksum = zlib.crc32(self._bytes) & 0xFFFFFFFF 74 | b = b'' 75 | b += checksum.to_bytes(CRC_LENGTH_IN_BYTES, byteorder='big') 76 | b += self.bytes 77 | s = base64.b32encode(b).decode('utf-8').lower().replace('=', '') 78 | ret = '' 79 | while len(s) > 5: 80 | ret += s[:5] 81 | ret += '-' 82 | s = s[5:] 83 | ret += s 84 | return ret 85 | 86 | def to_account_id(self, sub_account: int = 0): 87 | return AccountIdentifier.new(self, sub_account) 88 | 89 | def __repr__(self): 90 | return "Principal(" + self.to_str() + ")" 91 | 92 | def __str__(self): 93 | return self.to_str() 94 | 95 | def __eq__(self, other: object) -> bool: 96 | if type(other) is type(self): 97 | return self.len == other.len and self.bytes == other.bytes 98 | else: 99 | return False 100 | 101 | def __hash__(self) -> int: 102 | return hash(self.bytes) 103 | 104 | class AccountIdentifier: 105 | def __init__(self, hash: bytes) -> None: 106 | assert len(hash) == 32 107 | self._hash = hash 108 | 109 | def to_str(self): 110 | return '0x' + self._hash.hex() 111 | 112 | def __repr__(self): 113 | return "Account(" + self.to_str() + ")" 114 | 115 | def __str__(self): 116 | return self.to_str() 117 | 118 | @property 119 | def bytes(self) -> bytes: 120 | return self._hash 121 | 122 | @staticmethod 123 | def new(principal: Principal, sub_account: int = 0): 124 | sha224 = hashlib.sha224() 125 | sha224.update(b'\x0Aaccount-id') 126 | sha224.update(principal.bytes) 127 | 128 | sub_account = sub_account.to_bytes(32, byteorder='big') 129 | sha224.update(sub_account) 130 | hash = sha224.digest() 131 | checksum = zlib.crc32(hash) & 0xFFFFFFFF 132 | account = checksum.to_bytes(CRC_LENGTH_IN_BYTES, byteorder='big') + hash 133 | return AccountIdentifier(account) -------------------------------------------------------------------------------- /ic/system_state.py: -------------------------------------------------------------------------------- 1 | from .agent import Agent 2 | from .certificate import lookup 3 | from .principal import Principal 4 | import leb128 5 | import cbor2 6 | 7 | def time(agent: Agent, canister_id: str) -> int: 8 | cert = agent.read_state_raw(canister_id, [["time".encode()]]) 9 | timestamp = lookup(["time".encode()], cert) 10 | return leb128.u.decode(timestamp) 11 | 12 | def subnet_public_key(agent: Agent, canister_id: str, subnet_id: str) -> str: 13 | path = ["subnet".encode(), Principal.from_str(subnet_id).bytes, "public_key".encode()] 14 | cert = agent.read_state_raw(canister_id, [path]) 15 | pubkey = lookup(path, cert) 16 | return pubkey.hex() 17 | 18 | def subnet_canister_ranges(agent: Agent, canister_id: str, subnet_id: str) -> list[list[Principal]]: 19 | path = ["subnet".encode(), Principal.from_str(subnet_id).bytes, "canister_ranges".encode()] 20 | cert = agent.read_state_raw(canister_id, [path]) 21 | ranges = lookup(path, cert) 22 | return list( 23 | map(lambda range: 24 | list(map(lambda item: Principal(bytes=item), range)), 25 | cbor2.loads(ranges)) 26 | ) 27 | 28 | def canister_module_hash(agent: Agent, canister_id: str) -> str: 29 | path = ["canister".encode(), Principal.from_str(canister_id).bytes, "module_hash".encode()] 30 | cert = agent.read_state_raw(canister_id, [path]) 31 | module_hash = lookup(path, cert) 32 | return module_hash.hex() 33 | 34 | def canister_controllers(agent: Agent, canister_id: str) -> list[Principal]: 35 | path = ["canister".encode(), Principal.from_str(canister_id).bytes, "controllers".encode()] 36 | cert = agent.read_state_raw(canister_id, [path]) 37 | controllers = lookup(path, cert) 38 | return list(map(lambda item: Principal(bytes=item), cbor2.loads(controllers))) -------------------------------------------------------------------------------- /ic/utils.py: -------------------------------------------------------------------------------- 1 | import re 2 | import leb128 3 | import hashlib 4 | 5 | def encode_list(l): 6 | ret = b'' 7 | for item in l: 8 | v = item 9 | if isinstance(item, list): 10 | v = encode_list(item) 11 | if isinstance(item, int): 12 | v = bytes(leb128.u.encode(v)) 13 | if isinstance(item, str): 14 | v = item.encode() 15 | ret += hashlib.sha256(v).digest() 16 | return ret 17 | 18 | # used for sort record by key 19 | def labelHash(s:str) -> int: 20 | if re.match("(^_\d+_$)|(^_0x[0-9a-fA-F]+_$)", s): 21 | num = s[1:-1] 22 | try: 23 | if num.startswith("0x"): 24 | num = int(num, 16) 25 | else: 26 | num = int(num) 27 | except: 28 | # fallback 29 | pass 30 | if type(num) == int and num >= 0 and num < 2**32: 31 | return num 32 | return idlHash(s) 33 | 34 | def idlHash(s:str) -> int: 35 | h = 0 36 | for c in s.encode(): 37 | h = (h * 223 + c) % 2 ** 32 38 | return h 39 | 40 | def to_request_id(d): 41 | if not isinstance(d, dict): 42 | print(d) 43 | pass 44 | vec = [] 45 | for k, v in d.items(): 46 | if isinstance(v, list): 47 | v = encode_list(v) 48 | if isinstance(v, int): 49 | v = bytes(leb128.u.encode(v)) 50 | if not isinstance(k, bytes): 51 | k = k.encode() 52 | if not isinstance(v, bytes): 53 | v = v.encode() 54 | h_k = hashlib.sha256(k).digest() 55 | h_v = hashlib.sha256(v).digest() 56 | vec.append(h_k + h_v) 57 | s = b''.join(sorted(vec)) 58 | return hashlib.sha256(s).digest() 59 | -------------------------------------------------------------------------------- /parser/DIDLexer.g4: -------------------------------------------------------------------------------- 1 | lexer grammar DIDLexer; 2 | 3 | BlockComment: '/*' .*? '*/' -> skip; 4 | 5 | LineComment: '//' ~[\r\n]* -> skip; 6 | 7 | S: [ \t\r\n] -> skip; 8 | 9 | Type: 'type'; 10 | 11 | Query: 'query'; 12 | 13 | Oneway: 'oneway'; 14 | 15 | PrimType: 16 | NumType 17 | | 'bool' 18 | | 'text' 19 | | 'null' 20 | | 'reserved' 21 | | 'empty' 22 | | 'principal'; 23 | 24 | NumType: 25 | 'nat' 26 | | 'nat8' 27 | | 'nat16' 28 | | 'nat32' 29 | | 'nat64' 30 | | 'int' 31 | | 'int8' 32 | | 'int16' 33 | | 'int32' 34 | | 'int64' 35 | | 'float32' 36 | | 'float64'; 37 | 38 | OPT: 'opt'; 39 | 40 | VEC: 'vec'; 41 | 42 | RECORD: 'record'; 43 | 44 | VARIANT: 'variant'; 45 | 46 | Service: 'service'; 47 | 48 | FUNC: 'func'; 49 | 50 | fragment Letter: [A-Za-z]; 51 | 52 | fragment DIGIT: [0-9]; 53 | 54 | fragment NameChar: NameStartChar | '_' | DIGIT; 55 | 56 | fragment NameStartChar: [_a-zA-Z]; 57 | 58 | Name: '"'? NameStartChar NameChar* '"'?; 59 | 60 | LeftP: '('; 61 | 62 | RightP: ')'; 63 | 64 | LeftB: '{'; 65 | 66 | RightB: '}'; 67 | 68 | Arrow: '->'; 69 | 70 | Colon: ':'; 71 | 72 | Semicolon: ';'; 73 | 74 | Eq: '='; 75 | 76 | Comma: ','; 77 | -------------------------------------------------------------------------------- /parser/DIDParser.g4: -------------------------------------------------------------------------------- 1 | parser grammar DIDParser; 2 | 3 | options { 4 | tokenVocab = DIDLexer; 5 | } 6 | 7 | program: defination* actor?; 8 | 9 | defination: Type Name Eq datatype Semicolon; 10 | 11 | actor: Service Name? Colon (tuptype Arrow)? (actortype | Name); 12 | 13 | actortype: LeftB (methodtype Semicolon)* RightB; 14 | 15 | datatype: 16 | Name # Name 17 | | PrimType # Primitive 18 | | comptype # Component; 19 | 20 | comptype: constype | reftype; 21 | 22 | constype: 23 | OPT datatype # Option 24 | | VEC datatype # Vector 25 | | RECORD LeftB RightB # EmptyRecord 26 | | RECORD LeftB recordfield (Semicolon recordfield)* Semicolon? RightB # Record 27 | | VARIANT LeftB RightB # EmptyVariant 28 | | VARIANT LeftB variantfield (Semicolon variantfield)* Semicolon? RightB # Variant; 29 | 30 | recordfield: 31 | Name Colon datatype # RecordKV 32 | | datatype # RecordData; 33 | 34 | variantfield: 35 | Name Colon datatype # VariantKV 36 | | Name # VariantName; 37 | 38 | reftype: FUNC functype | Service actortype; 39 | 40 | functype: tuptype Arrow tuptype funcann?; 41 | 42 | tuptype: 43 | LeftP RightP # EmptyTuple 44 | | LeftP argtypes RightP # Tuple; 45 | 46 | argtypes: 47 | datatype (Comma datatype)* Comma? 48 | | Name Colon datatype; 49 | 50 | funcann: Query # Query | Oneway # Oneway; 51 | 52 | methodtype: Name Colon functype; -------------------------------------------------------------------------------- /parser/antlr-4.9.3-complete.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rocklabs-io/ic-py/79196d9149a7117b15f85077c4cf19cdf6808f3c/parser/antlr-4.9.3-complete.jar -------------------------------------------------------------------------------- /parser/compile.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | 3 | ROOT_DIR=$(cd $(dirname $0); pwd) 4 | 5 | java -jar antlr-4.9.3-complete.jar -Dlanguage=Python3 DIDLexer.g4 DIDParser.g4 -o dist -------------------------------------------------------------------------------- /pics/ic-py.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rocklabs-io/ic-py/79196d9149a7117b15f85077c4cf19cdf6808f3c/pics/ic-py.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools", 4 | "wheel", 5 | ] 6 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import setuptools 3 | from setuptools import setup 4 | from os import path 5 | this_directory = path.abspath(path.dirname(__file__)) 6 | with open(path.join(this_directory, 'README.md'), 'r') as f: 7 | long_description = f.read() 8 | 9 | setup( 10 | name = 'ic-py', 11 | version = '1.0.1', 12 | description = 'Python Agent Library for the Internet Computer', 13 | long_description = long_description, 14 | long_description_content_type = "text/markdown", 15 | url = 'https://github.com/rocklabs-io/ic-py', 16 | author = 'Rocklabs', 17 | author_email = 'hello@rocklabs.io', 18 | keywords = 'dfinity ic agent', 19 | install_requires = [ 20 | 'httpx>=0.22.0', 21 | 'ecdsa>=0.18.0b2', 22 | 'cbor2>=5.4.2', 23 | 'leb128>=1.0.4', 24 | 'waiter>=1.2', 25 | 'antlr4-python3-runtime==4.9.3', 26 | 'mnemonic==0.20' 27 | ], 28 | py_modules = ['ic'], 29 | package_dir = { 'ic': "ic" }, 30 | packages = setuptools.find_packages(where='./'), 31 | include_package_data = True 32 | ) 33 | -------------------------------------------------------------------------------- /test_agent.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from ic.agent import * 3 | from ic.identity import * 4 | from ic.client import * 5 | from ic.candid import Types, encode 6 | 7 | client = Client() 8 | iden = Identity(privkey="833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42") 9 | print('principal:', Principal.self_authenticating(iden.der_pubkey)) 10 | ag = Agent(iden, client) 11 | 12 | start = time.time() 13 | # query token totalSupply 14 | ret = ag.query_raw("gvbup-jyaaa-aaaah-qcdwa-cai", "totalSupply", encode([])) 15 | print('totalSupply:', ret) 16 | 17 | # query token name 18 | ret = ag.query_raw("gvbup-jyaaa-aaaah-qcdwa-cai", "name", encode([])) 19 | print('name:', ret) 20 | 21 | # query token balance of user 22 | ret = ag.query_raw( 23 | "gvbup-jyaaa-aaaah-qcdwa-cai", 24 | "balanceOf", 25 | encode([ 26 | {'type': Types.Principal, 'value': iden.sender().bytes} 27 | ]) 28 | ) 29 | print('balanceOf:', ret) 30 | 31 | # transfer 100 tokens to blackhole 32 | ret = ag.update_raw( 33 | "gvbup-jyaaa-aaaah-qcdwa-cai", 34 | "transfer", 35 | encode([ 36 | {'type': Types.Principal, 'value': 'aaaaa-aa'}, 37 | {'type': Types.Nat, 'value': 10000000000} 38 | ]) 39 | ) 40 | print('result: ', ret) 41 | 42 | t = time.time() 43 | print("sync call elapsed: ", t - start) 44 | 45 | async def test_async(): 46 | ret = await ag.query_raw_async("gvbup-jyaaa-aaaah-qcdwa-cai", "totalSupply", encode([])) 47 | print('totalSupply:', ret) 48 | 49 | # query token name 50 | ret = await ag.query_raw_async("gvbup-jyaaa-aaaah-qcdwa-cai", "name", encode([])) 51 | print('name:', ret) 52 | 53 | # query token balance of user 54 | ret = await ag.query_raw_async( 55 | "gvbup-jyaaa-aaaah-qcdwa-cai", 56 | "balanceOf", 57 | encode([ 58 | {'type': Types.Principal, 'value': iden.sender().bytes} 59 | ]) 60 | ) 61 | print('balanceOf:', ret) 62 | 63 | # transfer 100 tokens to blackhole 64 | ret = await ag.update_raw_async( 65 | "gvbup-jyaaa-aaaah-qcdwa-cai", 66 | "transfer", 67 | encode([ 68 | {'type': Types.Principal, 'value': 'aaaaa-aa'}, 69 | {'type': Types.Nat, 'value': 10000000000} 70 | ]) 71 | ) 72 | print('result: ', ret) 73 | 74 | asyncio.run(test_async()) 75 | print("sync call elapsed: ", time.time() - t) -------------------------------------------------------------------------------- /test_canister.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from ic.canister import Canister 3 | from ic.client import Client 4 | from ic.identity import Identity 5 | from ic.agent import Agent 6 | from ic.candid import Types 7 | 8 | iden = Identity() 9 | client = Client() 10 | agent = Agent(iden, client) 11 | 12 | 13 | # wicp_candid=""" 14 | # type Metadata = record { 15 | # fee : nat; 16 | # decimals : nat8; 17 | # owner : principal; 18 | # logo : text; 19 | # name : text; 20 | # totalSupply : nat; 21 | # symbol : text; 22 | # }; 23 | # type Result = variant { Ok : nat; Err : TxError }; 24 | # type TokenInfo = record { 25 | # holderNumber : nat64; 26 | # deployTime : nat64; 27 | # metadata : Metadata; 28 | # historySize : nat64; 29 | # cycles : nat64; 30 | # feeTo : principal; 31 | # }; 32 | # type TxError = variant { 33 | # InsufficientAllowance; 34 | # InsufficientBalance; 35 | # ErrorOperationStyle; 36 | # Unauthorized; 37 | # LedgerTrap; 38 | # ErrorTo; 39 | # Other; 40 | # BlockUsed; 41 | # AmountTooSmall; 42 | # }; 43 | # service : ( 44 | # text, 45 | # text, 46 | # text, 47 | # nat8, 48 | # nat, 49 | # principal, 50 | # nat, 51 | # principal, 52 | # principal, 53 | # ) -> { 54 | # allowance : (principal, principal) -> (nat) query; 55 | # approve : (principal, nat) -> (Result); 56 | # balanceOf : (principal) -> (nat) query; 57 | # decimals : () -> (nat8) query; 58 | # getAllowanceSize : () -> (nat64) query; 59 | # getBlockUsed : () -> (vec nat64) query; 60 | # getHolders : (nat64, nat64) -> (vec record { principal; nat }) query; 61 | # getMetadata : () -> (Metadata) query; 62 | # getTokenInfo : () -> (TokenInfo) query; 63 | # getUserApprovals : (principal) -> (vec record { principal; nat }) query; 64 | # historySize : () -> (nat64) query; 65 | # isBlockUsed : (nat64) -> (bool) query; 66 | # logo : () -> (text) query; 67 | # mint : (opt vec nat8, nat64) -> (Result); 68 | # mintFor : (opt vec nat8, nat64, principal) -> (Result); 69 | # name : () -> (text) query; 70 | # owner : () -> (principal) query; 71 | # setFee : (nat) -> (); 72 | # setFeeTo : (principal) -> (); 73 | # setGenesis : () -> (Result); 74 | # setLogo : (text) -> (); 75 | # setName : (text) -> (); 76 | # setOwner : (principal) -> (); 77 | # symbol : () -> (text) query; 78 | # totalSupply : () -> (nat) query; 79 | # transfer : (principal, nat) -> (Result); 80 | # transferFrom : (principal, principal, nat) -> (Result); 81 | # withdraw : (nat64, text) -> (Result); 82 | # } 83 | # """ 84 | # wicp = Canister(agent=agent, canister_id="wjsrf-myaaa-aaaam-qaayq-cai", candid=wicp_candid) 85 | # res = wicp.getTokenInfo() 86 | # print(res) 87 | # res = wicp.totalSupply() 88 | # print(res) 89 | 90 | 91 | governance_did=""" 92 | type AccountIdentifier = record { hash : vec nat8 }; 93 | type Action = variant { 94 | RegisterKnownNeuron : KnownNeuron; 95 | ManageNeuron : ManageNeuron; 96 | ExecuteNnsFunction : ExecuteNnsFunction; 97 | RewardNodeProvider : RewardNodeProvider; 98 | SetDefaultFollowees : SetDefaultFollowees; 99 | RewardNodeProviders : RewardNodeProviders; 100 | ManageNetworkEconomics : NetworkEconomics; 101 | ApproveGenesisKyc : ApproveGenesisKyc; 102 | AddOrRemoveNodeProvider : AddOrRemoveNodeProvider; 103 | Motion : Motion; 104 | }; 105 | type AddHotKey = record { new_hot_key : opt principal }; 106 | type AddOrRemoveNodeProvider = record { change : opt Change }; 107 | type Amount = record { e8s : nat64 }; 108 | type ApproveGenesisKyc = record { principals : vec principal }; 109 | type Ballot = record { vote : int32; voting_power : nat64 }; 110 | type BallotInfo = record { vote : int32; proposal_id : opt NeuronId }; 111 | type By = variant { 112 | NeuronIdOrSubaccount : record {}; 113 | MemoAndController : ClaimOrRefreshNeuronFromAccount; 114 | Memo : nat64; 115 | }; 116 | type Change = variant { ToRemove : NodeProvider; ToAdd : NodeProvider }; 117 | type ClaimOrRefresh = record { by : opt By }; 118 | type ClaimOrRefreshNeuronFromAccount = record { 119 | controller : opt principal; 120 | memo : nat64; 121 | }; 122 | type ClaimOrRefreshNeuronFromAccountResponse = record { result : opt Result_1 }; 123 | type ClaimOrRefreshResponse = record { refreshed_neuron_id : opt NeuronId }; 124 | type Command = variant { 125 | Spawn : Spawn; 126 | Split : Split; 127 | Follow : Follow; 128 | ClaimOrRefresh : ClaimOrRefresh; 129 | Configure : Configure; 130 | RegisterVote : RegisterVote; 131 | Merge : Merge; 132 | DisburseToNeuron : DisburseToNeuron; 133 | MakeProposal : Proposal; 134 | MergeMaturity : MergeMaturity; 135 | Disburse : Disburse; 136 | }; 137 | type Command_1 = variant { 138 | Error : GovernanceError; 139 | Spawn : SpawnResponse; 140 | Split : SpawnResponse; 141 | Follow : record {}; 142 | ClaimOrRefresh : ClaimOrRefreshResponse; 143 | Configure : record {}; 144 | RegisterVote : record {}; 145 | Merge : record {}; 146 | DisburseToNeuron : SpawnResponse; 147 | MakeProposal : MakeProposalResponse; 148 | MergeMaturity : MergeMaturityResponse; 149 | Disburse : DisburseResponse; 150 | }; 151 | type Command_2 = variant { 152 | Spawn : Spawn; 153 | Split : Split; 154 | Configure : Configure; 155 | Merge : Merge; 156 | DisburseToNeuron : DisburseToNeuron; 157 | ClaimOrRefreshNeuron : ClaimOrRefresh; 158 | MergeMaturity : MergeMaturity; 159 | Disburse : Disburse; 160 | }; 161 | type Configure = record { operation : opt Operation }; 162 | type Disburse = record { 163 | to_account : opt AccountIdentifier; 164 | amount : opt Amount; 165 | }; 166 | type DisburseResponse = record { transfer_block_height : nat64 }; 167 | type DisburseToNeuron = record { 168 | dissolve_delay_seconds : nat64; 169 | kyc_verified : bool; 170 | amount_e8s : nat64; 171 | new_controller : opt principal; 172 | nonce : nat64; 173 | }; 174 | type DissolveState = variant { 175 | DissolveDelaySeconds : nat64; 176 | WhenDissolvedTimestampSeconds : nat64; 177 | }; 178 | type ExecuteNnsFunction = record { nns_function : int32; payload : vec nat8 }; 179 | type Follow = record { topic : int32; followees : vec NeuronId }; 180 | type Followees = record { followees : vec NeuronId }; 181 | type Governance = record { 182 | default_followees : vec record { int32; Followees }; 183 | wait_for_quiet_threshold_seconds : nat64; 184 | metrics : opt GovernanceCachedMetrics; 185 | node_providers : vec NodeProvider; 186 | economics : opt NetworkEconomics; 187 | latest_reward_event : opt RewardEvent; 188 | to_claim_transfers : vec NeuronStakeTransfer; 189 | short_voting_period_seconds : nat64; 190 | proposals : vec record { nat64; ProposalData }; 191 | in_flight_commands : vec record { nat64; NeuronInFlightCommand }; 192 | neurons : vec record { nat64; Neuron }; 193 | genesis_timestamp_seconds : nat64; 194 | }; 195 | type GovernanceCachedMetrics = record { 196 | not_dissolving_neurons_e8s_buckets : vec record { nat64; float64 }; 197 | garbage_collectable_neurons_count : nat64; 198 | neurons_with_invalid_stake_count : nat64; 199 | not_dissolving_neurons_count_buckets : vec record { nat64; nat64 }; 200 | total_supply_icp : nat64; 201 | neurons_with_less_than_6_months_dissolve_delay_count : nat64; 202 | dissolved_neurons_count : nat64; 203 | total_staked_e8s : nat64; 204 | not_dissolving_neurons_count : nat64; 205 | dissolved_neurons_e8s : nat64; 206 | neurons_with_less_than_6_months_dissolve_delay_e8s : nat64; 207 | dissolving_neurons_count_buckets : vec record { nat64; nat64 }; 208 | dissolving_neurons_count : nat64; 209 | dissolving_neurons_e8s_buckets : vec record { nat64; float64 }; 210 | community_fund_total_staked_e8s : nat64; 211 | timestamp_seconds : nat64; 212 | }; 213 | type GovernanceError = record { error_message : text; error_type : int32 }; 214 | type IncreaseDissolveDelay = record { 215 | additional_dissolve_delay_seconds : nat32; 216 | }; 217 | type KnownNeuron = record { 218 | id : opt NeuronId; 219 | known_neuron_data : opt KnownNeuronData; 220 | }; 221 | type KnownNeuronData = record { name : text; description : opt text }; 222 | type ListKnownNeuronsResponse = record { known_neurons : vec KnownNeuron }; 223 | type ListNeurons = record { 224 | neuron_ids : vec nat64; 225 | include_neurons_readable_by_caller : bool; 226 | }; 227 | type ListNeuronsResponse = record { 228 | neuron_infos : vec record { nat64; NeuronInfo }; 229 | full_neurons : vec Neuron; 230 | }; 231 | type ListProposalInfo = record { 232 | include_reward_status : vec int32; 233 | before_proposal : opt NeuronId; 234 | limit : nat32; 235 | exclude_topic : vec int32; 236 | include_status : vec int32; 237 | }; 238 | type ListProposalInfoResponse = record { proposal_info : vec ProposalInfo }; 239 | type MakeProposalResponse = record { proposal_id : opt NeuronId }; 240 | type ManageNeuron = record { 241 | id : opt NeuronId; 242 | command : opt Command; 243 | neuron_id_or_subaccount : opt NeuronIdOrSubaccount; 244 | }; 245 | type ManageNeuronResponse = record { command : opt Command_1 }; 246 | type Merge = record { source_neuron_id : opt NeuronId }; 247 | type MergeMaturity = record { percentage_to_merge : nat32 }; 248 | type MergeMaturityResponse = record { 249 | merged_maturity_e8s : nat64; 250 | new_stake_e8s : nat64; 251 | }; 252 | type Motion = record { motion_text : text }; 253 | type NetworkEconomics = record { 254 | neuron_minimum_stake_e8s : nat64; 255 | max_proposals_to_keep_per_topic : nat32; 256 | neuron_management_fee_per_proposal_e8s : nat64; 257 | reject_cost_e8s : nat64; 258 | transaction_fee_e8s : nat64; 259 | neuron_spawn_dissolve_delay_seconds : nat64; 260 | minimum_icp_xdr_rate : nat64; 261 | maximum_node_provider_rewards_e8s : nat64; 262 | }; 263 | type Neuron = record { 264 | id : opt NeuronId; 265 | controller : opt principal; 266 | recent_ballots : vec BallotInfo; 267 | kyc_verified : bool; 268 | not_for_profit : bool; 269 | maturity_e8s_equivalent : nat64; 270 | cached_neuron_stake_e8s : nat64; 271 | created_timestamp_seconds : nat64; 272 | aging_since_timestamp_seconds : nat64; 273 | hot_keys : vec principal; 274 | account : vec nat8; 275 | joined_community_fund_timestamp_seconds : opt nat64; 276 | dissolve_state : opt DissolveState; 277 | followees : vec record { int32; Followees }; 278 | neuron_fees_e8s : nat64; 279 | transfer : opt NeuronStakeTransfer; 280 | known_neuron_data : opt KnownNeuronData; 281 | }; 282 | type NeuronId = record { id : nat64 }; 283 | type NeuronIdOrSubaccount = variant { 284 | Subaccount : vec nat8; 285 | NeuronId : NeuronId; 286 | }; 287 | type NeuronInFlightCommand = record { 288 | command : opt Command_2; 289 | timestamp : nat64; 290 | }; 291 | type NeuronInfo = record { 292 | dissolve_delay_seconds : nat64; 293 | recent_ballots : vec BallotInfo; 294 | created_timestamp_seconds : nat64; 295 | state : int32; 296 | stake_e8s : nat64; 297 | joined_community_fund_timestamp_seconds : opt nat64; 298 | retrieved_at_timestamp_seconds : nat64; 299 | known_neuron_data : opt KnownNeuronData; 300 | voting_power : nat64; 301 | age_seconds : nat64; 302 | }; 303 | type NeuronStakeTransfer = record { 304 | to_subaccount : vec nat8; 305 | neuron_stake_e8s : nat64; 306 | from : opt principal; 307 | memo : nat64; 308 | from_subaccount : vec nat8; 309 | transfer_timestamp : nat64; 310 | block_height : nat64; 311 | }; 312 | type NodeProvider = record { 313 | id : opt principal; 314 | reward_account : opt AccountIdentifier; 315 | }; 316 | type Operation = variant { 317 | RemoveHotKey : RemoveHotKey; 318 | AddHotKey : AddHotKey; 319 | StopDissolving : record {}; 320 | StartDissolving : record {}; 321 | IncreaseDissolveDelay : IncreaseDissolveDelay; 322 | JoinCommunityFund : record {}; 323 | SetDissolveTimestamp : SetDissolveTimestamp; 324 | }; 325 | type Proposal = record { 326 | url : text; 327 | title : opt text; 328 | action : opt Action; 329 | summary : text; 330 | }; 331 | type ProposalData = record { 332 | id : opt NeuronId; 333 | failure_reason : opt GovernanceError; 334 | ballots : vec record { nat64; Ballot }; 335 | proposal_timestamp_seconds : nat64; 336 | reward_event_round : nat64; 337 | failed_timestamp_seconds : nat64; 338 | reject_cost_e8s : nat64; 339 | latest_tally : opt Tally; 340 | decided_timestamp_seconds : nat64; 341 | proposal : opt Proposal; 342 | proposer : opt NeuronId; 343 | wait_for_quiet_state : opt WaitForQuietState; 344 | executed_timestamp_seconds : nat64; 345 | }; 346 | type ProposalInfo = record { 347 | id : opt NeuronId; 348 | status : int32; 349 | topic : int32; 350 | failure_reason : opt GovernanceError; 351 | ballots : vec record { nat64; Ballot }; 352 | proposal_timestamp_seconds : nat64; 353 | reward_event_round : nat64; 354 | deadline_timestamp_seconds : opt nat64; 355 | failed_timestamp_seconds : nat64; 356 | reject_cost_e8s : nat64; 357 | latest_tally : opt Tally; 358 | reward_status : int32; 359 | decided_timestamp_seconds : nat64; 360 | proposal : opt Proposal; 361 | proposer : opt NeuronId; 362 | executed_timestamp_seconds : nat64; 363 | }; 364 | type RegisterVote = record { vote : int32; proposal : opt NeuronId }; 365 | type RemoveHotKey = record { hot_key_to_remove : opt principal }; 366 | type Result = variant { Ok; Err : GovernanceError }; 367 | type Result_1 = variant { Error : GovernanceError; NeuronId : NeuronId }; 368 | type Result_2 = variant { Ok : Neuron; Err : GovernanceError }; 369 | type Result_3 = variant { Ok : RewardNodeProviders; Err : GovernanceError }; 370 | type Result_4 = variant { Ok : NeuronInfo; Err : GovernanceError }; 371 | type RewardEvent = record { 372 | day_after_genesis : nat64; 373 | actual_timestamp_seconds : nat64; 374 | distributed_e8s_equivalent : nat64; 375 | settled_proposals : vec NeuronId; 376 | }; 377 | type RewardMode = variant { 378 | RewardToNeuron : RewardToNeuron; 379 | RewardToAccount : RewardToAccount; 380 | }; 381 | type RewardNodeProvider = record { 382 | node_provider : opt NodeProvider; 383 | reward_mode : opt RewardMode; 384 | amount_e8s : nat64; 385 | }; 386 | type RewardNodeProviders = record { rewards : vec RewardNodeProvider }; 387 | type RewardToAccount = record { to_account : opt AccountIdentifier }; 388 | type RewardToNeuron = record { dissolve_delay_seconds : nat64 }; 389 | type SetDefaultFollowees = record { 390 | default_followees : vec record { int32; Followees }; 391 | }; 392 | type SetDissolveTimestamp = record { dissolve_timestamp_seconds : nat64 }; 393 | type Spawn = record { new_controller : opt principal; nonce : opt nat64 }; 394 | type SpawnResponse = record { created_neuron_id : opt NeuronId }; 395 | type Split = record { amount_e8s : nat64 }; 396 | type Tally = record { 397 | no : nat64; 398 | yes : nat64; 399 | total : nat64; 400 | timestamp_seconds : nat64; 401 | }; 402 | type UpdateNodeProvider = record { reward_account : opt AccountIdentifier }; 403 | type WaitForQuietState = record { current_deadline_timestamp_seconds : nat64 }; 404 | service : (Governance) -> { 405 | claim_gtc_neurons : (principal, vec NeuronId) -> (Result); 406 | claim_or_refresh_neuron_from_account : (ClaimOrRefreshNeuronFromAccount) -> ( 407 | ClaimOrRefreshNeuronFromAccountResponse, 408 | ); 409 | get_full_neuron : (nat64) -> (Result_2) query; 410 | get_full_neuron_by_id_or_subaccount : (NeuronIdOrSubaccount) -> ( 411 | Result_2, 412 | ) query; 413 | get_monthly_node_provider_rewards : () -> (Result_3); 414 | get_neuron_ids : () -> (vec nat64) query; 415 | get_neuron_info : (nat64) -> (Result_4) query; 416 | get_neuron_info_by_id_or_subaccount : (NeuronIdOrSubaccount) -> ( 417 | Result_4, 418 | ) query; 419 | get_pending_proposals : () -> (vec ProposalInfo) query; 420 | get_proposal_info : (nat64) -> (opt ProposalInfo) query; 421 | list_known_neurons : () -> (ListKnownNeuronsResponse) query; 422 | list_neurons : (ListNeurons) -> (ListNeuronsResponse) query; 423 | list_proposals : (ListProposalInfo) -> (ListProposalInfoResponse) query; 424 | manage_neuron : (ManageNeuron) -> (ManageNeuronResponse); 425 | transfer_gtc_neuron : (NeuronId, NeuronId) -> (Result); 426 | update_node_provider : (UpdateNodeProvider) -> (Result) query; 427 | } 428 | """ 429 | governance = Canister(agent=agent, canister_id="rrkah-fqaaa-aaaaa-aaaaq-cai", candid=governance_did) 430 | res = governance.list_proposals( 431 | { 432 | 'include_reward_status': [], 433 | 'before_proposal': [], 434 | 'limit': 100, 435 | 'exclude_topic': [], 436 | 'include_status': [1] 437 | } 438 | ) 439 | print(res) 440 | 441 | async def async_test(): 442 | res = await governance.list_proposals_async( 443 | { 444 | 'include_reward_status': [], 445 | 'before_proposal': [], 446 | 'limit': 100, 447 | 'exclude_topic': [], 448 | 'include_status': [1] 449 | } 450 | ) 451 | print(res) 452 | 453 | asyncio.run(async_test()) -------------------------------------------------------------------------------- /test_client.py: -------------------------------------------------------------------------------- 1 | from ic.agent import * 2 | from ic.identity import * 3 | from ic.client import * 4 | from ic.candid import Types, encode 5 | 6 | client = Client() 7 | canister_id = "gvbup-jyaaa-aaaah-qcdwa-cai" 8 | reqid = b'\xb3\nU\\F\xf9\x9d\x06\xfa\x15\x1aI\x93S\xa9\x8b\xb2\x8a\x03\xd6BJ},\xa5r=\xa9C\x10v\x98' 9 | 10 | data = b'\xa3gcontent\xa6lrequest_typedcallfsenderX\x1d\x819\xde\x9e\xc8\x1dP\xd8b\xa9V\xdd\x95\xe8\xd7\x05\xe4b\xf9\xe8\xdf o\xf4\xfeI\x879\x02\ 11 | kcanister_idJ\x00\x00\x00\x00\x00\xf0\x10\xec\x01\x01\ 12 | kmethod_namehtransfer\ 13 | cargODIDL\x00\x02h}\x01\x00\x80\xc8\xaf\xa0%\ 14 | ningress_expiry\x1b\x16\xc0Qh\xd1\xba\xf6\x00\ 15 | msender_pubkeyX,0*0\x05\x06\x03+ep\x03!\x00\xec\x17+\x93\xad^V;\xf4\x93,p\xe1$P4\xc3Tg\xef.\xfdMd\xeb\xf8\x19h4g\xe2\xbf\ 16 | jsender_sigX@\xf1K2\x17*\x87\x10UTDu\x12\x98\xa5\xc4\xab\xe7\xc0\x9a\x1a~\x16\xda\x1d\xdcl\x01\xbc\xe0Bi\xde}^\x9c\xcb\x07 \xd89Z\x97A22V\x0b\x0e\xb5\x7f\xe2\x1bcLJt\xea\x1b\xc4\xac\x00\x96\xb4\x02' 17 | 18 | ret = client.call(canister_id, reqid, data) 19 | 20 | ''' 21 | { 22 | 'content': 23 | {'request_type': 'call', 24 | 'sender': b'\x819\xde\x9e\xc8\x1dP\xd8b\xa9V\xdd\x95\xe8\xd7\x05\xe4b\xf9\xe8\xdf o\xf4\xfeI\x879\x02', 25 | 'canister_id': b'\x00\x00\x00\x00\x00\xf0\x10\xec\x01\x01', 26 | 'method_name': 'transfer', 27 | 'arg': b'DIDL\x00\x02h}\x01\x00\x80\xc8\xaf\xa0%', 28 | 'ingress_expiry': 163929373000000000000 29 | }, 30 | 'sender_pubkey': b'0*0\x05\x06\x03+ep\x03!\x00\xec\x17+\x93\xad^V;...Md\xeb\xf8\x19h4g\xe2\xbf', 31 | 'sender_sig': b'\x95\xb0\xde\x98\r9o\x19\xb8+\xc1\xf5\xdb\x0b\x85\xafSoLD...uu\xfa%\x7f`OP\xdd\x03' 32 | } 33 | ''' 34 | -------------------------------------------------------------------------------- /test_readstate.py: -------------------------------------------------------------------------------- 1 | from ic.agent import * 2 | from ic.identity import * 3 | from ic.client import * 4 | from ic.system_state import * 5 | 6 | client = Client() 7 | iden = Identity(privkey="833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42") 8 | print('principal:', Principal.self_authenticating(iden.der_pubkey)) 9 | ag = Agent(iden, client) 10 | 11 | ret = time(ag, "gvbup-jyaaa-aaaah-qcdwa-cai") 12 | print(ret) 13 | 14 | ret = subnet_public_key(ag, "gvbup-jyaaa-aaaah-qcdwa-cai", "pjljw-kztyl-46ud4-ofrj6-nzkhm-3n4nt-wi3jt-ypmav-ijqkt-gjf66-uae") 15 | print(ret) 16 | 17 | ret = subnet_canister_ranges(ag, "gvbup-jyaaa-aaaah-qcdwa-cai", "pjljw-kztyl-46ud4-ofrj6-nzkhm-3n4nt-wi3jt-ypmav-ijqkt-gjf66-uae") 18 | print(ret) 19 | 20 | ret = canister_module_hash(ag, "gvbup-jyaaa-aaaah-qcdwa-cai") 21 | print(ret) 22 | 23 | ret = canister_controllers(ag, "sxhuu-qqaaa-aaaai-qbbcq-cai") 24 | print(ret) -------------------------------------------------------------------------------- /tests/test_agent.py: -------------------------------------------------------------------------------- 1 | from ic.agent import * 2 | from ic.identity import * 3 | from ic.client import * 4 | from ic.candid import Types, encode 5 | 6 | class TestAgent: 7 | 8 | def setup_class(self): 9 | client = Client() 10 | iden = Identity(privkey="833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42") 11 | self.agent = Agent(iden, client) 12 | 13 | def test_query(self): 14 | # query token totalSupply 15 | ret = self.agent.query_raw("gvbup-jyaaa-aaaah-qcdwa-cai", "totalSupply", encode([])) 16 | assert ret[0]['value'] == 10000000000000000 17 | 18 | # query token name 19 | ret = self.agent.query_raw("gvbup-jyaaa-aaaah-qcdwa-cai", "name", encode([])) 20 | assert ret[0]['value'] == 'XTC Test' 21 | 22 | # def test_update(self): 23 | # ret = self.agent.update_raw( 24 | # "gvbup-jyaaa-aaaah-qcdwa-cai", 25 | # "transfer", 26 | # encode([ 27 | # {'type': Types.Principal, 'value': 'aaaaa-aa'}, 28 | # {'type': Types.Nat, 'value': 10000000000} 29 | # ]) 30 | # ) 31 | # assert ret != None 32 | -------------------------------------------------------------------------------- /tests/test_candid.py: -------------------------------------------------------------------------------- 1 | from ic.candid import * 2 | 3 | # TODO 4 | class TestCandid: 5 | 6 | def test_nat_encode(self): 7 | nat = NatClass() 8 | res = encode([{'type':nat, 'value':10000000000}]) 9 | assert res.hex() == "4449444c00017d80c8afa025" 10 | 11 | def test_nat_decode(self): 12 | data = bytes.fromhex("4449444c00017d80c8afa025") 13 | res = decode(data) 14 | assert len(res) == 1 15 | assert res[0]["type"] == 'nat' 16 | assert res[0]["value"] == 10000000000 17 | 18 | def test_principal_encode(self): 19 | principal = PrincipalClass() 20 | res = encode([{'type': principal, 'value':'aaaaa-aa'}]) 21 | assert res.hex() == "4449444c0001680100" 22 | 23 | def test_principal_decode(self): 24 | data = bytes.fromhex("4449444c0001680100") 25 | res = decode(data) 26 | assert len(res) == 1 27 | assert res[0]["type"] == 'principal' 28 | assert res[0]["value"].to_str() == 'aaaaa-aa' 29 | 30 | # data = b'DIDL\x00\x01q\x08XTC Test' 31 | # print('decode data: {}'.format(data)) 32 | # out = decode(data) 33 | # print(out) 34 | 35 | # data = b'DIDL\x00\x01}\xe2\x82\xac\xe2\x82\xac\xe2\x80' 36 | # print('decode data: {}'.format(data)) 37 | # out = decode(data) 38 | # print(out) 39 | 40 | def test_record_encode(self): 41 | record = Types.Record({'foo':Types.Text, 'bar': Types.Int}) 42 | res = encode([{'type': record, 'value':{'foo': '💩', 'bar': 42}}]) 43 | assert res.hex() == '4449444c016c02d3e3aa027c868eb7027101002a04f09f92a9' 44 | 45 | def test_record_decode(self): 46 | data = bytes.fromhex('4449444c016c02d3e3aa027c868eb7027101002a04f09f92a9') 47 | res = decode(data) 48 | assert len(res) == 1 49 | assert res[0]['value'] == {'_4895187': 42, '_5097222': '💩'} 50 | 51 | # def test_tuple_encode(self): 52 | # tup = Types.Tuple(Types.Int, Types.Text) 53 | # res = encode([{'type': tup, 'value': [42, '💩']}]) 54 | # assert res.hex() == '4449444c016c02007c017101002a04f09f92a9' 55 | 56 | 57 | # # variant 58 | # tup = Types.Variant({'ok': Types.Text, 'err': Types.Text}) 59 | # res = encode([{'type': tup, 'value': {'ok': 'good'} }]) 60 | # print('expected:', '4449444c016b03017e9cc20171e58eb4027101000104676f6f64') 61 | # print('current:', res.hex()) 62 | # print(decode(res, tup)) 63 | 64 | # # tuple(variant) 65 | # tup = Types.Tuple(Types.Variant({'ok': Types.Text, 'err': Types.Text})) 66 | # res = encode([{'type': tup, 'value': [{'ok': 'good'}] }]) 67 | # print('expected:', '4449444c026b029cc20171e58eb402716c01000001010004676f6f64') 68 | # print('current:', res.hex()) 69 | # print(decode(res, tup)) 70 | 71 | # # Vec 72 | # vec = Types.Vec(Types.Nat64) 73 | # param = [0, 1, 2, 3] 74 | # res = encode([{'type': vec, 'value': param}]) 75 | # print('expected:', '4449444c016d7c01000400010203') 76 | # print('current :', res.hex()) 77 | # print('decode Vec:', decode(res, vec)) 78 | 79 | # # Principle 80 | # Prin = Types.Principal 81 | # param = 'expmt-gtxsw-inftj-ttabj-qhp5s-nozup-n3bbo-k7zvn-dg4he-knac3-lae' 82 | # res = encode([{'type': Prin, 'value': param}]) 83 | # print('current :', res.hex()) 84 | # print('decode Principal:', decode(res)) 85 | 86 | # # Opt principal 87 | # Prin = Types.Opt(Types.Principal) 88 | # param = ['expmt-gtxsw-inftj-ttabj-qhp5s-nozup-n3bbo-k7zvn-dg4he-knac3-lae'] 89 | # res = encode([{'type': Prin, 'value': param}]) 90 | # print('current :', res.hex()) 91 | # print('decode Principal:', decode(res, Prin)) 92 | 93 | # # NULL 94 | # Prin = Types.Null 95 | # param = None 96 | # res = encode([{'type': Prin, 'value': param}]) 97 | # print('current :', res.hex()) 98 | # print('decode Null:', decode(res, Prin)) 99 | -------------------------------------------------------------------------------- /tests/test_identity.py: -------------------------------------------------------------------------------- 1 | from ic.identity import * 2 | 3 | class TestIdentity: 4 | 5 | def test_ed25519_privatekey(self): 6 | iden = Identity(privkey="833fe62409237b9d62ec77587520911e9a759cec1d19755b7da901b96dca3d42") 7 | assert iden.key_type == 'ed25519' 8 | assert iden.pubkey == 'ec172b93ad5e563bf4932c70e1245034c35467ef2efd4d64ebf819683467e2bf' 9 | 10 | def test_secp256k1_privatekey(self): 11 | pass 12 | 13 | def test_ed25519_frompem(self): 14 | pem = """ 15 | -----BEGIN PRIVATE KEY----- 16 | MFMCAQEwBQYDK2VwBCIEIGQqNAZlORmn1k4QrYz1FvO4fOQowS3GXQMqRKDzmx9P 17 | oSMDIQCrO5iGM5hnLWrHavywoXekAoXPpYRuB0Dr6DjZF6FZkg== 18 | -----END PRIVATE KEY-----""" 19 | iden = Identity.from_pem(pem) 20 | assert iden.key_type == 'ed25519' 21 | assert iden.privkey == '642a3406653919a7d64e10ad8cf516f3b87ce428c12dc65d032a44a0f39b1f4f' 22 | assert iden.pubkey == 'ab3b98863398672d6ac76afcb0a177a40285cfa5846e0740ebe838d917a15992' 23 | 24 | def test_secp256k1_frompem(self): 25 | pass 26 | 27 | def test_ed25519_from_seed(self): 28 | mnemonic = 'fence dragon soft spoon embrace bronze regular hawk more remind detect slam' 29 | iden = Identity.from_seed(mnemonic) 30 | assert iden.key_type == 'ed25519' 31 | assert iden.privkey == '97cc884647e7e0ef58c36b57448269ba6a123521a7f234fa5fdc5816d824ef50' -------------------------------------------------------------------------------- /tests/test_principal.py: -------------------------------------------------------------------------------- 1 | from ic.principal import Principal 2 | 3 | class TestPrincipal: 4 | 5 | def test_default(self): 6 | p = Principal() 7 | assert p.to_str() == 'aaaaa-aa' 8 | 9 | def test_anonymous(self): 10 | p = Principal.anonymous(); 11 | assert p.to_str() == '2vxsx-fae' 12 | 13 | def test_pubkey(self): 14 | p = Principal.self_authenticating("ec172b93ad5e563bf4932c70e1245034c35467ef2efd4d64ebf819683467e2bf") # create a principal from public key 15 | assert p.to_str() == 'zbtml-m23rk-szoaa-5x6p5-co5in-yylx6-463xu-zbjp4-4oizn-cqaij-tae' 16 | 17 | def test_fromstr(self): 18 | p = Principal.from_str("zbtml-m23rk-szoaa-5x6p5-co5in-yylx6-463xu-zbjp4-4oizn-cqaij-tae") 19 | assert p.to_str() == 'zbtml-m23rk-szoaa-5x6p5-co5in-yylx6-463xu-zbjp4-4oizn-cqaij-tae' 20 | 21 | def test_eq(self): 22 | p0 = Principal.anonymous() 23 | p1 = Principal.management_canister() 24 | assert p0 != p1 25 | 26 | p2 = Principal.from_str("aaaaa-aa") 27 | assert p1 == p2 28 | 29 | def test_hash(self): 30 | p = Principal.management_canister() 31 | m = {} 32 | m[p] = 1 33 | assert m[p] == 1 34 | m[p] = 2 35 | assert m[p] == 2 36 | assert len(m) == 1 -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py 3 | isolated_build = true 4 | 5 | [testenv] 6 | changedir = tests 7 | deps = pytest 8 | commands = pytest {posargs} 9 | --------------------------------------------------------------------------------