├── .dockerignore
├── .github
└── workflows
│ └── build-wheel.yml
├── .gitignore
├── LICENSE
├── README.md
├── deployment
├── dev-requirements.txt
└── jupyter.Dockerfile
├── docker-compose.jupyter.yaml
├── examples
└── pytonlib-example.ipynb
├── pytonlib
├── __init__.py
├── client.py
├── distlib
│ ├── darwin
│ │ ├── libtonlibjson.arm64.dylib
│ │ └── libtonlibjson.x86_64.dylib
│ ├── freebsd
│ │ └── libtonlibjson.amd64.so
│ ├── linux
│ │ ├── libtonlibjson.aarch64.so
│ │ └── libtonlibjson.x86_64.so
│ └── windows
│ │ └── tonlibjson.amd64.dll
├── tonlibjson.py
└── utils
│ ├── __init__.py
│ ├── address.py
│ ├── common.py
│ ├── tlb.py
│ ├── tokens.py
│ └── wallet.py
├── requirements.txt
├── setup.py
└── tests
├── __init__.py
└── pytonlib
└── test_tonlibclient.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | #PyCharm
107 | .idea/$CACHE_FILE$
108 | .idea/.gitignore
109 | .idea/encodings.xml
110 | .idea/inspectionProfiles/
111 | .idea/misc.xml
112 | .idea/modules.xml
113 | .idea/ton_client.iml
114 | .idea/vcs.xml
115 |
116 | ## tonlib chaotically created temp blkstate files
117 | *.blkstate
118 |
119 | ## logs
120 | /logs
121 |
122 | .vscode
123 | .DS_Store
124 |
125 | /sandbox
126 |
127 | ## custom
128 | /.vscode
129 | .idea/
130 | /docs
131 | /logs
132 | /sandbox
133 | /venv
134 | /notebooks
135 |
--------------------------------------------------------------------------------
/.github/workflows/build-wheel.yml:
--------------------------------------------------------------------------------
1 | name: Build package
2 |
3 | on: [push]
4 |
5 | jobs:
6 | set_version:
7 | runs-on: ubuntu-22.04
8 | steps:
9 | - name: Checkout
10 | uses: actions/checkout@v2
11 | with:
12 | fetch-depth: 0
13 | - name: Test version
14 | uses: paulhatch/semantic-version@v4.0.2
15 | id: versioning
16 | with:
17 | tag_prefix: "v"
18 | major_pattern: "[MAJOR]"
19 | minor_pattern: "[MINOR]"
20 | format: "${major}.${minor}.${patch}a${increment}"
21 | bump_each_commit: false
22 | outputs:
23 | version: ${{ github.repository == 'toncenter/pytonlib' && github.event_name == 'push' && github.ref == 'refs/heads/main' && steps.versioning.outputs.version_tag || steps.versioning.outputs.version}}
24 | build_wheel:
25 | runs-on: ubuntu-22.04
26 | needs: [ set_version ]
27 | steps:
28 | - name: Checkout source
29 | uses: actions/checkout@v2
30 | - name: Set up Python 3.10
31 | uses: actions/setup-python@v1
32 | with:
33 | python-version: 3.10.17
34 | - name: Install build dependencies
35 | run: |
36 | python -m pip install build wheel setuptools
37 | python -m pip install -r requirements.txt
38 | - name: Build distributions
39 | shell: bash -l {0}
40 | run: PYTONLIB_VERSION=${{ needs.set_version.outputs.version }} python setup.py sdist bdist_wheel
41 | - name: Check distributions
42 | shell: bash -l {0}
43 | run: |
44 | echo $(pwd)
45 | ls $(pwd)/
46 | - name: Run tests
47 | shell: bash -l {0}
48 | run: |
49 | python -m pip install ./*.whl
50 | PYTHONPATH=./ pytest --asyncio-mode=strict tests/ || echo "Warning! Some tests failed"
51 | - name: Upload artifacts
52 | uses: actions/upload-artifact@v4
53 | with:
54 | name: pytonlib-package
55 | path: |
56 | dist/*.whl
57 | dist/*.tar.gz
58 | upload-pypi:
59 | runs-on: ubuntu-22.04
60 | needs: [ set_version, build_wheel ]
61 | steps:
62 | - name: Download artifacts
63 | uses: actions/download-artifact@v4
64 | with:
65 | name: pytonlib-package
66 | path: dist
67 | - name: Check distributions
68 | shell: bash -l {0}
69 | run: ls -la dist/
70 | - name: Create release tag
71 | if: ${{ github.repository == 'toncenter/pytonlib' && github.event_name == 'push' && github.ref == 'refs/heads/main' }}
72 | uses: actions/github-script@v4
73 | with:
74 | github-token: ${{ github.token }}
75 | script: |
76 | github.git.createRef({
77 | owner: context.repo.owner,
78 | repo: context.repo.repo,
79 | ref: 'refs/tags/${{ needs.set_version.outputs.version }}',
80 | sha: context.sha
81 | })
82 | - name: Publish package to test PyPI
83 | if: ${{ github.repository == 'toncenter/pytonlib' && github.event_name == 'push' }}
84 | uses: pypa/gh-action-pypi-publish@master
85 | with:
86 | user: __token__
87 | password: ${{ secrets.TEST_PYPI_PASSWORD }}
88 | repository_url: https://test.pypi.org/legacy/
89 | - name: Publish package to PyPI
90 | if: ${{ github.repository == 'toncenter/pytonlib' && github.event_name == 'push' && github.ref == 'refs/heads/main' }}
91 | uses: pypa/gh-action-pypi-publish@master
92 | with:
93 | user: __token__
94 | password: ${{ secrets.PYPI_PASSWORD }}
95 | repository_url: https://upload.pypi.org/legacy/
96 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | #PyCharm
107 | .idea/$CACHE_FILE$
108 | .idea/.gitignore
109 | .idea/encodings.xml
110 | .idea/inspectionProfiles/
111 | .idea/misc.xml
112 | .idea/modules.xml
113 | .idea/ton_client.iml
114 | .idea/vcs.xml
115 |
116 | ## tonlib chaotically created temp blkstate files
117 | *.blkstate
118 |
119 | ## logs
120 | /logs
121 |
122 | .vscode/
123 | .idea/
124 | /private
125 | .DS_Store
126 | /RELEASE.md
127 | /sandbox
128 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
676 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PyTONLib
2 |
3 | [](https://pypi.org/project/pytonlib/)
4 | [](https://pypi.org/project/pytonlib/)
5 | 
6 |
7 |
8 | This is standalone Python library based on `libtonlibjson`, the functionality is similar to the [ton-http-api](https://github.com/toncenter/ton-http-api)
9 | with the following restrictions:
10 |
11 | * a client can connect to only one LiteServer;
12 | * a client is asyncronious;
13 | * no requests cache.
14 |
15 | ## Installation
16 |
17 | ### From PyPi
18 | Currently, the library works for Windows, Mac and Linux only on Intel CPUs:
19 |
20 | * (Windows) Install OpenSSL v1.1.1 for Win64 from [here](https://slproweb.com/products/Win32OpenSSL.html).
21 | * Install Python 3 package: `pip3 install pytonlib`.
22 |
23 | ### Docker
24 |
25 | In this repo Compose file is provided to deploy the example of service with *pytonlib*:
26 | ```bash
27 | docker-compose -f docker-compose.jupyter.yaml build
28 | docker-compose -f docker-compose.jupyter.yaml up -d
29 | ```
30 |
31 | Jupyter Notebook will be available on port 3100 (http://localhost:3100).
32 |
33 | ## Examples
34 |
35 | We recommend to use IPython or Jupyter Notebook for prototyping because they allow to run `async` code. An example of running `async` code from script could be found in the end of this section.
36 |
37 | * Connecting to the first LiteServer in mainnet config:
38 | ```python
39 | import requests
40 | import asyncio
41 | from pathlib import Path
42 |
43 | from pytonlib import TonlibClient
44 |
45 |
46 | # downloading mainnet config
47 | ton_config = requests.get('https://ton.org/global.config.json').json()
48 |
49 | # create keystore directory for tonlib
50 | keystore_dir = '/tmp/ton_keystore'
51 | Path(keystore_dir).mkdir(parents=True, exist_ok=True)
52 |
53 | # init TonlibClient
54 | client = TonlibClient(ls_index=0, # choose LiteServer index to connect
55 | config=ton_config,
56 | keystore=keystore_dir)
57 |
58 | # init tonlibjson
59 | await client.init()
60 | ```
61 |
62 | * Reading blocks info:
63 | ```python
64 | masterchain_info = await client.get_masterchain_info()
65 | block_header = await client.get_block_header(**masterchain_info['last'])
66 | shards = await client.get_shards(master_seqno=masterchain_info['last']['seqno'])
67 | ```
68 |
69 | * Reading Block Transactions for masterchain block:
70 | ```python
71 | masterchain_info = await client.get_masterchain_info()
72 | txs = await client.get_block_transactions(**masterchain_info['last'], count=10)
73 | ```
74 |
75 | * Running async code from script:
76 | ```python
77 | import requests
78 | import asyncio
79 | from pathlib import Path
80 |
81 | from pytonlib import TonlibClient
82 |
83 |
84 | async def main():
85 | loop = asyncio.get_running_loop()
86 | ton_config = requests.get('https://ton.org/global.config.json').json()
87 |
88 | # create keystore directory for tonlib
89 | keystore_dir = '/tmp/ton_keystore'
90 | Path(keystore_dir).mkdir(parents=True, exist_ok=True)
91 |
92 | # init TonlibClient
93 | client = TonlibClient(ls_index=0, # choose LiteServer index to connect
94 | config=ton_config,
95 | keystore=keystore_dir,
96 | loop=loop)
97 |
98 | # init tonlibjson
99 | await client.init()
100 |
101 | # reading masterchain info
102 | masterchain_info = await client.get_masterchain_info()
103 |
104 | # closing session
105 | await client.close()
106 |
107 |
108 | if __name__ == '__main__':
109 | asyncio.run(main())
110 | ```
111 |
112 | ## Running tests
113 |
114 | To run tests in *asyncio* mode use the following command:
115 | ```bash
116 | PYTHONPATH=./ pytest --asyncio-mode=strict tests/
117 | ```
118 |
--------------------------------------------------------------------------------
/deployment/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | pytest-asyncio==0.18.3
2 |
--------------------------------------------------------------------------------
/deployment/jupyter.Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:20.04
2 |
3 | RUN apt-get update
4 | RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
5 | RUN apt-get install -y git cmake wget python3 python3-pip
6 |
7 | RUN python3 -m pip install jupyter notebook
8 |
9 | # requirements
10 | ADD requirements.txt /tmp/requirements.txt
11 | RUN python3 -m pip install -r /tmp/requirements.txt
12 |
13 | # dev requirements
14 | ADD deployment/dev-requirements.txt /tmp/requirements.txt
15 | RUN python3 -m pip install -r /tmp/requirements.txt
16 |
17 | # app
18 | WORKDIR /app
19 |
20 | # entrypoint
21 | ENTRYPOINT [ "/bin/python3" ]
22 |
--------------------------------------------------------------------------------
/docker-compose.jupyter.yaml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | jupyter:
4 | build:
5 | context: .
6 | dockerfile: deployment/jupyter.Dockerfile
7 | ports:
8 | - "3100:3100"
9 | volumes:
10 | - .:/app
11 | command: "-m jupyter notebook . --ip=0.0.0.0 --port=3100 --allow-root --NotebookApp.token='' --NotebookApp.password=''"
12 |
--------------------------------------------------------------------------------
/examples/pytonlib-example.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "id": "21a02167",
6 | "metadata": {},
7 | "source": [
8 | "# TonlibClient example"
9 | ]
10 | },
11 | {
12 | "cell_type": "code",
13 | "execution_count": null,
14 | "id": "ed3cdccf",
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "import sys\n",
19 | "import os\n",
20 | "os.makedirs('/tmp/ton_keystore', exist_ok=True)\n",
21 | "sys.path.insert(0, '/app')\n",
22 | "\n",
23 | "import logging\n",
24 | "import requests\n",
25 | "import asyncio\n",
26 | "\n",
27 | "from pytonlib import TonlibClient\n",
28 | "\n",
29 | "\n",
30 | "logging.basicConfig(format='%(asctime)s %(module)-15s %(message)s',\n",
31 | " level=logging.INFO)"
32 | ]
33 | },
34 | {
35 | "cell_type": "markdown",
36 | "id": "3ccbf5a4",
37 | "metadata": {},
38 | "source": [
39 | "Reading latest config:"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": null,
45 | "id": "7e898620",
46 | "metadata": {},
47 | "outputs": [],
48 | "source": [
49 | "ton_config_url = 'https://ton.org/global.config.json'\n",
50 | "ton_config = requests.get(ton_config_url).json()"
51 | ]
52 | },
53 | {
54 | "cell_type": "markdown",
55 | "id": "ee2e0029",
56 | "metadata": {},
57 | "source": [
58 | "Creating TonlibClient with single LiteServer. You can select LiteServer by index in config file:"
59 | ]
60 | },
61 | {
62 | "cell_type": "code",
63 | "execution_count": null,
64 | "id": "22e67d8d",
65 | "metadata": {},
66 | "outputs": [],
67 | "source": [
68 | "loop = asyncio.get_running_loop()\n",
69 | "client = TonlibClient(ls_index=1, # choose LiteServer to connect\n",
70 | " config=ton_config,\n",
71 | " keystore='/tmp/ton_keystore',\n",
72 | " loop=loop)\n",
73 | "\n",
74 | "await client.init()"
75 | ]
76 | },
77 | {
78 | "cell_type": "markdown",
79 | "id": "8eb6d564",
80 | "metadata": {},
81 | "source": [
82 | "Reading masterchain info:"
83 | ]
84 | },
85 | {
86 | "cell_type": "code",
87 | "execution_count": null,
88 | "id": "a0c3eaf0",
89 | "metadata": {},
90 | "outputs": [],
91 | "source": [
92 | "masterchain_info = await client.get_masterchain_info()\n",
93 | "masterchain_info"
94 | ]
95 | },
96 | {
97 | "cell_type": "markdown",
98 | "id": "649abfa7",
99 | "metadata": {},
100 | "source": [
101 | "Block header for the last masterchain block:"
102 | ]
103 | },
104 | {
105 | "cell_type": "code",
106 | "execution_count": null,
107 | "id": "3da9bd64",
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "block_header = await client.get_block_header(**masterchain_info['last'])\n",
112 | "block_header"
113 | ]
114 | },
115 | {
116 | "cell_type": "markdown",
117 | "id": "4d62baff",
118 | "metadata": {},
119 | "source": [
120 | "Shards of the masterchain block:"
121 | ]
122 | },
123 | {
124 | "cell_type": "code",
125 | "execution_count": null,
126 | "id": "e5470d37",
127 | "metadata": {},
128 | "outputs": [],
129 | "source": [
130 | "shards = await client.get_shards(master_seqno=masterchain_info['last']['seqno'])\n",
131 | "shards"
132 | ]
133 | },
134 | {
135 | "cell_type": "markdown",
136 | "id": "86225dcd",
137 | "metadata": {},
138 | "source": [
139 | "Read transactions from the shard block:"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": null,
145 | "id": "d09bc06b",
146 | "metadata": {},
147 | "outputs": [],
148 | "source": [
149 | "txs = await client.get_block_transactions(**masterchain_info['last'], count=10)\n",
150 | "\n",
151 | "print('Is incomplete:', txs['incomplete'])\n",
152 | "print('Num txs:', len(txs['transactions']))"
153 | ]
154 | },
155 | {
156 | "cell_type": "markdown",
157 | "id": "0d4549b1",
158 | "metadata": {},
159 | "source": [
160 | "Read transaction details:"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "id": "e8397a5b",
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "tx = txs['transactions'][0]\n",
171 | "tx"
172 | ]
173 | },
174 | {
175 | "cell_type": "code",
176 | "execution_count": null,
177 | "id": "b65b4586",
178 | "metadata": {},
179 | "outputs": [],
180 | "source": [
181 | "await client.get_transactions(**tx, limit=1)"
182 | ]
183 | },
184 | {
185 | "cell_type": "markdown",
186 | "id": "68dc43a6",
187 | "metadata": {},
188 | "source": [
189 | "Check extra currencies:"
190 | ]
191 | },
192 | {
193 | "cell_type": "code",
194 | "execution_count": null,
195 | "id": "03b9b897",
196 | "metadata": {},
197 | "outputs": [],
198 | "source": [
199 | "address = '0:C4CAC12F5BC7EEF4CF5EC84EE68CCF860921A06CA0395EC558E53E37B13C3B08'\n",
200 | "await client.raw_get_account_state(address)"
201 | ]
202 | },
203 | {
204 | "cell_type": "code",
205 | "execution_count": null,
206 | "id": "df38f87a",
207 | "metadata": {},
208 | "outputs": [],
209 | "source": []
210 | }
211 | ],
212 | "metadata": {
213 | "kernelspec": {
214 | "display_name": "venv@3.12",
215 | "language": "python",
216 | "name": "python3"
217 | },
218 | "language_info": {
219 | "codemirror_mode": {
220 | "name": "ipython",
221 | "version": 3
222 | },
223 | "file_extension": ".py",
224 | "mimetype": "text/x-python",
225 | "name": "python",
226 | "nbconvert_exporter": "python",
227 | "pygments_lexer": "ipython3",
228 | "version": "3.12.9"
229 | }
230 | },
231 | "nbformat": 4,
232 | "nbformat_minor": 5
233 | }
234 |
--------------------------------------------------------------------------------
/pytonlib/__init__.py:
--------------------------------------------------------------------------------
1 | from pytonlib.client import TonlibClient
2 | from pytonlib.tonlibjson import TonlibException, TonlibNoResponse, TonlibError, LiteServerTimeout, BlockNotFound, ExternalMessageNotAccepted, BlockDeleted
3 |
--------------------------------------------------------------------------------
/pytonlib/client.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | import codecs
4 | import struct
5 | import logging
6 | import os
7 |
8 | from pytonlib.tonlibjson import TonLib
9 | from pytonlib.utils.address import prepare_address, detect_address
10 | from pytonlib.utils.common import b64str_to_hex, hex_to_b64str, hash_to_hex
11 | from pytonlib.utils.tokens import (parse_jetton_master_data, parse_jetton_wallet_data,
12 | parse_nft_collection_data, parse_nft_item_data, parse_nft_content, parse_dns_content,
13 | parse_jetton_wallet_address_data, parse_nft_item_address_data)
14 | from pytonlib.utils.tlb import MsgAddressInt
15 | from bitarray import bitarray
16 |
17 | from tvm_valuetypes import serialize_tvm_stack, render_tvm_stack, deserialize_boc, Cell
18 |
19 | from pathlib import Path
20 | from datetime import datetime
21 | from copy import deepcopy
22 |
23 | logger = logging.getLogger(__name__)
24 |
25 |
26 | class TonlibClient:
27 | def __init__(self,
28 | ls_index,
29 | config,
30 | keystore,
31 | loop=None,
32 | cdll_path=None,
33 | verbosity_level=0,
34 | tonlib_timeout=10):
35 | if not os.access(keystore, os.F_OK):
36 | raise FileNotFoundError(f'Keystore directory {keystore} does not exist')
37 | if not os.access(keystore, os.W_OK | os.R_OK | os.X_OK):
38 | raise PermissionError(f'Keystore directory {keystore} does not have required permissions (rwx)')
39 |
40 | self.ls_index = ls_index
41 | self.config = config
42 | self.keystore = keystore
43 | self.cdll_path = cdll_path
44 | self.loop = loop
45 | self.verbosity_level = verbosity_level
46 | self.tonlib_wrapper = None
47 | self.tonlib_timeout = tonlib_timeout
48 |
49 | @property
50 | def local_config(self):
51 | local = deepcopy(self.config)
52 | local['liteservers'] = [local['liteservers'][self.ls_index]]
53 | return local
54 |
55 | async def init(self):
56 | """
57 | TL Spec
58 | init options:options = options.Info;
59 | options config:config keystore_type:KeyStoreType = Options;
60 |
61 | keyStoreTypeDirectory directory:string = KeyStoreType;
62 | config config:string blockchain_name:string use_callbacks_for_network:Bool ignore_cache:Bool = Config;
63 |
64 | :param ip: IPv4 address in dotted notation or signed int32
65 | :param port: IPv4 TCP port
66 | :param key: base64 pub key of liteserver node
67 | :return: None
68 | """
69 | if self.tonlib_wrapper is None:
70 | event_loop = self.loop or asyncio.get_running_loop()
71 | wrapper = TonLib(event_loop, self.ls_index, self.cdll_path, self.verbosity_level)
72 | keystore_obj = {
73 | '@type': 'keyStoreTypeDirectory',
74 | 'directory': self.keystore
75 | }
76 |
77 | request = {
78 | '@type': 'init',
79 | 'options': {
80 | '@type': 'options',
81 | 'config': {
82 | '@type': 'config',
83 | 'config': json.dumps(self.local_config),
84 | 'use_callbacks_for_network': False,
85 | 'blockchain_name': '',
86 | 'ignore_cache': False
87 | },
88 | 'keystore_type': keystore_obj
89 | }
90 | }
91 | self.tonlib_wrapper = wrapper
92 |
93 | # set confog
94 | await self.tonlib_wrapper.execute(request)
95 |
96 | logger.info(F"TonLib #{self.ls_index:03d} inited successfully")
97 | else:
98 | logger.warning(f'init is already done')
99 |
100 | async def close(self):
101 | if self.tonlib_wrapper is not None:
102 | await self.tonlib_wrapper.close()
103 | self.tonlib_wrapper = None
104 |
105 | async def __aenter__(self):
106 | await self.init()
107 | return self
108 |
109 | async def __aexit__(self, *args):
110 | await self.close()
111 |
112 | def __await__(self):
113 | return self.init().__await__()
114 |
115 | async def sync_tonlib(self):
116 | request = {
117 | '@type': 'sync'
118 | }
119 | return await self.tonlib_wrapper.execute(request, timeout=30)
120 |
121 | # tonlib methods
122 | async def raw_get_transactions(self, account_address: str, from_transaction_lt: str, from_transaction_hash: str, *args, **kwargs):
123 | """
124 | TL Spec:
125 | raw.getTransactions account_address:accountAddress from_transaction_id:internal.transactionId = raw.Transactions;
126 | accountAddress account_address:string = AccountAddress;
127 | internal.transactionId lt:int64 hash:bytes = internal.TransactionId;
128 | :param account_address: str with raw or user friendly address
129 | :param from_transaction_lt: from transaction lt
130 | :param from_transaction_hash: from transaction hash in HEX representation
131 | :return: dict as
132 | {
133 | '@type': 'raw.transactions',
134 | 'transactions': list[dict as {
135 | '@type': 'raw.transaction',
136 | 'utime': int,
137 | 'data': str,
138 | 'transaction_id': internal.transactionId,
139 | 'fee': str,
140 | 'in_msg': dict as {
141 | '@type': 'raw.message',
142 | 'source': str,
143 | 'destination': str,
144 | 'value': str,
145 | 'message': str
146 | },
147 | 'out_msgs': list[dict as raw.message]
148 | }],
149 | 'previous_transaction_id': internal.transactionId
150 | }
151 | """
152 | account_address = prepare_address(account_address)
153 | from_transaction_hash = hex_to_b64str(from_transaction_hash)
154 |
155 | request = {
156 | '@type': 'raw.getTransactions',
157 | 'account_address': {
158 | 'account_address': account_address,
159 | },
160 | 'from_transaction_id': {
161 | '@type': 'internal.transactionId',
162 | 'lt': from_transaction_lt,
163 | 'hash': from_transaction_hash
164 | }
165 | }
166 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
167 |
168 | async def raw_get_account_state(self, address: str, seqno = None, *args, **kwargs):
169 | """
170 | TL Spec:
171 | raw.getAccountState account_address:accountAddress = raw.AccountState;
172 | accountAddress account_address:string = AccountAddress;
173 | :param address: str with raw or user friendly address
174 | :return: dict as
175 | {
176 | '@type': 'raw.accountState',
177 | 'balance': str,
178 | 'code': str,
179 | 'data': str,
180 | 'last_transaction_id': internal.transactionId,
181 | 'sync_utime': int
182 | }
183 | """
184 | account_address = prepare_address(
185 | address) # TODO: understand why this is not used
186 | request = {
187 | '@type': 'raw.getAccountState',
188 | 'account_address': {
189 | 'account_address': address
190 | }
191 | }
192 | if seqno is not None:
193 | wc, shard = -1, -9223372036854775808
194 | block_id = await self.lookup_block(wc, shard, seqno)
195 | request = {
196 | '@type': 'withBlock',
197 | 'id': block_id,
198 | 'function' : request
199 | }
200 |
201 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
202 |
203 | async def get_shard_account_cell(self, address: str, seqno = None, *args, **kwargs):
204 | request = {
205 | '@type': 'getShardAccountCell',
206 | 'account_address': {
207 | 'account_address': address
208 | }
209 | }
210 | if seqno is not None:
211 | wc, shard = -1, -9223372036854775808
212 | block_id = await self.lookup_block(wc, shard, seqno)
213 | request = {
214 | '@type': 'withBlock',
215 | 'id': block_id,
216 | 'function' : request
217 | }
218 |
219 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
220 |
221 | async def generic_get_account_state(self, address: str, seqno = None, *args, **kwargs):
222 | # TODO: understand why this is not used
223 | account_address = prepare_address(address)
224 | request = {
225 | '@type': 'getAccountState',
226 | 'account_address': {
227 | 'account_address': address
228 | }
229 | }
230 | if seqno is not None:
231 | wc, shard = -1, -9223372036854775808
232 | block_id = await self.lookup_block(wc, shard, seqno)
233 | request = {
234 | '@type': 'withBlock',
235 | 'id': block_id,
236 | 'function' : request
237 | }
238 |
239 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
240 |
241 | async def _load_contract(self, address, seqno = None, *args, **kwargs):
242 | # TODO: understand why this is not used
243 | # account_address = prepare_address(address)
244 | request = {
245 | '@type': 'smc.load',
246 | 'account_address': {
247 | 'account_address': address
248 | }
249 | }
250 | if seqno is not None:
251 | wc, shard = -1, -9223372036854775808
252 | block_id = await self.lookup_block(wc, shard, seqno)
253 | request = {
254 | '@type': 'withBlock',
255 | 'id': block_id,
256 | 'function' : request
257 | }
258 | result = await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
259 | return result["id"]
260 |
261 | async def raw_run_method(self, address, method, stack_data, seqno = None, *args, **kwargs):
262 | """
263 | For numeric data only
264 | TL Spec:
265 | smc.runGetMethod id:int53 method:smc.MethodId stack:vector = smc.RunResult;
266 |
267 | smc.methodIdNumber number:int32 = smc.MethodId;
268 | smc.methodIdName name:string = smc.MethodId;
269 |
270 | tvm.slice bytes:string = tvm.Slice;
271 | tvm.cell bytes:string = tvm.Cell;
272 | tvm.numberDecimal number:string = tvm.Number;
273 | tvm.tuple elements:vector = tvm.Tuple;
274 | tvm.list elements:vector = tvm.List;
275 |
276 | tvm.stackEntrySlice slice:tvm.slice = tvm.StackEntry;
277 | tvm.stackEntryCell cell:tvm.cell = tvm.StackEntry;
278 | tvm.stackEntryNumber number:tvm.Number = tvm.StackEntry;
279 | tvm.stackEntryTuple tuple:tvm.Tuple = tvm.StackEntry;
280 | tvm.stackEntryList list:tvm.List = tvm.StackEntry;
281 | tvm.stackEntryUnsupported = tvm.StackEntry;
282 |
283 | smc.runResult gas_used:int53 stack:vector exit_code:int32 = smc.RunResult;
284 | """
285 | stack_data = render_tvm_stack(stack_data)
286 | if isinstance(method, int):
287 | method = {'@type': 'smc.methodIdNumber', 'number': method}
288 | else:
289 | method = {'@type': 'smc.methodIdName', 'name': str(method)}
290 | contract_id = await self._load_contract(address, seqno)
291 | request = {
292 | '@type': 'smc.runGetMethod',
293 | 'id': contract_id,
294 | 'method': method,
295 | 'stack': stack_data
296 | }
297 | r = await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
298 | if 'stack' in r:
299 | r['stack'] = serialize_tvm_stack(r['stack'])
300 | request = {
301 | '@type': 'smc.getRawFullAccountState',
302 | 'id': contract_id
303 | }
304 | raw_full_account_state = await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
305 | r['block_id'] = raw_full_account_state['block_id']
306 | r['last_transaction_id'] = raw_full_account_state['last_transaction_id']
307 | return r
308 |
309 | async def raw_send_message(self, serialized_boc, *args, **kwargs):
310 | """
311 | raw.sendMessage body:bytes = Ok;
312 |
313 | :param serialized_boc: bytes, serialized bag of cell
314 | """
315 | serialized_boc = codecs.decode(codecs.encode(serialized_boc, "base64"), 'utf-8').replace("\n", '')
316 | request = {
317 | '@type': 'raw.sendMessage',
318 | 'body': serialized_boc
319 | }
320 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
321 |
322 | async def raw_send_message_return_hash(self, serialized_boc, *args, **kwargs):
323 | serialized_boc = codecs.decode(codecs.encode(serialized_boc, "base64"), 'utf-8').replace("\n", '')
324 | request = {
325 | '@type': 'raw.sendMessageReturnHash',
326 | 'body': serialized_boc
327 | }
328 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
329 |
330 | async def _raw_create_query(self, destination, body, init_code=b'', init_data=b'', *args, **kwargs):
331 | """
332 | raw.createQuery destination:accountAddress init_code:bytes init_data:bytes body:bytes = query.Info;
333 |
334 | query.info id:int53 valid_until:int53 body_hash:bytes = query.Info;
335 |
336 | """
337 | init_code = codecs.decode(codecs.encode(
338 | init_code, "base64"), 'utf-8').replace("\n", '')
339 | init_data = codecs.decode(codecs.encode(
340 | init_data, "base64"), 'utf-8').replace("\n", '')
341 | body = codecs.decode(codecs.encode(body, "base64"),
342 | 'utf-8').replace("\n", '')
343 | destination = prepare_address(destination)
344 | request = {
345 | '@type': 'raw.createQuery',
346 | 'body': body,
347 | 'init_code': init_code,
348 | 'init_data': init_data,
349 | 'destination': {
350 | 'account_address': destination
351 | }
352 | }
353 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
354 |
355 | async def _raw_send_query(self, query_info, *args, **kwargs):
356 | """
357 | query.send id:int53 = Ok;
358 | """
359 | request = {
360 | '@type': 'query.send',
361 | 'id': query_info['id']
362 | }
363 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
364 |
365 | async def raw_create_and_send_query(self, destination, body, init_code=b'', init_data=b'', *args, **kwargs):
366 | query_info = await self._raw_create_query(destination, body, init_code, init_data)
367 | return await self._raw_send_query(query_info)
368 |
369 | async def raw_create_and_send_message(self, destination, body, initial_account_state=b'', *args, **kwargs):
370 | # Very close to raw_create_and_send_query, but StateInit should be generated outside
371 | """
372 | raw.createAndSendMessage destination:accountAddress initial_account_state:bytes data:bytes = Ok;
373 |
374 | """
375 | initial_account_state = codecs.decode(codecs.encode(
376 | initial_account_state, "base64"), 'utf-8').replace("\n", '')
377 | body = codecs.decode(codecs.encode(body, "base64"),
378 | 'utf-8').replace("\n", '')
379 | destination = prepare_address(destination)
380 | request = {
381 | '@type': 'raw.createAndSendMessage',
382 | 'destination': {
383 | 'account_address': destination
384 | },
385 | 'initial_account_state': initial_account_state,
386 | 'data': body
387 | }
388 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
389 |
390 | async def raw_estimate_fees(self, destination, body, init_code=b'', init_data=b'', ignore_chksig=True, *args, **kwargs):
391 | query_info = await self._raw_create_query(destination, body, init_code, init_data)
392 | request = {
393 | '@type': 'query.estimateFees',
394 | 'id': query_info['id'],
395 | 'ignore_chksig': ignore_chksig
396 | }
397 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
398 |
399 | async def raw_get_block_transactions(self, fullblock, count, after_tx, *args, **kwargs):
400 | request = {
401 | '@type': 'blocks.getTransactions',
402 | 'id': fullblock,
403 | 'mode': 7 if not after_tx else 7+128,
404 | 'count': count,
405 | 'after': after_tx
406 | }
407 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
408 |
409 | async def raw_get_block_transactions_ext(self, fullblock, count, after_tx, *args, **kwargs):
410 | request = {
411 | '@type': 'blocks.getTransactionsExt',
412 | 'id': fullblock,
413 | 'mode': 7 if not after_tx else 7+128,
414 | 'count': count,
415 | 'after': after_tx
416 | }
417 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
418 |
419 | async def get_transactions(self, account,
420 | from_transaction_lt=None,
421 | from_transaction_hash=None,
422 | to_transaction_lt=0,
423 | limit=10,
424 | decode_messages=True,
425 | *args, **kwargs):
426 | """
427 | Return all transactions between from_transaction_lt and to_transaction_lt
428 | if to_transaction_lt and to_transaction_hash are not defined returns all transactions
429 | if from_transaction_lt and from_transaction_hash are not defined checks last
430 | """
431 | if from_transaction_hash:
432 | from_transaction_hash = hash_to_hex(from_transaction_hash)
433 | if (from_transaction_lt == None) or (from_transaction_hash == None):
434 | addr = await self.raw_get_account_state(account)
435 | from_transaction_lt, from_transaction_hash = int(
436 | addr["last_transaction_id"]["lt"]), b64str_to_hex(addr["last_transaction_id"]["hash"])
437 | reach_lt = False
438 | all_transactions = []
439 | current_lt, curret_hash = from_transaction_lt, from_transaction_hash
440 | while (not reach_lt) and (len(all_transactions) < limit):
441 | raw_transactions = await self.raw_get_transactions(account, current_lt, curret_hash)
442 | transactions, next = raw_transactions['transactions'], raw_transactions.get("previous_transaction_id")
443 | for t in transactions:
444 | tlt = int(t['transaction_id']['lt'])
445 | if tlt <= to_transaction_lt:
446 | reach_lt = True
447 | break
448 | all_transactions.append(t)
449 | if next:
450 | current_lt, curret_hash = int(
451 | next["lt"]), b64str_to_hex(next["hash"])
452 | else:
453 | break
454 | if current_lt == 0:
455 | break
456 |
457 | all_transactions = all_transactions[:limit]
458 | for t in all_transactions:
459 | try:
460 | if "in_msg" in t:
461 | if "source" in t["in_msg"]:
462 | t["in_msg"]["source"] = t["in_msg"]["source"]["account_address"]
463 | if "destination" in t["in_msg"]:
464 | t["in_msg"]["destination"] = t["in_msg"]["destination"]["account_address"]
465 | if decode_messages:
466 | try:
467 | if "msg_data" in t["in_msg"]:
468 | dcd = ""
469 | if t["in_msg"]["msg_data"]["@type"] == "msg.dataRaw":
470 | msg_cell_boc = codecs.decode(codecs.encode(
471 | t["in_msg"]["msg_data"]["body"], 'utf8'), 'base64')
472 | message_cell = deserialize_boc(msg_cell_boc)
473 | dcd = message_cell.data.data.tobytes()
474 | t["in_msg"]["message"] = codecs.decode(
475 | codecs.encode(dcd, 'base64'), "utf8")
476 | elif t["in_msg"]["msg_data"]["@type"] == "msg.dataText":
477 | dcd = codecs.encode(
478 | t["in_msg"]["msg_data"]["text"], 'utf8')
479 | t["in_msg"]["message"] = codecs.decode(
480 | codecs.decode(dcd, 'base64'), "utf8")
481 | except Exception as e:
482 | t["in_msg"]["message"] = ""
483 | logger.warning(
484 | f"in_msg message decoding exception: {e}")
485 | if "out_msgs" in t:
486 | for o in t["out_msgs"]:
487 | if "source" in o:
488 | o["source"] = o["source"]["account_address"]
489 | if "destination" in o:
490 | o["destination"] = o["destination"]["account_address"]
491 | if decode_messages:
492 | try:
493 | if "msg_data" in o:
494 | dcd = ""
495 | if o["msg_data"]["@type"] == "msg.dataRaw":
496 | msg_cell_boc = codecs.decode(codecs.encode(
497 | o["msg_data"]["body"], 'utf8'), 'base64')
498 | message_cell = deserialize_boc(
499 | msg_cell_boc)
500 | dcd = message_cell.data.data.tobytes()
501 | o["message"] = codecs.decode(
502 | codecs.encode(dcd, 'base64'), "utf8")
503 | elif o["msg_data"]["@type"] == "msg.dataText":
504 | dcd = codecs.encode(
505 | o["msg_data"]["text"], 'utf8')
506 | o["message"] = codecs.decode(
507 | codecs.decode(dcd, 'base64'), "utf8")
508 | except Exception as e:
509 | o["message"] = ""
510 | logger.warning(
511 | f"out_msg message decoding exception: {e}")
512 | except Exception as e:
513 | logger.error(f"getTransaction exception: {e}")
514 | return all_transactions
515 |
516 | async def get_masterchain_info(self, *args, **kwargs):
517 | request = {
518 | '@type': 'blocks.getMasterchainInfo'
519 | }
520 | result = await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
521 | return result
522 |
523 | async def get_masterchain_block_signatures(self, seqno: int, *args, **kwargs):
524 | request = {
525 | '@type': 'blocks.getMasterchainBlockSignatures',
526 | 'seqno': seqno
527 | }
528 | return await self.tonlib_wrapper.execute(request)
529 |
530 | async def get_shard_block_proof(self, workchain: int, shard: int, seqno: int, from_seqno = None, *args, **kwargs):
531 | block_id = await self.lookup_block(workchain, shard, seqno)
532 | mode = 0
533 | if from_seqno is not None:
534 | mode = 1
535 | wc, shard = -1, -9223372036854775808
536 | from_block_id = await self.lookup_block(wc, shard, from_seqno)
537 |
538 | request = {
539 | '@type': 'blocks.getShardBlockProof',
540 | 'mode': mode,
541 | 'id': block_id
542 | }
543 | if mode == 1:
544 | request['from'] = from_block_id
545 |
546 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
547 |
548 | async def get_out_msg_queue_sizes(self, *args, **kwargs):
549 | request = {
550 | '@type': 'blocks.getOutMsgQueueSizes',
551 | 'mode': 0
552 | }
553 |
554 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
555 |
556 | async def lookup_block(self, workchain, shard, seqno = None, lt=None, unixtime=None, *args, **kwargs):
557 | assert (seqno is not None) or (lt is not None) or (unixtime is not None), "Seqno, LT or unixtime should be defined"
558 | mode = 0
559 | if seqno is not None:
560 | mode += 1
561 | if lt is not None:
562 | mode += 2
563 | if unixtime is not None:
564 | mode += 4
565 | request = {
566 | '@type': 'blocks.lookupBlock',
567 | 'mode': mode,
568 | 'id': {
569 | '@type': 'ton.blockId',
570 | 'workchain': workchain,
571 | 'shard': shard,
572 | 'seqno': seqno
573 | },
574 | 'lt': lt,
575 | 'utime': unixtime
576 | }
577 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
578 |
579 | async def get_shards(self, master_seqno = None, lt=None, unixtime=None, *args, **kwargs):
580 | assert (master_seqno is not None) or (lt is not None) or (unixtime is not None), "Seqno, LT or unixtime should be defined"
581 | wc, shard = -1, -9223372036854775808
582 | fullblock = await self.lookup_block(wc, shard, master_seqno, lt, unixtime)
583 | request = {
584 | '@type': 'blocks.getShards',
585 | 'id': fullblock
586 | }
587 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
588 |
589 | async def get_block_transactions(self, workchain, shard, seqno, count, root_hash=None, file_hash=None, after_lt=None, after_hash=None, *args, **kwargs):
590 | if root_hash and file_hash:
591 | fullblock = {
592 | '@type': 'ton.blockIdExt',
593 | 'workchain': workchain,
594 | 'shard': shard,
595 | 'seqno': seqno,
596 | 'root_hash': root_hash,
597 | 'file_hash': file_hash
598 | }
599 | else:
600 | fullblock = await self.lookup_block(workchain, shard, seqno)
601 | if fullblock.get('@type', 'error') == 'error':
602 | return fullblock
603 | after_tx = {
604 | '@type': 'blocks.accountTransactionId',
605 | 'account': after_hash if after_hash else 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=',
606 | 'lt': after_lt if after_lt else 0
607 | }
608 | total_result = {}
609 | incomplete = True
610 | max_chunk_count = 256
611 | left_count = count
612 |
613 | while incomplete and left_count > 0:
614 | chunk_count = min(max_chunk_count, left_count)
615 | result = await self.raw_get_block_transactions(fullblock, chunk_count, after_tx)
616 | if not total_result:
617 | total_result = result
618 | else:
619 | total_result["transactions"] += result["transactions"]
620 | incomplete = result["incomplete"]
621 | left_count -= len(result["transactions"])
622 | if incomplete:
623 | after_tx['account'] = result["transactions"][-1]["account"]
624 | after_tx['lt'] = result["transactions"][-1]["lt"]
625 |
626 | total_result["incomplete"] = incomplete
627 | total_result["req_count"] = count
628 |
629 | for tx in total_result["transactions"]:
630 | try:
631 | tx["account"] = "%d:%s" % (result["id"]["workchain"], b64str_to_hex(tx["account"]))
632 | except:
633 | pass
634 | return total_result
635 |
636 | async def get_block_transactions_ext(self,
637 | workchain,
638 | shard,
639 | seqno,
640 | count,
641 | root_hash=None,
642 | file_hash=None,
643 | after_lt=None,
644 | after_hash=None,
645 | *args, **kwargs):
646 | if root_hash and file_hash:
647 | fullblock = {
648 | '@type': 'ton.blockIdExt',
649 | 'workchain': workchain,
650 | 'shard': shard,
651 | 'seqno': seqno,
652 | 'root_hash': root_hash,
653 | 'file_hash': file_hash
654 | }
655 | else:
656 | fullblock = await self.lookup_block(workchain, shard, seqno)
657 | if fullblock.get('@type', 'error') == 'error':
658 | return fullblock
659 | after_tx = {
660 | '@type': 'blocks.accountTransactionId',
661 | 'account': after_hash if after_hash else 'AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=',
662 | 'lt': after_lt if after_lt else 0
663 | }
664 | total_result = {}
665 | incomplete = True
666 | max_chunk_count = 256
667 | left_count = count
668 |
669 | while incomplete and left_count > 0:
670 | chunk_count = min(max_chunk_count, left_count)
671 | result = await self.raw_get_block_transactions_ext(fullblock, chunk_count, after_tx)
672 | if not total_result:
673 | total_result = result
674 | else:
675 | total_result["transactions"] += result["transactions"]
676 | total_result["incomplete"] = result["incomplete"]
677 | incomplete = result["incomplete"]
678 | left_count -= len(result["transactions"])
679 | if incomplete:
680 | account_friendly = result["transactions"][-1]["address"]["account_address"]
681 | hex_without_workchain = detect_address(account_friendly)['raw_form'].split(':')[1]
682 | after = hex_to_b64str(hex_without_workchain)
683 | after_tx['account'] = after
684 | after_tx['lt'] = result["transactions"][-1]["transaction_id"]["lt"]
685 |
686 | for tx in total_result["transactions"]:
687 | try:
688 | account_friendly = tx["address"]["account_address"]
689 | hex_without_workchain = detect_address(account_friendly)['raw_form'].split(':')[1]
690 | tx["account"] = "%d:%s" % (result["id"]["workchain"], hex_without_workchain)
691 | except:
692 | pass
693 | return total_result
694 |
695 | async def get_block_header(self, workchain, shard, seqno, root_hash=None, file_hash=None, *args, **kwargs):
696 | if root_hash and file_hash:
697 | fullblock = {
698 | '@type': 'ton.blockIdExt',
699 | 'workchain': workchain,
700 | 'shard': shard,
701 | 'seqno': seqno,
702 | 'root_hash': root_hash,
703 | 'file_hash': file_hash
704 | }
705 | else:
706 | fullblock = await self.lookup_block(workchain, shard, seqno)
707 | if fullblock.get('@type', 'error') == 'error':
708 | return fullblock
709 | request = {
710 | '@type': 'blocks.getBlockHeader',
711 | 'id': fullblock
712 | }
713 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
714 |
715 | async def get_config_param(self, config_id: int, seqno = None, *args, **kwargs):
716 | request = {
717 | '@type': 'getConfigParam',
718 | 'param': config_id,
719 | 'mode': 0
720 | }
721 |
722 | if seqno is not None:
723 | wc, shard = -1, -9223372036854775808
724 | block_id = await self.lookup_block(wc, shard, seqno)
725 | request = {
726 | '@type': 'withBlock',
727 | 'id': block_id,
728 | 'function' : request
729 | }
730 |
731 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
732 |
733 | async def get_config_all(self, seqno = None, *args, **kwargs):
734 | request = {
735 | '@type': 'getConfigAll',
736 | 'mode': 0
737 | }
738 |
739 | if seqno is not None:
740 | wc, shard = -1, -9223372036854775808
741 | block_id = await self.lookup_block(wc, shard, seqno)
742 | request = {
743 | '@type': 'withBlock',
744 | 'id': block_id,
745 | 'function' : request
746 | }
747 |
748 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
749 |
750 | async def try_locate_tx_by_incoming_message(self, source, destination, creation_lt, *args, **kwargs):
751 | src = detect_address(source)
752 | dest = detect_address(destination)
753 | workchain = dest["raw_form"].split(":")[0]
754 | shards = await self.get_shards(lt=int(creation_lt))
755 |
756 | for shard_data in shards['shards']:
757 | shardchain = shard_data['shard']
758 | for b in range(3):
759 | block = await self.lookup_block(workchain, shardchain, lt=int(creation_lt) + b * 1000000)
760 | txs = await self.get_block_transactions(workchain,
761 | shardchain,
762 | block["seqno"],
763 | count=40,
764 | root_hash=block["root_hash"],
765 | file_hash=block["file_hash"])
766 | candidate = tuple()
767 | count = 0
768 | for tx in txs["transactions"]:
769 | if tx["account"] == dest["raw_form"]:
770 | count += 1
771 | if not candidate or candidate[1] < int(tx["lt"]):
772 | candidate = tx["hash"], int(tx["lt"])
773 | if candidate:
774 | txses = await self.get_transactions(destination,
775 | from_transaction_lt=candidate[1],
776 | from_transaction_hash=b64str_to_hex(candidate[0]),
777 | limit=max(count, 10))
778 | for tx in txses:
779 | try:
780 | in_msg = tx["in_msg"]
781 | tx_source = in_msg["source"]
782 | if len(tx_source) and detect_address(tx_source)["raw_form"] == src["raw_form"]:
783 | if int(in_msg["created_lt"]) == int(creation_lt):
784 | return tx
785 | except KeyError:
786 | pass
787 | raise Exception("Tx not found")
788 |
789 | async def try_locate_tx_by_outcoming_message(self, source, destination, creation_lt, *args, **kwargs):
790 | src = detect_address(source)
791 | dest = detect_address(destination)
792 | workchain = src["raw_form"].split(":")[0]
793 | shards = await self.get_shards(lt=int(creation_lt))
794 |
795 | for shard_data in shards['shards']:
796 | shardchain = shard_data['shard']
797 | block = await self.lookup_block(workchain, shardchain, lt=int(creation_lt))
798 | txses = await self.get_block_transactions(workchain,
799 | shardchain,
800 | block["seqno"],
801 | count=40,
802 | root_hash=block["root_hash"],
803 | file_hash=block["file_hash"])
804 | candidate = tuple()
805 | count = 0
806 | for tx in txses["transactions"]:
807 | if tx["account"] == src["raw_form"]:
808 | count += 1
809 | if not candidate or candidate[1] < int(tx["lt"]):
810 | candidate = tx["hash"], int(tx["lt"])
811 | if candidate:
812 | txses = await self.get_transactions(source,
813 | from_transaction_lt=candidate[1],
814 | from_transaction_hash=b64str_to_hex(candidate[0]),
815 | limit=max(count, 10))
816 | for tx in txses:
817 | try:
818 | for msg in tx["out_msgs"]:
819 | if detect_address(msg["destination"])["raw_form"] == dest["raw_form"]:
820 | if int(msg["created_lt"]) == int(creation_lt):
821 | return tx
822 | except KeyError:
823 | pass
824 | raise Exception("Tx not found")
825 |
826 |
827 | async def get_jetton_wallet_address(self, owner_address, jetton_address):
828 | def address_to_MsgAddress_boc(workchain, address_hex):
829 | workchain = int.to_bytes(workchain, 1, 'big')
830 | address = bytes.fromhex(address_hex)
831 | workchain_ba = bitarray()
832 | workchain_ba.frombytes(workchain)
833 | address_ba = bitarray()
834 | address_ba.frombytes(address)
835 | addr_ba = bitarray('100') + workchain_ba + address_ba
836 | c = Cell()
837 | c.data.data = addr_ba
838 | boc_bytes = c.serialize_boc()
839 | return codecs.decode(codecs.encode(boc_bytes, "base64"), 'utf-8').replace("\n", '')
840 |
841 | owner_raw = detect_address(owner_address)['raw_form']
842 | addr_boc = address_to_MsgAddress_boc(int(owner_raw.split(':')[0]), owner_raw.split(':')[1])
843 | stack = [['tvm.Slice', addr_boc]]
844 | result = await self.raw_run_method(jetton_address, 'get_wallet_address', stack)
845 | if result['exit_code'] != 0 or len(result['stack']) != 1:
846 | raise Exception("get_wallet_address failed")
847 | return parse_jetton_wallet_address_data(result['stack'])
848 |
849 | async def get_nft_item_address(self, collection_address, item_index):
850 | stack = [['int', item_index]]
851 | result = await self.raw_run_method(collection_address, 'get_nft_address_by_index', stack)
852 | if result['exit_code'] != 0 or len(result['stack']) != 1:
853 | raise Exception("get_nft_address_by_index failed")
854 | return parse_nft_item_address_data(result['stack'])
855 |
856 | async def get_token_data(self, address: str, skip_verification=False):
857 | address = prepare_address(address)
858 |
859 | types_methods = {
860 | 'jetton_master': 'get_jetton_data',
861 | 'jetton_wallet': 'get_wallet_data',
862 | 'nft_collection': 'get_collection_data',
863 | 'nft_item': 'get_nft_data'
864 | }
865 | get_method_results = await asyncio.gather(*[self.raw_run_method(address, t, []) for t in types_methods.values()])
866 |
867 | contract_type = None
868 | get_method_result_stack = None
869 | for i, type in enumerate(types_methods.keys()):
870 | if get_method_results[i]['exit_code'] == 0:
871 | contract_type = type
872 | get_method_result_stack = get_method_results[i]['stack']
873 |
874 | if contract_type is None or get_method_result_stack is None:
875 | raise Exception("Smart contract is not Jetton or NFT")
876 |
877 | result = None
878 | if contract_type == 'jetton_master':
879 | result = parse_jetton_master_data(get_method_result_stack)
880 | elif contract_type == 'jetton_wallet':
881 | result = parse_jetton_wallet_data(get_method_result_stack)
882 |
883 | if not skip_verification and await self.get_jetton_wallet_address(result['owner'], result['jetton']) != address:
884 | raise Exception("Verification with Jetton master failed")
885 | elif contract_type == 'nft_collection':
886 | result = parse_nft_collection_data(get_method_result_stack)
887 | elif contract_type == 'nft_item':
888 | result = parse_nft_item_data(get_method_result_stack)
889 | if result['collection_address'] is not None:
890 | if not skip_verification and await self.get_nft_item_address(result['collection_address'], result['index']) != address:
891 | raise Exception("Verification with NFT collection failed")
892 |
893 | individual_content = result.pop('individual_content')
894 | get_nft_content_request_stack = [['num', result['index']], ['tvm.Cell', individual_content]]
895 | content_raw = await self.raw_run_method(prepare_address(result['collection_address']), 'get_nft_content', get_nft_content_request_stack)
896 |
897 | # TON DNS collection
898 | if prepare_address(result['collection_address']) == 'EQC3dNlesgVD8YbAazcauIrXBPfiVhMMr5YYk2in0Mtsz0Bz':
899 | content = parse_dns_content(content_raw['stack'])
900 | get_domain_res = await self.raw_run_method(address, 'get_domain', [])
901 | if get_domain_res['exit_code'] == 0:
902 | domain_bytes = get_domain_res['stack'][0][1]['bytes']
903 | domain_boc = codecs.decode(codecs.encode(domain_bytes, 'utf-8'), 'base64')
904 | domain_cell = deserialize_boc(domain_boc)
905 | content['domain'] = domain_cell.data.data.tobytes().decode('ascii') + '.ton'
906 | else:
907 | content = parse_nft_content(content_raw['stack'])
908 | else:
909 | content = result.pop('individual_content')
910 | result['content'] = content
911 | result['contract_type'] = contract_type
912 | return result
913 |
914 | async def get_libraries(self, library_list: list):
915 | """
916 | :param library_list: list of base64-encoded libraries hashes
917 | """
918 |
919 | request = {
920 | '@type': 'smc.getLibraries',
921 | 'library_list': library_list
922 | }
923 | return await self.tonlib_wrapper.execute(request, timeout=self.tonlib_timeout)
924 |
--------------------------------------------------------------------------------
/pytonlib/distlib/darwin/libtonlibjson.arm64.dylib:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/distlib/darwin/libtonlibjson.arm64.dylib
--------------------------------------------------------------------------------
/pytonlib/distlib/darwin/libtonlibjson.x86_64.dylib:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/distlib/darwin/libtonlibjson.x86_64.dylib
--------------------------------------------------------------------------------
/pytonlib/distlib/freebsd/libtonlibjson.amd64.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/distlib/freebsd/libtonlibjson.amd64.so
--------------------------------------------------------------------------------
/pytonlib/distlib/linux/libtonlibjson.aarch64.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/distlib/linux/libtonlibjson.aarch64.so
--------------------------------------------------------------------------------
/pytonlib/distlib/linux/libtonlibjson.x86_64.so:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/distlib/linux/libtonlibjson.x86_64.so
--------------------------------------------------------------------------------
/pytonlib/distlib/windows/tonlibjson.amd64.dll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/distlib/windows/tonlibjson.amd64.dll
--------------------------------------------------------------------------------
/pytonlib/tonlibjson.py:
--------------------------------------------------------------------------------
1 | import json
2 | import platform
3 | import traceback
4 |
5 | import pkg_resources
6 | import random
7 | import asyncio
8 | import time
9 | import functools
10 | import logging
11 |
12 | from copy import deepcopy
13 | from ctypes import *
14 |
15 | logger = logging.getLogger(__name__)
16 |
17 |
18 | class TonlibException(Exception):
19 | pass
20 |
21 | class TonlibNoResponse(TonlibException):
22 | def __str__(self):
23 | return 'tonlibjson did not respond'
24 |
25 | class TonlibError(TonlibException):
26 | def __init__(self, result):
27 | self.result = result
28 |
29 | @property
30 | def code(self):
31 | return self.result.get('code')
32 |
33 | def __str__(self):
34 | return self.result.get('message')
35 |
36 | class LiteServerTimeout(TonlibError):
37 | pass
38 |
39 | class BlockNotFound(TonlibError):
40 | pass
41 |
42 | class BlockDeleted(TonlibError):
43 | pass
44 |
45 | class ExternalMessageNotAccepted(TonlibError):
46 | pass
47 |
48 | def parse_tonlib_error(result):
49 | if result.get('@type') == 'error':
50 | message = result.get('message')
51 | if 'not in db' in message:
52 | return BlockNotFound(result)
53 | if "state already gc'd" in message:
54 | return BlockDeleted(result)
55 | if 'cannot apply external message to current state' in message:
56 | return ExternalMessageNotAccepted(result)
57 | if 'adnl query timeout' in message:
58 | return LiteServerTimeout(result)
59 | return TonlibError(result)
60 | return None
61 |
62 | def get_tonlib_path():
63 | arch_name = platform.system().lower()
64 | machine = platform.machine().lower()
65 | if arch_name == 'linux':
66 | lib_name = f'libtonlibjson.{machine}.so'
67 | elif arch_name == 'darwin':
68 | lib_name = f'libtonlibjson.{machine}.dylib'
69 | elif arch_name == 'freebsd':
70 | lib_name = f'libtonlibjson.{machine}.so'
71 | elif arch_name == 'windows':
72 | lib_name = f'tonlibjson.{machine}.dll'
73 | else:
74 | raise RuntimeError(f"Platform '{arch_name}({machine})' is not compatible yet")
75 | return pkg_resources.resource_filename('pytonlib', f'distlib/{arch_name}/{lib_name}')
76 |
77 | # class TonLib for single liteserver
78 | class TonLib:
79 | def __init__(self, loop, ls_index, cdll_path=None, verbosity_level=0):
80 | cdll_path = get_tonlib_path() if not cdll_path else cdll_path
81 | tonlib = CDLL(cdll_path)
82 |
83 | tonlib_client_set_verbosity_level = tonlib.tonlib_client_set_verbosity_level
84 | tonlib_client_set_verbosity_level.restype = None
85 | tonlib_client_set_verbosity_level.argtypes = [c_int]
86 |
87 | try:
88 | tonlib_client_set_verbosity_level(verbosity_level)
89 | except Exception as ee:
90 | raise RuntimeError(f"Failed to set verbosity level: {ee}")
91 |
92 | tonlib_json_client_create = tonlib.tonlib_client_json_create
93 | tonlib_json_client_create.restype = c_void_p
94 | tonlib_json_client_create.argtypes = []
95 | try:
96 | self._client = tonlib_json_client_create()
97 | except Exception as ee:
98 | raise RuntimeError(f"Failed to create tonlibjson client: {ee}")
99 |
100 | tonlib_json_client_receive = tonlib.tonlib_client_json_receive
101 | tonlib_json_client_receive.restype = c_char_p
102 | tonlib_json_client_receive.argtypes = [c_void_p, c_double]
103 | self._tonlib_json_client_receive = tonlib_json_client_receive
104 |
105 | tonlib_json_client_send = tonlib.tonlib_client_json_send
106 | tonlib_json_client_send.restype = None
107 | tonlib_json_client_send.argtypes = [c_void_p, c_char_p]
108 | self._tonlib_json_client_send = tonlib_json_client_send
109 |
110 | tonlib_json_client_execute = tonlib.tonlib_client_json_execute
111 | tonlib_json_client_execute.restype = c_char_p
112 | tonlib_json_client_execute.argtypes = [c_void_p, c_char_p]
113 | self._tonlib_json_client_execute = tonlib_json_client_execute
114 |
115 | tonlib_json_client_destroy = tonlib.tonlib_client_json_destroy
116 | tonlib_json_client_destroy.restype = None
117 | tonlib_json_client_destroy.argtypes = [c_void_p]
118 | self._tonlib_json_client_destroy = tonlib_json_client_destroy
119 |
120 | self.futures = {}
121 | self.loop = loop
122 | self.ls_index = ls_index
123 | self._state = None # None, "finished", "crashed", "stuck"
124 |
125 | self.is_dead = False
126 |
127 | # creating tasks
128 | self.read_results_task = self.loop.create_task(self.read_results())
129 | self.del_expired_futures_task = self.loop.create_task(self.del_expired_futures_loop())
130 |
131 | def __del__(self):
132 | try:
133 | self._tonlib_json_client_destroy(self._client)
134 | except Exception as ee:
135 | logger.error(f"Exception in tonlibjson.__del__: {traceback.format_exc()}")
136 | raise RuntimeError(f'Error in tonlibjson.__del__: {ee}')
137 |
138 | def send(self, query):
139 | if not self._is_working:
140 | raise RuntimeError(f"TonLib failed with state: {self._state}")
141 |
142 | query = json.dumps(query).encode('utf-8')
143 | try:
144 | self._tonlib_json_client_send(self._client, query)
145 | except Exception as ee:
146 | logger.error(f"Exception in tonlibjson.send: {traceback.format_exc()}")
147 | raise RuntimeError(f'Error in tonlibjson.send: {ee}')
148 |
149 | def receive(self, timeout=10):
150 | result = None
151 | try:
152 | result = self._tonlib_json_client_receive(self._client, timeout) # time.sleep # asyncio.sleep
153 | except Exception as ee:
154 | logger.error(f"Exception in tonlibjson.receive: {traceback.format_exc()}")
155 | raise RuntimeError(f'Error in tonlibjson.receive: {ee}')
156 | if result:
157 | result = json.loads(result.decode('utf-8'))
158 | return result
159 |
160 | def execute(self, query, timeout=10):
161 | if not self._is_working:
162 | raise RuntimeError(f"TonLib failed with state: {self._state}")
163 |
164 | extra_id = "%s:%s:%s" % (time.time() + timeout, self.ls_index, random.random())
165 | query["@extra"] = extra_id
166 |
167 | future_result = self.loop.create_future()
168 | self.futures[extra_id] = future_result
169 |
170 | self.loop.run_in_executor(None, lambda: self.send(query))
171 | return future_result
172 |
173 | @property
174 | def _is_working(self):
175 | return self._state not in ('crashed', 'stuck', 'finished')
176 |
177 | async def close(self):
178 | try:
179 | self._state = 'finished'
180 | await self.read_results_task
181 | await self.del_expired_futures_task
182 | except Exception as ee:
183 | logger.error(f"Exception in tonlibjson.close: {traceback.format_exc()}")
184 | raise RuntimeError(f'Error in tonlibjson.close: {ee}')
185 |
186 | def cancel_futures(self, cancel_all=False):
187 | now = time.time()
188 | to_del = []
189 | for i in self.futures:
190 | if float(i.split(":")[0]) <= now or cancel_all:
191 | to_del.append(i)
192 | logger.debug(f'Pruning {len(to_del)} tasks')
193 | for i in to_del:
194 | self.futures[i].set_exception(TonlibNoResponse())
195 | self.futures.pop(i)
196 |
197 | # tasks
198 | async def read_results(self):
199 | timeout = 1
200 | delta = 5
201 | receive_func = functools.partial(self.receive, timeout)
202 | try:
203 | while self._is_working:
204 | # return reading result
205 | result = None
206 | try:
207 | result = await asyncio.wait_for(self.loop.run_in_executor(None, receive_func), timeout=timeout + delta)
208 | except asyncio.TimeoutError:
209 | logger.critical(f"Tonlib #{self.ls_index:03d} stuck (timeout error)")
210 | self._state = "stuck"
211 | except:
212 | logger.critical(f"Tonlib #{self.ls_index:03d} crashed: {traceback.format_exc()}")
213 | self._state = "crashed"
214 |
215 | if isinstance(result, dict) and ("@extra" in result) and (result["@extra"] in self.futures):
216 | try:
217 | if not self.futures[result["@extra"]].done():
218 | tonlib_error = parse_tonlib_error(result)
219 | if tonlib_error is not None:
220 | self.futures[result["@extra"]].set_exception(tonlib_error)
221 | else:
222 | self.futures[result["@extra"]].set_result(result)
223 | self.futures.pop(result["@extra"])
224 | except Exception as e:
225 | logger.error(f'Tonlib #{self.ls_index:03d} receiving result exception: {e}')
226 | except Exception as ee:
227 | logger.critical(f'Task read_results failed: {ee}')
228 |
229 | async def del_expired_futures_loop(self):
230 | try:
231 | while self._is_working:
232 | self.cancel_futures()
233 | await asyncio.sleep(1)
234 |
235 | self.cancel_futures(cancel_all=True)
236 | except Exception as ee:
237 | logger.critical(f'Task del_expired_futures_loop failed: {ee}')
238 |
--------------------------------------------------------------------------------
/pytonlib/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/pytonlib/utils/__init__.py
--------------------------------------------------------------------------------
/pytonlib/utils/address.py:
--------------------------------------------------------------------------------
1 | import base64
2 |
3 |
4 | bounceable_tag, non_bounceable_tag = b'\x11', b'\x51'
5 | b64_abc = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890+/')
6 | b64_abc_urlsafe = set('abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_-')
7 |
8 |
9 | def is_int(x):
10 | try:
11 | int(x)
12 | return True
13 | except:
14 | return False
15 |
16 |
17 | def is_hex(x):
18 | try:
19 | int(x, 16)
20 | return True
21 | except:
22 | return False
23 |
24 |
25 | def calcCRC(message):
26 | poly = 0x1021
27 | reg = 0
28 | message += b'\x00\x00'
29 | for byte in message:
30 | mask = 0x80
31 | while(mask > 0):
32 | reg <<= 1
33 | if byte & mask:
34 | reg += 1
35 | mask >>= 1
36 | if reg > 0xffff:
37 | reg &= 0xffff
38 | reg ^= poly
39 | return reg.to_bytes(2, "big")
40 |
41 |
42 | def account_forms(raw_form, test_only=False):
43 | workchain, address = raw_form.split(":")
44 | workchain, address = int(workchain), int(address, 16)
45 | address = address.to_bytes(32, "big")
46 | workchain_tag = b'\xff' if workchain == -1 else workchain.to_bytes(1, "big")
47 | btag = bounceable_tag
48 | nbtag = non_bounceable_tag
49 | if test_only:
50 | btag = (btag[0] | 0x80).to_bytes(1,'big')
51 | nbtag = (nbtag[0] | 0x80).to_bytes(1,'big')
52 | preaddr_b = btag + workchain_tag + address
53 | preaddr_u = nbtag + workchain_tag + address
54 | b64_b = base64.b64encode(preaddr_b+calcCRC(preaddr_b)).decode('utf8')
55 | b64_u = base64.b64encode(preaddr_u+calcCRC(preaddr_u)).decode('utf8')
56 | b64_b_us = base64.urlsafe_b64encode(preaddr_b+calcCRC(preaddr_b)).decode('utf8')
57 | b64_u_us = base64.urlsafe_b64encode(preaddr_u+calcCRC(preaddr_u)).decode('utf8')
58 | return {'raw_form': raw_form,
59 | 'bounceable': {'b64': b64_b, 'b64url': b64_b_us},
60 | 'non_bounceable': {'b64': b64_u, 'b64url': b64_u_us},
61 | 'given_type': 'raw_form',
62 | 'test_only': test_only}
63 |
64 |
65 | def read_friendly_address(address):
66 | urlsafe = False
67 | if set(address).issubset(b64_abc):
68 | address_bytes = base64.b64decode(address.encode('utf8'))
69 | elif set(address).issubset(b64_abc_urlsafe):
70 | urlsafe = True
71 | address_bytes = base64.urlsafe_b64decode(address.encode('utf8'))
72 | else:
73 | raise Exception("Not an address")
74 | if not calcCRC(address_bytes[:-2]) == address_bytes[-2:]:
75 | raise Exception("Wrong checksum")
76 | tag = address_bytes[0]
77 | if tag & 0x80:
78 | test_only = True
79 | tag = tag ^ 0x80
80 | else:
81 | test_only = False
82 | tag = tag.to_bytes(1, 'big')
83 | if tag == bounceable_tag:
84 | bounceable = True
85 | elif tag == non_bounceable_tag:
86 | bounceable = False
87 | else:
88 | raise Exception("Unknown tag")
89 | if address_bytes[1:2] == b'\xff':
90 | workchain = -1
91 | else:
92 | workchain = address_bytes[1]
93 | hx = hex(int.from_bytes(address_bytes[2:-2], "big"))[2:]
94 | hx = (64-len(hx))*"0"+hx
95 | raw_form = str(workchain)+":"+hx
96 | account = account_forms(raw_form, test_only)
97 | account['given_type'] = "friendly_"+("bounceable" if bounceable else "non_bounceable")
98 | return account
99 |
100 |
101 | def detect_address(unknown_form):
102 | if is_hex(unknown_form):
103 | return account_forms("-1:"+unknown_form)
104 | elif (":" in unknown_form) and is_int(unknown_form.split(":")[0]) and is_hex(unknown_form.split(":")[1]):
105 | return account_forms(unknown_form)
106 | else:
107 | return read_friendly_address(unknown_form)
108 |
109 |
110 | def prepare_address(unknown_form):
111 | address = detect_address(unknown_form)
112 | if 'non_bounceable' in address['given_type']:
113 | return address["non_bounceable"]["b64"]
114 | return address["bounceable"]["b64"]
115 |
--------------------------------------------------------------------------------
/pytonlib/utils/common.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import functools
4 | import base64
5 | import asyncio
6 | import struct
7 | import crc16
8 | import codecs
9 | import logging
10 |
11 | from functools import wraps
12 |
13 | logger = logging.getLogger(__name__)
14 |
15 |
16 | def b64str_to_bytes(b64str):
17 | b64bytes = codecs.encode(b64str, "utf8")
18 | return codecs.decode(b64bytes, "base64")
19 |
20 |
21 | def b64str_to_hex(b64str):
22 | _bytes = b64str_to_bytes(b64str)
23 | _hex = codecs.encode(_bytes, "hex")
24 | return codecs.decode(_hex, "utf8")
25 |
26 |
27 | def hex_to_b64str(x):
28 | return codecs.encode(codecs.decode(x, 'hex'), 'base64').decode().replace("\n", "")
29 |
30 |
31 | def hash_to_hex(b64_or_hex_hash):
32 | """
33 | Detect encoding of transactions hash and if necessary convert it to hex.
34 | """
35 | if len(b64_or_hex_hash) == 44:
36 | # Hash is base64
37 | return b64str_to_hex(b64_or_hex_hash)
38 | if len(b64_or_hex_hash) == 64:
39 | # Hash is hex
40 | return b64_or_hex_hash
41 | raise ValueError("Invalid hash")
42 |
43 |
44 | def pubkey_b64_to_hex(b64_key):
45 | """
46 | Convert tonlib's pubkey in format f'I{"H"*16}' i.e. prefix:key to upperhex filename as it stored in keystore
47 | :param b64_key: base64 encoded 36 bytes of public key
48 | :return:
49 | """
50 | bin_key = base64.b64decode(b64_key)
51 | words = 18
52 | ints_key = struct.unpack(f'{"H"*words}', bin_key)
53 | key = [x.to_bytes(2, byteorder='little') for x in ints_key]
54 | key = b''.join(key)
55 | key = [((x & 0x0F) << 4 | (x & 0xF0) >> 4).to_bytes(1, byteorder='little') for x in key]
56 | name = b''.join(key)
57 | return name.hex().upper()
58 |
59 |
60 | def parallelize(f):
61 | @functools.wraps(f)
62 | def wrapper(self, *args, **kwds):
63 | if self._style == 'futures':
64 | return self._executor.submit(f, self, *args, **kwds)
65 | if self._style == 'asyncio':
66 | loop = asyncio.get_event_loop()
67 | return loop.run_in_executor(self._executor, functools.partial(f, self, *args, **kwds))
68 | raise RuntimeError(self._style)
69 | return wrapper
70 |
71 |
72 | def coro_result(coro):
73 | return asyncio.get_event_loop().run_until_complete(coro)
74 |
75 |
76 | def raw_to_userfriendly(address, tag=0x11):
77 | workchain_id, key = address.split(':')
78 | workchain_id = int(workchain_id)
79 | key = bytearray.fromhex(key)
80 |
81 | short_ints = [j * 256 + i for i, j in zip(*[iter(key)] * 2)]
82 | payload = struct.pack(f'Bb{"H"*16}', tag, workchain_id, *short_ints)
83 | crc = crc16.crc16xmodem(payload)
84 |
85 | e_key = payload + struct.pack('>H', crc)
86 | return base64.urlsafe_b64encode(e_key).decode("utf-8")
87 |
88 |
89 | def userfriendly_to_raw(address):
90 | k = base64.urlsafe_b64decode(address)[1:34]
91 | workchain_id = struct.unpack('b', k[:1])[0]
92 | key = k[1:].hex().upper()
93 | return f'{workchain_id}:{key}'
94 |
95 |
96 | def str_b64encode(s):
97 | return base64.b64encode(s.encode('utf-8')).decode('utf-8') if s and isinstance(s, str) else None
98 |
99 |
100 | # repeat
101 | def retry_async(repeats=3, last_archval=False, raise_error=True):
102 | def decorator(func):
103 | @wraps(func)
104 | async def wrapper(*args, **kwargs):
105 | result = None
106 | exception = None
107 | for i in range(repeats):
108 | try:
109 | kwargs_loc = kwargs.copy()
110 | if i == repeats - 1 and last_archval:
111 | logger.info('Retry with archival node')
112 | kwargs_loc['archival'] = True
113 | result = await func(*args, **kwargs_loc)
114 | exception = None
115 | except Exception as ee:
116 | logger.warning(f'Retry. Attempt {i+1}')
117 | exception = ee
118 | if exception is not None and raise_error:
119 | raise exception
120 | return result
121 | #end def
122 | return wrapper
123 | return decorator
124 |
--------------------------------------------------------------------------------
/pytonlib/utils/tlb.py:
--------------------------------------------------------------------------------
1 | from tvm_valuetypes.cell import deserialize_boc, Cell
2 | from tvm_valuetypes.dict_utils import parse_hashmap
3 | import codecs
4 | from copy import copy
5 | from bitarray import bitarray
6 | from bitarray.util import ba2int, ba2hex, hex2ba
7 | import math
8 | import json
9 | from hashlib import sha256
10 |
11 | class Slice:
12 | def __init__(self, cell: Cell):
13 | self._data = cell.data.data
14 | self._data_offset = 0
15 | self._refs = cell.refs
16 | self._refs_offset = 0
17 |
18 | def prefetch_next(self, bits_count: int):
19 | return self._data[self._data_offset : self._data_offset + bits_count]
20 |
21 | def read_next(self, bits_count: int):
22 | result = self._data[self._data_offset : self._data_offset + bits_count]
23 | self._data_offset += bits_count
24 | return result
25 |
26 | def read_next_ref(self):
27 | cell = self._refs[self._refs_offset]
28 | self._refs_offset += 1
29 | return Slice(cell)
30 |
31 | def read_uint(self, bits_count: int):
32 | return ba2int(self.read_next(bits_count), signed=False)
33 |
34 | def read_var_uint(self, max_len: int):
35 | """
36 | var_uint$_ {n:#} len:(#< n) value:(uint (len * 8))
37 | = VarUInteger n;
38 | """
39 | header_bits = math.ceil(math.log2(max_len))
40 | uint_len = ba2int(self.read_next(header_bits), signed=False)
41 | if uint_len == 0:
42 | return 0
43 | return ba2int(self.read_next(uint_len * 8), signed=False)
44 |
45 | def bits_left(self):
46 | return len(self._data) - self._data_offset
47 |
48 | def refs_left(self):
49 | return len(self._refs) - self._refs_offset
50 |
51 | def raise_if_not_empty(self):
52 | assert self.bits_left() == 0, f"Parsing error - slice has {self.bits_left()} unread bits left."
53 | assert self.refs_left() == 0, f"Parsing error - slice has {self.refs_left()} unread refs left."
54 |
55 |
56 | class CurrencyCollection:
57 | """
58 | nanograms$_ amount:(VarUInteger 16) = Grams;
59 | extra_currencies$_ dict:(HashmapE 32 (VarUInteger 32))
60 | = ExtraCurrencyCollection;
61 | currencies$_ grams:Grams other:ExtraCurrencyCollection
62 | = CurrencyCollection;
63 | """
64 | def __init__(self, slice: Slice):
65 | self.grams = slice.read_var_uint(16)
66 | extra_currency_collection_empty = slice.read_next(1)
67 | if extra_currency_collection_empty == bitarray('1'):
68 | extra_currency_collection = slice.read_next_ref() # TODO: parse hashmap
69 |
70 |
71 | class TrStoragePhase:
72 | """
73 | tr_phase_storage$_ storage_fees_collected:Grams
74 | storage_fees_due:(Maybe Grams)
75 | status_change:AccStatusChange
76 | = TrStoragePhase;
77 | """
78 | def __init__(self, cell_slice: Slice):
79 | self.storage_fees_collected = cell_slice.read_var_uint(16)
80 | self.storage_fees_due = cell_slice.read_var_uint(16) if cell_slice.read_next(1).any() else None
81 | account_status_change = cell_slice.read_next(1)
82 | if account_status_change == bitarray('0'):
83 | self.status_change = 'acst_unchanged'
84 | else:
85 | account_status_change += cell_slice.read_next(1)
86 | if account_status_change == bitarray('10'):
87 | self.status_change = 'acst_frozen'
88 | else:
89 | self.status_change = 'acst_deleted'
90 |
91 | class TrCreditPhase:
92 | """
93 | tr_phase_credit$_ due_fees_collected:(Maybe Grams)
94 | credit:CurrencyCollection = TrCreditPhase;
95 | """
96 | def __init__(self, cell_slice: Slice):
97 | self.due_fees_collected = cell_slice.read_var_uint(16) if cell_slice.read_next(1).any() else None
98 | self.credit = CurrencyCollection(cell_slice)
99 |
100 | class TrComputePhase:
101 | """
102 | tr_phase_compute_skipped$0 reason:ComputeSkipReason
103 | = TrComputePhase;
104 | tr_phase_compute_vm$1 success:Bool msg_state_used:Bool
105 | account_activated:Bool gas_fees:Grams
106 | ^[ gas_used:(VarUInteger 7)
107 | gas_limit:(VarUInteger 7) gas_credit:(Maybe (VarUInteger 3))
108 | mode:int8 exit_code:int32 exit_arg:(Maybe int32)
109 | vm_steps:uint32
110 | vm_init_state_hash:bits256 vm_final_state_hash:bits256 ]
111 | = TrComputePhase;
112 | cskip_no_state$00 = ComputeSkipReason;
113 | cskip_bad_state$01 = ComputeSkipReason;
114 | cskip_no_gas$10 = ComputeSkipReason;
115 | """
116 | def __init__(self, cell_slice: Slice):
117 | if cell_slice.read_next(1).any():
118 | self.type = 'tr_phase_compute_vm'
119 | self.success = cell_slice.read_next(1).any()
120 | self.msg_state_used = cell_slice.read_next(1).any()
121 | self.account_activated = cell_slice.read_next(1).any()
122 | self.gas_fees = cell_slice.read_var_uint(16)
123 |
124 | subcell_slice = cell_slice.read_next_ref()
125 | self.gas_used = subcell_slice.read_var_uint(7)
126 | self.gas_limit = subcell_slice.read_var_uint(7)
127 | self.gas_credit = subcell_slice.read_var_uint(3) if subcell_slice.read_next(1).any() else None
128 | self.mode = ba2int(subcell_slice.read_next(8), signed=True)
129 | self.exit_code = ba2int(subcell_slice.read_next(32), signed=True)
130 | self.exit_arg = ba2int(subcell_slice.read_next(32), signed=True) if subcell_slice.read_next(1).any() else None
131 | self.vm_steps = ba2int(subcell_slice.read_next(32), signed=False)
132 | self.vm_init_state_hash = ba2hex(subcell_slice.read_next(256))
133 | self.vm_final_state_hash = ba2hex(subcell_slice.read_next(256))
134 | assert subcell_slice.bits_left() == 0
135 | else:
136 | self.type = 'tr_phase_compute_skipped'
137 | reason = cell_slice.read_next(2)
138 | if reason == bitarray('00'):
139 | self.reason = 'cskip_no_state'
140 | elif reason == bitarray('01'):
141 | self.reason = 'cskip_bad_state'
142 | elif reason == bitarray('10'):
143 | self.reason = 'cskip_no_gas'
144 |
145 | class StorageUsedShort:
146 | """
147 | storage_used_short$_ cells:(VarUInteger 7)
148 | bits:(VarUInteger 7) = StorageUsedShort;
149 | """
150 | def __init__(self, cell_slice: Slice):
151 | self.cells = cell_slice.read_var_uint(7)
152 | self.bits = cell_slice.read_var_uint(7)
153 |
154 | class TrActionPhase:
155 | """
156 | tr_phase_action$_ success:Bool valid:Bool no_funds:Bool
157 | status_change:AccStatusChange
158 | total_fwd_fees:(Maybe Grams) total_action_fees:(Maybe Grams)
159 | result_code:int32 result_arg:(Maybe int32) tot_actions:uint16
160 | spec_actions:uint16 skipped_actions:uint16 msgs_created:uint16
161 | action_list_hash:bits256 tot_msg_size:StorageUsedShort
162 | = TrActionPhase;
163 | """
164 | def __init__(self, cell_slice: Slice):
165 | self.success = cell_slice.read_next(1).any()
166 | self.valid = cell_slice.read_next(1).any()
167 | self.no_funds = cell_slice.read_next(1).any()
168 | account_status_change = cell_slice.read_next(1)
169 | if account_status_change == bitarray('0'):
170 | self.status_change = 'acst_unchanged'
171 | else:
172 | account_status_change += cell_slice.read_next(1)
173 | if account_status_change == bitarray('10'):
174 | self.status_change = 'acst_frozen'
175 | else:
176 | self.status_change = 'acst_deleted'
177 | self.total_fwd_fees = cell_slice.read_var_uint(16) if cell_slice.read_next(1).any() else None
178 | self.total_action_fees = cell_slice.read_var_uint(16) if cell_slice.read_next(1).any() else None
179 | self.result_code = ba2int(cell_slice.read_next(32), signed=True)
180 | self.result_arg = ba2int(cell_slice.read_next(32), signed=True) if cell_slice.read_next(1).any() else None
181 | self.tot_actions = ba2int(cell_slice.read_next(16), signed=False)
182 | self.spec_actions = ba2int(cell_slice.read_next(16), signed=False)
183 | self.skipped_actions = ba2int(cell_slice.read_next(16), signed=False)
184 | self.msgs_created = ba2int(cell_slice.read_next(16), signed=False)
185 | self.action_list_hash = ba2hex(cell_slice.read_next(256))
186 | self.tot_msg_size = StorageUsedShort(cell_slice)
187 |
188 | class TrBouncePhase:
189 | """
190 | tr_phase_bounce_negfunds$00 = TrBouncePhase;
191 | tr_phase_bounce_nofunds$01 msg_size:StorageUsedShort
192 | req_fwd_fees:Grams = TrBouncePhase;
193 | tr_phase_bounce_ok$1 msg_size:StorageUsedShort
194 | msg_fees:Grams fwd_fees:Grams = TrBouncePhase;
195 | """
196 | def __init__(self, cell_slice: Slice):
197 | prefix = cell_slice.read_next(1)
198 | if prefix == bitarray('1'):
199 | self.type = 'tr_phase_bounce_ok'
200 | self.msg_size = StorageUsedShort(cell_slice)
201 | self.msg_fees = cell_slice.read_var_uint(16)
202 | self.fwd_fees = cell_slice.read_var_uint(16)
203 | else:
204 | prefix += cell_slice.read_next(1)
205 | if prefix == bitarray('00'):
206 | self.type = 'tr_phase_bounce_negfunds'
207 | else:
208 | self.type = 'tr_phase_bounce_nofunds'
209 | self.msg_size = StorageUsedShort(cell_slice)
210 | self.req_fwd_fees = cell_slice.read_var_uint(16)
211 |
212 | class SplitMergeInfo:
213 | """
214 | split_merge_info$_ cur_shard_pfx_len:(## 6)
215 | acc_split_depth:(## 6) this_addr:bits256 sibling_addr:bits256
216 | = SplitMergeInfo;
217 | """
218 | def __init__(self, cell_slice: Slice):
219 | self.cur_shard_pfx_len = ba2int(cell_slice.read_next(6), signed=False)
220 | self.acc_split_depth = ba2int(cell_slice.read_next(6), signed=False)
221 | self.this_addr = ba2hex(cell_slice.read_next(256))
222 | self.sibling_addr = ba2hex(cell_slice.read_next(256))
223 |
224 | class TransactionDescr:
225 | """
226 | trans_ord$0000 credit_first:Bool
227 | storage_ph:(Maybe TrStoragePhase)
228 | credit_ph:(Maybe TrCreditPhase)
229 | compute_ph:TrComputePhase action:(Maybe ^TrActionPhase)
230 | aborted:Bool bounce:(Maybe TrBouncePhase)
231 | destroyed:Bool
232 | = TransactionDescr;
233 |
234 | trans_storage$0001 storage_ph:TrStoragePhase
235 | = TransactionDescr;
236 |
237 | trans_tick_tock$001 is_tock:Bool storage_ph:TrStoragePhase
238 | compute_ph:TrComputePhase action:(Maybe ^TrActionPhase)
239 | aborted:Bool destroyed:Bool = TransactionDescr;
240 |
241 | trans_split_prepare$0100 split_info:SplitMergeInfo
242 | storage_ph:(Maybe TrStoragePhase)
243 | compute_ph:TrComputePhase action:(Maybe ^TrActionPhase)
244 | aborted:Bool destroyed:Bool
245 | = TransactionDescr;
246 |
247 | trans_split_install$0101 split_info:SplitMergeInfo
248 | prepare_transaction:^Transaction
249 | installed:Bool = TransactionDescr;
250 |
251 | trans_merge_prepare$0110 split_info:SplitMergeInfo
252 | storage_ph:TrStoragePhase aborted:Bool
253 | = TransactionDescr;
254 |
255 | trans_merge_install$0111 split_info:SplitMergeInfo
256 | prepare_transaction:^Transaction
257 | storage_ph:(Maybe TrStoragePhase)
258 | credit_ph:(Maybe TrCreditPhase)
259 | compute_ph:TrComputePhase action:(Maybe ^TrActionPhase)
260 | aborted:Bool destroyed:Bool
261 | = TransactionDescr;
262 | """
263 | def __init__(self, cell_slice: Slice):
264 | prefix = cell_slice.read_next(3)
265 | if prefix == bitarray('001'):
266 | self._init_tick_tock(cell_slice)
267 | else:
268 | prefix += cell_slice.read_next(1)
269 | if prefix == bitarray('0000'):
270 | self._init_ord(cell_slice)
271 | elif prefix == bitarray('0001'):
272 | self._init_storage(cell_slice)
273 | elif prefix == bitarray('0100'):
274 | self._init_split_prepare(cell_slice)
275 | elif prefix == bitarray('0110'):
276 | self._init_merge_prepare(cell_slice)
277 | elif prefix == bitarray('0111'):
278 | self._init_merge_install(cell_slice)
279 |
280 | def _init_ord(self, cell_slice: Slice):
281 | self.type = 'trans_ord'
282 | self.credit_first = cell_slice.read_next(1).any()
283 | self.storage_ph = TrStoragePhase(cell_slice) if cell_slice.read_next(1).any() else None
284 | self.credit_ph = TrCreditPhase(cell_slice) if cell_slice.read_next(1).any() else None
285 | self.compute_ph = TrComputePhase(cell_slice)
286 | self.action = TrActionPhase(cell_slice.read_next_ref()) if cell_slice.read_next(1).any() else None
287 | self.aborted = cell_slice.read_next(1).any()
288 | self.bounce = TrBouncePhase(cell_slice) if cell_slice.read_next(1).any() else None
289 | self.destroyed = cell_slice.read_next(1).any()
290 |
291 | def _init_storage(self, cell_slice: Slice):
292 | self.type = 'trans_storage'
293 | self.storage_ph = TrStoragePhase(cell_slice)
294 |
295 | def _init_tick_tock(self, cell_slice: Slice):
296 | self.type = 'trans_tick_tock'
297 | self.is_tock = cell_slice.read_next(1).any()
298 | self.storage_ph = TrStoragePhase(cell_slice)
299 | self.compute_ph = TrComputePhase(cell_slice)
300 | self.action = TrActionPhase(cell_slice.read_next_ref()) if cell_slice.read_next(1).any() else None
301 | self.aborted = cell_slice.read_next(1).any()
302 | self.destroyed = cell_slice.read_next(1).any()
303 |
304 | def _init_split_prepare(self, cell_slice: Slice):
305 | self.type = 'trans_split_prepare'
306 | self.split_info = SplitMergeInfo(cell_slice)
307 | self.storage_ph = TrStoragePhase(cell_slice) if cell_slice.read_next(1).any() else None
308 | self.compute_ph = TrComputePhase(cell_slice)
309 | self.action = TrActionPhase(cell_slice.read_next_ref()) if cell_slice.read_next(1).any() else None
310 | self.aborted = cell_slice.read_next(1).any()
311 | self.destroyed = cell_slice.read_next(1).any()
312 |
313 | def _init_merge_prepare(self, cell_slice: Slice):
314 | self.type = 'trans_merge_prepare'
315 | self.split_info = SplitMergeInfo(cell_slice)
316 | self.storage_ph = TrStoragePhase(cell_slice)
317 | self.aborted = cell_slice.read_next(1).any()
318 |
319 | def _init_merge_install(self, cell_slice: Slice):
320 | self.type = 'trans_merge_install'
321 | self.split_info = SplitMergeInfo(cell_slice)
322 | self.prepare_transaction = Transaction(cell_slice.read_next_ref())
323 | self.storage_ph = TrStoragePhase(cell_slice) if cell_slice.read_next(1).any() else None
324 | self.credit_ph = TrCreditPhase(cell_slice) if cell_slice.read_next(1).any() else None
325 | self.compute_ph = TrComputePhase(cell_slice)
326 | self.action = TrActionPhase(cell_slice.read_next_ref()) if cell_slice.read_next(1).any() else None
327 | self.aborted = cell_slice.read_next(1).any()
328 | self.destroyed = cell_slice.read_next(1).any()
329 |
330 | class AccountStatus:
331 | """
332 | acc_state_uninit$00 = AccountStatus;
333 | acc_state_frozen$01 = AccountStatus;
334 | acc_state_active$10 = AccountStatus;
335 | acc_state_nonexist$11 = AccountStatus;
336 | """
337 | def __init__(self, cell_slice):
338 | prefix = cell_slice.read_next(2)
339 | if prefix == bitarray('00'):
340 | self.type = 'acc_state_uninit'
341 | elif prefix == bitarray('01'):
342 | self.type = 'acc_state_frozen'
343 | elif prefix == bitarray('10'):
344 | self.type = 'acc_state_active'
345 | elif prefix == bitarray('11'):
346 | self.type = 'acc_state_nonexist'
347 | class HASH_UPDATE:
348 | """
349 | update_hashes#72 {X:Type} old_hash:bits256 new_hash:bits256
350 | = HASH_UPDATE X;
351 | """
352 | def __init__(self, cell_slice):
353 | prefix = cell_slice.read_next(8)
354 | if prefix != bitarray('01110010'):
355 | raise ValueError(f'HASH_UPDATE must have prefix 0x72 (but has {prefix})')
356 | self.old_hash = ba2hex(cell_slice.read_next(256))
357 | self.new_hash = ba2hex(cell_slice.read_next(256))
358 |
359 | class Transaction:
360 | """
361 | transaction$0111 account_addr:bits256 lt:uint64
362 | prev_trans_hash:bits256 prev_trans_lt:uint64 now:uint32
363 | outmsg_cnt:uint15
364 | orig_status:AccountStatus end_status:AccountStatus
365 | ^[ in_msg:(Maybe ^(Message Any)) out_msgs:(HashmapE 15 ^(Message Any)) ]
366 | total_fees:CurrencyCollection state_update:^(HASH_UPDATE Account)
367 | description:^TransactionDescr = Transaction;
368 | """
369 | def __init__(self, cell_slice):
370 | prefix = cell_slice.read_next(4)
371 | if prefix != bitarray('0111'):
372 | raise ValueError(f'Transaction must have prefix 0111 (but has {prefix})')
373 |
374 | self.account_addr = ba2hex(cell_slice.read_next(256))
375 | self.lt = ba2int(cell_slice.read_next(64), signed=False)
376 | self.prev_trans_hash = ba2hex(cell_slice.read_next(256))
377 | self.prev_trans_lt = ba2int(cell_slice.read_next(64), signed=False)
378 | self.now = ba2int(cell_slice.read_next(32), signed=False)
379 | self.outmsg_cnt = ba2int(cell_slice.read_next(15), signed=False)
380 |
381 | self.orig_status = AccountStatus(cell_slice)
382 | self.end_status = AccountStatus(cell_slice)
383 |
384 | messages = cell_slice.read_next_ref() # TODO: parse messages
385 |
386 | self.total_fees = CurrencyCollection(cell_slice)
387 |
388 | state_update_cell_slice = cell_slice.read_next_ref() # TODO: parse state update
389 | self.state_update = HASH_UPDATE(state_update_cell_slice)
390 | state_update_cell_slice.raise_if_not_empty()
391 |
392 | description_cell_slice = cell_slice.read_next_ref()
393 | self.description = TransactionDescr(description_cell_slice)
394 | description_cell_slice.raise_if_not_empty()
395 |
396 | class MsgAddress:
397 | def parse(cell_slice):
398 | prefix = cell_slice.prefetch_next(2)
399 | if prefix == bitarray('00') or prefix == bitarray('01'):
400 | return MsgAddressExt(cell_slice)
401 | else:
402 | return MsgAddressInt(cell_slice)
403 |
404 | class MsgAddressExt:
405 | """
406 | addr_none$00 = MsgAddressExt;
407 | addr_extern$01 len:(## 9) external_address:(bits len)
408 | = MsgAddressExt;
409 | """
410 | def __init__(self, cell_slice):
411 | prefix = cell_slice.read_next(2)
412 | if prefix == bitarray('00'):
413 | self.type = 'addr_none'
414 | elif prefix == bitarray('01'):
415 | self.type = 'addr_extern'
416 | cell_slice.read_next(cell_slice.bits_left()) #TODO: parse len and external_address
417 |
418 | class MsgAddressInt:
419 | """
420 | anycast_info$_ depth:(#<= 30) { depth >= 1 }
421 | rewrite_pfx:(bits depth) = Anycast;
422 | addr_std$10 anycast:(Maybe Anycast)
423 | workchain_id:int8 address:bits256 = MsgAddressInt;
424 | addr_var$11 anycast:(Maybe Anycast) addr_len:(## 9)
425 | workchain_id:int32 address:(bits addr_len) = MsgAddressInt;
426 | """
427 | def __init__(self, cell_slice):
428 | prefix = cell_slice.read_next(2)
429 | if prefix == bitarray('10'):
430 | self.type = 'addr_std'
431 | elif prefix == bitarray('11'):
432 | self.type = 'addr_var'
433 | else:
434 | raise ValueError(f'MsgAddressInt must have prefix 10 or 11 (but has {prefix})')
435 |
436 | if cell_slice.read_next(1).any():
437 | raise NotImplementedError('Anycast not supported yet')
438 |
439 | if self.type == 'addr_std':
440 | self.workchain_id = ba2int(cell_slice.read_next(8), signed=True)
441 | self.address = ba2hex(cell_slice.read_next(256))
442 | else:
443 | addr_len = ba2int(cell_slice.read_next(6), signed=False)
444 | self.workchain_id = ba2int(cell_slice.read_next(32), signed=True)
445 | self.address = ba2hex(cell_slice.read_next(addr_len))
446 |
447 | class TokenData:
448 | attributes = ['uri', 'name', 'description', 'image', 'image_data', 'symbol', 'decimals', 'amount_style', 'render_type']
449 | attributes_hashes = {}
450 | for attr in attributes:
451 | attributes_hashes[sha256(attr.encode('utf-8')).hexdigest()] = attr
452 |
453 | def __init__(self, cell_slice):
454 | prefix = cell_slice.read_next(8)
455 | if prefix == bitarray('00000000'):
456 | self.type = 'onchain'
457 | if cell_slice.read_next(1).any():
458 | child_slice = cell_slice.read_next_ref()
459 | hashmap_cell = Cell()
460 | hashmap_cell.data.data = child_slice._data
461 | hashmap_cell.refs = child_slice._refs
462 | hashmap = {}
463 | parse_hashmap(hashmap_cell, 256, hashmap, bitarray())
464 | self.data = self._parse_attributes(hashmap)
465 | else:
466 | self.data = {}
467 | elif prefix == bitarray('00000001'):
468 | self.type = 'offchain'
469 | data = cell_slice.read_next(cell_slice.bits_left())
470 | while cell_slice.refs_left() > 0:
471 | cell_slice = cell_slice.read_next_ref()
472 | data += cell_slice.read_next(cell_slice.bits_left())
473 | self.data = data.tobytes().decode('ascii')
474 | else:
475 | raise ValueError('Unexpected content prefix')
476 |
477 | def _parse_attributes(self, hashmap: dict):
478 | res = {}
479 | for attr_hash_bitstr, value_cell in hashmap.items():
480 | attr_hash_hex = ba2hex(bitarray(attr_hash_bitstr))
481 | attr_name = TokenData.attributes_hashes.get(attr_hash_hex)
482 | if attr_name is None:
483 | attr_name = attr_hash_hex
484 | res[attr_name] = self._parse_content_data(value_cell)
485 | return res
486 |
487 | def _parse_content_data(self, cell: Cell, encoding='utf-8'):
488 | if len(cell.data.data) > 0:
489 | # TODO: Check if it complies with Token Data standard
490 | cell_slice = Slice(cell)
491 | else:
492 | cell_slice = Slice(cell.refs[0])
493 | prefix = cell_slice.read_next(8)
494 | if prefix == bitarray('00000000'):
495 | #snake
496 | data = cell_slice.read_next(cell_slice.bits_left())
497 | while cell_slice.refs_left() > 0:
498 | cell_slice = cell_slice.read_next_ref()
499 | data += cell_slice.read_next(cell_slice.bits_left())
500 | return data.tobytes().decode(encoding)
501 | elif prefix == bitarray('00000001'):
502 | #chunks
503 | data = bitarray()
504 | if cell_slice.read_next(1).any():
505 | child_slice = cell_slice.read_next_ref()
506 | hashmap_cell = Cell()
507 | hashmap_cell.data.data = child_slice._data
508 | hashmap_cell.refs = child_slice._refs
509 | hashmap = {}
510 | parse_hashmap(hashmap_cell, 32, hashmap, bitarray())
511 | for ind in range(len(hashmap)):
512 | ind_bitstr = f'{ind:032b}'
513 | chunk_cell = hashmap[ind_bitstr]
514 | assert chunk_cell.data.data == bitarray()
515 | assert len(chunk_cell.refs) == 1
516 | data += chunk_cell.refs[0].data.data
517 | else:
518 | raise ValueError(f'Unexpected content data prefix: {prefix}')
519 | return data.tobytes().decode(encoding)
520 |
521 |
522 | class DNSRecord:
523 | """
524 | dns_smc_address#9fd3 smc_addr:MsgAddressInt flags:(## 8) { flags <= 1 }
525 | cap_list:flags . 0?SmcCapList = DNSRecord;
526 | dns_next_resolver#ba93 resolver:MsgAddressInt = DNSRecord;
527 | dns_adnl_address#ad01 adnl_addr:bits256 flags:(## 8) { flags <= 1 }
528 | proto_list:flags . 0?ProtoList = DNSRecord;
529 | dns_storage_address#7473 bag_id:bits256 = DNSRecord;
530 | """
531 | def __init__(self, cell_slice):
532 | prefix = ba2hex(cell_slice.read_next(16))
533 | if prefix == '9fd3':
534 | self.smc_addr = MsgAddressInt(cell_slice)
535 | flags = ba2int(cell_slice.read_next(8))
536 | if flags & 1:
537 | #TODO: parse SmcCapList
538 | cell_slice.read_next(cell_slice.bits_left())
539 | elif prefix == 'ad01':
540 | self.adnl_addr = ba2hex(cell_slice.read_next(256))
541 | flags = ba2int(cell_slice.read_next(8))
542 | if flags & 1:
543 | #TODO: parse ProtoList
544 | cell_slice.read_next(cell_slice.bits_left())
545 | elif prefix == 'ba93':
546 | self.resolver = MsgAddressInt(cell_slice)
547 | elif prefix == '7473':
548 | self.bag_id = ba2hex(cell_slice.read_next(256))
549 | else:
550 | raise ValueError(f'Unexpected content data prefix: {prefix}')
551 |
552 | class DNSRecordSet:
553 | attributes = ['wallet', 'site', 'storage', 'dns_next_resolver']
554 | attributes_hashes = {}
555 | for attr in attributes:
556 | attributes_hashes[sha256(attr.encode('utf-8')).hexdigest()] = attr
557 |
558 | def __init__(self, cell_slice):
559 | prefix = cell_slice.read_next(8)
560 | assert prefix == bitarray('00000000'), 'dns data expected to be onchain'
561 | if cell_slice.read_next(1).any():
562 | child_slice = cell_slice.read_next_ref()
563 | hashmap_cell = Cell()
564 | hashmap_cell.data.data = child_slice._data
565 | hashmap_cell.refs = child_slice._refs
566 | hashmap = {}
567 | parse_hashmap(hashmap_cell, 256, hashmap, bitarray())
568 | self.data = self._parse_attributes(hashmap)
569 | else:
570 | self.data = {}
571 |
572 | def _parse_attributes(self, hashmap: dict):
573 | res = {}
574 | for attr_hash_bitstr, value_cell in hashmap.items():
575 | attr_hash_hex = ba2hex(bitarray(attr_hash_bitstr))
576 | attr_name = DNSRecordSet.attributes_hashes.get(attr_hash_hex)
577 | if attr_name is None:
578 | attr_name = attr_hash_hex
579 | assert len(value_cell.refs) == 1, 'dict val should contain exact 1 ref'
580 | res[attr_name] = DNSRecord(Slice(value_cell.refs[0]))
581 | return res
582 |
583 | class TextCommentMessage:
584 | def __init__(self, cell_slice):
585 | prefix = cell_slice.read_next(32)
586 | if prefix != hex2ba('00000000'):
587 | raise ValueError('Unexpected content prefix')
588 | if cell_slice.prefetch_next(8) == hex2ba('ff'):
589 | raise ValueError('Text comment cannot start with byte 0xff')
590 | data = cell_slice.read_next(cell_slice.bits_left()).tobytes()
591 | self.text_comment = codecs.decode(data, 'utf8')
592 | while cell_slice.refs_left() > 0:
593 | if cell_slice.refs_left() > 1:
594 | raise ValueError('Unexpected number of subcells in simple comment message')
595 | cell_slice = cell_slice.read_next_ref()
596 | data = cell_slice.read_next(cell_slice.bits_left()).tobytes()
597 | self.text_comment += codecs.decode(data, 'utf8')
598 |
599 | self.text_comment = self.text_comment.replace('\x00', '')
600 |
601 | class BinaryCommentMessage:
602 | def __init__(self, cell_slice):
603 | prefix = cell_slice.read_next(40)
604 | if prefix != hex2ba('00000000ff'):
605 | raise ValueError('Unexpected content prefix')
606 | data_ba = cell_slice.read_next(cell_slice.bits_left())
607 | while cell_slice.refs_left() > 0:
608 | if cell_slice.refs_left() > 1:
609 | raise ValueError('Unexpected number of subcells in binary comment message')
610 | cell_slice = cell_slice.read_next_ref()
611 | data_ba += cell_slice.read_next(cell_slice.bits_left())
612 | self.hex_comment = ba2hex(data_ba)
613 |
614 | class CommentMessage:
615 | @classmethod
616 | def parse(cls, cell_slice: Slice):
617 | if cell_slice.prefetch_next(32) != hex2ba('00000000'):
618 | raise ValueError('Unexpected content prefix')
619 | if cell_slice.bits_left() >= 40 and cell_slice.prefetch_next(40)[32:40] == hex2ba('ff'):
620 | return BinaryCommentMessage(cell_slice)
621 | else:
622 | return TextCommentMessage(cell_slice)
623 |
624 | class NftTransferMessage:
625 | def __init__(self, cell_slice):
626 | prefix = cell_slice.read_next(32)
627 | if prefix != hex2ba('5fcc3d14'):
628 | raise ValueError('Unexpected content prefix')
629 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
630 | self.new_owner = MsgAddress.parse(cell_slice)
631 | self.response_destination = MsgAddress.parse(cell_slice)
632 | if cell_slice.read_next(1).any():
633 | cell_slice.read_next_ref() #TODO: read custom_payload
634 | self.forward_amount = cell_slice.read_var_uint(16)
635 | if cell_slice.read_next(1).any(): #TODO: read forward_payload
636 | cell_slice.read_next_ref()
637 | else:
638 | cell_slice.read_next(cell_slice.bits_left())
639 |
640 | class NftOwnershipAssignedMessage:
641 | def __init__(self, cell_slice):
642 | prefix = cell_slice.read_next(32)
643 | if prefix != hex2ba('05138d91'):
644 | raise ValueError('Unexpected content prefix')
645 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
646 | self.prev_owner = MsgAddress.parse(cell_slice)
647 | if cell_slice.read_next(1).any(): #TODO: read forward_payload
648 | cell_slice.read_next_ref()
649 | else:
650 | cell_slice.read_next(cell_slice.bits_left())
651 |
652 | class NftExcessesMessage:
653 | def __init__(self, cell_slice):
654 | prefix = cell_slice.read_next(32)
655 | if prefix != hex2ba('d53276db'):
656 | raise ValueError('Unexpected content prefix')
657 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
658 |
659 | class NftGetStaticDataMessage:
660 | def __init__(self, cell_slice):
661 | prefix = cell_slice.read_next(32)
662 | if prefix != hex2ba('2fcb26a2'):
663 | raise ValueError('Unexpected content prefix')
664 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
665 |
666 | class NftReportStaticDataMessage:
667 | def __init__(self, cell_slice):
668 | prefix = cell_slice.read_next(32)
669 | if prefix != hex2ba('8b771735'):
670 | raise ValueError('Unexpected content prefix')
671 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
672 | self.index = ba2int(cell_slice.read_next(256), signed=False)
673 | self.collection = MsgAddress.parse(cell_slice)
674 |
675 | class JettonTransferMessage:
676 | def __init__(self, cell_slice):
677 | prefix = cell_slice.read_next(32)
678 | if prefix != hex2ba('0f8a7ea5'):
679 | raise ValueError('Unexpected content prefix')
680 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
681 | self.amount = cell_slice.read_var_uint(16)
682 | self.destination = MsgAddress.parse(cell_slice)
683 | self.response_destination = MsgAddress.parse(cell_slice)
684 | if cell_slice.read_next(1).any():
685 | cell_slice.read_next_ref() #TODO: read custom_payload
686 | self.forward_ton_amount = cell_slice.read_var_uint(16)
687 | if cell_slice.read_next(1).any(): #TODO: read forward_payload
688 | cell_slice.read_next_ref()
689 | else:
690 | cell_slice.read_next(cell_slice.bits_left())
691 |
692 |
693 | class JettonTransferNotificationMessage:
694 | def __init__(self, cell_slice):
695 | prefix = cell_slice.read_next(32)
696 | if prefix != hex2ba('7362d09c'):
697 | raise ValueError('Unexpected content prefix')
698 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
699 | self.amount = cell_slice.read_var_uint(16)
700 | self.sender = MsgAddress.parse(cell_slice)
701 | if cell_slice.read_next(1).any(): #TODO: read forward_payload
702 | cell_slice.read_next_ref()
703 | else:
704 | cell_slice.read_next(cell_slice.bits_left())
705 |
706 |
707 | class JettonExcessesMessage:
708 | def __init__(self, cell_slice):
709 | prefix = cell_slice.read_next(32)
710 | if prefix != hex2ba('d53276db'):
711 | raise ValueError('Unexpected content prefix')
712 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
713 |
714 | class JettonBurnMessage:
715 | def __init__(self, cell_slice):
716 | prefix = cell_slice.read_next(32)
717 | if prefix != hex2ba('595f07bc'):
718 | raise ValueError('Unexpected content prefix')
719 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
720 | self.amount = cell_slice.read_var_uint(16)
721 | self.response_destination = MsgAddress.parse(cell_slice)
722 | if cell_slice.read_next(1).any():
723 | cell_slice.read_next_ref() #TODO: read custom_payload
724 |
725 | class JettonInternalTransferMessage:
726 | def __init__(self, cell_slice):
727 | prefix = cell_slice.read_next(32)
728 | if prefix != hex2ba('178d4519'):
729 | raise ValueError('Unexpected content prefix')
730 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
731 | self.amount = cell_slice.read_var_uint(16)
732 | self.from_ = MsgAddress.parse(cell_slice)
733 | self.response_address = MsgAddress.parse(cell_slice)
734 | self.forward_ton_amount = cell_slice.read_var_uint(16)
735 | if cell_slice.read_next(1).any(): #TODO: read forward_payload
736 | cell_slice.read_next_ref()
737 | else:
738 | cell_slice.read_next(cell_slice.bits_left())
739 |
740 | class JettonBurnNotificationMessage:
741 | def __init__(self, cell_slice):
742 | prefix = cell_slice.read_next(32)
743 | if prefix != hex2ba('7bdd97de'):
744 | raise ValueError('Unexpected content prefix')
745 | self.query_id = ba2int(cell_slice.read_next(64), signed=False)
746 | self.amount = cell_slice.read_var_uint(16)
747 | self.sender = MsgAddress.parse(cell_slice)
748 | self.response_destination = MsgAddress.parse(cell_slice)
749 |
750 | # Deprecated, use boc_to_object
751 | def parse_transaction(b64_tx_data: str) -> dict:
752 | transaction_boc = codecs.decode(codecs.encode(b64_tx_data, 'utf-8'), 'base64')
753 | cell = deserialize_boc(transaction_boc)
754 | cell_slice = Slice(cell)
755 | tx = Transaction(cell_slice)
756 | cell_slice.raise_if_not_empty()
757 |
758 | return json.loads(json.dumps(tx, default=lambda o: o.__dict__))
759 |
760 | # Deprecated, use boc_to_object
761 | def parse_tlb_object(b64_boc: str, tlb_type: type):
762 | boc = codecs.decode(codecs.encode(b64_boc, 'utf-8'), 'base64')
763 | cell = deserialize_boc(boc)
764 | cell_slice = Slice(cell)
765 | parse_cons = getattr(tlb_type, "parse", None)
766 | if callable(parse_cons):
767 | object = parse_cons(cell_slice)
768 | else:
769 | object = tlb_type(cell_slice)
770 | cell_slice.raise_if_not_empty()
771 | return json.loads(json.dumps(object, default=lambda o: o.__dict__))
772 |
773 | def boc_to_object(b64_boc: str, tlb_type: type):
774 | boc = codecs.decode(codecs.encode(b64_boc, 'utf-8'), 'base64')
775 | cell = deserialize_boc(boc)
776 | cell_slice = Slice(cell)
777 | parse_cons = getattr(tlb_type, "parse", None)
778 | if callable(parse_cons):
779 | object = parse_cons(cell_slice)
780 | else:
781 | object = tlb_type(cell_slice)
782 | cell_slice.raise_if_not_empty()
783 | return object
--------------------------------------------------------------------------------
/pytonlib/utils/tokens.py:
--------------------------------------------------------------------------------
1 | from pytonlib.utils.tlb import parse_tlb_object, MsgAddress, MsgAddressInt, TokenData, DNSRecordSet
2 | from pytonlib.utils.address import detect_address
3 |
4 | def read_stack_num(entry: list):
5 | assert entry[0] == 'num'
6 | return int(entry[1], 16)
7 |
8 | def read_stack_cell(entry: list):
9 | assert entry[0] == 'cell'
10 | return entry[1]['bytes']
11 |
12 | def parse_jetton_master_data(stack: list):
13 | total_supply = read_stack_num(stack[0])
14 | mintable = bool(read_stack_num(stack[1]))
15 | admin_address = parse_tlb_object(read_stack_cell(stack[2]), MsgAddress)
16 | if admin_address['type'] == 'addr_std':
17 | admin_address_friendly = detect_address(f"{admin_address['workchain_id']}:{admin_address['address']}")['bounceable']['b64url']
18 | elif admin_address['type'] == 'addr_none':
19 | admin_address_friendly = None
20 | else:
21 | raise NotImplementedError('Owner address not supported')
22 |
23 | jetton_content = parse_tlb_object(read_stack_cell(stack[3]), TokenData)
24 | jetton_wallet_code = read_stack_cell(stack[4])
25 | return {
26 | 'total_supply': total_supply,
27 | 'mintable': mintable,
28 | 'admin_address': admin_address_friendly,
29 | 'jetton_content': jetton_content,
30 | 'jetton_wallet_code': jetton_wallet_code
31 | }
32 |
33 | def parse_jetton_wallet_data(stack: list):
34 | balance = read_stack_num(stack[0])
35 | owner = parse_tlb_object(read_stack_cell(stack[1]), MsgAddress)
36 | if owner['type'] == 'addr_std':
37 | owner_friendly = detect_address(f"{owner['workchain_id']}:{owner['address']}")['bounceable']['b64url']
38 | else:
39 | raise NotImplementedError('Owner address not supported')
40 |
41 | jetton = parse_tlb_object(read_stack_cell(stack[2]), MsgAddress)
42 | if jetton['type'] == 'addr_std':
43 | jetton_friendly = detect_address(f"{jetton['workchain_id']}:{jetton['address']}")['bounceable']['b64url']
44 | else:
45 | raise NotImplementedError('Jetton address not supported')
46 | jetton_wallet_code = read_stack_cell(stack[3])
47 | return {
48 | 'balance': balance,
49 | 'owner': owner_friendly,
50 | 'jetton': jetton_friendly,
51 | 'jetton_wallet_code': jetton_wallet_code
52 | }
53 |
54 | def parse_single_address_stack(stack: list):
55 | jetton_wallet_address = parse_tlb_object(read_stack_cell(stack[0]), MsgAddress)
56 | if jetton_wallet_address['type'] == 'addr_std':
57 | jetton_wallet_address_friendly = detect_address(f"{jetton_wallet_address['workchain_id']}:{jetton_wallet_address['address']}")['bounceable']['b64']
58 | else:
59 | raise NotImplementedError('addr_var jetton wallet address not supported')
60 | return jetton_wallet_address_friendly
61 |
62 | def parse_jetton_wallet_address_data(stack: list):
63 | return parse_single_address_stack(stack)
64 |
65 | def parse_nft_item_address_data(stack: list):
66 | return parse_single_address_stack(stack)
67 |
68 | def parse_nft_collection_data(stack: list):
69 | next_item_index = read_stack_num(stack[0])
70 | collection_content = parse_tlb_object(read_stack_cell(stack[1]), TokenData)
71 | owner_address = parse_tlb_object(read_stack_cell(stack[2]), MsgAddress)
72 | if owner_address['type'] == 'addr_std':
73 | owner_address_friendly = detect_address(f"{owner_address['workchain_id']}:{owner_address['address']}")['bounceable']['b64url']
74 | elif owner_address['type'] == 'addr_none':
75 | owner_address_friendly = None
76 | else:
77 | raise NotImplementedError('Owner address not supported')
78 | return {
79 | 'next_item_index': next_item_index,
80 | 'collection_content': collection_content,
81 | 'owner_address': owner_address_friendly
82 | }
83 |
84 | def parse_nft_item_data(stack: list):
85 | init = bool(read_stack_num(stack[0]))
86 | index = read_stack_num(stack[1])
87 |
88 | collection_address = parse_tlb_object(read_stack_cell(stack[2]), MsgAddress)
89 | if collection_address['type'] == 'addr_std':
90 | collection_address_friendly = detect_address(f"{collection_address['workchain_id']}:{collection_address['address']}")['bounceable']['b64url']
91 | elif collection_address['type'] == 'addr_none':
92 | collection_address_friendly = None
93 | else:
94 | raise NotImplementedError('Collection address not supported')
95 |
96 | owner_address = parse_tlb_object(read_stack_cell(stack[3]), MsgAddress)
97 | if owner_address['type'] == 'addr_std':
98 | owner_address_friendly = detect_address(f"{owner_address['workchain_id']}:{owner_address['address']}")['bounceable']['b64url']
99 | elif owner_address['type'] == 'addr_none':
100 | owner_address_friendly = None
101 | else:
102 | raise NotImplementedError('Owner address not supported')
103 |
104 | if collection_address['type'] == 'addr_none':
105 | individual_content = parse_tlb_object(read_stack_cell(stack[4]), TokenData)
106 | else:
107 | individual_content = read_stack_cell(stack[4])
108 | return {
109 | 'init': init,
110 | 'index': index,
111 | 'owner_address': owner_address_friendly,
112 | 'collection_address': collection_address_friendly,
113 | 'individual_content': individual_content
114 | }
115 |
116 | def parse_nft_content(stack: list):
117 | return parse_tlb_object(read_stack_cell(stack[0]), TokenData)
118 |
119 | def parse_dns_content(stack: list):
120 | return parse_tlb_object(read_stack_cell(stack[0]), DNSRecordSet)
121 |
--------------------------------------------------------------------------------
/pytonlib/utils/wallet.py:
--------------------------------------------------------------------------------
1 | import codecs
2 |
3 | from hashlib import sha256 as hasher
4 | from tvm_valuetypes.cell import deserialize_boc
5 |
6 | def empty_extractor(result, data):
7 | pass
8 |
9 | def seqno_extractor(result, data):
10 | data_cell = deserialize_boc(codecs.decode(codecs.encode(data["data"], 'utf-8'), 'base64'))
11 | seqno = int.from_bytes(data_cell.data.data[0:32].tobytes(), 'big')
12 | result['seqno'] = seqno
13 |
14 |
15 | def v3_extractor(result, data):
16 | seqno_extractor(result, data)
17 | data_cell = deserialize_boc(codecs.decode(codecs.encode(data["data"], 'utf-8'), 'base64'))
18 | wallet_id = int.from_bytes(data_cell.data.data[32:64].tobytes(), 'big')
19 | result['wallet_id'] = wallet_id
20 |
21 | def v5_extractor(result, data):
22 | data_cell = deserialize_boc(codecs.decode(codecs.encode(data["data"], 'utf-8'), 'base64'))
23 | is_signature_allowed = bool.from_bytes(data_cell.data.data[0:1].tobytes(), 'big')
24 | seqno = int.from_bytes(data_cell.data.data[1:33].tobytes(), 'big')
25 | wallet_id = int.from_bytes(data_cell.data.data[33:65].tobytes(), 'big')
26 | result['is_signature_allowed'] = is_signature_allowed
27 | result['seqno'] = seqno
28 | result['wallet_id'] = wallet_id
29 |
30 | def sha256(x):
31 | if not isinstance(x, bytes):
32 | x = codecs.encode(x, 'utf-8')
33 | h = hasher()
34 | h.update(x)
35 | return h.digest()
36 |
37 |
38 | wallet_v1_r1 = "te6cckEBAQEARAAAhP8AIN2k8mCBAgDXGCDXCx/tRNDTH9P/0VESuvKhIvkBVBBE+RDyovgAAdMfMSDXSpbTB9QC+wDe0aTIyx/L/8ntVEH98Ik="
39 | wallet_v1_r2 = "te6cckEBAQEAUwAAov8AIN0gggFMl7qXMO1E0NcLH+Ck8mCBAgDXGCDXCx/tRNDTH9P/0VESuvKhIvkBVBBE+RDyovgAAdMfMSDXSpbTB9QC+wDe0aTIyx/L/8ntVNDieG8="
40 | wallet_v1_r3 = "te6cckEBAQEAXwAAuv8AIN0gggFMl7ohggEznLqxnHGw7UTQ0x/XC//jBOCk8mCBAgDXGCDXCx/tRNDTH9P/0VESuvKhIvkBVBBE+RDyovgAAdMfMSDXSpbTB9QC+wDe0aTIyx/L/8ntVLW4bkI="
41 | wallet_v2_r1 = "te6cckEBAQEAVwAAqv8AIN0gggFMl7qXMO1E0NcLH+Ck8mCDCNcYINMf0x8B+CO78mPtRNDTH9P/0VExuvKhA/kBVBBC+RDyovgAApMg10qW0wfUAvsA6NGkyMsfy//J7VShNwu2"
42 | wallet_v2_r2 = "te6cckEBAQEAYwAAwv8AIN0gggFMl7ohggEznLqxnHGw7UTQ0x/XC//jBOCk8mCDCNcYINMf0x8B+CO78mPtRNDTH9P/0VExuvKhA/kBVBBC+RDyovgAApMg10qW0wfUAvsA6NGkyMsfy//J7VQETNeh"
43 | wallet_v3_r1 = "te6cckEBAQEAYgAAwP8AIN0gggFMl7qXMO1E0NcLH+Ck8mCDCNcYINMf0x/TH/gjE7vyY+1E0NMf0x/T/9FRMrryoVFEuvKiBPkBVBBV+RDyo/gAkyDXSpbTB9QC+wDo0QGkyMsfyx/L/8ntVD++buA="
44 | wallet_v3_r2 = "te6cckEBAQEAcQAA3v8AIN0gggFMl7ohggEznLqxn3Gw7UTQ0x/THzHXC//jBOCk8mCDCNcYINMf0x/TH/gjE7vyY+1E0NMf0x/T/9FRMrryoVFEuvKiBPkBVBBV+RDyo/gAkyDXSpbTB9QC+wDo0QGkyMsfyx/L/8ntVBC9ba0="
45 | wallet_v4_r1 = "te6cckECFQEAAvUAART/APSkE/S88sgLAQIBIAIDAgFIBAUE+PKDCNcYINMf0x/THwL4I7vyY+1E0NMf0x/T//QE0VFDuvKhUVG68qIF+QFUEGT5EPKj+AAkpMjLH1JAyx9SMMv/UhD0AMntVPgPAdMHIcAAn2xRkyDXSpbTB9QC+wDoMOAhwAHjACHAAuMAAcADkTDjDQOkyMsfEssfy/8REhMUA+7QAdDTAwFxsJFb4CHXScEgkVvgAdMfIYIQcGx1Z70ighBibG5jvbAighBkc3RyvbCSXwPgAvpAMCD6RAHIygfL/8nQ7UTQgQFA1yH0BDBcgQEI9ApvoTGzkl8F4ATTP8glghBwbHVnupEx4w0kghBibG5juuMABAYHCAIBIAkKAFAB+gD0BDCCEHBsdWeDHrFwgBhQBcsFJ88WUAP6AvQAEstpyx9SEMs/AFL4J28ighBibG5jgx6xcIAYUAXLBSfPFiT6AhTLahPLH1Iwyz8B+gL0AACSghBkc3Ryuo41BIEBCPRZMO1E0IEBQNcgyAHPFvQAye1UghBkc3Rygx6xcIAYUATLBVjPFiL6AhLLassfyz+UEDRfBOLJgED7AAIBIAsMAFm9JCtvaiaECAoGuQ+gIYRw1AgIR6STfSmRDOaQPp/5g3gSgBt4EBSJhxWfMYQCAVgNDgARuMl+1E0NcLH4AD2ynftRNCBAUDXIfQEMALIygfL/8nQAYEBCPQKb6ExgAgEgDxAAGa3OdqJoQCBrkOuF/8AAGa8d9qJoQBBrkOuFj8AAbtIH+gDU1CL5AAXIygcVy//J0Hd0gBjIywXLAiLPFlAF+gIUy2sSzMzJcfsAyEAUgQEI9FHypwIAbIEBCNcYyFQgJYEBCPRR8qeCEG5vdGVwdIAYyMsFywJQBM8WghAF9eEA+gITy2oSyx/JcfsAAgBygQEI1xgwUgKBAQj0WfKn+CWCEGRzdHJwdIAYyMsFywJQBc8WghAF9eEA+gIUy2oTyx8Syz/Jc/sAAAr0AMntVEap808="
46 | wallet_v4_r2 = "te6cckECFAEAAtQAART/APSkE/S88sgLAQIBIAIDAgFIBAUE+PKDCNcYINMf0x/THwL4I7vyZO1E0NMf0x/T//QE0VFDuvKhUVG68qIF+QFUEGT5EPKj+AAkpMjLH1JAyx9SMMv/UhD0AMntVPgPAdMHIcAAn2xRkyDXSpbTB9QC+wDoMOAhwAHjACHAAuMAAcADkTDjDQOkyMsfEssfy/8QERITAubQAdDTAyFxsJJfBOAi10nBIJJfBOAC0x8hghBwbHVnvSKCEGRzdHK9sJJfBeAD+kAwIPpEAcjKB8v/ydDtRNCBAUDXIfQEMFyBAQj0Cm+hMbOSXwfgBdM/yCWCEHBsdWe6kjgw4w0DghBkc3RyupJfBuMNBgcCASAICQB4AfoA9AQw+CdvIjBQCqEhvvLgUIIQcGx1Z4MesXCAGFAEywUmzxZY+gIZ9ADLaRfLH1Jgyz8gyYBA+wAGAIpQBIEBCPRZMO1E0IEBQNcgyAHPFvQAye1UAXKwjiOCEGRzdHKDHrFwgBhQBcsFUAPPFiP6AhPLassfyz/JgED7AJJfA+ICASAKCwBZvSQrb2omhAgKBrkPoCGEcNQICEekk30pkQzmkD6f+YN4EoAbeBAUiYcVnzGEAgFYDA0AEbjJftRNDXCx+AA9sp37UTQgQFA1yH0BDACyMoHy//J0AGBAQj0Cm+hMYAIBIA4PABmtznaiaEAga5Drhf/AABmvHfaiaEAQa5DrhY/AAG7SB/oA1NQi+QAFyMoHFcv/ydB3dIAYyMsFywIizxZQBfoCFMtrEszMyXP7AMhAFIEBCPRR8qcCAHCBAQjXGPoA0z/IVCBHgQEI9FHyp4IQbm90ZXB0gBjIywXLAlAGzxZQBPoCFMtqEssfyz/Jc/sAAgBsgQEI1xj6ANM/MFIkgQEI9Fnyp4IQZHN0cnB0gBjIywXLAlAFzxZQA/oCE8tqyx8Syz/Jc/sAAAr0AMntVGliJeU="
47 | wallet_v5_r1 = "te6cckECFAEAAoEAART/APSkE/S88sgLAQIBIAIDAgFIBAUBAvIOAtzQINdJwSCRW49jINcLHyCCEGV4dG69IYIQc2ludL2wkl8D4IIQZXh0brqOtIAg1yEB0HTXIfpAMPpE+Cj6RDBYvZFb4O1E0IEBQdch9AWDB/QOb6ExkTDhgEDXIXB/2zzgMSDXSYECgLmRMOBw4hAPAgEgBgcCASAICQAZvl8PaiaECAoOuQ+gLAIBbgoLAgFIDA0AGa3OdqJoQCDrkOuF/8AAGa8d9qJoQBDrkOuFj8AAF7Ml+1E0HHXIdcLH4AARsmL7UTQ1woAgAR4g1wsfghBzaWduuvLgin8PAeaO8O2i7fshgwjXIgKDCNcjIIAg1yHTH9Mf0x/tRNDSANMfINMf0//XCgAK+QFAzPkQmiiUXwrbMeHywIffArNQB7Dy0IRRJbry4IVQNrry4Ib4I7vy0IgikvgA3gGkf8jKAMsfAc8Wye1UIJL4D95w2zzYEAP27aLt+wL0BCFukmwhjkwCIdc5MHCUIccAs44tAdcoIHYeQ2wg10nACPLgkyDXSsAC8uCTINcdBscSwgBSMLDy0InXTNc5MAGk6GwShAe78uCT10rAAPLgk+1V4tIAAcAAkVvg69csCBQgkXCWAdcsCBwS4lIQseMPINdKERITAJYB+kAB+kT4KPpEMFi68uCR7UTQgQFB1xj0BQSdf8jKAEAEgwf0U/Lgi44UA4MH9Fvy4Iwi1woAIW4Bs7Dy0JDiyFADzxYS9ADJ7VQAcjDXLAgkji0h8uCS0gDtRNDSAFETuvLQj1RQMJExnAGBAUDXIdcKAPLgjuLIygBYzxbJ7VST8sCN4gAQk1vbMeHXTNCon9ZI"
48 | nominator_pool_v1 = "te6cckECOgEACcIAART/APSkE/S88sgLAQIBYgIDAgLOBAUCASATFAIBIAYHAGVCHXSasCcFIDqgCOI6oDA/ABFKACpFMBuo4TI9dKwAGcWwHUMNAg10mrAhJw3t4C5GwhgEfz4J28QAtDTA/pAMCD6RANxsI8iMTMzINdJwj+PFIAg1yHTHzCCEE5zdEu6Ats8sOMAkl8D4uAD0x/bPFYSwACAhCB8JAE80wcBptAgwv/y4UkgwQrcpvkgwv/y4UkgwRDcpuAgwv8hwRCw8uFJgAzDbPFYQwAGTcFcR3hBMEDtKmNs8CFUz2zwfDBIELOMPVUDbPBBcEEsQOkl4EFYQRRA0QDMKCwwNA6JXEhEQ0wchwHkiwG6xIsBkI8B3sSGx8uBAILOeIdFWFsAA8r1WFS698r7eIsBk4wAiwHeSVxfjDREWjhMwBBEVBAMRFAMCERMCVxFXEV8D4w0ODxADNBER0z9WFlYW2zzjDwsREAsQvxC+EL0QvBCrISIjACjIgQEAECbPARPLD8sPAfoCAfoCyQEE2zwSAtiBAQBWFlKi9A5voSCzlRESpBES3lYSLrvy4EGCEDuaygABERsBoSDCAPLgQhEajoLbPJMwcCDiVhPAAJQBVhqglFYaoAHiUwGgLL7y4EMq12V1VhS2A6oAtgm58uBEAds8gQEAElYXQLv0QwgvJQOkVhHAAI8hVhUEEDkQKAERGAEREds8AVYYoYISVAvkAL6OhFYT2zzejqNXF4EBAFYVUpL0Dm+hMfLgRciBAQASVhZAmfRDVhPbPE8HAuJPH1B3BikwMAL+VhTA/1YULbqws46dERTAAPLgeYEBAFYTUnL0Dm+h8uB62zwwwgDy4HuSVxTiERSAIPACAdERE8B5VhNWEYMH9A5voSCzjhmCEDuaygBWE9dllYAPeqmE5AERGAG+8uB7klcX4lYWlfQE0x8wlDBt+CPiVhQigwf0Dm+hMfLQfC8RAWz4IwPIygATyx8CERQBgwf0Q8j0AAEREgHLHwIBERIBD4MH9EMREo6DDds8kT3iDBEQDBC/ELwwAEoMyMsHG8sPUAn6AlAH+gIVzBP0APQAyx/L/8sHyx/LH/QAye1UAgEgFRYCASAZGgEJu/Gds8gfAgFiFxgBda877Z4riC+HtqzBg/oHN9D5cEL6Ahg/xw/AgIApEHo+N9KQT4FpAGmPmBEst4GoAjeBAciZcQDZ8y3AHwEJrIttnkAzAgFuGxwBXbvQXbPFcQXw9tf44fIoMH9HxvpSCOEAL0BDHTHzBSEG8CUANvAgKRMuIBs+YwMYHwIBIB0eAReuPu2eCDevh5i3WcAfAnaqOds8XwZQml8JbX+OqYEBAFIw9HxvpSCOmALbPIEBAFRjgPQOb6ExI1UgbwRQA28CApEy4gGz5hNfAx8vAkSrWds8XwZQml8JgQEAI1n0Dm+h8uBW2zyBAQBEMPQOb6ExHy8BVO1E0NMH0w/6APoA1AHQ2zwF9AT0BNMf0//TB9Mf0x/0BDAQvBCrEJoQiSAAHIEBANcB0w/TD/oA+gAwAB4BwP9x+DPQgQEA1wNYurAB6FtXElcSVxJXEvgAghD5b3MkUuC6jrk7ERFwCaFTgMEBmlCIoCDBAJI3J96OFjBTBaiBJxCpBFMBvJIwIN5RiKAIoQfiUHfbPCcKEREKCAqSVxLiKsABjhmCEO5vRUxS0LqScDveghDzdEhMHbqScjrekTziJAS4VhPCAFYUwQiwghBHZXQkVhUBurGCEE5zdEtWFQG6sfLgRlYTwAEwVhPAAo8k0wcQORAoVhgCARESAds8VhmhghJUC+QAvo6EVhTbPN4REEhw3lYTwAPjAFYTwAYmMCcoA7pwf46YgQEAUjD0fG+lII6HAts8MBOgApEy4gGz5jBtf483gQEAUkD0fG+lII8mAts8JcIAn1R3FamEEqAgwQCSMHDeAd6gcNs8gQEAVBIBUFX0QwKRMuIBs+YUXwQvLyUADshY+gIB+gIBcnB/IY6wgQEAVCJw9HxvpTIhjpwyVEETSHBSZts8Uhe6BaRTBL6SfzbeEDhHY0VQ3gGzIrES5l8EASkCaIEBANcBgQEAVGKg9A5voTHy4EdJMBhWGAEREts8AVYZoYISVAvkAL6OhFYU2zzeERBIcBIpMATWjyAkwQPy4HHbPGwh+QBTYL2ZNDUDpEQT+CMDkTDiVhTbPN5WE8AHjrf4I3+OLFYUgwf0fG+lII4cAvQEMdMfMFIwoYIIJ40AvJogERaDB/RbMBEV3pEy4gGz5ltWFNs83oIQR2V0JFYUAbo0MDAqA7KBAQBUZVD0Dm+h8rzbPKCCElQL5ABSMKFSELyTMGwU4IEBAFRGZvRbMIEBAFRGVfRbMAGlUSShghA7msoAUlC+jxFwUAbbPG2AEBAjECZw2zwQI5I0NOJDMC85OATgjzAkwgHy4G8kwgL4IyWhJKY8vLHy4HCCEEdldCTIyx9SIMs/yds8cIAYgEAQNBAj2zzeVhPABI4jVhbA/1YWL7qw8uBJghA7msoAAREZAaEgwgDy4EpR7qAOERjeVhPABZJXFOMNghBOc3RLVhMBujc4KywEqFYRwADy4EpWFsD/VhYvurDy4Ev6ACHCAPLgTinbPIISVAvkAFYaAaEBoVIgu/LgTFHxoSDBAJIwcN5/L9s8bYAQJFlw2zxWGFihVhmhghJUC+QAvi05OC4BTo4XMAURFgUEERUEAxEUAwIREwJXEVcRXwTjDQ8REA8Q7xDeEM0QvDEBPnB/jpiBAQBSMPR8b6UgjocC2zygE6ACkTLiAbPmMDEvARyOhBEU2zySVxTiDRETDTAACvoA+gAwARRwbYAQgEByoNs8OATWPl8FD8D/Uea6HrDy4E4IwADy4E8l8uBQghA7msoAH77y4FYJ+gAg2zyCEDuaygBSMKGCGHRqUogAUkC+8uBRghJUC+QAAREQAaFSMLvy4FJTX77y4FMu2zxSYL7y4FQtbvLgVXHbPDH5AHAyMzQ1ABzT/zHTH9MfMdP/MdQx0QCEgCj4MyBumFuCGBeEEbIA4NDTBzH6ANMf0w/TD9MPMdMPMdMP0w8wUFOoqwdQM6irB1AjqKsHWairB1IgqbQfoLYIACaAIvgzINDTBwHAEvKJ0x/THzBYA1zbPNs8ERDIyx8cyz9QBs8WyYAYcQQREAQQONs8DhEQDh8QPhAtELwQe1CZB0MTNjc4ACKAD/gz0NMfMdMfMdMfMdcLHwEacfgz0IEBANcDfwHbPDkASCJusyCRcZFw4gPIywVQBs8WUAT6AstqA5NYzAGRMOIByQH7AAAcdMjLAhLKB4EBAM8BydDKWCmU"
49 |
50 | wallets = {sha256(wallet_v1_r1): {'type': 'wallet v1 r1', 'data_extractor': seqno_extractor},
51 | sha256(wallet_v1_r2): {'type': 'wallet v1 r2', 'data_extractor': seqno_extractor},
52 | sha256(wallet_v1_r3): {'type': 'wallet v1 r3', 'data_extractor': seqno_extractor},
53 | sha256(wallet_v2_r1): {'type': 'wallet v2 r1', 'data_extractor': seqno_extractor},
54 | sha256(wallet_v2_r2): {'type': 'wallet v2 r2', 'data_extractor': seqno_extractor},
55 | sha256(wallet_v3_r1): {'type': 'wallet v3 r1', 'data_extractor': v3_extractor},
56 | sha256(wallet_v3_r2): {'type': 'wallet v3 r2', 'data_extractor': v3_extractor},
57 | sha256(wallet_v4_r1): {'type': 'wallet v4 r1', 'data_extractor': v3_extractor},
58 | sha256(wallet_v4_r2): {'type': 'wallet v4 r2', 'data_extractor': v3_extractor},
59 | sha256(wallet_v5_r1): {'type': 'wallet v5 r1', 'data_extractor': v5_extractor},
60 | sha256(nominator_pool_v1): {'type': 'nominator pool v1', 'data_extractor': empty_extractor}
61 | }
62 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | crc16>=0.1.1
2 | tvm_valuetypes==0.0.12
3 | requests>=2.27.1
4 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from setuptools import setup, find_packages
4 | from os.path import dirname, join
5 |
6 | with open(join(dirname(__file__), "README.md"), "r") as f:
7 | long_description = f.read()
8 |
9 |
10 | version = os.environ.get('PYTONLIB_VERSION', 'dev')
11 |
12 |
13 | setup(
14 | author='K-Dimentional Tree',
15 | author_email='kdimentionaltree@gmail.com',
16 | name='pytonlib',
17 | version=version,
18 | packages=find_packages('.', exclude=['tests']),
19 | install_requires=[
20 | 'crc16>=0.1.1',
21 | 'tvm_valuetypes==0.0.12',
22 | 'requests>=2.27.1'
23 | ],
24 | package_data={
25 | 'pytonlib': ['distlib/linux/*',
26 | 'distlib/darwin/*',
27 | 'distlib/freebsd/*',
28 | 'distlib/windows/*',],
29 | 'pytonlib.utils': [],
30 | '': ['requirements.txt']
31 | },
32 | zip_safe=True,
33 | python_requires='>=3.7',
34 | classifiers=[
35 | "Development Status :: 3 - Alpha",
36 | "Intended Audience :: Developers",
37 | "Programming Language :: Python :: 3.9",
38 | "Programming Language :: Python :: 3.10",
39 | "Programming Language :: Python :: 3.11",
40 | "Programming Language :: Python :: 3.12",
41 | "License :: Other/Proprietary License",
42 | "Topic :: Software Development :: Libraries"
43 | ],
44 | url="https://github.com/toncenter/pytonlib",
45 | description="Python API for TON (Telegram Open Network)",
46 | long_description_content_type="text/markdown",
47 | long_description=long_description,
48 | )
49 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toncenter/pytonlib/3fc302da6612b6b0895c9e69442dbe259e98dc80/tests/__init__.py
--------------------------------------------------------------------------------
/tests/pytonlib/test_tonlibclient.py:
--------------------------------------------------------------------------------
1 | import requests
2 | import pytest
3 | import pytest_asyncio
4 | import asyncio
5 |
6 | from time import time
7 | from pathlib import Path
8 | from pytonlib.client import TonlibClient
9 |
10 |
11 | @pytest.fixture
12 | def tonlib_config():
13 | url = 'https://ton-blockchain.github.io/global.config.json'
14 | return requests.get(url).json()
15 |
16 |
17 | @pytest.fixture
18 | def ton_keystore():
19 | return f"/tmp/ton_keystore"
20 |
21 |
22 | @pytest.fixture
23 | def ls_index():
24 | return 3
25 |
26 |
27 | @pytest_asyncio.fixture
28 | async def tonlib_client(tonlib_config, ton_keystore, ls_index):
29 | loop = asyncio.get_running_loop()
30 | Path(ton_keystore).mkdir(parents=True, exist_ok=True)
31 | client = TonlibClient(ls_index=ls_index,
32 | config=tonlib_config,
33 | keystore=ton_keystore,
34 | loop=loop,
35 | verbosity_level=0,
36 | tonlib_timeout=30)
37 | await client.init()
38 | return client
39 |
40 |
41 | # tests
42 | @pytest.mark.asyncio
43 | async def test_get_masterchain_info(tonlib_client: TonlibClient):
44 | exception = None
45 | try:
46 | res = await tonlib_client.get_masterchain_info()
47 | assert res['@type'] == 'blocks.masterchainInfo'
48 | except Exception as ee:
49 | exception = ee
50 | finally:
51 | await tonlib_client.close()
52 | assert exception is None
53 |
54 |
55 | @pytest.mark.asyncio
56 | async def test_sync_tonlib_method(tonlib_client: TonlibClient):
57 | exception = None
58 | try:
59 | res = await tonlib_client.sync_tonlib()
60 | assert res['@type'] == 'ton.blockIdExt'
61 | except Exception as ee:
62 | exception = ee
63 | finally:
64 | await tonlib_client.close()
65 | assert exception is None
66 |
67 |
68 | @pytest.mark.asyncio
69 | async def test_get_block_header(tonlib_client: TonlibClient):
70 | exception = None
71 | try:
72 | masterchain_block = await tonlib_client.get_masterchain_info()
73 | res = await tonlib_client.get_block_header(**masterchain_block['last'])
74 | assert res['@type'] == 'blocks.header'
75 | except Exception as ee:
76 | exception = ee
77 | finally:
78 | await tonlib_client.close()
79 | assert exception is None
80 |
81 |
82 | @pytest.mark.asyncio
83 | async def test_get_shards(tonlib_client: TonlibClient):
84 | exception = None
85 | try:
86 | masterchain_info = await tonlib_client.get_masterchain_info()
87 | shards = await tonlib_client.get_shards(master_seqno=masterchain_info['last']['seqno'])
88 | assert shards['@type'] == 'blocks.shards'
89 | except Exception as ee:
90 | exception = ee
91 | finally:
92 | await tonlib_client.close()
93 | assert exception is None
94 |
95 |
96 | @pytest.mark.asyncio
97 | async def test_get_transactions(tonlib_client: TonlibClient):
98 | exception = None
99 | try:
100 | masterchain_info = await tonlib_client.get_masterchain_info()
101 |
102 | txs = await tonlib_client.get_block_transactions(**masterchain_info['last'], count=10)
103 | assert txs['@type'] == 'blocks.transactions'
104 |
105 | tx = await tonlib_client.get_transactions(**txs['transactions'][0], limit=1)
106 | assert tx[0]['@type'] == 'raw.transaction'
107 | except Exception as ee:
108 | exception = ee
109 | finally:
110 | await tonlib_client.close()
111 | assert exception is None
112 |
113 |
114 | @pytest.mark.asyncio
115 | async def test_correct_close(tonlib_client: TonlibClient):
116 | exception = None
117 | try:
118 | masterchain_info = await tonlib_client.get_masterchain_info()
119 | raise RuntimeError('Test error')
120 | except Exception as ee:
121 | exception = ee
122 | finally:
123 | await tonlib_client.close()
124 | assert exception.args == ('Test error',)
125 |
126 |
127 | def test_sync_code(tonlib_config, ton_keystore, ls_index):
128 | async def main():
129 | loop = asyncio.get_running_loop()
130 | exception = None
131 | try:
132 | Path(ton_keystore).mkdir(parents=True, exist_ok=True)
133 | client = TonlibClient(ls_index=ls_index,
134 | config=tonlib_config,
135 | keystore=ton_keystore,
136 | loop=loop,
137 | verbosity_level=0)
138 | await client.init()
139 | except Exception as ee:
140 | exception = ee
141 | finally:
142 | await client.close()
143 | assert exception is None
144 | asyncio.run(main())
145 |
--------------------------------------------------------------------------------