├── .dockerignore
├── .github
└── workflows
│ ├── build-docker.yml
│ └── build-wheel.yml
├── .gitignore
├── LICENSE
├── README.md
├── configure.py
├── docker-compose.cache.yaml
├── docker-compose.filebeat.yaml
├── docker-compose.yaml
├── scripts
├── deploy.sh
├── setup.sh
└── swarm_config.sh
└── ton-http-api
├── .docker
├── Dockerfile
└── entrypoint.sh
├── README.md
├── pyTON
├── __init__.py
├── __main__.py
├── cache.py
├── main.py
├── manager.py
├── models.py
├── settings.py
└── worker.py
├── requirements.txt
└── setup.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | #PyCharm
107 | .idea/$CACHE_FILE$
108 | .idea/.gitignore
109 | .idea/encodings.xml
110 | .idea/inspectionProfiles/
111 | .idea/misc.xml
112 | .idea/modules.xml
113 | .idea/ton_client.iml
114 | .idea/vcs.xml
115 |
116 | ## tonlib chaotically created temp blkstate files
117 | *.blkstate
118 |
119 | ## logs
120 | /logs
121 |
122 | .vscode
123 | /private
124 | .DS_Store
125 |
126 | /sandbox
127 |
128 | ## custom
129 | /.vscode
130 | /docs
131 | /logs
132 | /private
133 | /sandbox
134 | /venv
135 | /notebooks
136 |
--------------------------------------------------------------------------------
/.github/workflows/build-docker.yml:
--------------------------------------------------------------------------------
1 | name: Build Docker Image
2 |
3 | on:
4 | push:
5 |
6 | jobs:
7 | set_version:
8 | runs-on: ubuntu-20.04
9 | steps:
10 | - name: Checkout
11 | uses: actions/checkout@v2
12 | with:
13 | fetch-depth: 0
14 | - name: Get branch name
15 | id: branch_name
16 | run: |
17 | GIT_BRANCH=`git symbolic-ref --short HEAD | sed "s/[^[:alnum:].]//g"`
18 | echo "::set-output name=GIT_BRANCH::${GIT_BRANCH}"
19 | - name: Get semantic version
20 | uses: paulhatch/semantic-version@v4.0.2
21 | id: versioning
22 | with:
23 | tag_prefix: "v"
24 | major_pattern: "[MAJOR]"
25 | minor_pattern: "[MINOR]"
26 | format: "${major}.${minor}.${patch}.dev${increment}"
27 | bump_each_commit: false
28 | outputs:
29 | version: ${{ github.ref == 'refs/heads/master' && steps.versioning.outputs.version_tag || steps.versioning.outputs.version}}
30 | buildx:
31 | runs-on: ubuntu-latest
32 | needs: [ set_version ]
33 | steps:
34 | - name: Checkout
35 | uses: actions/checkout@v2
36 | with:
37 | fetch-depth: 0
38 | - name: Set up QEMU
39 | uses: docker/setup-qemu-action@v2
40 | - name: Set up Docker Buildx
41 | id: buildx
42 | uses: docker/setup-buildx-action@v2
43 | - name: Inspect builder
44 | run: |
45 | echo "Name: ${{ steps.buildx.outputs.name }}"
46 | echo "Endpoint: ${{ steps.buildx.outputs.endpoint }}"
47 | echo "Status: ${{ steps.buildx.outputs.status }}"
48 | echo "Flags: ${{ steps.buildx.outputs.flags }}"
49 | echo "Platforms: ${{ steps.buildx.outputs.platforms }}"
50 | - name: Prepare
51 | id: prepare
52 | run: |
53 | DOCKER_IMAGE=toncenter/ton-http-api
54 |
55 | if ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
56 | then
57 | DOCKER_PLATFORMS=linux/amd64,linux/arm64
58 | else
59 | DOCKER_PLATFORMS=linux/amd64
60 | fi
61 | VERSION=${{ needs.set_version.outputs.version }}
62 |
63 | TAGS="-t ${DOCKER_IMAGE}:${VERSION} -t ${DOCKER_IMAGE}:latest"
64 |
65 | echo ::set-output name=docker_image::${DOCKER_IMAGE}
66 | echo ::set-output name=version::${VERSION}
67 | echo ::set-output name=buildx_args::--platform ${DOCKER_PLATFORMS} \
68 | ${TAGS} --file ton-http-api/.docker/Dockerfile ton-http-api \
69 | --cache-from=${DOCKER_IMAGE}:latest
70 |
71 | echo "Pulling latest image for cache"
72 | IFS=', ' read -r -a PLATFORMS <<< "${DOCKER_PLATFORMS}"
73 | for platform in "${PLATFORMS[@]}"
74 | do
75 | echo "Pulling image for platform ${platform}"
76 | docker pull --platform ${platform} ${{ steps.prepare.outputs.docker_image }}:latest || true
77 | done
78 | - name: Docker Buildx (build)
79 | run: |
80 | docker buildx build --output "type=image,push=false" ${{ steps.prepare.outputs.buildx_args }}
81 | - name: Login to DockerHub
82 | if: success() && github.repository == 'toncenter/ton-http-api' && github.event_name == 'push' && github.ref == 'refs/heads/master'
83 | uses: docker/login-action@v1
84 | with:
85 | username: ${{ secrets.DOCKER_USERNAME }}
86 | password: ${{ secrets.DOCKER_PASSWORD }}
87 | - name: Docker Buildx (push)
88 | if: success() && github.repository == 'toncenter/ton-http-api' && github.event_name == 'push' && github.ref == 'refs/heads/master'
89 | run: |
90 | docker buildx build --output "type=image,push=true" ${{ steps.prepare.outputs.buildx_args }}
91 | - name: Inspect image
92 | if: always() && github.repository == 'toncenter/ton-http-api' && github.event_name == 'push' && github.ref == 'refs/heads/master'
93 | run: |
94 | docker buildx imagetools inspect ${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.version }}
95 |
--------------------------------------------------------------------------------
/.github/workflows/build-wheel.yml:
--------------------------------------------------------------------------------
1 | name: Build package
2 |
3 | on:
4 | push:
5 |
6 | jobs:
7 | set_version:
8 | runs-on: ubuntu-20.04
9 | steps:
10 | - name: Checkout
11 | uses: actions/checkout@v2
12 | with:
13 | fetch-depth: 0
14 | - name: Get branch name
15 | id: branch_name
16 | run: |
17 | GIT_BRANCH=`git symbolic-ref --short HEAD | sed "s/[^[:alnum:].]//g"`
18 | echo "::set-output name=GIT_BRANCH::${GIT_BRANCH}"
19 | - name: Get semantic version
20 | uses: paulhatch/semantic-version@v4.0.2
21 | id: versioning
22 | with:
23 | tag_prefix: "v"
24 | major_pattern: "[MAJOR]"
25 | minor_pattern: "[MINOR]"
26 | format: "${major}.${minor}.${patch}.dev${increment}"
27 | bump_each_commit: false
28 | outputs:
29 | version: ${{ github.ref == 'refs/heads/master' && steps.versioning.outputs.version_tag || steps.versioning.outputs.version}}
30 | build_wheel:
31 | runs-on: ubuntu-20.04
32 | needs: [ set_version ]
33 | steps:
34 | - name: Checkout source
35 | uses: actions/checkout@v2
36 | - name: Set up Python 3.8
37 | uses: actions/setup-python@v1
38 | with:
39 | python-version: 3.8
40 | - name: Install build dependencies
41 | shell: bash -l {0}
42 | working-directory: ./ton-http-api/
43 | run: |
44 | python -m pip install build wheel
45 | python -m pip install -r requirements.txt
46 | - name: Build distributions
47 | shell: bash -l {0}
48 | working-directory: ./ton-http-api/
49 | run: TON_HTTP_API_VERSION=${{ needs.set_version.outputs.version }} python setup.py sdist bdist_wheel
50 | - name: Check distributions
51 | shell: bash -l {0}
52 | working-directory: ./ton-http-api/
53 | run: |
54 | echo $(pwd)
55 | ls $(pwd)/
56 | - name: Upload artifacts
57 | uses: actions/upload-artifact@v3
58 | with:
59 | name: ton-http-api-package
60 | path: |
61 | ton-http-api/dist/*.whl
62 | ton-http-api/dist/*.tar.gz
63 | upload-pypi:
64 | runs-on: ubuntu-20.04
65 | needs: [ set_version, build_wheel ]
66 | steps:
67 | - name: Download artifacts
68 | uses: actions/download-artifact@v3
69 | with:
70 | name: ton-http-api-package
71 | path: dist
72 | - name: Check distributions
73 | shell: bash -l {0}
74 | run: ls -la dist/
75 | - name: Create release tag
76 | if: ${{ github.repository == 'toncenter/ton-http-api' && github.event_name == 'push' && github.ref == 'refs/heads/master' }}
77 | uses: actions/github-script@v3
78 | with:
79 | github-token: ${{ github.token }}
80 | script: |
81 | github.git.createRef({
82 | owner: context.repo.owner,
83 | repo: context.repo.repo,
84 | ref: 'refs/tags/${{ needs.set_version.outputs.version }}',
85 | sha: context.sha
86 | })
87 | - name: Publish package to test PyPI
88 | if: ${{ github.repository == 'toncenter/ton-http-api' && github.event_name == 'push' }}
89 | uses: pypa/gh-action-pypi-publish@release/v1
90 | with:
91 | user: __token__
92 | password: ${{ secrets.TEST_PYPI_PASSWORD }}
93 | repository_url: https://test.pypi.org/legacy/
94 | - name: Publish package to PyPI
95 | if: ${{ github.repository == 'toncenter/ton-http-api' && github.event_name == 'push' && github.ref == 'refs/heads/master' }}
96 | uses: pypa/gh-action-pypi-publish@release/v1
97 | with:
98 | user: __token__
99 | password: ${{ secrets.PYPI_PASSWORD }}
100 | repository_url: https://upload.pypi.org/legacy/
101 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 | MANIFEST
27 |
28 | # PyInstaller
29 | # Usually these files are written by a python script from a template
30 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
31 | *.manifest
32 | *.spec
33 |
34 | # Installer logs
35 | pip-log.txt
36 | pip-delete-this-directory.txt
37 |
38 | # Unit test / coverage reports
39 | htmlcov/
40 | .tox/
41 | .coverage
42 | .coverage.*
43 | .cache
44 | nosetests.xml
45 | coverage.xml
46 | *.cover
47 | .hypothesis/
48 | .pytest_cache/
49 |
50 | # Translations
51 | *.mo
52 | *.pot
53 |
54 | # Django stuff:
55 | *.log
56 | local_settings.py
57 | db.sqlite3
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 | env.bak/
91 | venv.bak/
92 |
93 | # Spyder project settings
94 | .spyderproject
95 | .spyproject
96 |
97 | # Rope project settings
98 | .ropeproject
99 |
100 | # mkdocs documentation
101 | /site
102 |
103 | # mypy
104 | .mypy_cache/
105 |
106 | #PyCharm
107 | .idea/
108 | .idea/$CACHE_FILE$
109 | .idea/.gitignore
110 | .idea/encodings.xml
111 | .idea/inspectionProfiles/
112 | .idea/misc.xml
113 | .idea/modules.xml
114 | .idea/ton_client.iml
115 | .idea/vcs.xml
116 |
117 | ## tonlib chaotically created temp blkstate files
118 | *.blkstate
119 |
120 | ## logs
121 | /logs
122 |
123 | .vscode/
124 | /private
125 | .DS_Store
126 |
127 | /sandbox
128 | /ton_keystore
129 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | # HTTP API for [The Open Network](https://ton.org)
4 |
5 | [](https://pypi.org/project/ton-http-api/)
6 | [](https://pypi.org/project/ton-http-api/)
7 | [](https://hub.docker.com/repository/docker/toncenter/ton-http-api)
8 | [](https://hub.docker.com/repository/docker/toncenter/ton-http-api)
9 | 
10 |
11 | Since TON nodes uses its own ADNL binary transport protocol, a intermediate service is needed for an HTTP connection.
12 |
13 | TON HTTP API is such a intermediate service, receiving requests via HTTP, it accesses the lite servers of the TON network using `tonlibjson`.
14 |
15 | You can use the ready-made [toncenter.com](https://toncenter.com) service or start your own instance.
16 |
17 | ## Building and running
18 |
19 | Recommended hardware:
20 | - CPU architecture: x86_64 or arm64.
21 | - HTTP API only: 1 vCPU, 2 GB RAM.
22 | - HTTP API with cache enabled: 2 vCPUs, 4 GB RAM.
23 |
24 | There are two main ways to run TON HTTP API:
25 | - __Local__ *(experimental)*: works on following platforms: Ubuntu Linux (x86_64, arm64), MacOSX (Intel x86_64, Apple M1 arm64) and Windows (x86_64).
26 | - __Docker Compose__: flexible configuration, recommended for production environments, works on any x86_64 and arm64 OS with Docker available.
27 |
28 | ### Local run *(experimental)*
29 | **Note:** It is simple but not stable way to run the service. We do not recommend to use it in production.
30 | - (Windows only, first time) Install OpenSSL v1.1.1 for win64 from [here](https://slproweb.com/products/Win32OpenSSL.html).
31 | - Install package: `pip install ton-http-api`.
32 | - Run service with `ton-http-api`. This command will run service with [mainnet](https://ton.org/global-config.json) configuration.
33 | - Run `ton-http-api --help` to show parameters list.
34 |
35 | ### Docker Compose
36 | - (First time) Install required tools: `docker`, `docker-compose`, `curl`.
37 | - For Ubuntu: run `scripts/setup.sh` from the root of the repo.
38 | - For MacOS and Windows: install [Docker Desktop](https://www.docker.com/products/docker-desktop/).
39 | - **Note:** we recommend to use Docker Compose V2.
40 | - Download TON configuration files to private folder:
41 | ```bash
42 | mkdir private
43 | curl -sL https://ton-blockchain.github.io/global.config.json > private/mainnet.json
44 | curl -sL https://ton-blockchain.github.io/testnet-global.config.json > private/testnet.json
45 | ```
46 | - Run `./configure.py` to create `.env` file with necessary environment variables (see [Configuration](#Configuration) for details).
47 | - Build services: `docker-compose build`.
48 | - Or pull latest images: `docker-compose pull`.
49 | - Run services: `docker-compose up -d`.
50 | - Stop services: `docker-compose down`.
51 |
52 | ### Configuration
53 |
54 | You should specify environment parameters and run `./configure.py` to create `.env` file.
55 | ```bash
56 | export TON_API_LITESERVER_CONFIG=private/testnet.json
57 | ./configure.py
58 | ```
59 |
60 | The service supports the following environment variables:
61 | #### Webserver settings
62 | - `TON_API_HTTP_PORT` *(default: 80)*
63 |
64 | Port for HTTP connections of API service.
65 |
66 | - `TON_API_ROOT_PATH` *(default: /)*
67 |
68 | If you use a proxy server such as Nginx or Traefik you might change the default API path prefix (e.g. `/api/v2`). If so you have to pass the path prefix to the API service in this variable.
69 |
70 | - `TON_API_WEBSERVERS_WORKERS` *(default: 1)*
71 |
72 | Number of webserver processes. If your server is under high load try increase this value to increase RPS. We recommend setting it to number of CPU cores / 2.
73 |
74 | - `TON_API_GET_METHODS_ENABLED` *(default: 1)*
75 |
76 | Enables `runGetMethod` endpoint.
77 |
78 | - `TON_API_JSON_RPC_ENABLED` *(default: 1)*
79 |
80 | Enables `jsonRPC` endpoint.
81 |
82 | - `TON_API_LOGS_JSONIFY` *(default: 0)*
83 |
84 | Enables printing all logs in json format.
85 |
86 | - `TON_API_LOGS_LEVEL` *(default: ERROR)*
87 |
88 | Defines log verbosity level. Values allowed: `DEBUG`,`INFO`,`WARNING`,`ERROR`,`CRITICAL`.
89 |
90 | - `TON_API_GUNICORN_FLAGS` *(default: empty)*
91 |
92 | Additional Gunicorn [command line arguments](https://docs.gunicorn.org/en/stable/settings.html).
93 |
94 | #### Tonlib settings
95 | - `TON_API_TONLIB_LITESERVER_CONFIG` *(default docker: private/mainnet.json local: https://ton.org/global-config.json)*
96 |
97 | Path to config file with lite servers information. In case of native run you can pass URL to download config. Docker support only path to file.
98 |
99 | - `TON_API_TONLIB_KEYSTORE` *(default docker: /tmp/ton_keystore local: ./ton_keystore/)*
100 |
101 | Path to tonlib keystore.
102 |
103 | - `TON_API_TONLIB_PARALLEL_REQUESTS_PER_LITESERVER` *(default: 50)*
104 |
105 | Number of maximum parallel requests count per worker.
106 |
107 | - `TON_API_TONLIB_CDLL_PATH` *(default: empty)*
108 |
109 | Path to tonlibjson binary. It could be useful if you want to run service on unsupported platform and have built the `libtonlibjson` library manually.
110 |
111 | - `TON_API_TONLIB_REQUEST_TIMEOUT` *(default: 10)*
112 |
113 | Timeout for liteserver requests.
114 |
115 | #### Cache configuration
116 | - `TON_API_CACHE_ENABLED` *(default: 0)*
117 |
118 | Enables caching lite server responses with Redis.
119 |
120 | - `TON_API_CACHE_REDIS_ENDPOINT` *(default: localhost, docker: cache_redis)*
121 |
122 | Redis cache service host.
123 |
124 | - `TON_API_CACHE_REDIS_PORT` *(default: 6379)*
125 |
126 | Redis cache service port.
127 |
128 | - `TON_API_CACHE_REDIS_TIMEOUT` *(default: 1)*
129 |
130 | Redis cache timeout.
131 |
132 |
133 | ## FAQ
134 | #### How to point the service to my own lite server?
135 |
136 | To point the HTTP API to your own lite server you should set `TON_API_TONLIB_LITESERVER_CONFIG` to config file with your only lite server.
137 |
138 | - If you use MyTonCtrl on your node you can generate config file with these commands:
139 | ```
140 | $ mytonctrl
141 | MyTonCtrl> installer
142 | MyTonInstaller> clcf
143 | ```
144 | Config file will be saved at `/usr/bin/ton/local.config.json`.
145 | - If you don't use MyTonCtrl: copy `private/mainnet.json` and overwrite section `liteservers` with your liteservers ip, port and public key. To get public key from `liteserver.pub` file use the following script:
146 | ```
147 | python -c 'import codecs; f=open("liteserver.pub", "rb+"); pub=f.read()[4:]; print(str(codecs.encode(pub,"base64")).replace("\n",""))'
148 | ```
149 | - Once config file is created assign variable `TON_API_TONLIB_LITESERVER_CONFIG` to its path, run `./configure.py` and rebuild the project.
150 |
151 | #### How to run multiple API instances on single machine?
152 |
153 | - Clone the repo as many times as many instances you need to the folders with different names (otherwise docker compose containers will conflict).
154 | - Configure each instance to use unique port (env variable `TON_API_HTTP_PORT`)
155 | - Build and run every instance.
156 |
157 | #### How to update tonlibjson library?
158 |
159 | Binary file `libtonlibjson` now moved to [pytonlib](https://github.com/toncenter/pytonlib).
160 | - Docker Compose: `docker-compose build --no-cache`.
161 | - Local run: `pip install -U ton-http-api`.
162 |
--------------------------------------------------------------------------------
/configure.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | import os
4 | import sys
5 |
6 |
7 | LOCAL_ENV = {
8 | 'TON_API_CACHE_ENABLED': '0',
9 | 'TON_API_CACHE_REDIS_ENDPOINT': 'cache_redis',
10 | 'TON_API_CACHE_REDIS_PORT': '6379',
11 | 'TON_API_CACHE_REDIS_TIMEOUT': '1',
12 | 'TON_API_LOGS_JSONIFY': '0',
13 | 'TON_API_LOGS_LEVEL': 'ERROR',
14 | 'TON_API_GET_METHODS_ENABLED': '1',
15 | 'TON_API_HTTP_PORT': '80',
16 | 'TON_API_JSON_RPC_ENABLED': '1',
17 | 'TON_API_ROOT_PATH': '/',
18 | 'TON_API_WEBSERVERS_WORKERS': '1',
19 | 'TON_API_TONLIB_LITESERVER_CONFIG': 'private/mainnet.json',
20 | 'TON_API_TONLIB_KEYSTORE': '/tmp/ton_keystore/',
21 | 'TON_API_TONLIB_PARALLEL_REQUESTS_PER_LITESERVER': '50',
22 | 'TON_API_TONLIB_CDLL_PATH': '',
23 | 'TON_API_TONLIB_REQUEST_TIMEOUT': '10',
24 | 'TON_API_GUNICORN_FLAGS': ''
25 | }
26 |
27 |
28 | def strtobool(val):
29 | if val.lower() in ['y', 'yes', 't', 'true', 'on', '1']:
30 | return True
31 | if val.lower() in ['n', 'no', 'f', 'false', 'off', '0']:
32 | return False
33 | raise ValueError(f"Invalid bool value {val}")
34 |
35 | def main():
36 | default_env = LOCAL_ENV
37 |
38 | for var in default_env.keys():
39 | if os.getenv(var) != None:
40 | default_env[var] = os.getenv(var)
41 |
42 | compose_file = 'docker-compose.yaml'
43 |
44 | cache_enabled = strtobool(default_env['TON_API_CACHE_ENABLED'])
45 | if cache_enabled:
46 | compose_file += ':docker-compose.cache.yaml'
47 | default_env['COMPOSE_FILE'] = compose_file
48 |
49 | env_content = ''
50 | for k, v in default_env.items():
51 | env_content += f'{k}={v}\n'
52 |
53 | with open(os.path.join(sys.path[0], ".env"), "w") as f:
54 | f.write(env_content)
55 |
56 | print(".env file created.")
57 |
58 | if __name__ == '__main__':
59 | main()
60 |
--------------------------------------------------------------------------------
/docker-compose.cache.yaml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | cache_redis:
4 | image: redis:latest
5 | restart: unless-stopped
6 | networks:
7 | - internal
8 |
--------------------------------------------------------------------------------
/docker-compose.filebeat.yaml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | main:
4 | labels:
5 | co.elastic.logs/enabled: "true"
6 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | version: '3.7'
2 | services:
3 | main:
4 | image: toncenter/ton-http-api
5 | build:
6 | context: ton-http-api
7 | dockerfile: .docker/Dockerfile
8 | ports:
9 | - ${TON_API_HTTP_PORT:-8081}:8081
10 | environment:
11 | - TON_API_CACHE_ENABLED
12 | - TON_API_CACHE_REDIS_ENDPOINT
13 | - TON_API_CACHE_REDIS_PORT
14 | - TON_API_CACHE_REDIS_TIMEOUT
15 | - TON_API_LOGS_JSONIFY
16 | - TON_API_LOGS_LEVEL
17 | - TON_API_TONLIB_LITESERVER_CONFIG=/run/secrets/liteserver_config
18 | - TON_API_TONLIB_KEYSTORE
19 | - TON_API_TONLIB_PARALLEL_REQUESTS_PER_LITESERVER
20 | - TON_API_TONLIB_CDLL_PATH
21 | - TON_API_TONLIB_REQUEST_TIMEOUT
22 | - TON_API_GET_METHODS_ENABLED
23 | - TON_API_JSON_RPC_ENABLED
24 | - TON_API_ROOT_PATH
25 | restart: unless-stopped
26 | networks:
27 | - internal
28 | secrets:
29 | - liteserver_config
30 | command: -c "gunicorn -k uvicorn.workers.UvicornWorker -w ${TON_API_WEBSERVERS_WORKERS} --bind 0.0.0.0:8081 ${TON_API_GUNICORN_FLAGS} pyTON.main:app"
31 | healthcheck:
32 | test: curl -sS http://127.0.0.1:8081${TON_API_ROOT_PATH}/healthcheck || echo 1
33 | interval: 15s
34 | timeout: 3s
35 | retries: 12
36 | secrets:
37 | liteserver_config:
38 | file: ${TON_API_TONLIB_LITESERVER_CONFIG:-./private/mainnet.json}
39 | networks:
40 | internal:
41 |
--------------------------------------------------------------------------------
/scripts/deploy.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | docker stack deploy -c <(scripts/swarm_config.sh) ton-http-api
5 |
--------------------------------------------------------------------------------
/scripts/setup.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | echo "Setup Actions"
5 | apt update --yes
6 | apt install --yes curl
7 |
8 | # install docker and compose
9 | curl -fsSL https://get.docker.com -o /tmp/get-docker.sh && sh /tmp/get-docker.sh
10 | curl -L "https://github.com/docker/compose/releases/download/v2.6.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose
11 | chmod +x /usr/local/bin/docker-compose
12 |
13 | # Download the latest version of config.json
14 | mkdir private
15 |
16 | curl -sL https://ton-blockchain.github.io/global.config.json > private/mainnet.json
17 | curl -sL https://ton-blockchain.github.io/testnet-global.config.json > private/testnet.json
18 |
--------------------------------------------------------------------------------
/scripts/swarm_config.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | docker-compose config | sed -E "s/cpus: ([0-9\\.]+)/cpus: '\\1'/"
5 |
--------------------------------------------------------------------------------
/ton-http-api/.docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:20.04
2 |
3 | RUN apt-get update
4 | RUN DEBIAN_FRONTEND=noninteractive TZ=Etc/UTC apt-get -y install tzdata
5 | RUN apt-get install -y git cmake wget python3 python3-pip libsecp256k1-dev libsodium-dev
6 |
7 | # python requirements
8 | ADD ./requirements.txt /tmp/requirements.txt
9 | RUN python3 -m pip install --no-cache-dir -r /tmp/requirements.txt
10 |
11 | # app
12 | COPY . /app
13 | WORKDIR /app
14 |
15 | # entrypoint
16 | ENTRYPOINT [ "/bin/bash" ]
17 | CMD [ "/app/.docker/entrypoint.sh" ]
18 |
--------------------------------------------------------------------------------
/ton-http-api/.docker/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | echo "Running api with ${TON_API_WEBSERVERS_WORKERS:-1} workers"
6 | echo "ENVIRONMENT:"
7 | printenv
8 |
9 | gunicorn -k uvicorn.workers.UvicornWorker -w ${TON_API_WEBSERVERS_WORKERS:-1} --bind 0.0.0.0:8081 pyTON.main:app
10 |
--------------------------------------------------------------------------------
/ton-http-api/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | # HTTP API for [The Open Network](https://ton.org)
4 |
5 | Since TON nodes uses its own ADNL binary transport protocol, a intermediate service is needed for an HTTP connection.
6 |
7 | TON HTTP API is such a intermediate service, receiving requests via HTTP, it accesses the lite servers of the TON network using `tonlibjson`.
8 |
9 | You can use the ready-made [toncenter.com](https://toncenter.com) service or start your own instance.
10 |
11 | ## Building and running
12 |
13 | Recommended hardware:
14 | - CPU architecture: x86_64 or arm64.
15 | - HTTP API only: 1 vCPU, 2 GB RAM.
16 | - HTTP API with cache enabled: 2 vCPUs, 4 GB RAM.
17 |
18 | There are two main ways to run TON HTTP API:
19 | - __Local__ *(experimental)*: works on following platforms: Ubuntu Linux (x86_64, arm64), MacOSX (Intel x86_64, Apple M1 arm64) and Windows (x86_64).
20 | - __Docker Compose__: flexible configuration, recommended for production environments, works on any x86_64 and arm64 OS with Docker available.
21 |
22 | ### Local run *(experimental)*
23 | **Note:** It is simple but not stable way to run the service. We do not recommend to use it in production.
24 | - (Windows only, first time) Install OpenSSL v1.1.1 for win64 from [here](https://slproweb.com/products/Win32OpenSSL.html).
25 | - Install package: `pip install ton-http-api`.
26 | - Run service with `ton-http-api`. This command will run service with [mainnet](https://ton.org/global-config.json) configuration.
27 | - Run `ton-http-api --help` to show parameters list.
28 |
29 | ### Docker Compose
30 | - (First time) Install required tools: `docker`, `docker-compose`, `curl`.
31 | - For Ubuntu: run `scripts/setup.sh` from the root of the repo.
32 | - For MacOS and Windows: install [Docker Desktop](https://www.docker.com/products/docker-desktop/).
33 | - **Note:** we recommend to use Docker Compose V2.
34 | - Download TON configuration files to private folder:
35 | ```bash
36 | mkdir private
37 | curl -sL https://ton-blockchain.github.io/global.config.json > private/mainnet.json
38 | curl -sL https://ton-blockchain.github.io/testnet-global.config.json > private/testnet.json
39 | ```
40 | - Run `./configure.py` to create `.env` file with necessary environment variables (see [Configuration](#Configuration) for details).
41 | - Build services: `docker-compose build`.
42 | - Or pull latest images: `docker-compose pull`.
43 | - Run services: `docker-compose up -d`.
44 | - Stop services: `docker-compose down`.
45 |
46 | ### Configuration
47 |
48 | You should specify environment parameters and run `./configure.py` to create `.env` file.
49 | ```bash
50 | export TON_API_LITESERVER_CONFIG=private/testnet.json
51 | ./configure.py
52 | ```
53 |
54 | The service supports the following environment variables:
55 | #### Webserver settings
56 | - `TON_API_HTTP_PORT` *(default: 80)*
57 |
58 | Port for HTTP connections of API service.
59 |
60 | - `TON_API_ROOT_PATH` *(default: /)*
61 |
62 | If you use a proxy server such as Nginx or Traefik you might change the default API path prefix (e.g. `/api/v2`). If so you have to pass the path prefix to the API service in this variable.
63 |
64 | - `TON_API_WEBSERVERS_WORKERS` *(default: 1)*
65 |
66 | Number of webserver processes. If your server is under high load try increase this value to increase RPS. We recommend setting it to number of CPU cores / 2.
67 |
68 | - `TON_API_GET_METHODS_ENABLED` *(default: 1)*
69 |
70 | Enables `runGetMethod` endpoint.
71 |
72 | - `TON_API_JSON_RPC_ENABLED` *(default: 1)*
73 |
74 | Enables `jsonRPC` endpoint.
75 |
76 | - `TON_API_LOGS_JSONIFY` *(default: 0)*
77 |
78 | Enables printing all logs in json format.
79 |
80 | - `TON_API_LOGS_LEVEL` *(default: ERROR)*
81 |
82 | Defines log verbosity level. Values allowed: `DEBUG`,`INFO`,`WARNING`,`ERROR`,`CRITICAL`.
83 |
84 | - `TON_API_GUNICORN_FLAGS` *(default: empty)*
85 |
86 | Additional Gunicorn [command line arguments](https://docs.gunicorn.org/en/stable/settings.html).
87 |
88 | #### Tonlib settings
89 | - `TON_API_TONLIB_LITESERVER_CONFIG` *(default docker: private/mainnet.json local: https://ton.org/global-config.json)*
90 |
91 | Path to config file with lite servers information. In case of native run you can pass URL to download config. Docker support only path to file.
92 |
93 | - `TON_API_TONLIB_KEYSTORE` *(default docker: /tmp/ton_keystore local: ./ton_keystore/)*
94 |
95 | Path to tonlib keystore.
96 |
97 | - `TON_API_TONLIB_PARALLEL_REQUESTS_PER_LITESERVER` *(default: 50)*
98 |
99 | Number of maximum parallel requests count per worker.
100 |
101 | - `TON_API_TONLIB_CDLL_PATH` *(default: empty)*
102 |
103 | Path to tonlibjson binary. It could be useful if you want to run service on unsupported platform and have built the `libtonlibjson` library manually.
104 |
105 | - `TON_API_TONLIB_REQUEST_TIMEOUT` *(default: 10)*
106 |
107 | Timeout for liteserver requests.
108 |
109 | #### Cache configuration
110 | - `TON_API_CACHE_ENABLED` *(default: 0)*
111 |
112 | Enables caching lite server responses with Redis.
113 |
114 | - `TON_API_CACHE_REDIS_ENDPOINT` *(default: localhost, docker: cache_redis)*
115 |
116 | Redis cache service host.
117 |
118 | - `TON_API_CACHE_REDIS_PORT` *(default: 6379)*
119 |
120 | Redis cache service port.
121 |
122 | - `TON_API_CACHE_REDIS_TIMEOUT` *(default: 1)*
123 |
124 | Redis cache service port.
125 |
126 |
127 | ## FAQ
128 | #### How to point the service to my own lite server?
129 |
130 | To point the HTTP API to your own lite server you should set `TON_API_TONLIB_LITESERVER_CONFIG` to config file with your only lite server.
131 |
132 | - If you use MyTonCtrl on your node you can generate config file with these commands:
133 | ```
134 | $ mytonctrl
135 | MyTonCtrl> installer
136 | MyTonInstaller> clcf
137 | ```
138 | Config file will be saved at `/usr/bin/ton/local.config.json`.
139 | - If you don't use MyTonCtrl: copy `private/mainnet.json` and overwrite section `liteservers` with your liteservers ip, port and public key. To get public key from `liteserver.pub` file use the following script:
140 | ```
141 | python -c 'import codecs; f=open("liteserver.pub", "rb+"); pub=f.read()[4:]; print(str(codecs.encode(pub,"base64")).replace("\n",""))'
142 | ```
143 | - Once config file is created assign variable `TON_API_TONLIB_LITESERVER_CONFIG` to its path, run `./configure.py` and rebuild the project.
144 |
145 | #### How to run multiple API instances on single machine?
146 |
147 | - Clone the repo as many times as many instances you need to the folders with different names (otherwise docker compose containers will conflict).
148 | - Configure each instance to use unique port (env variable `TON_API_HTTP_PORT`)
149 | - Build and run every instance.
150 |
151 | #### How to update tonlibjson library?
152 |
153 | Binary file `libtonlibjson` now moved to [pytonlib](https://github.com/toncenter/pytonlib).
154 | - Docker Compose: `docker-compose build --no-cache`.
155 | - Local run: `pip install -U ton-http-api`.
156 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/artemd24/ton-http-api/7aee2b8e471b1908b0cb8ac13cf1cd7feda26c98/ton-http-api/pyTON/__init__.py
--------------------------------------------------------------------------------
/ton-http-api/pyTON/__main__.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import uvicorn
3 | import os
4 |
5 |
6 | def setup_environment(args):
7 | os.environ['TON_API_CACHE_ENABLED'] = ('1' if args.cache else '0')
8 | os.environ['TON_API_CACHE_REDIS_ENDPOINT'] = args.cache_redis_endpoint
9 | os.environ['TON_API_CACHE_REDIS_PORT'] = str(args.cache_redis_port)
10 |
11 | os.environ['TON_API_LOGS_LEVEL'] = args.logs_level
12 | os.environ['TON_API_LOGS_JSONIFY'] = ('1' if args.logs_jsonify else '0')
13 |
14 | os.environ['TON_API_ROOT_PATH'] = args.root
15 | os.environ['TON_API_GET_METHODS_ENABLED'] = ('1' if args.get_methods else '0')
16 | os.environ['TON_API_JSON_RPC_ENABLED'] = ('1' if args.json_rpc else '0')
17 |
18 | os.environ['TON_API_TONLIB_LITESERVER_CONFIG'] = args.liteserver_config
19 | os.environ['TON_API_TONLIB_KEYSTORE'] = args.tonlib_keystore
20 | os.environ['TON_API_TONLIB_PARALLEL_REQUESTS_PER_LITESERVER'] = str(args.parallel_requests_per_liteserver)
21 | if args.cdll_path is not None:
22 | os.environ['TON_API_TONLIB_CDLL_PATH'] = args.cdll_path
23 | return
24 |
25 |
26 | def main():
27 | parser = argparse.ArgumentParser('ton-http-api')
28 |
29 | webserver_args = parser.add_argument_group('webserver')
30 | webserver_args.add_argument('--host', type=str, default='0.0.0.0', help='HTTP API host')
31 | webserver_args.add_argument('--port', type=int, default=8081, help='HTTP API port')
32 | webserver_args.add_argument('--root', type=str, default='/', help='HTTP API root, default: /')
33 | webserver_args.add_argument('--no-get-methods', action='store_false', default=True, dest='get_methods', help='Disable runGetMethod endpoint')
34 | webserver_args.add_argument('--no-json-rpc', action='store_false', default=True, dest='json_rpc', help='Disable jsonRPC endpoint')
35 |
36 | tonlib_args = parser.add_argument_group('tonlib')
37 | tonlib_args.add_argument('--liteserver-config', type=str, default='https://ton.org/global-config.json', help='Liteserver config JSON path')
38 | tonlib_args.add_argument('--tonlib-keystore', type=str, default='./ton_keystore/', help='Keystore path for tonlibjson')
39 | tonlib_args.add_argument('--parallel-requests-per-liteserver', type=int, default=50, help='Maximum parallel requests per liteserver')
40 | tonlib_args.add_argument('--cdll-path', type=str, default=None, help='Path to tonlibjson binary')
41 |
42 | cache_args = parser.add_argument_group('cache')
43 | cache_args.add_argument('--cache', default=False, action='store_true', help='Enable cache')
44 | cache_args.add_argument('--cache-redis-endpoint', type=str, default='localhost', help='Cache Redis endpoint')
45 | cache_args.add_argument('--cache-redis-port', type=int, default=6379, help='Cache Redis port')
46 |
47 | logs_args = parser.add_argument_group('logs')
48 | logs_args.add_argument('--logs-level', type=str, choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], default='ERROR', help='Logging level')
49 | logs_args.add_argument('--logs-jsonify', default=False, action='store_true', help='Print logs in JSON format')
50 |
51 | other_args = parser.add_argument_group('other')
52 | other_args.add_argument('--version', default=False, action='store_true', help='Show version of PyPI package')
53 |
54 | args = parser.parse_args()
55 |
56 | if args.version:
57 | os.system('pip3 show ton-http-api')
58 | return
59 |
60 | # running web app
61 | setup_environment(args)
62 |
63 | from pyTON.main import app
64 | uvicorn.run(app, host=args.host, port=args.port, log_level=args.logs_level.lower())
65 |
66 |
67 | if __name__ == '__main__':
68 | main()
69 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/cache.py:
--------------------------------------------------------------------------------
1 | import redis.asyncio
2 | import ring
3 |
4 | from ring.func.asyncio import Aioredis2Storage
5 | from pyTON.settings import RedisCacheSettings
6 |
7 |
8 | class TonlibResultRedisStorage(Aioredis2Storage):
9 | async def set(self, key, value, expire=...):
10 | if value.get('@type', 'error') == 'error':
11 | return None
12 | return await super().set(key, value, expire)
13 |
14 |
15 | class CacheManager:
16 | def cached(self, expire=0, check_error=True):
17 | pass
18 |
19 |
20 | class DisabledCacheManager:
21 | def cached(self, expire=0, check_error=True):
22 | def g(func):
23 | def wrapper(*args, **kwargs):
24 | return func(*args, **kwargs)
25 | return wrapper
26 | return g
27 |
28 |
29 | class RedisCacheManager:
30 | def __init__(self, cache_settings: RedisCacheSettings):
31 | self.cache_settings = cache_settings
32 | self.cache_redis = redis.asyncio.from_url(f"redis://{cache_settings.redis.endpoint}:{cache_settings.redis.port}")
33 |
34 | def cached(self, expire=0, check_error=True):
35 | storage_class = TonlibResultRedisStorage if check_error else Aioredis2Storage
36 | def g(func):
37 | return ring.aioredis(self.cache_redis, coder='pickle', expire=expire, storage_class=storage_class)(func)
38 | return g
39 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/main.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python3
2 | import os
3 | import sys
4 | sys.setrecursionlimit(2048)
5 |
6 | import json
7 | import asyncio
8 | import base64
9 | import inspect
10 | import inject
11 | import codecs
12 |
13 | from functools import wraps
14 |
15 | from typing import Optional, Union, Dict, Any, List
16 | from fastapi import FastAPI, Depends, Response, Request, BackgroundTasks
17 | from fastapi.params import Body, Query, Param
18 | from fastapi.exceptions import HTTPException, RequestValidationError
19 | from starlette.exceptions import HTTPException as StarletteHTTPException
20 | from fastapi.responses import JSONResponse
21 | from fastapi import status
22 |
23 | from tvm_valuetypes.cell import deserialize_cell_from_object
24 |
25 | from pyTON.models import TonResponse, TonResponseJsonRPC, TonRequestJsonRPC
26 | from pyTON.manager import TonlibManager
27 | from pyTON.cache import CacheManager, RedisCacheManager, DisabledCacheManager
28 | from pyTON.settings import Settings, RedisCacheSettings
29 |
30 | from pytonlib.utils.address import detect_address as __detect_address, prepare_address as _prepare_address
31 | from pytonlib.utils.wallet import wallets as known_wallets, sha256
32 | from pytonlib import TonlibException
33 |
34 | from loguru import logger
35 |
36 |
37 | # inject configuration
38 |
39 | def main_config(binder):
40 | settings = Settings.from_environment()
41 | binder.bind(Settings, settings)
42 |
43 | # cache
44 | if settings.cache.enabled:
45 | if isinstance(settings.cache, RedisCacheSettings):
46 | cache_manager = RedisCacheManager(settings.cache)
47 | print(settings.cache)
48 | binder.bind(CacheManager, cache_manager)
49 | else:
50 | raise RuntimeError('Only Redis cache supported')
51 | else:
52 | cache_manager = DisabledCacheManager()
53 | binder.bind(CacheManager, cache_manager)
54 | return
55 |
56 | inject.configure_once(main_config)
57 |
58 |
59 | # main service
60 | description = """
61 | This API enables HTTP access to TON blockchain - getting accounts and wallets information, looking up blocks and transactions, sending messages to the blockchain, calling get methods of smart contracts, and more.
62 |
63 | In addition to REST API, all methods are available through [JSON-RPC endpoint](#json%20rpc) with `method` equal to method name and `params` passed as a dictionary.
64 |
65 | The response contains a JSON object, which always has a boolean field `ok` and either `error` or `result`. If `ok` equals true, the request was successful and the result of the query can be found in the `result` field. In case of an unsuccessful request, `ok` equals false and the error is explained in the `error`.
66 |
67 | API Key should be sent either as `api_key` query parameter or `X-API-Key` header.
68 | """
69 |
70 | tags_metadata = [
71 | {
72 | "name": "accounts",
73 | "description": "Information about accounts.",
74 | },
75 | {
76 | "name": "blocks",
77 | "description": "Information about blocks.",
78 | },
79 | {
80 | "name": "transactions",
81 | "description": "Fetching and locating transactions.",
82 | },
83 | {
84 | "name": "get config",
85 | "description": "Get blockchain config"
86 | },
87 | {
88 | "name": "run method",
89 | "description": "Run get method of smart contract.",
90 | },
91 | {
92 | "name": "send",
93 | "description": "Send data to blockchain.",
94 | },
95 | {
96 | "name": "json rpc",
97 | "description": "JSON-RPC endpoint.",
98 | },
99 | ]
100 |
101 | settings = inject.instance(Settings)
102 |
103 | app = FastAPI(
104 | title="TON HTTP API",
105 | description=description,
106 | version='2.0.0',
107 | docs_url='/',
108 | responses={
109 | 422: {'description': 'Validation Error'},
110 | 504: {'description': 'Lite Server Timeout'}
111 | },
112 | root_path=settings.webserver.api_root_path,
113 | openapi_tags=tags_metadata
114 | )
115 |
116 |
117 | tonlib = None
118 |
119 | @app.on_event("startup")
120 | async def startup():
121 | logger.remove()
122 | logger.add(sys.stdout, level=settings.logging.level, enqueue=True, serialize=settings.logging.jsonify)
123 |
124 | # setup tonlib multiclient
125 | global tonlib
126 |
127 | loop = asyncio.get_event_loop()
128 | cache_manager = inject.instance(CacheManager)
129 | tonlib = TonlibManager(tonlib_settings=settings.tonlib,
130 | dispatcher=None,
131 | cache_manager=cache_manager,
132 | loop=loop)
133 |
134 | await asyncio.sleep(2) # wait for manager to spawn all workers and report their status
135 |
136 | @app.on_event("shutdown")
137 | async def shutdown_event():
138 | await tonlib.shutdown()
139 |
140 | # Exception handlers
141 | @app.exception_handler(StarletteHTTPException)
142 | async def http_exception_handler(request, exc):
143 | res = TonResponse(ok=False, error=str(exc.detail), code=exc.status_code)
144 | return JSONResponse(res.dict(exclude_none=True), status_code=res.code)
145 |
146 |
147 | @app.exception_handler(RequestValidationError)
148 | async def validation_exception_handler(request, exc):
149 | res = TonResponse(ok=False, error=f"Validation error: {exc}", code=status.HTTP_422_UNPROCESSABLE_ENTITY)
150 | return JSONResponse(res.dict(exclude_none=True), status_code=status.HTTP_422_UNPROCESSABLE_ENTITY)
151 |
152 |
153 | @app.exception_handler(asyncio.TimeoutError)
154 | async def timeout_exception_handler(request, exc):
155 | res = TonResponse(ok=False, error="Liteserver timeout", code=status.HTTP_504_GATEWAY_TIMEOUT)
156 | return JSONResponse(res.dict(exclude_none=True), status_code=status.HTTP_504_GATEWAY_TIMEOUT)
157 |
158 |
159 | @app.exception_handler(TonlibException)
160 | async def tonlib_error_result_exception_handler(request, exc):
161 | res = TonResponse(ok=False, error=str(exc), code=status.HTTP_500_INTERNAL_SERVER_ERROR)
162 | return JSONResponse(res.dict(exclude_none=True), status_code=status.HTTP_500_INTERNAL_SERVER_ERROR)
163 |
164 |
165 | @app.exception_handler(Exception)
166 | async def fastapi_generic_exception_handler(request, exc):
167 | res = TonResponse(ok=False, error=str(exc), code=status.HTTP_503_SERVICE_UNAVAILABLE)
168 | return JSONResponse(res.dict(exclude_none=True), status_code=status.HTTP_503_SERVICE_UNAVAILABLE)
169 |
170 |
171 | # Helper functions
172 | def _detect_address(address):
173 | try:
174 | return __detect_address(address)
175 | except:
176 | raise HTTPException(status_code=416, detail="Incorrect address")
177 |
178 | def prepare_address(address):
179 | try:
180 | return _prepare_address(address)
181 | except:
182 | raise HTTPException(status_code=416, detail="Incorrect address")
183 |
184 | def address_state(account_info):
185 | if isinstance(account_info.get("code", ""), int) or len(account_info.get("code", "")) == 0:
186 | if len(account_info.get("frozen_hash", "")) == 0:
187 | return "uninitialized"
188 | else:
189 | return "frozen"
190 | return "active"
191 |
192 | def wrap_result(func):
193 | @wraps(func)
194 | async def wrapper(*args, **kwargs):
195 | result = await asyncio.wait_for(func(*args, **kwargs), settings.tonlib.request_timeout)
196 | return TonResponse(ok=True, result=result)
197 | return wrapper
198 |
199 | json_rpc_methods = {}
200 |
201 | def json_rpc(method):
202 | def g(func):
203 | @wraps(func)
204 | def f(**kwargs):
205 | sig = inspect.signature(func)
206 | for k, v in sig.parameters.items():
207 | # Add function's default value parameters to kwargs.
208 | if k not in kwargs and v.default is not inspect._empty:
209 | default_val = v.default
210 |
211 | if isinstance(default_val, Param) or isinstance(default_val, Body):
212 | if default_val.default == ...:
213 | raise TypeError("Non-optional argument expected")
214 | kwargs[k] = default_val.default
215 | else:
216 | kwargs[k] = default_val
217 |
218 | # Some values (e.g. lt, shard) don't fit in json int and can be sent as str.
219 | # Coerce such str to int.
220 | if (v.annotation is int or v.annotation is Optional[int]) and type(kwargs[k]) is str:
221 | try:
222 | kwargs[k] = int(kwargs[k])
223 | except ValueError:
224 | raise TypeError(f"Can't parse integer in parameter {k}")
225 |
226 | return func(**kwargs)
227 |
228 | json_rpc_methods[method] = f
229 | return func
230 | return g
231 |
232 | # Endpoints
233 | @app.get('/healthcheck', include_in_schema=False)
234 | async def healthcheck():
235 | return 'OK'
236 |
237 |
238 | @app.get('/getWorkerState', response_model=TonResponse, include_in_schema=False)
239 | @wrap_result
240 | async def get_worker_state():
241 | return tonlib.get_workers_state()
242 |
243 |
244 | @app.get('/getAddressInformation', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
245 | @json_rpc('getAddressInformation')
246 | @wrap_result
247 | async def get_address_information(
248 | address: str = Query(..., description="Identifier of target TON account in any form.")
249 | ):
250 | """
251 | Get basic information about the address: balance, code, data, last_transaction_id.
252 | """
253 | address = prepare_address(address)
254 | result = await tonlib.raw_get_account_state(address)
255 | result["state"] = address_state(result)
256 | if "balance" in result and int(result["balance"]) < 0:
257 | result["balance"] = 0
258 | return result
259 |
260 | @app.get('/getExtendedAddressInformation', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
261 | @json_rpc('getExtendedAddressInformation')
262 | @wrap_result
263 | async def get_extended_address_information(
264 | address: str = Query(..., description="Identifier of target TON account in any form.")
265 | ):
266 | """
267 | Similar to previous one but tries to parse additional information for known contract types. This method is based on tonlib's function *getAccountState*. For detecting wallets we recommend to use *getWalletInformation*.
268 | """
269 | address = prepare_address(address)
270 | result = await tonlib.generic_get_account_state(address)
271 | return result
272 |
273 | @app.get('/getWalletInformation', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
274 | @json_rpc('getWalletInformation')
275 | @wrap_result
276 | async def get_wallet_information(
277 | address: str = Query(..., description="Identifier of target TON account in any form.")
278 | ):
279 | """
280 | Retrieve wallet information. This method parses contract state and currently supports more wallet types than getExtendedAddressInformation: simple wallet, standart wallet, v3 wallet, v4 wallet.
281 | """
282 | address = prepare_address(address)
283 | result = await tonlib.raw_get_account_state(address)
284 | res = {'wallet': False, 'balance': 0, 'account_state': None, 'wallet_type': None, 'seqno': None}
285 | res["account_state"] = address_state(result)
286 | res["balance"] = result["balance"] if (result["balance"] and int(result["balance"]) > 0) else 0
287 | if "last_transaction_id" in result:
288 | res["last_transaction_id"] = result["last_transaction_id"]
289 | ci = sha256(result["code"])
290 | if ci in known_wallets:
291 | res["wallet"] = True
292 | wallet_handler = known_wallets[ci]
293 | res["wallet_type"] = wallet_handler["type"]
294 | wallet_handler["data_extractor"](res, result)
295 | return res
296 |
297 | @app.get('/getTransactions', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts', 'transactions'])
298 | @json_rpc('getTransactions')
299 | @wrap_result
300 | async def get_transactions(
301 | address: str = Query(..., description="Identifier of target TON account in any form."),
302 | limit: Optional[int] = Query(default=10, description="Maximum number of transactions in response.", gt=0, le=100),
303 | lt: Optional[int] = Query(default=None, description="Logical time of transaction to start with, must be sent with *hash*."),
304 | hash: Optional[str] = Query(default=None, description="Hash of transaction to start with, in *base64* or *hex* encoding , must be sent with *lt*."),
305 | to_lt: Optional[int] = Query(default=0, description="Logical time of transaction to finish with (to get tx from *lt* to *to_lt*)."),
306 | archival: bool = Query(default=False, description="By default getTransaction request is processed by any available liteserver. If *archival=true* only liteservers with full history are used.")
307 | ):
308 | """
309 | Get transaction history of a given address.
310 | """
311 | address = prepare_address(address)
312 | return await tonlib.get_transactions(address, from_transaction_lt=lt, from_transaction_hash=hash, to_transaction_lt=to_lt, limit=limit, archival=archival)
313 |
314 | @app.get('/getAddressBalance', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
315 | @json_rpc('getAddressBalance')
316 | @wrap_result
317 | async def get_address_balance(
318 | address: str = Query(..., description="Identifier of target TON account in any form.")
319 | ):
320 | """
321 | Get balance (in nanotons) of a given address.
322 | """
323 | address = prepare_address(address)
324 | result = await tonlib.raw_get_account_state(address)
325 | if "balance" in result and int(result["balance"]) < 0:
326 | result["balance"] = 0
327 | return result["balance"]
328 |
329 | @app.get('/getAddressState', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
330 | @json_rpc('getAddressState')
331 | @wrap_result
332 | async def get_address(
333 | address: str = Query(..., description="Identifier of target TON account in any form.")
334 | ):
335 | """
336 | Get state of a given address. State can be either *unitialized*, *active* or *frozen*.
337 | """
338 | address = prepare_address(address)
339 | result = await tonlib.raw_get_account_state(address)
340 | return address_state(result)
341 |
342 | @app.get('/packAddress', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
343 | @json_rpc('packAddress')
344 | @wrap_result
345 | async def pack_address(
346 | address: str = Query(..., description="Identifier of target TON account in raw form.", example="0:83DFD552E63729B472FCBCC8C45EBCC6691702558B68EC7527E1BA403A0F31A8")
347 | ):
348 | """
349 | Convert an address from raw to human-readable format.
350 | """
351 | return prepare_address(address)
352 |
353 | @app.get('/unpackAddress', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
354 | @json_rpc('unpackAddress')
355 | @wrap_result
356 | async def unpack_address(
357 | address: str = Query(..., description="Identifier of target TON account in user-friendly form", example="EQCD39VS5jcptHL8vMjEXrzGaRcCVYto7HUn4bpAOg8xqB2N")
358 | ):
359 | """
360 | Convert an address from human-readable to raw format.
361 | """
362 | return _detect_address(address)["raw_form"]
363 |
364 | @app.get('/getMasterchainInfo', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
365 | @json_rpc('getMasterchainInfo')
366 | @wrap_result
367 | async def get_masterchain_info():
368 | """
369 | Get up-to-date masterchain state.
370 | """
371 | return await tonlib.getMasterchainInfo()
372 |
373 | @app.get('/getMasterchainBlockSignatures', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
374 | @json_rpc('getMasterchainBlockSignatures')
375 | @wrap_result
376 | async def get_masterchain_block_signatures(
377 | seqno: int
378 | ):
379 | """
380 | Get up-to-date masterchain state.
381 | """
382 | return await tonlib.getMasterchainBlockSignatures(seqno)
383 |
384 | @app.get('/getShardBlockProof', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
385 | @json_rpc('getShardBlockProof')
386 | @wrap_result
387 | async def get_shard_block_proof(
388 | workchain: int = Query(..., description="Block workchain id"),
389 | shard: int = Query(..., description="Block shard id"),
390 | seqno: int = Query(..., description="Block seqno"),
391 | from_seqno: Optional[int] = Query(None, description="Seqno of masterchain block starting from which proof is required. If not specified latest masterchain block is used."),
392 | ):
393 | """
394 | Get merkle proof of shardchain block.
395 | """
396 | return await tonlib.getShardBlockProof(workchain, shard, seqno, from_seqno)
397 |
398 | @app.get('/getConsensusBlock', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
399 | @json_rpc('getConsensusBlock')
400 | @wrap_result
401 | async def get_consensus_block():
402 | """
403 | Get consensus block and its update timestamp.
404 | """
405 | return await tonlib.getConsensusBlock()
406 |
407 | @app.get('/lookupBlock', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
408 | @json_rpc('lookupBlock')
409 | @wrap_result
410 | async def lookup_block(
411 | workchain: int = Query(..., description="Workchain id to look up block in"),
412 | shard: int = Query(..., description="Shard id to look up block in"),
413 | seqno: Optional[int] = Query(None, description="Block's height"),
414 | lt: Optional[int] = Query(None, description="Block's logical time"),
415 | unixtime: Optional[int] = Query(None, description="Block's unixtime")
416 | ):
417 | """
418 | Look up block by either *seqno*, *lt* or *unixtime*.
419 | """
420 | return await tonlib.lookupBlock(workchain, shard, seqno, lt, unixtime)
421 |
422 | @app.get('/shards', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
423 | @json_rpc('shards')
424 | @wrap_result
425 | async def shards(
426 | seqno: int = Query(..., description="Masterchain seqno to fetch shards of.")
427 | ):
428 | """
429 | Get shards information.
430 | """
431 | return await tonlib.getShards(seqno)
432 |
433 | @app.get('/getBlockTransactions', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks','transactions'])
434 | @json_rpc('getBlockTransactions')
435 | @wrap_result
436 | async def get_block_transactions(
437 | workchain: int,
438 | shard: int,
439 | seqno: int,
440 | root_hash: Optional[str] = None,
441 | file_hash: Optional[str] = None,
442 | after_lt: Optional[int] = None,
443 | after_hash: Optional[str] = None,
444 | count: int = 40
445 | ):
446 | """
447 | Get transactions of the given block.
448 | """
449 | return await tonlib.getBlockTransactions(workchain, shard, seqno, count, root_hash, file_hash, after_lt, after_hash)
450 |
451 | @app.get('/getBlockHeader', response_model=TonResponse, response_model_exclude_none=True, tags=['blocks'])
452 | @json_rpc('getBlockHeader')
453 | @wrap_result
454 | async def get_block_header(
455 | workchain: int,
456 | shard: int,
457 | seqno: int,
458 | root_hash: Optional[str] = None,
459 | file_hash: Optional[str] = None
460 | ):
461 | """
462 | Get metadata of a given block.
463 | """
464 | return await tonlib.getBlockHeader(workchain, shard, seqno, root_hash, file_hash)
465 |
466 | @app.get('/getConfigParam', response_model=TonResponse, response_model_exclude_none=True, tags=['get config'])
467 | @json_rpc('getConfigParam')
468 | @wrap_result
469 | async def get_config_param(
470 | config_id: int = Query(..., description="Config id"),
471 | seqno: Optional[int] = Query(None, description="Masterchain seqno. If not specified, latest blockchain state will be used.")
472 | ):
473 | """
474 | Get config by id.
475 | """
476 | return await tonlib.get_config_param(config_id, seqno)
477 |
478 | @app.get('/getTokenData', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
479 | @json_rpc('getTokenData')
480 | @wrap_result
481 | async def get_token_data(
482 | address: str = Query(..., description="Address of NFT collection/item or Jetton master/wallet smart contract")
483 | ):
484 | """
485 | Get NFT or Jetton information.
486 | """
487 | address = prepare_address(address)
488 | return await tonlib.get_token_data(address)
489 |
490 | @app.get('/tryLocateTx', response_model=TonResponse, response_model_exclude_none=True, tags=['transactions'])
491 | @json_rpc('tryLocateTx')
492 | @wrap_result
493 | async def get_try_locate_tx(
494 | source: str,
495 | destination: str,
496 | created_lt: int
497 | ):
498 | """
499 | Locate outcoming transaction of *destination* address by incoming message.
500 | """
501 | return await tonlib.tryLocateTxByIncomingMessage(source, destination, created_lt)
502 |
503 | @app.get('/tryLocateResultTx', response_model=TonResponse, response_model_exclude_none=True, tags=['transactions'])
504 | @json_rpc('tryLocateResultTx')
505 | @wrap_result
506 | async def get_try_locate_result_tx(
507 | source: str,
508 | destination: str,
509 | created_lt: int
510 | ):
511 | """
512 | Same as previous. Locate outcoming transaction of *destination* address by incoming message
513 | """
514 | return await tonlib.tryLocateTxByIncomingMessage(source, destination, created_lt)
515 |
516 | @app.get('/tryLocateSourceTx', response_model=TonResponse, response_model_exclude_none=True, tags=['transactions'])
517 | @json_rpc('tryLocateSourceTx')
518 | @wrap_result
519 | async def get_try_locate_source_tx(
520 | source: str,
521 | destination: str,
522 | created_lt: int
523 | ):
524 | """
525 | Locate incoming transaction of *source* address by outcoming message.
526 | """
527 | return await tonlib.tryLocateTxByOutcomingMessage(source, destination, created_lt)
528 |
529 | @app.get('/detectAddress', response_model=TonResponse, response_model_exclude_none=True, tags=['accounts'])
530 | @json_rpc('detectAddress')
531 | @wrap_result
532 | async def detect_address(
533 | address: str = Query(..., description="Identifier of target TON account in any form.")
534 | ):
535 | """
536 | Get all possible address forms.
537 | """
538 | return _detect_address(address)
539 |
540 | @app.post('/sendBoc', response_model=TonResponse, response_model_exclude_none=True, tags=['send'])
541 | @json_rpc('sendBoc')
542 | @wrap_result
543 | async def send_boc(
544 | boc: str = Body(..., embed=True, description="b64 encoded bag of cells")
545 | ):
546 | """
547 | Send serialized boc file: fully packed and serialized external message to blockchain.
548 | """
549 | boc = base64.b64decode(boc)
550 | return await tonlib.raw_send_message(boc)
551 |
552 | @app.post('/sendBocReturnHash', response_model=TonResponse, response_model_exclude_none=True, tags=['send'])
553 | @json_rpc('sendBocReturnHash')
554 | @wrap_result
555 | async def send_boc_return_hash(
556 | boc: str = Body(..., embed=True, description="b64 encoded bag of cells")
557 | ):
558 | """
559 | Send serialized boc file: fully packed and serialized external message to blockchain. The method returns message hash.
560 | """
561 | boc = base64.b64decode(boc)
562 | return await tonlib.raw_send_message_return_hash(boc)
563 |
564 | async def send_boc_unsafe_task(boc_bytes: bytes):
565 | send_interval = 5
566 | send_duration = 60
567 | for i in range(int(send_duration / send_interval)):
568 | try:
569 | await tonlib.raw_send_message(boc_bytes)
570 | except:
571 | pass
572 | await asyncio.sleep(send_interval)
573 |
574 | @app.post('/sendBocUnsafe', response_model=TonResponse, response_model_exclude_none=True, include_in_schema=False, tags=['send'])
575 | @json_rpc('sendBocUnsafe')
576 | @wrap_result
577 | async def send_boc_unsafe(
578 | background_tasks: BackgroundTasks,
579 | boc: str = Body(..., embed=True, description="b64 encoded bag of cells")
580 | ):
581 | """
582 | Unsafe send serialized boc file: fully packed and serialized external message to blockchain. This method creates
583 | background task that sends boc to network every 5 seconds for 1 minute.
584 | """
585 | boc = base64.b64decode(boc)
586 | background_tasks.add_task(send_boc_unsafe_task, boc)
587 | return {'@type': 'ok', '@extra': '0:0:0'}
588 |
589 | @app.post('/sendCellSimple', response_model=TonResponse, response_model_exclude_none=True, include_in_schema=False, tags=['send'])
590 | @json_rpc('sendCellSimple')
591 | @wrap_result
592 | async def send_cell(
593 | cell: Dict[str, Any] = Body(..., embed=True, description="Cell serialized as object")
594 | ):
595 | """
596 | (Deprecated) Send cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`, that is fully packed but not serialized external message.
597 | """
598 | try:
599 | cell = deserialize_cell_from_object(cell)
600 | boc = codecs.encode(cell.serialize_boc(), 'base64')
601 | except:
602 | raise HTTPException(status_code=400, detail="Error while parsing cell")
603 | return await tonlib.raw_send_message(boc)
604 |
605 | @app.post('/sendQuery', response_model=TonResponse, response_model_exclude_none=True, tags=['send'])
606 | @json_rpc('sendQuery')
607 | @wrap_result
608 | async def send_query(
609 | address: str = Body(..., description="Address in any format"),
610 | body: str = Body(..., description="b64-encoded boc-serialized cell with message body"),
611 | init_code: str = Body(default='', description="b64-encoded boc-serialized cell with init-code"),
612 | init_data: str = Body(default='', description="b64-encoded boc-serialized cell with init-data")
613 | ):
614 | """
615 | Send query - unpacked external message. This method takes address, body and init-params (if any), packs it to external message and sends to network. All params should be boc-serialized.
616 | """
617 | address = prepare_address(address)
618 | body = codecs.decode(codecs.encode(body, "utf-8"), 'base64')
619 | code = codecs.decode(codecs.encode(init_code, "utf-8"), 'base64')
620 | data = codecs.decode(codecs.encode(init_data, "utf-8"), 'base64')
621 | return await tonlib.raw_create_and_send_query(address, body, init_code=code, init_data=data)
622 |
623 | @app.post('/sendQuerySimple', response_model=TonResponse, response_model_exclude_none=True, include_in_schema=False, tags=['send'])
624 | @json_rpc('sendQuerySimple')
625 | @wrap_result
626 | async def send_query_cell(
627 | address: str = Body(..., description="Address in any format"),
628 | body: str = Body(..., description='Body cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'),
629 | init_code: Optional[Dict[str, Any]] = Body(default=None, description='init-code cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'),
630 | init_data: Optional[Dict[str, Any]] = Body(default=None, description='init-data cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`')
631 | ):
632 | """
633 | (Deprecated) Send query - unpacked external message. This method gets address, body and init-params (if any), packs it to external message and sends to network. Body, init-code and init-data should be passed as objects.
634 | """
635 | address = prepare_address(address)
636 | try:
637 | body = deserialize_cell_from_object(body).serialize_boc(has_idx=False)
638 | qcode, qdata = b'', b''
639 | if init_code is not None:
640 | qcode = deserialize_cell_from_object(init_code).serialize_boc(has_idx=False)
641 | if init_data is not None:
642 | qdata = deserialize_cell_from_object(init_data).serialize_boc(has_idx=False)
643 | except:
644 | raise HTTPException(status_code=400, detail="Error while parsing cell object")
645 | return await tonlib.raw_create_and_send_query(address, body, init_code=qcode, init_data=qdata)
646 |
647 | @app.post('/estimateFee', response_model=TonResponse, response_model_exclude_none=True, tags=['send'])
648 | @json_rpc('estimateFee')
649 | @wrap_result
650 | async def estimate_fee(
651 | address: str = Body(..., description='Address in any format'),
652 | body: str = Body(..., description='b64-encoded cell with message body'),
653 | init_code: str = Body(default='', description='b64-encoded cell with init-code'),
654 | init_data: str = Body(default='', description='b64-encoded cell with init-data'),
655 | ignore_chksig: bool = Body(default=True, description='If true during test query processing assume that all chksig operations return True')
656 | ):
657 | """
658 | Estimate fees required for query processing. *body*, *init-code* and *init-data* accepted in serialized format (b64-encoded).
659 | """
660 | address = prepare_address(address)
661 | body = codecs.decode(codecs.encode(body, "utf-8"), 'base64')
662 | code = codecs.decode(codecs.encode(init_code, "utf-8"), 'base64')
663 | data = codecs.decode(codecs.encode(init_data, "utf-8"), 'base64')
664 | return await tonlib.raw_estimate_fees(address, body, init_code=code, init_data=data, ignore_chksig=ignore_chksig)
665 |
666 | @app.post('/estimateFeeSimple', response_model=TonResponse, response_model_exclude_none=True, include_in_schema=False, tags=['send'])
667 | @json_rpc('estimateFeeSimple')
668 | @wrap_result
669 | async def estimate_fee_cell(
670 | address: str = Body(..., description='Address in any format'),
671 | body: Dict[str, Any] = Body(..., description='Body cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'),
672 | init_code: Optional[Dict[str, Any]] = Body(default=None, description='init-code cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'),
673 | init_data: Optional[Dict[str, Any]] = Body(default=None, description='init-data cell as object: `{"data": {"b64": "...", "len": int }, "refs": [...subcells...]}`'),
674 | ignore_chksig: bool = Body(default=True, description='If true during test query processing assume that all chksig operations return True')
675 | ):
676 | """
677 | (Deprecated) Estimate fees required for query processing. *body*, *init-code* and *init-data* accepted in unserialized format (as objects).
678 | """
679 | address = prepare_address(address)
680 | try:
681 | body = deserialize_cell_from_object(body).serialize_boc(has_idx=False)
682 | qcode, qdata = b'', b''
683 | if init_code is not None:
684 | qcode = deserialize_cell_from_object(init_code).serialize_boc(has_idx=False)
685 | if init_data is not None:
686 | qdata = deserialize_cell_from_object(init_data).serialize_boc(has_idx=False)
687 | except:
688 | raise HTTPException(status_code=400, detail="Error while parsing cell object")
689 | return await tonlib.raw_estimate_fees(address, body, init_code=qcode, init_data=qdata, ignore_chksig=ignore_chksig)
690 |
691 |
692 | if settings.webserver.get_methods:
693 | @app.post('/runGetMethod', response_model=TonResponse, response_model_exclude_none=True, tags=["run method"])
694 | @json_rpc('runGetMethod')
695 | @wrap_result
696 | async def run_get_method(
697 | address: str = Body(..., description='Contract address'),
698 | method: Union[str, int] = Body(..., description='Method name or method id'),
699 | stack: List[List[Any]] = Body(..., description="Array of stack elements: `[['num',3], ['cell', cell_object], ['slice', slice_object]]`")
700 | ):
701 | """
702 | Run get method on smart contract.
703 | """
704 | address = prepare_address(address)
705 | return await tonlib.raw_run_method(address, method, stack)
706 |
707 |
708 | if settings.webserver.json_rpc:
709 | @app.post('/jsonRPC', response_model=TonResponseJsonRPC, response_model_exclude_none=True, tags=['json rpc'])
710 | async def jsonrpc_handler(json_rpc: TonRequestJsonRPC, request: Request, response: Response, background_tasks: BackgroundTasks):
711 | """
712 | All methods in the API are available through JSON-RPC protocol ([spec](https://www.jsonrpc.org/specification)).
713 | """
714 | params = json_rpc.params
715 | method = json_rpc.method
716 | _id = json_rpc.id
717 |
718 | if not method in json_rpc_methods:
719 | response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY
720 | return TonResponseJsonRPC(ok=False, error='Unknown method', id=_id)
721 | handler = json_rpc_methods[method]
722 |
723 | try:
724 | if 'request' in inspect.signature(handler).parameters.keys():
725 | params['request'] = request
726 | if 'background_tasks' in inspect.signature(handler).parameters.keys():
727 | params['background_tasks'] = background_tasks
728 |
729 | result = await handler(**params)
730 | except TypeError as e:
731 | response.status_code = status.HTTP_422_UNPROCESSABLE_ENTITY
732 | return TonResponseJsonRPC(ok=False, error=f'TypeError: {e}', id=_id)
733 |
734 | return TonResponseJsonRPC(ok=result.ok, result=result.result, error=result.error, code=result.code, id=_id)
735 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/manager.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import time
3 | import traceback
4 | import random
5 | import queue
6 |
7 | from collections import defaultdict
8 | from collections.abc import Mapping
9 | from copy import deepcopy
10 | from concurrent.futures import ThreadPoolExecutor
11 |
12 | from pyTON.worker import TonlibWorker
13 | from pyTON.models import TonlibWorkerMsgType, TonlibClientResult, ConsensusBlock
14 | from pyTON.cache import CacheManager, DisabledCacheManager
15 | from pyTON.settings import TonlibSettings
16 |
17 | from pytonlib import TonlibError
18 |
19 | from typing import Optional, Dict, Any
20 | from dataclasses import dataclass
21 | from datetime import datetime
22 |
23 | from loguru import logger
24 |
25 |
26 | class TonlibManager:
27 | def __init__(self,
28 | tonlib_settings: TonlibSettings,
29 | dispatcher: Optional["Dispatcher"]=None,
30 | cache_manager: Optional["CacheManager"]=None,
31 | loop: Optional[asyncio.BaseEventLoop]=None):
32 | self.tonlib_settings = tonlib_settings
33 | self.dispatcher = dispatcher
34 | self.cache_manager = cache_manager or DisabledCacheManager()
35 |
36 | self.workers = {}
37 | self.futures = {}
38 | self.tasks = {}
39 | self.consensus_block = ConsensusBlock()
40 |
41 | # cache setup
42 | self.setup_cache()
43 |
44 | self.threadpool_executor = ThreadPoolExecutor(max_workers=max(32, len(self.tonlib_settings.liteserver_config['liteservers']) * 4))
45 |
46 | # workers spawn
47 | self.loop = loop or asyncio.get_running_loop()
48 | for ls_index in range(len(self.tonlib_settings.liteserver_config['liteservers'])):
49 | self.spawn_worker(ls_index)
50 |
51 | # running tasks
52 | self.tasks['check_working'] = self.loop.create_task(self.check_working())
53 | self.tasks['check_children_alive'] = self.loop.create_task(self.check_children_alive())
54 |
55 | async def shutdown(self):
56 | for i in self.futures:
57 | self.futures[i].cancel()
58 |
59 | self.tasks['check_working'].cancel()
60 | await self.tasks['check_working']
61 |
62 | self.tasks['check_children_alive'].cancel()
63 | await self.tasks['check_children_alive']
64 |
65 | await asyncio.wait([self.loop.create_task(self.worker_control(i, enabled=False)) for i in self.workers])
66 |
67 | self.threadpool_executor.shutdown()
68 |
69 | def setup_cache(self):
70 | self.raw_get_transactions = self.cache_manager.cached(expire=5)(self.raw_get_transactions)
71 | self.get_transactions = self.cache_manager.cached(expire=15, check_error=False)(self.get_transactions)
72 | self.raw_get_account_state = self.cache_manager.cached(expire=5)(self.raw_get_account_state)
73 | self.generic_get_account_state = self.cache_manager.cached(expire=5)(self.generic_get_account_state)
74 | self.raw_run_method = self.cache_manager.cached(expire=5)(self.raw_run_method)
75 | self.raw_estimate_fees = self.cache_manager.cached(expire=5)(self.raw_estimate_fees)
76 | self.getMasterchainInfo = self.cache_manager.cached(expire=1)(self.getMasterchainInfo)
77 | self.getMasterchainBlockSignatures = self.cache_manager.cached(expire=5)(self.getMasterchainBlockSignatures)
78 | self.getShardBlockProof = self.cache_manager.cached(expire=5)(self.getShardBlockProof)
79 | self.lookupBlock = self.cache_manager.cached(expire=600)(self.lookupBlock)
80 | self.getShards = self.cache_manager.cached(expire=600)(self.getShards)
81 | self.raw_getBlockTransactions = self.cache_manager.cached(expire=600)(self.raw_getBlockTransactions)
82 | self.getBlockTransactions = self.cache_manager.cached(expire=600)(self.getBlockTransactions)
83 | self.getBlockHeader = self.cache_manager.cached(expire=600)(self.getBlockHeader)
84 | self.get_config_param = self.cache_manager.cached(expire=5)(self.get_config_param)
85 | self.get_token_data = self.cache_manager.cached(expire=15)(self.get_token_data)
86 | self.tryLocateTxByOutcomingMessage = self.cache_manager.cached(expire=600, check_error=False)(self.tryLocateTxByOutcomingMessage)
87 | self.tryLocateTxByIncomingMessage = self.cache_manager.cached(expire=600, check_error=False)(self.tryLocateTxByIncomingMessage)
88 |
89 | def spawn_worker(self, ls_index, force_restart=False):
90 | if ls_index in self.workers:
91 | worker_info = self.workers[ls_index]
92 | if not force_restart and worker_info.is_alive():
93 | logger.warning('Worker for liteserver #{ls_index} already exists', ls_index=ls_index)
94 | return
95 | try:
96 | worker_info['reader'].cancel()
97 | worker_info['worker'].exit_event.set()
98 | worker_info['worker'].output_queue.cancel_join_thread()
99 | worker_info['worker'].input_queue.cancel_join_thread()
100 | worker_info['worker'].output_queue.close()
101 | worker_info['worker'].input_queue.close()
102 | worker_info['worker'].join(timeout=3)
103 | except Exception as ee:
104 | logger.error('Failed to delete existing process: {exc}', exc=ee)
105 | # running new worker
106 | if not ls_index in self.workers:
107 | self.workers[ls_index] = {
108 | 'is_working': False,
109 | 'is_enabled': True,
110 | 'restart_count': -1,
111 | 'tasks_count': 0
112 | }
113 |
114 | tonlib_settings = deepcopy(self.tonlib_settings)
115 | tonlib_settings.keystore += f'worker_{ls_index}'
116 | self.workers[ls_index]['worker'] = TonlibWorker(ls_index, tonlib_settings)
117 | self.workers[ls_index]['reader'] = self.loop.create_task(self.read_results(ls_index))
118 | self.workers[ls_index]['worker'].start()
119 | self.workers[ls_index]['restart_count'] += 1
120 |
121 | async def worker_control(self, ls_index, enabled):
122 | if enabled == False:
123 | self.workers[ls_index]['reader'].cancel()
124 | self.workers[ls_index]['worker'].exit_event.set()
125 |
126 | self.workers[ls_index]['worker'].output_queue.cancel_join_thread()
127 | self.workers[ls_index]['worker'].input_queue.cancel_join_thread()
128 | self.workers[ls_index]['worker'].output_queue.close()
129 | self.workers[ls_index]['worker'].input_queue.close()
130 |
131 | self.workers[ls_index]['worker'].join()
132 |
133 | await self.workers[ls_index]['reader']
134 |
135 | self.workers[ls_index]['is_enabled'] = enabled
136 |
137 | def log_liteserver_task(self, task_result: TonlibClientResult):
138 | result_type = None
139 | if isinstance(task_result.result, Mapping):
140 | result_type = task_result.result.get('@type', 'unknown')
141 | else:
142 | result_type = type(task_result.result).__name__
143 | details = {}
144 |
145 | rec = {
146 | 'timestamp': datetime.utcnow(),
147 | 'elapsed': task_result.elapsed_time,
148 | 'task_id': task_result.task_id,
149 | 'method': task_result.method,
150 | 'liteserver_info': task_result.liteserver_info,
151 | 'result_type': result_type,
152 | 'exception': task_result.exception
153 | }
154 |
155 | logger.info("Received result of type: {result_type}, method: {method}, task_id: {task_id}", **rec)
156 |
157 | async def read_results(self, ls_index):
158 | worker = self.workers[ls_index]['worker']
159 | while True:
160 | try:
161 | try:
162 | msg_type, msg_content = await self.loop.run_in_executor(self.threadpool_executor, worker.output_queue.get, True, 1)
163 | except queue.Empty:
164 | continue
165 | if msg_type == TonlibWorkerMsgType.TASK_RESULT:
166 | task_id = msg_content.task_id
167 |
168 | if task_id in self.futures and not self.futures[task_id].done():
169 | if msg_content.exception is not None:
170 | self.futures[task_id].set_exception(msg_content.exception)
171 | if msg_content.result is not None:
172 | self.futures[task_id].set_result(msg_content.result)
173 | else:
174 | logger.warning("TonlibManager received result from TonlibWorker #{ls_index:03d} whose task '{task_id}' doesn't exist or is done.", ls_index=ls_index, task_id=task_id)
175 |
176 | self.log_liteserver_task(msg_content)
177 |
178 | if msg_type == TonlibWorkerMsgType.LAST_BLOCK_UPDATE:
179 | worker.last_block = msg_content
180 |
181 | if msg_type == TonlibWorkerMsgType.ARCHIVAL_UPDATE:
182 | worker.is_archival = msg_content
183 | except asyncio.CancelledError:
184 | logger.info("Task read_results from TonlibWorker #{ls_index:03d} was cancelled", ls_index=ls_index)
185 | return
186 | except:
187 | logger.error("read_results exception {format_exc}", format_exc=traceback.format_exc())
188 |
189 | async def check_working(self):
190 | while True:
191 | try:
192 | last_blocks = [self.workers[ls_index]['worker'].last_block for ls_index in self.workers]
193 | best_block = max([i for i in last_blocks])
194 | consensus_block_seqno = 0
195 | # detect 'consensus':
196 | # it is no more than 3 blocks less than best block
197 | # at least 60% of ls know it
198 | # it is not earlier than prev
199 | last_blocks_non_zero = [i for i in last_blocks if i != 0]
200 | strats = [sum([1 if ls == (best_block-i) else 0 for ls in last_blocks_non_zero]) for i in range(4)]
201 | total_suitable = sum(strats)
202 | sm = 0
203 | for i, am in enumerate(strats):
204 | sm += am
205 | if sm >= total_suitable * 0.6:
206 | consensus_block_seqno = best_block - i
207 | break
208 | if consensus_block_seqno > self.consensus_block.seqno:
209 | self.consensus_block.seqno = consensus_block_seqno
210 | self.consensus_block.timestamp = datetime.utcnow().timestamp()
211 | for ls_index in self.workers:
212 | self.workers[ls_index]['is_working'] = last_blocks[ls_index] >= self.consensus_block.seqno
213 |
214 | await asyncio.sleep(1)
215 | except asyncio.CancelledError:
216 | logger.info('Task check_working was cancelled')
217 | return
218 | except:
219 | logger.critical('Task check_working dead: {format_exc}', format_exc=traceback.format_exc())
220 |
221 | async def check_children_alive(self):
222 | while True:
223 | try:
224 | for ls_index in self.workers:
225 | worker_info = self.workers[ls_index]
226 | worker_info['is_enabled'] = worker_info['is_enabled'] or time.time() > worker_info.get('time_to_alive', 1e10)
227 | if worker_info['restart_count'] >= 3:
228 | worker_info['is_enabled'] = False
229 | worker_info['time_to_alive'] = time.time() + 10 * 60
230 | worker_info['restart_count'] = 0
231 | if not worker_info['worker'].is_alive() and worker_info['is_enabled']:
232 | logger.error("TonlibWorker #{ls_index:03d} is dead!!! Exit code: {exit_code}", ls_index=ls_index, exit_code=self.workers[ls_index]['worker'].exitcode)
233 | self.spawn_worker(ls_index, force_restart=True)
234 | await asyncio.sleep(1)
235 | except asyncio.CancelledError:
236 | logger.info('Task check_children_alive was cancelled')
237 | return
238 | except:
239 | logger.critical('Task check_children_alive dead: {format_exc}', format_exc=traceback.format_exc())
240 |
241 | def get_workers_state(self):
242 | result = {}
243 | for ls_index, worker_info in self.workers.items():
244 | result[ls_index] = {
245 | 'ls_index': ls_index,
246 | **self.tonlib_settings.liteserver_config['liteservers'][ls_index],
247 | 'is_working': worker_info['is_working'],
248 | 'is_archival': worker_info['worker'].is_archival,
249 | 'is_enabled': worker_info['is_enabled'],
250 | 'last_block': worker_info['worker'].last_block,
251 | 'restart_count': worker_info['restart_count'],
252 | 'tasks_count': worker_info['tasks_count']
253 | }
254 | return result
255 |
256 | def select_worker(self, ls_index=None, archival=None, count=1):
257 | if count == 1 and ls_index is not None and self.workers[ls_index]['is_working']:
258 | return ls_index
259 |
260 | suitable = [ls_index for ls_index, worker_info in self.workers.items() if worker_info['is_working'] and
261 | (archival is None or worker_info['worker'].is_archival == archival)]
262 | random.shuffle(suitable)
263 | if len(suitable) < count:
264 | logger.warning('Required number of workers is not reached: found {found} of {count}', found=len(suitable), count=count)
265 | if len(suitable) == 0:
266 | raise RuntimeError(f'No working liteservers with ls_index={ls_index}, archival={archival}')
267 | return suitable[:count] if count > 1 else suitable[0]
268 |
269 | async def dispatch_request_to_worker(self, method, ls_index, *args, **kwargs):
270 | task_id = "{}:{}".format(time.time(), random.random())
271 | timeout = time.time() + self.tonlib_settings.request_timeout
272 | self.workers[ls_index]['tasks_count'] += 1
273 |
274 | logger.info("Sending request method: {method}, task_id: {task_id}, ls_index: {ls_index}",
275 | method=method, task_id=task_id, ls_index=ls_index)
276 | await self.loop.run_in_executor(self.threadpool_executor, self.workers[ls_index]['worker'].input_queue.put, (task_id, timeout, method, args, kwargs))
277 |
278 | try:
279 | self.futures[task_id] = self.loop.create_future()
280 | await self.futures[task_id]
281 | return self.futures[task_id].result()
282 | finally:
283 | self.futures.pop(task_id)
284 |
285 | def dispatch_request(self, method, *args, **kwargs):
286 | ls_index = self.select_worker()
287 | return self.dispatch_request_to_worker(method, ls_index, *args, **kwargs)
288 |
289 | def dispatch_archival_request(self, method, *args, **kwargs):
290 | ls_index = None
291 | try:
292 | ls_index = self.select_worker(archival=True)
293 | except RuntimeError as ee:
294 | logger.warning(f'Method {method} failed to execute on archival node: {ee}')
295 | ls_index = self.select_worker(archival=False)
296 | return self.dispatch_request_to_worker(method, ls_index, *args, **kwargs)
297 |
298 | async def raw_get_transactions(self, account_address: str, from_transaction_lt: str, from_transaction_hash: str, archival: bool):
299 | method = 'raw_get_transactions'
300 | if archival:
301 | return await self.dispatch_archival_request(method, account_address, from_transaction_lt, from_transaction_hash)
302 | else:
303 | return await self.dispatch_request(method, account_address, from_transaction_lt, from_transaction_hash)
304 |
305 | async def get_transactions(self, account_address, from_transaction_lt=None, from_transaction_hash=None, to_transaction_lt=0, limit=10, decode_messages=True, archival=False):
306 | """
307 | Return all transactions between from_transaction_lt and to_transaction_lt
308 | if to_transaction_lt and to_transaction_hash are not defined returns all transactions
309 | if from_transaction_lt and from_transaction_hash are not defined latest transactions are returned
310 | """
311 | method = 'get_transactions'
312 | if archival:
313 | return await self.dispatch_archival_request(method, account_address, from_transaction_lt, from_transaction_hash, to_transaction_lt, limit, decode_messages)
314 | else:
315 | return await self.dispatch_request(method, account_address, from_transaction_lt, from_transaction_hash, to_transaction_lt, limit, decode_messages)
316 |
317 | async def raw_get_account_state(self, address: str):
318 | method = 'raw_get_account_state'
319 | try:
320 | addr = await self.dispatch_request(method, address)
321 | except TonlibError:
322 | addr = await self.dispatch_archival_request(method, address)
323 | return addr
324 |
325 | async def generic_get_account_state(self, address: str):
326 | return await self.dispatch_request('generic_get_account_state', address)
327 |
328 | async def get_token_data(self, address: str):
329 | return await self.dispatch_request('get_token_data', address)
330 |
331 | async def raw_run_method(self, address, method, stack_data, output_layout=None):
332 | return await self.dispatch_request('raw_run_method', address, method, stack_data, output_layout)
333 |
334 | async def _send_message(self, serialized_boc, method):
335 | ls_index_list = self.select_worker(count=4)
336 | result = None
337 | try:
338 | task_ids = []
339 | for ls_index in ls_index_list:
340 | task_id = "{}:{}".format(time.time(), random.random())
341 | timeout = time.time() + self.tonlib_settings.request_timeout
342 | await self.loop.run_in_executor(self.threadpool_executor, self.workers[ls_index]['worker'].input_queue.put, (task_id, timeout, method, [serialized_boc], {}))
343 |
344 | self.futures[task_id] = self.loop.create_future()
345 | task_ids.append(task_id)
346 |
347 | done, _ = await asyncio.wait([self.futures[task_id] for task_id in task_ids], return_when=asyncio.FIRST_COMPLETED)
348 | result = list(done)[0].result()
349 | finally:
350 | for task_id in task_ids:
351 | self.futures.pop(task_id)
352 |
353 | return result
354 |
355 | async def raw_send_message(self, serialized_boc):
356 | return await self._send_message(serialized_boc, 'raw_send_message')
357 |
358 | async def raw_send_message_return_hash(self, serialized_boc):
359 | return await self._send_message(serialized_boc, 'raw_send_message_return_hash')
360 |
361 | async def _raw_create_query(self, destination, body, init_code=b'', init_data=b''):
362 | return await self.dispatch_request('_raw_create_query', destination, body, init_code, init_data)
363 |
364 | async def _raw_send_query(self, query_info):
365 | return await self.dispatch_request('_raw_send_query', query_info)
366 |
367 | async def raw_create_and_send_query(self, destination, body, init_code=b'', init_data=b''):
368 | return await self.dispatch_request('raw_create_and_send_query', destination, body, init_code, init_data)
369 |
370 | async def raw_create_and_send_message(self, destination, body, initial_account_state=b''):
371 | return await self.dispatch_request('raw_create_and_send_message', destination, body, initial_account_state)
372 |
373 | async def raw_estimate_fees(self, destination, body, init_code=b'', init_data=b'', ignore_chksig=True):
374 | return await self.dispatch_request('raw_estimate_fees', destination, body, init_code, init_data, ignore_chksig)
375 |
376 | async def getMasterchainInfo(self):
377 | return await self.dispatch_request('get_masterchain_info')
378 |
379 | async def getMasterchainBlockSignatures(self, seqno):
380 | return await self.dispatch_request('get_masterchain_block_signatures', seqno)
381 |
382 | async def getShardBlockProof(self, workchain, shard, seqno, from_seqno):
383 | return await self.dispatch_request('get_shard_block_proof', workchain, shard, seqno, from_seqno)
384 |
385 | async def getConsensusBlock(self):
386 | return {
387 | "consensus_block": self.consensus_block.seqno,
388 | "timestamp": self.consensus_block.timestamp
389 | }
390 |
391 | async def lookupBlock(self, workchain, shard, seqno=None, lt=None, unixtime=None):
392 | method = 'lookup_block'
393 | if workchain == -1 and seqno and self.consensus_block.seqno - seqno < 2000:
394 | return await self.dispatch_request(method, workchain, shard, seqno, lt, unixtime)
395 | else:
396 | return await self.dispatch_archival_request(method, workchain, shard, seqno, lt, unixtime)
397 |
398 | async def getShards(self, master_seqno=None, lt=None, unixtime=None):
399 | method = 'get_shards'
400 | if master_seqno and self.consensus_block.seqno - master_seqno < 2000:
401 | return await self.dispatch_request(method, master_seqno)
402 | else:
403 | return await self.dispatch_archival_request(method, master_seqno)
404 |
405 | async def raw_getBlockTransactions(self, fullblock, count, after_tx):
406 | return await self.dispatch_archival_request('raw_get_block_transactions', fullblock, count, after_tx)
407 |
408 | async def getBlockTransactions(self, workchain, shard, seqno, count, root_hash=None, file_hash=None, after_lt=None, after_hash=None):
409 | return await self.dispatch_archival_request('get_block_transactions', workchain, shard, seqno, count, root_hash, file_hash, after_lt, after_hash)
410 |
411 | async def getBlockHeader(self, workchain, shard, seqno, root_hash=None, file_hash=None):
412 | method = 'get_block_header'
413 | if workchain == -1 and seqno and self.consensus_block.seqno - seqno < 2000:
414 | return await self.dispatch_request(method, workchain, shard, seqno, root_hash, file_hash)
415 | else:
416 | return await self.dispatch_archival_request(method, workchain, shard, seqno, root_hash, file_hash)
417 |
418 | async def get_config_param(self, config_id: int, seqno: Optional[int]):
419 | seqno = seqno or self.consensus_block.seqno
420 | method = 'get_config_param'
421 | if self.consensus_block.seqno - seqno < 2000:
422 | return await self.dispatch_request(method, config_id, seqno)
423 | else:
424 | return await self.dispatch_archival_request(method, config_id, seqno)
425 |
426 | async def tryLocateTxByOutcomingMessage(self, source, destination, creation_lt):
427 | return await self.dispatch_archival_request('try_locate_tx_by_outcoming_message', source, destination, creation_lt)
428 |
429 | async def tryLocateTxByIncomingMessage(self, source, destination, creation_lt):
430 | return await self.dispatch_archival_request('try_locate_tx_by_incoming_message', source, destination, creation_lt)
431 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/models.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, Union, Dict, Any, List
2 | from pydantic import BaseModel
3 |
4 | from enum import Enum
5 | from dataclasses import dataclass
6 |
7 |
8 | @dataclass
9 | class TonlibClientResult:
10 | task_id: str
11 | method: str
12 | elapsed_time: float
13 | params: Optional[Any] = None
14 | result: Optional[Any] = None
15 | exception: Optional[Exception] = None
16 | liteserver_info: Optional[Any] = None
17 |
18 |
19 | class TonlibWorkerMsgType(Enum):
20 | TASK_RESULT = 0
21 | LAST_BLOCK_UPDATE = 1
22 | ARCHIVAL_UPDATE = 2
23 |
24 |
25 | @dataclass
26 | class ConsensusBlock:
27 | seqno: int = 0
28 | timestamp: int = 0
29 |
30 |
31 | class TonResponse(BaseModel):
32 | ok: bool
33 | result: Union[str, list, dict, None] = None
34 | error: Optional[str] = None
35 | code: Optional[int] = None
36 |
37 |
38 | class TonResponseJsonRPC(TonResponse):
39 | jsonrpc: str = "2.0"
40 | id: Optional[str] = None
41 |
42 |
43 | class TonRequestJsonRPC(BaseModel):
44 | method: str
45 | params: dict = {}
46 | id: Optional[str] = None
47 | jsonrpc: Optional[str] = None
48 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/settings.py:
--------------------------------------------------------------------------------
1 | import os
2 | import requests
3 | import json
4 |
5 | from typing import Optional
6 | from dataclasses import dataclass
7 | from loguru import logger
8 |
9 |
10 | def strtobool(val):
11 | if val.lower() in ['y', 'yes', 't', 'true', 'on', '1']:
12 | return True
13 | if val.lower() in ['n', 'no', 'f', 'false', 'off', '0']:
14 | return False
15 | raise ValueError(f"Invalid bool value {val}")
16 |
17 |
18 | @dataclass
19 | class TonlibSettings:
20 | parallel_requests_per_liteserver: int
21 | keystore: str
22 | liteserver_config_path: str
23 | cdll_path: Optional[str]
24 | request_timeout: int
25 | verbosity_level: int
26 |
27 | @property
28 | def liteserver_config(self):
29 | if not hasattr(self, '_liteserver_config'):
30 | if self.liteserver_config_path.startswith('https://') or self.liteserver_config_path.startswith('http://'):
31 | self._liteserver_config = requests.get(self.liteserver_config_path).json()
32 | else:
33 | with open(self.liteserver_config_path, 'r') as f:
34 | self._liteserver_config = json.load(f)
35 | return self._liteserver_config
36 |
37 | @classmethod
38 | def from_environment(cls):
39 | verbosity_level = 0
40 | if os.environ.get('TON_API_LOGS_LEVEL') == 'DEBUG':
41 | verbosity_level = 4
42 | return TonlibSettings(parallel_requests_per_liteserver=int(os.environ.get('TON_API_TONLIB_PARALLEL_REQUESTS_PER_LITESERVER', '50')),
43 | keystore=os.environ.get('TON_API_TONLIB_KEYSTORE', './ton_keystore/'),
44 | liteserver_config_path=os.environ.get('TON_API_TONLIB_LITESERVER_CONFIG', 'https://ton.org/global-config.json'),
45 | cdll_path=os.environ.get('TON_API_TONLIB_CDLL_PATH', None),
46 | request_timeout=int(os.environ.get('TON_API_TONLIB_REQUEST_TIMEOUT', '10')),
47 | verbosity_level=verbosity_level)
48 |
49 |
50 | @dataclass
51 | class RedisSettings:
52 | endpoint: str
53 | port: int
54 | timeout: Optional[int]=None
55 |
56 | @classmethod
57 | def from_environment(cls, settings_type):
58 | if settings_type == 'cache':
59 | return RedisSettings(endpoint=os.environ.get('TON_API_CACHE_REDIS_ENDPOINT', 'localhost'),
60 | port=int(os.environ.get('TON_API_CACHE_REDIS_PORT', '6379')),
61 | timeout=int(os.environ.get('TON_API_CACHE_REDIS_TIMEOUT', '1')))
62 |
63 |
64 | @dataclass
65 | class LoggingSettings:
66 | jsonify: bool
67 | level: str
68 |
69 | @classmethod
70 | def from_environment(cls):
71 | return LoggingSettings(jsonify=strtobool(os.environ.get('TON_API_LOGS_JSONIFY', '0')),
72 | level=os.environ.get('TON_API_LOGS_LEVEL', 'WARNING'))
73 |
74 |
75 | @dataclass
76 | class CacheSettings:
77 | enabled: bool
78 |
79 | @classmethod
80 | def from_environment(cls):
81 | return CacheSettings(enabled=False)
82 |
83 |
84 | @dataclass
85 | class RedisCacheSettings(CacheSettings):
86 | redis: Optional[RedisSettings]
87 |
88 | @classmethod
89 | def from_environment(cls):
90 | return RedisCacheSettings(enabled=strtobool(os.environ.get('TON_API_CACHE_ENABLED', '0')),
91 | redis=RedisSettings.from_environment('cache'))
92 |
93 |
94 | @dataclass
95 | class WebServerSettings:
96 | api_root_path: str
97 | get_methods: bool
98 | json_rpc: bool
99 |
100 | @classmethod
101 | def from_environment(cls):
102 | return WebServerSettings(api_root_path=os.environ.get('TON_API_ROOT_PATH', '/'),
103 | get_methods=strtobool(os.environ.get('TON_API_GET_METHODS_ENABLED', '1')),
104 | json_rpc=strtobool(os.environ.get('TON_API_JSON_RPC_ENABLED', '1')))
105 |
106 |
107 | @dataclass
108 | class Settings:
109 | tonlib: TonlibSettings
110 | webserver: WebServerSettings
111 | cache: CacheSettings
112 | logging: LoggingSettings
113 |
114 | @classmethod
115 | def from_environment(cls):
116 | cache_enabled = strtobool(os.environ.get('TON_API_CACHE_ENABLED', '0'))
117 | logging = LoggingSettings.from_environment()
118 | cache = (RedisCacheSettings if cache_enabled else CacheSettings).from_environment()
119 | return Settings(tonlib=TonlibSettings.from_environment(),
120 | webserver=WebServerSettings.from_environment(),
121 | logging=logging,
122 | cache=cache)
123 |
--------------------------------------------------------------------------------
/ton-http-api/pyTON/worker.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import random
3 | import sys
4 | import time
5 | import queue
6 | import multiprocessing as mp
7 |
8 | from pyTON.settings import TonlibSettings
9 | from pyTON.models import TonlibWorkerMsgType, TonlibClientResult
10 | from pytonlib import TonlibClient, TonlibException, BlockNotFound
11 | from datetime import datetime
12 | from concurrent.futures import ThreadPoolExecutor
13 | from pathlib import Path
14 |
15 | from enum import Enum
16 | from dataclasses import dataclass
17 | from typing import Any, Dict, Optional
18 |
19 | from loguru import logger
20 |
21 |
22 | class TonlibWorker(mp.Process):
23 | def __init__(self,
24 | ls_index: int,
25 | tonlib_settings: TonlibSettings,
26 | input_queue: Optional[mp.Queue]=None,
27 | output_queue: Optional[mp.Queue]=None):
28 | super(TonlibWorker, self).__init__(daemon=True)
29 |
30 | self.input_queue = input_queue or mp.Queue()
31 | self.output_queue = output_queue or mp.Queue()
32 | self.exit_event = mp.Event()
33 |
34 | self.ls_index = ls_index
35 | self.tonlib_settings = tonlib_settings
36 |
37 | self.last_block = -1
38 | self.is_archival = False
39 | self.semaphore = None
40 | self.loop = None
41 | self.tasks = {}
42 | self.tonlib = None
43 | self.threadpool_executor = None
44 |
45 | self.timeout_count = 0
46 | self.is_dead = False
47 |
48 | def run(self):
49 | self.threadpool_executor = ThreadPoolExecutor(max_workers=16)
50 |
51 | policy = asyncio.get_event_loop_policy()
52 | policy.set_event_loop(policy.new_event_loop())
53 | self.loop = asyncio.new_event_loop()
54 |
55 | Path(self.tonlib_settings.keystore).mkdir(parents=True, exist_ok=True)
56 |
57 | # init tonlib
58 | self.tonlib = TonlibClient(ls_index=self.ls_index,
59 | config=self.tonlib_settings.liteserver_config,
60 | keystore=self.tonlib_settings.keystore,
61 | loop=self.loop,
62 | cdll_path=self.tonlib_settings.cdll_path,
63 | verbosity_level=self.tonlib_settings.verbosity_level)
64 |
65 | try:
66 | self.loop.run_until_complete(self.tonlib.init())
67 | except Exception as e:
68 | logger.error("TonlibWorker #{ls_index:03d} failed to init and sync tonlib: {exc}", ls_index=self.ls_index, exc=e)
69 | self.shutdown(11)
70 |
71 | # creating tasks
72 | self.tasks['report_last_block'] = self.loop.create_task(self.report_last_block())
73 | self.tasks['report_archival'] = self.loop.create_task(self.report_archival())
74 | self.tasks['main_loop'] = self.loop.create_task(self.main_loop())
75 | self.tasks['sync_tonlib'] = self.loop.create_task(self.sync_tonlib())
76 |
77 | finished, unfinished = self.loop.run_until_complete(asyncio.wait([
78 | self.tasks['report_last_block'], self.tasks['report_archival'], self.tasks['main_loop'], self.tasks['sync_tonlib']], return_when=asyncio.FIRST_COMPLETED))
79 |
80 | self.shutdown(0 if self.exit_event.is_set() else 12)
81 |
82 | def shutdown(self, code: int):
83 | self.exit_event.set()
84 |
85 | for task in self.tasks.values():
86 | task.cancel()
87 | try:
88 | self.loop.run_until_complete(to_cancel)
89 | except:
90 | pass
91 |
92 | self.threadpool_executor.shutdown()
93 |
94 | self.output_queue.cancel_join_thread()
95 | self.input_queue.cancel_join_thread()
96 | self.output_queue.close()
97 | self.input_queue.close()
98 | sys.exit(code)
99 |
100 | @property
101 | def info(self):
102 | return {
103 | 'ip_int': f"{self.tonlib_settings.liteserver_config['liteservers'][self.ls_index]['ip']}",
104 | 'port': f"{self.tonlib_settings.liteserver_config['liteservers'][self.ls_index]['port']}",
105 | 'last_block': self.last_block,
106 | 'archival': self.is_archival,
107 | 'number': self.ls_index,
108 | }
109 |
110 | async def report_last_block(self):
111 | while not self.exit_event.is_set():
112 | last_block = -1
113 | try:
114 | masterchain_info = await self.tonlib.get_masterchain_info()
115 | last_block = masterchain_info["last"]["seqno"]
116 | self.timeout_count = 0
117 | except TonlibException as e:
118 | logger.error("TonlibWorker #{ls_index:03d} report_last_block exception of type {exc_type}: {exc}", ls_index=self.ls_index, exc_type=type(e).__name__, exc=e)
119 | self.timeout_count += 1
120 |
121 | if self.timeout_count >= 10:
122 | raise RuntimeError(f'TonlibWorker #{self.ls_index:03d} got {self.timeout_count} timeouts in report_last_block')
123 |
124 | self.last_block = last_block
125 | await self.loop.run_in_executor(self.threadpool_executor, self.output_queue.put, (TonlibWorkerMsgType.LAST_BLOCK_UPDATE, self.last_block))
126 | await asyncio.sleep(1)
127 |
128 | async def report_archival(self):
129 | while not self.exit_event.is_set():
130 | try:
131 | block_transactions = await self.tonlib.get_block_transactions(-1, -9223372036854775808, random.randint(2, 4096), count=10)
132 | self.is_archival = True
133 | except BlockNotFound as e:
134 | self.is_archival = False
135 | except TonlibException as e:
136 | logger.error("TonlibWorker #{ls_index:03d} report_archival exception of type {exc_type}: {exc}", ls_index=self.ls_index, exc_type=type(e).__name__, exc=e)
137 |
138 | await self.loop.run_in_executor(self.threadpool_executor, self.output_queue.put, (TonlibWorkerMsgType.ARCHIVAL_UPDATE, self.is_archival))
139 | await asyncio.sleep(600)
140 |
141 | async def main_loop(self):
142 | while not self.exit_event.is_set():
143 | try:
144 | task_id, timeout, method, args, kwargs = await self.loop.run_in_executor(self.threadpool_executor, self.input_queue.get, True, 1)
145 | except queue.Empty:
146 | continue
147 |
148 | self.loop.create_task(self.process_task(task_id, timeout, method, args, kwargs))
149 |
150 | async def process_task(self, task_id, timeout, method, args, kwargs):
151 | result = None
152 | exception = None
153 |
154 | start_time = datetime.now()
155 | if time.time() < timeout:
156 | try:
157 | result = await self.tonlib.__getattribute__(method)(*args, **kwargs)
158 | except Exception as e:
159 | exception = e
160 | logger.warning("TonlibWorker #{ls_index:03d} raised exception of type {exc_type} while executing task. Method: {method}, args: {args}, kwargs: {kwargs}, exception: {exc}",
161 | ls_index=self.ls_index, method=method, args=args, kwargs=kwargs, exc_type=type(e).__name__, exc=e)
162 | else:
163 | logger.debug("TonlibWorker #{ls_index:03d} got result {method} for task '{task_id}'", ls_index=self.ls_index, method=method, task_id=task_id)
164 | else:
165 | exception = asyncio.TimeoutError()
166 | logger.warning("TonlibWorker #{ls_index:03d} received task '{task_id}' after timeout", ls_index=self.ls_index, task_id=task_id)
167 | end_time = datetime.now()
168 | elapsed_time = (end_time - start_time).total_seconds()
169 |
170 | # result
171 | tonlib_task_result = TonlibClientResult(task_id,
172 | method,
173 | elapsed_time=elapsed_time,
174 | params=[args, kwargs],
175 | result=result,
176 | exception=exception,
177 | liteserver_info=self.info)
178 | await self.loop.run_in_executor(self.threadpool_executor, self.output_queue.put, (TonlibWorkerMsgType.TASK_RESULT, tonlib_task_result))
179 |
180 | async def sync_tonlib(self):
181 | await self.tonlib.sync_tonlib()
182 |
183 | while not self.exit_event.is_set():
184 | await asyncio.sleep(1)
185 |
--------------------------------------------------------------------------------
/ton-http-api/requirements.txt:
--------------------------------------------------------------------------------
1 | redis==5.0.1
2 | loguru==0.6.0
3 | fastapi==0.99.1
4 | pydantic==1.10.14
5 | requests==2.28.0
6 | ring==0.10.1
7 | uvicorn==0.17.6
8 | gunicorn==20.1.0
9 | pytonlib==0.0.58
10 | inject==4.3.1
11 |
--------------------------------------------------------------------------------
/ton-http-api/setup.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from setuptools import setup, find_packages
4 | from os.path import dirname, join, pardir
5 |
6 |
7 | with open(join(dirname(__file__), "README.md"), "r") as f:
8 | long_description = f.read()
9 |
10 | version = os.environ.get('TON_HTTP_API_VERSION', '0.0.0')
11 |
12 | setup(
13 | author='K-Dimentional Tree',
14 | author_email='kdimentionaltree@gmail.com',
15 | name='ton-http-api',
16 | version=version,
17 | packages=find_packages('.', exclude=['tests']),
18 | install_requires=[
19 | 'redis==5.0.1',
20 | 'loguru==0.6.0',
21 | 'fastapi==0.99.1',
22 | 'pydantic==1.10.14',
23 | 'requests==2.28.0',
24 | 'ring==0.10.1',
25 | 'uvicorn==0.17.6',
26 | 'gunicorn==20.1.0',
27 | 'pytonlib==0.0.58',
28 | 'inject==4.3.1'
29 | ],
30 | package_data={},
31 | zip_safe=True,
32 | python_requires='>=3.7',
33 | classifiers=[
34 | "Development Status :: 3 - Alpha",
35 | "Intended Audience :: Developers",
36 | "Programming Language :: Python :: 3.7",
37 | "Programming Language :: Python :: 3.8",
38 | "Programming Language :: Python :: 3.9",
39 | "License :: Other/Proprietary License",
40 | "Topic :: Software Development :: Libraries"
41 | ],
42 | url="https://github.com/toncenter/ton-http-api",
43 | description="HTTP API for TON (The Open Network)",
44 | long_description_content_type="text/markdown",
45 | long_description=long_description,
46 | entry_points={
47 | 'console_scripts': [
48 | 'ton-http-api = pyTON.__main__:main'
49 | ]
50 | }
51 | )
52 |
--------------------------------------------------------------------------------