├── .github
└── workflows
│ ├── lint.yaml
│ └── tests.yaml
├── .gitignore
├── LICENSE
├── README.md
├── poetry.lock
├── pyproject.toml
├── tests
├── __init__.py
├── test_api.py
├── test_config.py
├── test_core.py
├── test_http.py
└── test_install.py
└── xray_node
├── __init__.py
├── api
├── __init__.py
├── entities.py
├── sspanel.py
└── v2board.py
├── config.py
├── core
├── __init__.py
├── cfg.py
└── xray.py
├── exceptions.py
├── main.py
├── mdb
├── __init__.py
└── models.py
└── utils
├── __init__.py
├── consts.py
├── http.py
├── install.py
├── log.py
└── port.py
/.github/workflows/lint.yaml:
--------------------------------------------------------------------------------
1 | name: Lint
2 |
3 | on: [ push, pull_request ]
4 |
5 | jobs:
6 | black:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v2
10 | - uses: actions/setup-python@v2
11 | - uses: psf/black@stable
--------------------------------------------------------------------------------
/.github/workflows/tests.yaml:
--------------------------------------------------------------------------------
1 | name: UnitTest
2 |
3 | on: [ push, pull_request ]
4 |
5 | jobs:
6 | pytest:
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v2
10 | - uses: actions/setup-python@v2
11 | with:
12 | python-version: 3.8
13 | - name: Install dependencies
14 | run: pip install poetry && poetry install
15 | - name: pytest
16 | run: poetry run pytest .
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | share/python-wheels/
26 | *.egg-info/
27 | .installed.cfg
28 | *.egg
29 | MANIFEST
30 |
31 | # PyInstaller
32 | # Usually these files are written by a python script from a template
33 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
34 | *.manifest
35 | *.spec
36 |
37 | # Installer logs
38 | pip-log.txt
39 | pip-delete-this-directory.txt
40 |
41 | # Unit test / coverage reports
42 | htmlcov/
43 | .tox/
44 | .nox/
45 | .coverage
46 | .coverage.*
47 | .cache
48 | nosetests.xml
49 | coverage.xml
50 | *.cover
51 | *.py,cover
52 | .hypothesis/
53 | .pytest_cache/
54 | cover/
55 |
56 | # Translations
57 | *.mo
58 | *.pot
59 |
60 | # Django stuff:
61 | *.log
62 | local_settings.py
63 | db.sqlite3
64 | db.sqlite3-journal
65 |
66 | # Flask stuff:
67 | instance/
68 | .webassets-cache
69 |
70 | # Scrapy stuff:
71 | .scrapy
72 |
73 | # Sphinx documentation
74 | docs/_build/
75 |
76 | # PyBuilder
77 | .pybuilder/
78 | target/
79 |
80 | # Jupyter Notebook
81 | .ipynb_checkpoints
82 |
83 | # IPython
84 | profile_default/
85 | ipython_config.py
86 |
87 | # pyenv
88 | # For a library or package, you might want to ignore these files since the code is
89 | # intended to run in multiple environments; otherwise, check them in:
90 | # .python-version
91 |
92 | # pipenv
93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
96 | # install all needed dependencies.
97 | #Pipfile.lock
98 |
99 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
100 | __pypackages__/
101 |
102 | # Celery stuff
103 | celerybeat-schedule
104 | celerybeat.pid
105 |
106 | # SageMath parsed files
107 | *.sage.py
108 |
109 | # Environments
110 | .env
111 | .venv
112 | env/
113 | venv/
114 | ENV/
115 | env.bak/
116 | venv.bak/
117 |
118 | # Spyder project settings
119 | .spyderproject
120 | .spyproject
121 |
122 | # Rope project settings
123 | .ropeproject
124 |
125 | # mkdocs documentation
126 | /site
127 |
128 | # mypy
129 | .mypy_cache/
130 | .dmypy.json
131 | dmypy.json
132 |
133 | # Pyre type checker
134 | .pyre/
135 |
136 | # pytype static type analyzer
137 | .pytype/
138 |
139 | # Cython debug symbols
140 | cython_debug/
141 |
142 | # IDE
143 | .idea/
144 |
145 | # MacOS
146 | .DS_Store
147 |
148 | # Custom
149 | xnode.toml
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU AFFERO GENERAL PUBLIC LICENSE
2 | Version 3, 19 November 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU Affero General Public License is a free, copyleft license for
11 | software and other kinds of works, specifically designed to ensure
12 | cooperation with the community in the case of network server software.
13 |
14 | The licenses for most software and other practical works are designed
15 | to take away your freedom to share and change the works. By contrast,
16 | our General Public Licenses are intended to guarantee your freedom to
17 | share and change all versions of a program--to make sure it remains free
18 | software for all its users.
19 |
20 | When we speak of free software, we are referring to freedom, not
21 | price. Our General Public Licenses are designed to make sure that you
22 | have the freedom to distribute copies of free software (and charge for
23 | them if you wish), that you receive source code or can get it if you
24 | want it, that you can change the software or use pieces of it in new
25 | free programs, and that you know you can do these things.
26 |
27 | Developers that use our General Public Licenses protect your rights
28 | with two steps: (1) assert copyright on the software, and (2) offer
29 | you this License which gives you legal permission to copy, distribute
30 | and/or modify the software.
31 |
32 | A secondary benefit of defending all users' freedom is that
33 | improvements made in alternate versions of the program, if they
34 | receive widespread use, become available for other developers to
35 | incorporate. Many developers of free software are heartened and
36 | encouraged by the resulting cooperation. However, in the case of
37 | software used on network servers, this result may fail to come about.
38 | The GNU General Public License permits making a modified version and
39 | letting the public access it on a server without ever releasing its
40 | source code to the public.
41 |
42 | The GNU Affero General Public License is designed specifically to
43 | ensure that, in such cases, the modified source code becomes available
44 | to the community. It requires the operator of a network server to
45 | provide the source code of the modified version running there to the
46 | users of that server. Therefore, public use of a modified version, on
47 | a publicly accessible server, gives the public access to the source
48 | code of the modified version.
49 |
50 | An older license, called the Affero General Public License and
51 | published by Affero, was designed to accomplish similar goals. This is
52 | a different license, not a version of the Affero GPL, but Affero has
53 | released a new version of the Affero GPL which permits relicensing under
54 | this license.
55 |
56 | The precise terms and conditions for copying, distribution and
57 | modification follow.
58 |
59 | TERMS AND CONDITIONS
60 |
61 | 0. Definitions.
62 |
63 | "This License" refers to version 3 of the GNU Affero General Public License.
64 |
65 | "Copyright" also means copyright-like laws that apply to other kinds of
66 | works, such as semiconductor masks.
67 |
68 | "The Program" refers to any copyrightable work licensed under this
69 | License. Each licensee is addressed as "you". "Licensees" and
70 | "recipients" may be individuals or organizations.
71 |
72 | To "modify" a work means to copy from or adapt all or part of the work
73 | in a fashion requiring copyright permission, other than the making of an
74 | exact copy. The resulting work is called a "modified version" of the
75 | earlier work or a work "based on" the earlier work.
76 |
77 | A "covered work" means either the unmodified Program or a work based
78 | on the Program.
79 |
80 | To "propagate" a work means to do anything with it that, without
81 | permission, would make you directly or secondarily liable for
82 | infringement under applicable copyright law, except executing it on a
83 | computer or modifying a private copy. Propagation includes copying,
84 | distribution (with or without modification), making available to the
85 | public, and in some countries other activities as well.
86 |
87 | To "convey" a work means any kind of propagation that enables other
88 | parties to make or receive copies. Mere interaction with a user through
89 | a computer network, with no transfer of a copy, is not conveying.
90 |
91 | An interactive user interface displays "Appropriate Legal Notices"
92 | to the extent that it includes a convenient and prominently visible
93 | feature that (1) displays an appropriate copyright notice, and (2)
94 | tells the user that there is no warranty for the work (except to the
95 | extent that warranties are provided), that licensees may convey the
96 | work under this License, and how to view a copy of this License. If
97 | the interface presents a list of user commands or options, such as a
98 | menu, a prominent item in the list meets this criterion.
99 |
100 | 1. Source Code.
101 |
102 | The "source code" for a work means the preferred form of the work
103 | for making modifications to it. "Object code" means any non-source
104 | form of a work.
105 |
106 | A "Standard Interface" means an interface that either is an official
107 | standard defined by a recognized standards body, or, in the case of
108 | interfaces specified for a particular programming language, one that
109 | is widely used among developers working in that language.
110 |
111 | The "System Libraries" of an executable work include anything, other
112 | than the work as a whole, that (a) is included in the normal form of
113 | packaging a Major Component, but which is not part of that Major
114 | Component, and (b) serves only to enable use of the work with that
115 | Major Component, or to implement a Standard Interface for which an
116 | implementation is available to the public in source code form. A
117 | "Major Component", in this context, means a major essential component
118 | (kernel, window system, and so on) of the specific operating system
119 | (if any) on which the executable work runs, or a compiler used to
120 | produce the work, or an object code interpreter used to run it.
121 |
122 | The "Corresponding Source" for a work in object code form means all
123 | the source code needed to generate, install, and (for an executable
124 | work) run the object code and to modify the work, including scripts to
125 | control those activities. However, it does not include the work's
126 | System Libraries, or general-purpose tools or generally available free
127 | programs which are used unmodified in performing those activities but
128 | which are not part of the work. For example, Corresponding Source
129 | includes interface definition files associated with source files for
130 | the work, and the source code for shared libraries and dynamically
131 | linked subprograms that the work is specifically designed to require,
132 | such as by intimate data communication or control flow between those
133 | subprograms and other parts of the work.
134 |
135 | The Corresponding Source need not include anything that users
136 | can regenerate automatically from other parts of the Corresponding
137 | Source.
138 |
139 | The Corresponding Source for a work in source code form is that
140 | same work.
141 |
142 | 2. Basic Permissions.
143 |
144 | All rights granted under this License are granted for the term of
145 | copyright on the Program, and are irrevocable provided the stated
146 | conditions are met. This License explicitly affirms your unlimited
147 | permission to run the unmodified Program. The output from running a
148 | covered work is covered by this License only if the output, given its
149 | content, constitutes a covered work. This License acknowledges your
150 | rights of fair use or other equivalent, as provided by copyright law.
151 |
152 | You may make, run and propagate covered works that you do not
153 | convey, without conditions so long as your license otherwise remains
154 | in force. You may convey covered works to others for the sole purpose
155 | of having them make modifications exclusively for you, or provide you
156 | with facilities for running those works, provided that you comply with
157 | the terms of this License in conveying all material for which you do
158 | not control copyright. Those thus making or running the covered works
159 | for you must do so exclusively on your behalf, under your direction
160 | and control, on terms that prohibit them from making any copies of
161 | your copyrighted material outside their relationship with you.
162 |
163 | Conveying under any other circumstances is permitted solely under
164 | the conditions stated below. Sublicensing is not allowed; section 10
165 | makes it unnecessary.
166 |
167 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
168 |
169 | No covered work shall be deemed part of an effective technological
170 | measure under any applicable law fulfilling obligations under article
171 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
172 | similar laws prohibiting or restricting circumvention of such
173 | measures.
174 |
175 | When you convey a covered work, you waive any legal power to forbid
176 | circumvention of technological measures to the extent such circumvention
177 | is effected by exercising rights under this License with respect to
178 | the covered work, and you disclaim any intention to limit operation or
179 | modification of the work as a means of enforcing, against the work's
180 | users, your or third parties' legal rights to forbid circumvention of
181 | technological measures.
182 |
183 | 4. Conveying Verbatim Copies.
184 |
185 | You may convey verbatim copies of the Program's source code as you
186 | receive it, in any medium, provided that you conspicuously and
187 | appropriately publish on each copy an appropriate copyright notice;
188 | keep intact all notices stating that this License and any
189 | non-permissive terms added in accord with section 7 apply to the code;
190 | keep intact all notices of the absence of any warranty; and give all
191 | recipients a copy of this License along with the Program.
192 |
193 | You may charge any price or no price for each copy that you convey,
194 | and you may offer support or warranty protection for a fee.
195 |
196 | 5. Conveying Modified Source Versions.
197 |
198 | You may convey a work based on the Program, or the modifications to
199 | produce it from the Program, in the form of source code under the
200 | terms of section 4, provided that you also meet all of these conditions:
201 |
202 | a) The work must carry prominent notices stating that you modified
203 | it, and giving a relevant date.
204 |
205 | b) The work must carry prominent notices stating that it is
206 | released under this License and any conditions added under section
207 | 7. This requirement modifies the requirement in section 4 to
208 | "keep intact all notices".
209 |
210 | c) You must license the entire work, as a whole, under this
211 | License to anyone who comes into possession of a copy. This
212 | License will therefore apply, along with any applicable section 7
213 | additional terms, to the whole of the work, and all its parts,
214 | regardless of how they are packaged. This License gives no
215 | permission to license the work in any other way, but it does not
216 | invalidate such permission if you have separately received it.
217 |
218 | d) If the work has interactive user interfaces, each must display
219 | Appropriate Legal Notices; however, if the Program has interactive
220 | interfaces that do not display Appropriate Legal Notices, your
221 | work need not make them do so.
222 |
223 | A compilation of a covered work with other separate and independent
224 | works, which are not by their nature extensions of the covered work,
225 | and which are not combined with it such as to form a larger program,
226 | in or on a volume of a storage or distribution medium, is called an
227 | "aggregate" if the compilation and its resulting copyright are not
228 | used to limit the access or legal rights of the compilation's users
229 | beyond what the individual works permit. Inclusion of a covered work
230 | in an aggregate does not cause this License to apply to the other
231 | parts of the aggregate.
232 |
233 | 6. Conveying Non-Source Forms.
234 |
235 | You may convey a covered work in object code form under the terms
236 | of sections 4 and 5, provided that you also convey the
237 | machine-readable Corresponding Source under the terms of this License,
238 | in one of these ways:
239 |
240 | a) Convey the object code in, or embodied in, a physical product
241 | (including a physical distribution medium), accompanied by the
242 | Corresponding Source fixed on a durable physical medium
243 | customarily used for software interchange.
244 |
245 | b) Convey the object code in, or embodied in, a physical product
246 | (including a physical distribution medium), accompanied by a
247 | written offer, valid for at least three years and valid for as
248 | long as you offer spare parts or customer support for that product
249 | model, to give anyone who possesses the object code either (1) a
250 | copy of the Corresponding Source for all the software in the
251 | product that is covered by this License, on a durable physical
252 | medium customarily used for software interchange, for a price no
253 | more than your reasonable cost of physically performing this
254 | conveying of source, or (2) access to copy the
255 | Corresponding Source from a network server at no charge.
256 |
257 | c) Convey individual copies of the object code with a copy of the
258 | written offer to provide the Corresponding Source. This
259 | alternative is allowed only occasionally and noncommercially, and
260 | only if you received the object code with such an offer, in accord
261 | with subsection 6b.
262 |
263 | d) Convey the object code by offering access from a designated
264 | place (gratis or for a charge), and offer equivalent access to the
265 | Corresponding Source in the same way through the same place at no
266 | further charge. You need not require recipients to copy the
267 | Corresponding Source along with the object code. If the place to
268 | copy the object code is a network server, the Corresponding Source
269 | may be on a different server (operated by you or a third party)
270 | that supports equivalent copying facilities, provided you maintain
271 | clear directions next to the object code saying where to find the
272 | Corresponding Source. Regardless of what server hosts the
273 | Corresponding Source, you remain obligated to ensure that it is
274 | available for as long as needed to satisfy these requirements.
275 |
276 | e) Convey the object code using peer-to-peer transmission, provided
277 | you inform other peers where the object code and Corresponding
278 | Source of the work are being offered to the general public at no
279 | charge under subsection 6d.
280 |
281 | A separable portion of the object code, whose source code is excluded
282 | from the Corresponding Source as a System Library, need not be
283 | included in conveying the object code work.
284 |
285 | A "User Product" is either (1) a "consumer product", which means any
286 | tangible personal property which is normally used for personal, family,
287 | or household purposes, or (2) anything designed or sold for incorporation
288 | into a dwelling. In determining whether a product is a consumer product,
289 | doubtful cases shall be resolved in favor of coverage. For a particular
290 | product received by a particular user, "normally used" refers to a
291 | typical or common use of that class of product, regardless of the status
292 | of the particular user or of the way in which the particular user
293 | actually uses, or expects or is expected to use, the product. A product
294 | is a consumer product regardless of whether the product has substantial
295 | commercial, industrial or non-consumer uses, unless such uses represent
296 | the only significant mode of use of the product.
297 |
298 | "Installation Information" for a User Product means any methods,
299 | procedures, authorization keys, or other information required to install
300 | and execute modified versions of a covered work in that User Product from
301 | a modified version of its Corresponding Source. The information must
302 | suffice to ensure that the continued functioning of the modified object
303 | code is in no case prevented or interfered with solely because
304 | modification has been made.
305 |
306 | If you convey an object code work under this section in, or with, or
307 | specifically for use in, a User Product, and the conveying occurs as
308 | part of a transaction in which the right of possession and use of the
309 | User Product is transferred to the recipient in perpetuity or for a
310 | fixed term (regardless of how the transaction is characterized), the
311 | Corresponding Source conveyed under this section must be accompanied
312 | by the Installation Information. But this requirement does not apply
313 | if neither you nor any third party retains the ability to install
314 | modified object code on the User Product (for example, the work has
315 | been installed in ROM).
316 |
317 | The requirement to provide Installation Information does not include a
318 | requirement to continue to provide support service, warranty, or updates
319 | for a work that has been modified or installed by the recipient, or for
320 | the User Product in which it has been modified or installed. Access to a
321 | network may be denied when the modification itself materially and
322 | adversely affects the operation of the network or violates the rules and
323 | protocols for communication across the network.
324 |
325 | Corresponding Source conveyed, and Installation Information provided,
326 | in accord with this section must be in a format that is publicly
327 | documented (and with an implementation available to the public in
328 | source code form), and must require no special password or key for
329 | unpacking, reading or copying.
330 |
331 | 7. Additional Terms.
332 |
333 | "Additional permissions" are terms that supplement the terms of this
334 | License by making exceptions from one or more of its conditions.
335 | Additional permissions that are applicable to the entire Program shall
336 | be treated as though they were included in this License, to the extent
337 | that they are valid under applicable law. If additional permissions
338 | apply only to part of the Program, that part may be used separately
339 | under those permissions, but the entire Program remains governed by
340 | this License without regard to the additional permissions.
341 |
342 | When you convey a copy of a covered work, you may at your option
343 | remove any additional permissions from that copy, or from any part of
344 | it. (Additional permissions may be written to require their own
345 | removal in certain cases when you modify the work.) You may place
346 | additional permissions on material, added by you to a covered work,
347 | for which you have or can give appropriate copyright permission.
348 |
349 | Notwithstanding any other provision of this License, for material you
350 | add to a covered work, you may (if authorized by the copyright holders of
351 | that material) supplement the terms of this License with terms:
352 |
353 | a) Disclaiming warranty or limiting liability differently from the
354 | terms of sections 15 and 16 of this License; or
355 |
356 | b) Requiring preservation of specified reasonable legal notices or
357 | author attributions in that material or in the Appropriate Legal
358 | Notices displayed by works containing it; or
359 |
360 | c) Prohibiting misrepresentation of the origin of that material, or
361 | requiring that modified versions of such material be marked in
362 | reasonable ways as different from the original version; or
363 |
364 | d) Limiting the use for publicity purposes of names of licensors or
365 | authors of the material; or
366 |
367 | e) Declining to grant rights under trademark law for use of some
368 | trade names, trademarks, or service marks; or
369 |
370 | f) Requiring indemnification of licensors and authors of that
371 | material by anyone who conveys the material (or modified versions of
372 | it) with contractual assumptions of liability to the recipient, for
373 | any liability that these contractual assumptions directly impose on
374 | those licensors and authors.
375 |
376 | All other non-permissive additional terms are considered "further
377 | restrictions" within the meaning of section 10. If the Program as you
378 | received it, or any part of it, contains a notice stating that it is
379 | governed by this License along with a term that is a further
380 | restriction, you may remove that term. If a license document contains
381 | a further restriction but permits relicensing or conveying under this
382 | License, you may add to a covered work material governed by the terms
383 | of that license document, provided that the further restriction does
384 | not survive such relicensing or conveying.
385 |
386 | If you add terms to a covered work in accord with this section, you
387 | must place, in the relevant source files, a statement of the
388 | additional terms that apply to those files, or a notice indicating
389 | where to find the applicable terms.
390 |
391 | Additional terms, permissive or non-permissive, may be stated in the
392 | form of a separately written license, or stated as exceptions;
393 | the above requirements apply either way.
394 |
395 | 8. Termination.
396 |
397 | You may not propagate or modify a covered work except as expressly
398 | provided under this License. Any attempt otherwise to propagate or
399 | modify it is void, and will automatically terminate your rights under
400 | this License (including any patent licenses granted under the third
401 | paragraph of section 11).
402 |
403 | However, if you cease all violation of this License, then your
404 | license from a particular copyright holder is reinstated (a)
405 | provisionally, unless and until the copyright holder explicitly and
406 | finally terminates your license, and (b) permanently, if the copyright
407 | holder fails to notify you of the violation by some reasonable means
408 | prior to 60 days after the cessation.
409 |
410 | Moreover, your license from a particular copyright holder is
411 | reinstated permanently if the copyright holder notifies you of the
412 | violation by some reasonable means, this is the first time you have
413 | received notice of violation of this License (for any work) from that
414 | copyright holder, and you cure the violation prior to 30 days after
415 | your receipt of the notice.
416 |
417 | Termination of your rights under this section does not terminate the
418 | licenses of parties who have received copies or rights from you under
419 | this License. If your rights have been terminated and not permanently
420 | reinstated, you do not qualify to receive new licenses for the same
421 | material under section 10.
422 |
423 | 9. Acceptance Not Required for Having Copies.
424 |
425 | You are not required to accept this License in order to receive or
426 | run a copy of the Program. Ancillary propagation of a covered work
427 | occurring solely as a consequence of using peer-to-peer transmission
428 | to receive a copy likewise does not require acceptance. However,
429 | nothing other than this License grants you permission to propagate or
430 | modify any covered work. These actions infringe copyright if you do
431 | not accept this License. Therefore, by modifying or propagating a
432 | covered work, you indicate your acceptance of this License to do so.
433 |
434 | 10. Automatic Licensing of Downstream Recipients.
435 |
436 | Each time you convey a covered work, the recipient automatically
437 | receives a license from the original licensors, to run, modify and
438 | propagate that work, subject to this License. You are not responsible
439 | for enforcing compliance by third parties with this License.
440 |
441 | An "entity transaction" is a transaction transferring control of an
442 | organization, or substantially all assets of one, or subdividing an
443 | organization, or merging organizations. If propagation of a covered
444 | work results from an entity transaction, each party to that
445 | transaction who receives a copy of the work also receives whatever
446 | licenses to the work the party's predecessor in interest had or could
447 | give under the previous paragraph, plus a right to possession of the
448 | Corresponding Source of the work from the predecessor in interest, if
449 | the predecessor has it or can get it with reasonable efforts.
450 |
451 | You may not impose any further restrictions on the exercise of the
452 | rights granted or affirmed under this License. For example, you may
453 | not impose a license fee, royalty, or other charge for exercise of
454 | rights granted under this License, and you may not initiate litigation
455 | (including a cross-claim or counterclaim in a lawsuit) alleging that
456 | any patent claim is infringed by making, using, selling, offering for
457 | sale, or importing the Program or any portion of it.
458 |
459 | 11. Patents.
460 |
461 | A "contributor" is a copyright holder who authorizes use under this
462 | License of the Program or a work on which the Program is based. The
463 | work thus licensed is called the contributor's "contributor version".
464 |
465 | A contributor's "essential patent claims" are all patent claims
466 | owned or controlled by the contributor, whether already acquired or
467 | hereafter acquired, that would be infringed by some manner, permitted
468 | by this License, of making, using, or selling its contributor version,
469 | but do not include claims that would be infringed only as a
470 | consequence of further modification of the contributor version. For
471 | purposes of this definition, "control" includes the right to grant
472 | patent sublicenses in a manner consistent with the requirements of
473 | this License.
474 |
475 | Each contributor grants you a non-exclusive, worldwide, royalty-free
476 | patent license under the contributor's essential patent claims, to
477 | make, use, sell, offer for sale, import and otherwise run, modify and
478 | propagate the contents of its contributor version.
479 |
480 | In the following three paragraphs, a "patent license" is any express
481 | agreement or commitment, however denominated, not to enforce a patent
482 | (such as an express permission to practice a patent or covenant not to
483 | sue for patent infringement). To "grant" such a patent license to a
484 | party means to make such an agreement or commitment not to enforce a
485 | patent against the party.
486 |
487 | If you convey a covered work, knowingly relying on a patent license,
488 | and the Corresponding Source of the work is not available for anyone
489 | to copy, free of charge and under the terms of this License, through a
490 | publicly available network server or other readily accessible means,
491 | then you must either (1) cause the Corresponding Source to be so
492 | available, or (2) arrange to deprive yourself of the benefit of the
493 | patent license for this particular work, or (3) arrange, in a manner
494 | consistent with the requirements of this License, to extend the patent
495 | license to downstream recipients. "Knowingly relying" means you have
496 | actual knowledge that, but for the patent license, your conveying the
497 | covered work in a country, or your recipient's use of the covered work
498 | in a country, would infringe one or more identifiable patents in that
499 | country that you have reason to believe are valid.
500 |
501 | If, pursuant to or in connection with a single transaction or
502 | arrangement, you convey, or propagate by procuring conveyance of, a
503 | covered work, and grant a patent license to some of the parties
504 | receiving the covered work authorizing them to use, propagate, modify
505 | or convey a specific copy of the covered work, then the patent license
506 | you grant is automatically extended to all recipients of the covered
507 | work and works based on it.
508 |
509 | A patent license is "discriminatory" if it does not include within
510 | the scope of its coverage, prohibits the exercise of, or is
511 | conditioned on the non-exercise of one or more of the rights that are
512 | specifically granted under this License. You may not convey a covered
513 | work if you are a party to an arrangement with a third party that is
514 | in the business of distributing software, under which you make payment
515 | to the third party based on the extent of your activity of conveying
516 | the work, and under which the third party grants, to any of the
517 | parties who would receive the covered work from you, a discriminatory
518 | patent license (a) in connection with copies of the covered work
519 | conveyed by you (or copies made from those copies), or (b) primarily
520 | for and in connection with specific products or compilations that
521 | contain the covered work, unless you entered into that arrangement,
522 | or that patent license was granted, prior to 28 March 2007.
523 |
524 | Nothing in this License shall be construed as excluding or limiting
525 | any implied license or other defenses to infringement that may
526 | otherwise be available to you under applicable patent law.
527 |
528 | 12. No Surrender of Others' Freedom.
529 |
530 | If conditions are imposed on you (whether by court order, agreement or
531 | otherwise) that contradict the conditions of this License, they do not
532 | excuse you from the conditions of this License. If you cannot convey a
533 | covered work so as to satisfy simultaneously your obligations under this
534 | License and any other pertinent obligations, then as a consequence you may
535 | not convey it at all. For example, if you agree to terms that obligate you
536 | to collect a royalty for further conveying from those to whom you convey
537 | the Program, the only way you could satisfy both those terms and this
538 | License would be to refrain entirely from conveying the Program.
539 |
540 | 13. Remote Network Interaction; Use with the GNU General Public License.
541 |
542 | Notwithstanding any other provision of this License, if you modify the
543 | Program, your modified version must prominently offer all users
544 | interacting with it remotely through a computer network (if your version
545 | supports such interaction) an opportunity to receive the Corresponding
546 | Source of your version by providing access to the Corresponding Source
547 | from a network server at no charge, through some standard or customary
548 | means of facilitating copying of software. This Corresponding Source
549 | shall include the Corresponding Source for any work covered by version 3
550 | of the GNU General Public License that is incorporated pursuant to the
551 | following paragraph.
552 |
553 | Notwithstanding any other provision of this License, you have
554 | permission to link or combine any covered work with a work licensed
555 | under version 3 of the GNU General Public License into a single
556 | combined work, and to convey the resulting work. The terms of this
557 | License will continue to apply to the part which is the covered work,
558 | but the work with which it is combined will remain governed by version
559 | 3 of the GNU General Public License.
560 |
561 | 14. Revised Versions of this License.
562 |
563 | The Free Software Foundation may publish revised and/or new versions of
564 | the GNU Affero General Public License from time to time. Such new versions
565 | will be similar in spirit to the present version, but may differ in detail to
566 | address new problems or concerns.
567 |
568 | Each version is given a distinguishing version number. If the
569 | Program specifies that a certain numbered version of the GNU Affero General
570 | Public License "or any later version" applies to it, you have the
571 | option of following the terms and conditions either of that numbered
572 | version or of any later version published by the Free Software
573 | Foundation. If the Program does not specify a version number of the
574 | GNU Affero General Public License, you may choose any version ever published
575 | by the Free Software Foundation.
576 |
577 | If the Program specifies that a proxy can decide which future
578 | versions of the GNU Affero General Public License can be used, that proxy's
579 | public statement of acceptance of a version permanently authorizes you
580 | to choose that version for the Program.
581 |
582 | Later license versions may give you additional or different
583 | permissions. However, no additional obligations are imposed on any
584 | author or copyright holder as a result of your choosing to follow a
585 | later version.
586 |
587 | 15. Disclaimer of Warranty.
588 |
589 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
590 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
591 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
592 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
593 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
594 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
595 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
596 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
597 |
598 | 16. Limitation of Liability.
599 |
600 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
601 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
602 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
603 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
604 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
605 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
606 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
607 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
608 | SUCH DAMAGES.
609 |
610 | 17. Interpretation of Sections 15 and 16.
611 |
612 | If the disclaimer of warranty and limitation of liability provided
613 | above cannot be given local legal effect according to their terms,
614 | reviewing courts shall apply local law that most closely approximates
615 | an absolute waiver of all civil liability in connection with the
616 | Program, unless a warranty or assumption of liability accompanies a
617 | copy of the Program in return for a fee.
618 |
619 | END OF TERMS AND CONDITIONS
620 |
621 | How to Apply These Terms to Your New Programs
622 |
623 | If you develop a new program, and you want it to be of the greatest
624 | possible use to the public, the best way to achieve this is to make it
625 | free software which everyone can redistribute and change under these terms.
626 |
627 | To do so, attach the following notices to the program. It is safest
628 | to attach them to the start of each source file to most effectively
629 | state the exclusion of warranty; and each file should have at least
630 | the "copyright" line and a pointer to where the full notice is found.
631 |
632 |
633 | Copyright (C)
634 |
635 | This program is free software: you can redistribute it and/or modify
636 | it under the terms of the GNU Affero General Public License as published
637 | by the Free Software Foundation, either version 3 of the License, or
638 | (at your option) any later version.
639 |
640 | This program is distributed in the hope that it will be useful,
641 | but WITHOUT ANY WARRANTY; without even the implied warranty of
642 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
643 | GNU Affero General Public License for more details.
644 |
645 | You should have received a copy of the GNU Affero General Public License
646 | along with this program. If not, see .
647 |
648 | Also add information on how to contact you by electronic and paper mail.
649 |
650 | If your software can interact with users remotely through a computer
651 | network, you should also make sure that it provides a way for users to
652 | get its source. For example, if your program is a web application, its
653 | interface could display a "Source" link that leads users to an archive
654 | of the code. There are many ways you could offer source, and different
655 | solutions will be better for different programs; see section 13 for the
656 | specific requirements.
657 |
658 | You should also get your employer (if you work as a programmer) or school,
659 | if any, to sign a "copyright disclaimer" for the program, if necessary.
660 | For more information on this, and how to apply and follow the GNU AGPL, see
661 | .
662 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # xray-node
2 |
3 | 
4 | 
5 | 
6 | 
7 | [](https://t.me/laoshan_tech)
8 | [](https://t.me/laoshan_tech_discuss)
9 |
10 | 
11 |
12 | ## 简介
13 |
14 | Python 开发的基于 [xray-core](https://github.com/XTLS/Xray-core) 的多用户代理后端,支持用户动态管理和流量统计,暂不支持限速和审计。
15 |
16 | > _仍处于开发阶段,暂不能提供完整功能。_
17 |
18 | ## 特性
19 |
20 | - xray-core 提供的完整特性
21 | - VMess
22 | - VLESS
23 | - SS(支持单端口多用户)
24 | - Trojan
25 | - 自动安装、更新 xray-core
26 | - 支持多种用户管理系统(开发中)
27 | - SSPanel
28 | - django-sspanel
29 | - v2board
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 | [[package]]
2 | name = "aiosqlite"
3 | version = "0.17.0"
4 | description = "asyncio bridge to the standard sqlite3 module"
5 | category = "main"
6 | optional = false
7 | python-versions = ">=3.6"
8 |
9 | [package.dependencies]
10 | typing_extensions = ">=3.7.2"
11 |
12 | [package.source]
13 | type = "legacy"
14 | url = "https://mirrors.aliyun.com/pypi/simple"
15 | reference = "aliyun"
16 |
17 | [[package]]
18 | name = "appdirs"
19 | version = "1.4.4"
20 | description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
21 | category = "dev"
22 | optional = false
23 | python-versions = "*"
24 |
25 | [package.source]
26 | type = "legacy"
27 | url = "https://mirrors.aliyun.com/pypi/simple"
28 | reference = "aliyun"
29 |
30 | [[package]]
31 | name = "atomicwrites"
32 | version = "1.4.0"
33 | description = "Atomic file writes."
34 | category = "dev"
35 | optional = false
36 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
37 |
38 | [package.source]
39 | type = "legacy"
40 | url = "https://mirrors.aliyun.com/pypi/simple"
41 | reference = "aliyun"
42 |
43 | [[package]]
44 | name = "attrs"
45 | version = "21.4.0"
46 | description = "Classes Without Boilerplate"
47 | category = "dev"
48 | optional = false
49 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
50 |
51 | [package.extras]
52 | dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
53 | docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
54 | tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
55 | tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
56 |
57 | [package.source]
58 | type = "legacy"
59 | url = "https://mirrors.aliyun.com/pypi/simple"
60 | reference = "aliyun"
61 |
62 | [[package]]
63 | name = "black"
64 | version = "20.8b1"
65 | description = "The uncompromising code formatter."
66 | category = "dev"
67 | optional = false
68 | python-versions = ">=3.6"
69 |
70 | [package.dependencies]
71 | appdirs = "*"
72 | click = ">=7.1.2"
73 | mypy_extensions = ">=0.4.3"
74 | pathspec = ">=0.6,<1"
75 | regex = ">=2020.1.8"
76 | toml = ">=0.10.1"
77 | typed-ast = ">=1.4.0"
78 | typing_extensions = ">=3.7.4"
79 |
80 | [package.extras]
81 | colorama = ["colorama (>=0.4.3)"]
82 | d = ["aiohttp (>=3.3.2)", "aiohttp-cors"]
83 |
84 | [package.source]
85 | type = "legacy"
86 | url = "https://mirrors.aliyun.com/pypi/simple"
87 | reference = "aliyun"
88 |
89 | [[package]]
90 | name = "certifi"
91 | version = "2021.10.8"
92 | description = "Python package for providing Mozilla's CA Bundle."
93 | category = "main"
94 | optional = false
95 | python-versions = "*"
96 |
97 | [package.source]
98 | type = "legacy"
99 | url = "https://mirrors.aliyun.com/pypi/simple"
100 | reference = "aliyun"
101 |
102 | [[package]]
103 | name = "click"
104 | version = "7.1.2"
105 | description = "Composable command line interface toolkit"
106 | category = "main"
107 | optional = false
108 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
109 |
110 | [package.source]
111 | type = "legacy"
112 | url = "https://mirrors.aliyun.com/pypi/simple"
113 | reference = "aliyun"
114 |
115 | [[package]]
116 | name = "colorama"
117 | version = "0.4.4"
118 | description = "Cross-platform colored terminal text."
119 | category = "main"
120 | optional = false
121 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
122 |
123 | [package.source]
124 | type = "legacy"
125 | url = "https://mirrors.aliyun.com/pypi/simple"
126 | reference = "aliyun"
127 |
128 | [[package]]
129 | name = "googleapis-common-protos"
130 | version = "1.54.0"
131 | description = "Common protobufs used in Google APIs"
132 | category = "main"
133 | optional = false
134 | python-versions = ">=3.6"
135 |
136 | [package.dependencies]
137 | protobuf = ">=3.12.0"
138 |
139 | [package.extras]
140 | grpc = ["grpcio (>=1.0.0)"]
141 |
142 | [package.source]
143 | type = "legacy"
144 | url = "https://mirrors.aliyun.com/pypi/simple"
145 | reference = "aliyun"
146 |
147 | [[package]]
148 | name = "grpcio"
149 | version = "1.43.0"
150 | description = "HTTP/2-based RPC framework"
151 | category = "main"
152 | optional = false
153 | python-versions = ">=3.6"
154 |
155 | [package.dependencies]
156 | six = ">=1.5.2"
157 |
158 | [package.extras]
159 | protobuf = ["grpcio-tools (>=1.43.0)"]
160 |
161 | [package.source]
162 | type = "legacy"
163 | url = "https://mirrors.aliyun.com/pypi/simple"
164 | reference = "aliyun"
165 |
166 | [[package]]
167 | name = "grpcio-status"
168 | version = "1.43.0"
169 | description = "Status proto mapping for gRPC"
170 | category = "main"
171 | optional = false
172 | python-versions = ">=3.6"
173 |
174 | [package.dependencies]
175 | googleapis-common-protos = ">=1.5.5"
176 | grpcio = ">=1.43.0"
177 | protobuf = ">=3.6.0"
178 |
179 | [package.source]
180 | type = "legacy"
181 | url = "https://mirrors.aliyun.com/pypi/simple"
182 | reference = "aliyun"
183 |
184 | [[package]]
185 | name = "grpcio-tools"
186 | version = "1.43.0"
187 | description = "Protobuf code generator for gRPC"
188 | category = "main"
189 | optional = false
190 | python-versions = ">=3.6"
191 |
192 | [package.dependencies]
193 | grpcio = ">=1.43.0"
194 | protobuf = ">=3.5.0.post1,<4.0dev"
195 |
196 | [package.source]
197 | type = "legacy"
198 | url = "https://mirrors.aliyun.com/pypi/simple"
199 | reference = "aliyun"
200 |
201 | [[package]]
202 | name = "h11"
203 | version = "0.13.0"
204 | description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
205 | category = "main"
206 | optional = false
207 | python-versions = ">=3.6"
208 |
209 | [package.source]
210 | type = "legacy"
211 | url = "https://mirrors.aliyun.com/pypi/simple"
212 | reference = "aliyun"
213 |
214 | [[package]]
215 | name = "httpcore"
216 | version = "0.12.3"
217 | description = "A minimal low-level HTTP client."
218 | category = "main"
219 | optional = false
220 | python-versions = ">=3.6"
221 |
222 | [package.dependencies]
223 | h11 = "<1.0.0"
224 | sniffio = ">=1.0.0,<2.0.0"
225 |
226 | [package.extras]
227 | http2 = ["h2 (>=3,<5)"]
228 |
229 | [package.source]
230 | type = "legacy"
231 | url = "https://mirrors.aliyun.com/pypi/simple"
232 | reference = "aliyun"
233 |
234 | [[package]]
235 | name = "httpx"
236 | version = "0.17.1"
237 | description = "The next generation HTTP client."
238 | category = "main"
239 | optional = false
240 | python-versions = ">=3.6"
241 |
242 | [package.dependencies]
243 | certifi = "*"
244 | httpcore = ">=0.12.1,<0.13"
245 | rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
246 | sniffio = "*"
247 |
248 | [package.extras]
249 | brotli = ["brotlipy (>=0.7.0,<0.8.0)"]
250 | http2 = ["h2 (>=3.0.0,<4.0.0)"]
251 |
252 | [package.source]
253 | type = "legacy"
254 | url = "https://mirrors.aliyun.com/pypi/simple"
255 | reference = "aliyun"
256 |
257 | [[package]]
258 | name = "humanize"
259 | version = "3.13.1"
260 | description = "Python humanize utilities"
261 | category = "main"
262 | optional = false
263 | python-versions = ">=3.6"
264 |
265 | [package.extras]
266 | tests = ["freezegun", "pytest", "pytest-cov"]
267 |
268 | [package.source]
269 | type = "legacy"
270 | url = "https://mirrors.aliyun.com/pypi/simple"
271 | reference = "aliyun"
272 |
273 | [[package]]
274 | name = "idna"
275 | version = "3.3"
276 | description = "Internationalized Domain Names in Applications (IDNA)"
277 | category = "main"
278 | optional = false
279 | python-versions = ">=3.5"
280 |
281 | [package.source]
282 | type = "legacy"
283 | url = "https://mirrors.aliyun.com/pypi/simple"
284 | reference = "aliyun"
285 |
286 | [[package]]
287 | name = "iniconfig"
288 | version = "1.1.1"
289 | description = "iniconfig: brain-dead simple config-ini parsing"
290 | category = "dev"
291 | optional = false
292 | python-versions = "*"
293 |
294 | [package.source]
295 | type = "legacy"
296 | url = "https://mirrors.aliyun.com/pypi/simple"
297 | reference = "aliyun"
298 |
299 | [[package]]
300 | name = "iso8601"
301 | version = "0.1.16"
302 | description = "Simple module to parse ISO 8601 dates"
303 | category = "main"
304 | optional = false
305 | python-versions = "*"
306 |
307 | [package.source]
308 | type = "legacy"
309 | url = "https://mirrors.aliyun.com/pypi/simple"
310 | reference = "aliyun"
311 |
312 | [[package]]
313 | name = "loguru"
314 | version = "0.5.3"
315 | description = "Python logging made (stupidly) simple"
316 | category = "main"
317 | optional = false
318 | python-versions = ">=3.5"
319 |
320 | [package.dependencies]
321 | colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""}
322 | win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
323 |
324 | [package.extras]
325 | dev = ["codecov (>=2.0.15)", "colorama (>=0.3.4)", "flake8 (>=3.7.7)", "tox (>=3.9.0)", "tox-travis (>=0.12)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "Sphinx (>=2.2.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "black (>=19.10b0)", "isort (>=5.1.1)"]
326 |
327 | [package.source]
328 | type = "legacy"
329 | url = "https://mirrors.aliyun.com/pypi/simple"
330 | reference = "aliyun"
331 |
332 | [[package]]
333 | name = "mypy-extensions"
334 | version = "0.4.3"
335 | description = "Experimental type system extensions for programs checked with the mypy typechecker."
336 | category = "dev"
337 | optional = false
338 | python-versions = "*"
339 |
340 | [package.source]
341 | type = "legacy"
342 | url = "https://mirrors.aliyun.com/pypi/simple"
343 | reference = "aliyun"
344 |
345 | [[package]]
346 | name = "packaging"
347 | version = "21.3"
348 | description = "Core utilities for Python packages"
349 | category = "dev"
350 | optional = false
351 | python-versions = ">=3.6"
352 |
353 | [package.dependencies]
354 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5"
355 |
356 | [package.source]
357 | type = "legacy"
358 | url = "https://mirrors.aliyun.com/pypi/simple"
359 | reference = "aliyun"
360 |
361 | [[package]]
362 | name = "pathspec"
363 | version = "0.9.0"
364 | description = "Utility library for gitignore style pattern matching of file paths."
365 | category = "dev"
366 | optional = false
367 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7"
368 |
369 | [package.source]
370 | type = "legacy"
371 | url = "https://mirrors.aliyun.com/pypi/simple"
372 | reference = "aliyun"
373 |
374 | [[package]]
375 | name = "pluggy"
376 | version = "1.0.0"
377 | description = "plugin and hook calling mechanisms for python"
378 | category = "dev"
379 | optional = false
380 | python-versions = ">=3.6"
381 |
382 | [package.extras]
383 | dev = ["pre-commit", "tox"]
384 | testing = ["pytest", "pytest-benchmark"]
385 |
386 | [package.source]
387 | type = "legacy"
388 | url = "https://mirrors.aliyun.com/pypi/simple"
389 | reference = "aliyun"
390 |
391 | [[package]]
392 | name = "protobuf"
393 | version = "3.19.3"
394 | description = "Protocol Buffers"
395 | category = "main"
396 | optional = false
397 | python-versions = ">=3.5"
398 |
399 | [package.source]
400 | type = "legacy"
401 | url = "https://mirrors.aliyun.com/pypi/simple"
402 | reference = "aliyun"
403 |
404 | [[package]]
405 | name = "psutil"
406 | version = "5.9.0"
407 | description = "Cross-platform lib for process and system monitoring in Python."
408 | category = "main"
409 | optional = false
410 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
411 |
412 | [package.extras]
413 | test = ["ipaddress", "mock", "unittest2", "enum34", "pywin32", "wmi"]
414 |
415 | [package.source]
416 | type = "legacy"
417 | url = "https://mirrors.aliyun.com/pypi/simple"
418 | reference = "aliyun"
419 |
420 | [[package]]
421 | name = "py"
422 | version = "1.11.0"
423 | description = "library with cross-python path, ini-parsing, io, code, log facilities"
424 | category = "dev"
425 | optional = false
426 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
427 |
428 | [package.source]
429 | type = "legacy"
430 | url = "https://mirrors.aliyun.com/pypi/simple"
431 | reference = "aliyun"
432 |
433 | [[package]]
434 | name = "pyparsing"
435 | version = "3.0.7"
436 | description = "Python parsing module"
437 | category = "dev"
438 | optional = false
439 | python-versions = ">=3.6"
440 |
441 | [package.extras]
442 | diagrams = ["jinja2", "railroad-diagrams"]
443 |
444 | [package.source]
445 | type = "legacy"
446 | url = "https://mirrors.aliyun.com/pypi/simple"
447 | reference = "aliyun"
448 |
449 | [[package]]
450 | name = "pypika-tortoise"
451 | version = "0.1.3"
452 | description = "Forked from pypika and streamline just for tortoise-orm"
453 | category = "main"
454 | optional = false
455 | python-versions = ">=3.7,<4.0"
456 |
457 | [package.source]
458 | type = "legacy"
459 | url = "https://mirrors.aliyun.com/pypi/simple"
460 | reference = "aliyun"
461 |
462 | [[package]]
463 | name = "pytest"
464 | version = "6.2.5"
465 | description = "pytest: simple powerful testing with Python"
466 | category = "dev"
467 | optional = false
468 | python-versions = ">=3.6"
469 |
470 | [package.dependencies]
471 | atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""}
472 | attrs = ">=19.2.0"
473 | colorama = {version = "*", markers = "sys_platform == \"win32\""}
474 | iniconfig = "*"
475 | packaging = "*"
476 | pluggy = ">=0.12,<2.0"
477 | py = ">=1.8.2"
478 | toml = "*"
479 |
480 | [package.extras]
481 | testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"]
482 |
483 | [package.source]
484 | type = "legacy"
485 | url = "https://mirrors.aliyun.com/pypi/simple"
486 | reference = "aliyun"
487 |
488 | [[package]]
489 | name = "pytest-asyncio"
490 | version = "0.14.0"
491 | description = "Pytest support for asyncio."
492 | category = "dev"
493 | optional = false
494 | python-versions = ">= 3.5"
495 |
496 | [package.dependencies]
497 | pytest = ">=5.4.0"
498 |
499 | [package.extras]
500 | testing = ["async-generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"]
501 |
502 | [package.source]
503 | type = "legacy"
504 | url = "https://mirrors.aliyun.com/pypi/simple"
505 | reference = "aliyun"
506 |
507 | [[package]]
508 | name = "pytest-httpserver"
509 | version = "1.0.3"
510 | description = "pytest-httpserver is a httpserver for pytest"
511 | category = "main"
512 | optional = false
513 | python-versions = ">=3.6,<4.0"
514 |
515 | [package.dependencies]
516 | Werkzeug = ">=2.0.0"
517 |
518 | [package.extras]
519 | dev = ["autopep8", "coverage", "flake8", "ipdb", "mypy", "pytest-cov", "pytest", "reno", "requests", "sphinx", "sphinx-rtd-theme", "types-requests"]
520 | test = ["coverage", "flake8", "mypy", "pytest-cov", "pytest", "requests", "types-requests"]
521 | doc = ["reno", "sphinx", "sphinx-rtd-theme"]
522 |
523 | [package.source]
524 | type = "legacy"
525 | url = "https://mirrors.aliyun.com/pypi/simple"
526 | reference = "aliyun"
527 |
528 | [[package]]
529 | name = "pytest-ordering"
530 | version = "0.6"
531 | description = "pytest plugin to run your tests in a specific order"
532 | category = "dev"
533 | optional = false
534 | python-versions = "*"
535 |
536 | [package.dependencies]
537 | pytest = "*"
538 |
539 | [package.source]
540 | type = "legacy"
541 | url = "https://mirrors.aliyun.com/pypi/simple"
542 | reference = "aliyun"
543 |
544 | [[package]]
545 | name = "pytz"
546 | version = "2021.3"
547 | description = "World timezone definitions, modern and historical"
548 | category = "main"
549 | optional = false
550 | python-versions = "*"
551 |
552 | [package.source]
553 | type = "legacy"
554 | url = "https://mirrors.aliyun.com/pypi/simple"
555 | reference = "aliyun"
556 |
557 | [[package]]
558 | name = "regex"
559 | version = "2022.1.18"
560 | description = "Alternative regular expression module, to replace re."
561 | category = "dev"
562 | optional = false
563 | python-versions = "*"
564 |
565 | [package.source]
566 | type = "legacy"
567 | url = "https://mirrors.aliyun.com/pypi/simple"
568 | reference = "aliyun"
569 |
570 | [[package]]
571 | name = "rfc3986"
572 | version = "1.5.0"
573 | description = "Validating URI References per RFC 3986"
574 | category = "main"
575 | optional = false
576 | python-versions = "*"
577 |
578 | [package.dependencies]
579 | idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
580 |
581 | [package.extras]
582 | idna2008 = ["idna"]
583 |
584 | [package.source]
585 | type = "legacy"
586 | url = "https://mirrors.aliyun.com/pypi/simple"
587 | reference = "aliyun"
588 |
589 | [[package]]
590 | name = "six"
591 | version = "1.16.0"
592 | description = "Python 2 and 3 compatibility utilities"
593 | category = "main"
594 | optional = false
595 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
596 |
597 | [package.source]
598 | type = "legacy"
599 | url = "https://mirrors.aliyun.com/pypi/simple"
600 | reference = "aliyun"
601 |
602 | [[package]]
603 | name = "sniffio"
604 | version = "1.2.0"
605 | description = "Sniff out which async library your code is running under"
606 | category = "main"
607 | optional = false
608 | python-versions = ">=3.5"
609 |
610 | [package.source]
611 | type = "legacy"
612 | url = "https://mirrors.aliyun.com/pypi/simple"
613 | reference = "aliyun"
614 |
615 | [[package]]
616 | name = "toml"
617 | version = "0.10.2"
618 | description = "Python Library for Tom's Obvious, Minimal Language"
619 | category = "dev"
620 | optional = false
621 | python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
622 |
623 | [package.source]
624 | type = "legacy"
625 | url = "https://mirrors.aliyun.com/pypi/simple"
626 | reference = "aliyun"
627 |
628 | [[package]]
629 | name = "tomlkit"
630 | version = "0.7.2"
631 | description = "Style preserving TOML library"
632 | category = "main"
633 | optional = false
634 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
635 |
636 | [package.source]
637 | type = "legacy"
638 | url = "https://mirrors.aliyun.com/pypi/simple"
639 | reference = "aliyun"
640 |
641 | [[package]]
642 | name = "tortoise-orm"
643 | version = "0.17.8"
644 | description = "Easy async ORM for python, built with relations in mind"
645 | category = "main"
646 | optional = false
647 | python-versions = ">=3.7,<4.0"
648 |
649 | [package.dependencies]
650 | aiosqlite = ">=0.16.0,<0.18.0"
651 | iso8601 = ">=0.1.13,<0.2.0"
652 | pypika-tortoise = ">=0.1.1,<0.2.0"
653 | pytz = "*"
654 |
655 | [package.extras]
656 | aiomysql = ["aiomysql"]
657 | asyncmy = ["asyncmy"]
658 | asyncpg = ["asyncpg"]
659 | accel = ["ciso8601 (>=2.1.2,<3.0.0)", "python-rapidjson", "uvloop (>=0.14.0,<0.15.0)"]
660 |
661 | [package.source]
662 | type = "legacy"
663 | url = "https://mirrors.aliyun.com/pypi/simple"
664 | reference = "aliyun"
665 |
666 | [[package]]
667 | name = "typed-ast"
668 | version = "1.5.2"
669 | description = "a fork of Python 2 and 3 ast modules with type comment support"
670 | category = "dev"
671 | optional = false
672 | python-versions = ">=3.6"
673 |
674 | [package.source]
675 | type = "legacy"
676 | url = "https://mirrors.aliyun.com/pypi/simple"
677 | reference = "aliyun"
678 |
679 | [[package]]
680 | name = "typing-extensions"
681 | version = "4.0.1"
682 | description = "Backported and Experimental Type Hints for Python 3.6+"
683 | category = "main"
684 | optional = false
685 | python-versions = ">=3.6"
686 |
687 | [package.source]
688 | type = "legacy"
689 | url = "https://mirrors.aliyun.com/pypi/simple"
690 | reference = "aliyun"
691 |
692 | [[package]]
693 | name = "uvloop"
694 | version = "0.15.3"
695 | description = "Fast implementation of asyncio event loop on top of libuv"
696 | category = "main"
697 | optional = false
698 | python-versions = ">=3.7"
699 |
700 | [package.extras]
701 | dev = ["Cython (>=0.29.20,<0.30.0)", "pytest (>=3.6.0)", "Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx_rtd_theme (>=0.2.4,<0.3.0)", "aiohttp", "flake8 (>=3.8.4,<3.9.0)", "psutil", "pycodestyle (>=2.6.0,<2.7.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"]
702 | docs = ["Sphinx (>=1.7.3,<1.8.0)", "sphinxcontrib-asyncio (>=0.2.0,<0.3.0)", "sphinx_rtd_theme (>=0.2.4,<0.3.0)"]
703 | test = ["aiohttp", "flake8 (>=3.8.4,<3.9.0)", "psutil", "pycodestyle (>=2.6.0,<2.7.0)", "pyOpenSSL (>=19.0.0,<19.1.0)", "mypy (>=0.800)"]
704 |
705 | [package.source]
706 | type = "legacy"
707 | url = "https://mirrors.aliyun.com/pypi/simple"
708 | reference = "aliyun"
709 |
710 | [[package]]
711 | name = "werkzeug"
712 | version = "2.0.2"
713 | description = "The comprehensive WSGI web application library."
714 | category = "main"
715 | optional = false
716 | python-versions = ">=3.6"
717 |
718 | [package.extras]
719 | watchdog = ["watchdog"]
720 |
721 | [package.source]
722 | type = "legacy"
723 | url = "https://mirrors.aliyun.com/pypi/simple"
724 | reference = "aliyun"
725 |
726 | [[package]]
727 | name = "win32-setctime"
728 | version = "1.1.0"
729 | description = "A small Python utility to set file creation time on Windows"
730 | category = "main"
731 | optional = false
732 | python-versions = ">=3.5"
733 |
734 | [package.extras]
735 | dev = ["pytest (>=4.6.2)", "black (>=19.3b0)"]
736 |
737 | [package.source]
738 | type = "legacy"
739 | url = "https://mirrors.aliyun.com/pypi/simple"
740 | reference = "aliyun"
741 |
742 | [[package]]
743 | name = "xray-rpc"
744 | version = "1.5.2"
745 | description = "gRPC files generated from Xray source code."
746 | category = "main"
747 | optional = false
748 | python-versions = ">=3.8,<4.0"
749 |
750 | [package.dependencies]
751 | grpcio = ">=1.37.0,<2.0.0"
752 | grpcio-tools = ">=1.37.0,<2.0.0"
753 | httpx = ">=0.17.1,<0.18.0"
754 |
755 | [package.source]
756 | type = "legacy"
757 | url = "https://mirrors.aliyun.com/pypi/simple"
758 | reference = "aliyun"
759 |
760 | [metadata]
761 | lock-version = "1.1"
762 | python-versions = "^3.8"
763 | content-hash = "ef6750467fa606f8a5ead7d92c5c6666f7774304f894c200aee262a1a59d2ef2"
764 |
765 | [metadata.files]
766 | aiosqlite = [
767 | {file = "aiosqlite-0.17.0-py3-none-any.whl", hash = "sha256:6c49dc6d3405929b1d08eeccc72306d3677503cc5e5e43771efc1e00232e8231"},
768 | {file = "aiosqlite-0.17.0.tar.gz", hash = "sha256:f0e6acc24bc4864149267ac82fb46dfb3be4455f99fe21df82609cc6e6baee51"},
769 | ]
770 | appdirs = [
771 | {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
772 | {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
773 | ]
774 | atomicwrites = [
775 | {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"},
776 | {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"},
777 | ]
778 | attrs = [
779 | {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
780 | {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
781 | ]
782 | black = [
783 | {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"},
784 | ]
785 | certifi = [
786 | {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"},
787 | {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"},
788 | ]
789 | click = [
790 | {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"},
791 | {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"},
792 | ]
793 | colorama = [
794 | {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"},
795 | {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"},
796 | ]
797 | googleapis-common-protos = [
798 | {file = "googleapis-common-protos-1.54.0.tar.gz", hash = "sha256:a4031d6ec6c2b1b6dc3e0be7e10a1bd72fb0b18b07ef9be7b51f2c1004ce2437"},
799 | {file = "googleapis_common_protos-1.54.0-py2.py3-none-any.whl", hash = "sha256:e54345a2add15dc5e1a7891c27731ff347b4c33765d79b5ed7026a6c0c7cbcae"},
800 | ]
801 | grpcio = [
802 | {file = "grpcio-1.43.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:a4e786a8ee8b30b25d70ee52cda6d1dbba2a8ca2f1208d8e20ed8280774f15c8"},
803 | {file = "grpcio-1.43.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:af9c3742f6c13575c0d4147a8454da0ff5308c4d9469462ff18402c6416942fe"},
804 | {file = "grpcio-1.43.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:fdac966699707b5554b815acc272d81e619dd0999f187cd52a61aef075f870ee"},
805 | {file = "grpcio-1.43.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e463b4aa0a6b31cf2e57c4abc1a1b53531a18a570baeed39d8d7b65deb16b7e"},
806 | {file = "grpcio-1.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11d05402e0ac3a284443d8a432d3dfc76a6bd3f7b5858cddd75617af2d7bd9b"},
807 | {file = "grpcio-1.43.0-cp310-cp310-win32.whl", hash = "sha256:c36f418c925a41fccada8f7ae9a3d3e227bfa837ddbfddd3d8b0ac252d12dda9"},
808 | {file = "grpcio-1.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:772b943f34374744f70236bbbe0afe413ed80f9ae6303503f85e2b421d4bca92"},
809 | {file = "grpcio-1.43.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:cbc9b83211d905859dcf234ad39d7193ff0f05bfc3269c364fb0d114ee71de59"},
810 | {file = "grpcio-1.43.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:fb7229fa2a201a0c377ff3283174ec966da8f9fd7ffcc9a92f162d2e7fc9025b"},
811 | {file = "grpcio-1.43.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:17b75f220ee6923338155b4fcef4c38802b9a57bc57d112c9599a13a03e99f8d"},
812 | {file = "grpcio-1.43.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:6620a5b751b099b3b25553cfc03dfcd873cda06f9bb2ff7e9948ac7090e20f05"},
813 | {file = "grpcio-1.43.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:1898f999383baac5fcdbdef8ea5b1ef204f38dc211014eb6977ac6e55944d738"},
814 | {file = "grpcio-1.43.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47b6821238d8978014d23b1132713dac6c2d72cbb561cf257608b1673894f90a"},
815 | {file = "grpcio-1.43.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80398e9fb598060fa41050d1220f5a2440fe74ff082c36dda41ac3215ebb5ddd"},
816 | {file = "grpcio-1.43.0-cp36-cp36m-win32.whl", hash = "sha256:0110310eff07bb69782f53b7a947490268c4645de559034c43c0a635612e250f"},
817 | {file = "grpcio-1.43.0-cp36-cp36m-win_amd64.whl", hash = "sha256:45401d00f2ee46bde75618bf33e9df960daa7980e6e0e7328047191918c98504"},
818 | {file = "grpcio-1.43.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:af78ac55933811e6a25141336b1f2d5e0659c2f568d44d20539b273792563ca7"},
819 | {file = "grpcio-1.43.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8b2b9dc4d7897566723b77422e11c009a0ebd397966b165b21b89a62891a9fdf"},
820 | {file = "grpcio-1.43.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:77ef653f966934b3bfdd00e4f2064b68880eb40cf09b0b99edfa5ee22a44f559"},
821 | {file = "grpcio-1.43.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e95b5d62ec26d0cd0b90c202d73e7cb927c369c3358e027225239a4e354967dc"},
822 | {file = "grpcio-1.43.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:04239e8f71db832c26bbbedb4537b37550a39d77681d748ab4678e58dd6455d6"},
823 | {file = "grpcio-1.43.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b4a7152187a49767a47d1413edde2304c96f41f7bc92cc512e230dfd0fba095"},
824 | {file = "grpcio-1.43.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8cc936a29c65ab39714e1ba67a694c41218f98b6e2a64efb83f04d9abc4386b"},
825 | {file = "grpcio-1.43.0-cp37-cp37m-win32.whl", hash = "sha256:577e024c8dd5f27cd98ba850bc4e890f07d4b5942e5bc059a3d88843a2f48f66"},
826 | {file = "grpcio-1.43.0-cp37-cp37m-win_amd64.whl", hash = "sha256:138f57e3445d4a48d9a8a5af1538fdaafaa50a0a3c243f281d8df0edf221dc02"},
827 | {file = "grpcio-1.43.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:08cf25f2936629db062aeddbb594bd76b3383ab0ede75ef0461a3b0bc3a2c150"},
828 | {file = "grpcio-1.43.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:01f4b887ed703fe82ebe613e1d2dadea517891725e17e7a6134dcd00352bd28c"},
829 | {file = "grpcio-1.43.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0aa8285f284338eb68962fe1a830291db06f366ea12f213399b520c062b01f65"},
830 | {file = "grpcio-1.43.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:0edbfeb6729aa9da33ce7e28fb7703b3754934115454ae45e8cc1db601756fd3"},
831 | {file = "grpcio-1.43.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:c354017819201053d65212befd1dcb65c2d91b704d8977e696bae79c47cd2f82"},
832 | {file = "grpcio-1.43.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50cfb7e1067ee5e00b8ab100a6b7ea322d37ec6672c0455106520b5891c4b5f5"},
833 | {file = "grpcio-1.43.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57f1aeb65ed17dfb2f6cd717cc109910fe395133af7257a9c729c0b9604eac10"},
834 | {file = "grpcio-1.43.0-cp38-cp38-win32.whl", hash = "sha256:fa26a8bbb3fe57845acb1329ff700d5c7eaf06414c3e15f4cb8923f3a466ef64"},
835 | {file = "grpcio-1.43.0-cp38-cp38-win_amd64.whl", hash = "sha256:ade8b79a6b6aea68adb9d4bfeba5d647667d842202c5d8f3ba37ac1dc8e5c09c"},
836 | {file = "grpcio-1.43.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:124e718faf96fe44c98b05f3f475076be8b5198bb4c52a13208acf88a8548ba9"},
837 | {file = "grpcio-1.43.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2f96142d0abc91290a63ba203f01649e498302b1b6007c67bad17f823ecde0cf"},
838 | {file = "grpcio-1.43.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:31e6e489ccd8f08884b9349a39610982df48535881ec34f05a11c6e6b6ebf9d0"},
839 | {file = "grpcio-1.43.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0e731f660e1e68238f56f4ce11156f02fd06dc58bc7834778d42c0081d4ef5ad"},
840 | {file = "grpcio-1.43.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1f16725a320460435a8a5339d8b06c4e00d307ab5ad56746af2e22b5f9c50932"},
841 | {file = "grpcio-1.43.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4b4543e13acb4806917d883d0f70f21ba93b29672ea81f4aaba14821aaf9bb0"},
842 | {file = "grpcio-1.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:594aaa0469f4fca7773e80d8c27bf1298e7bbce5f6da0f084b07489a708f16ab"},
843 | {file = "grpcio-1.43.0-cp39-cp39-win32.whl", hash = "sha256:5449ae564349e7a738b8c38583c0aad954b0d5d1dd3cea68953bfc32eaee11e3"},
844 | {file = "grpcio-1.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:bdf41550815a831384d21a498b20597417fd31bd084deb17d31ceb39ad9acc79"},
845 | {file = "grpcio-1.43.0.tar.gz", hash = "sha256:735d9a437c262ab039d02defddcb9f8f545d7009ae61c0114e19dda3843febe5"},
846 | ]
847 | grpcio-status = [
848 | {file = "grpcio-status-1.43.0.tar.gz", hash = "sha256:21759006f36a7ffbff187d4191f4118c072d8aa9fa6823a11aad7842a3c6ccd0"},
849 | {file = "grpcio_status-1.43.0-py3-none-any.whl", hash = "sha256:9036b24f5769adafdc3e91d9434c20e9ede0b30f50cc6bff105c0f414bb9e0e0"},
850 | ]
851 | grpcio-tools = [
852 | {file = "grpcio-tools-1.43.0.tar.gz", hash = "sha256:f42f1d713096808b1b0472dd2a3749b712d13f0092dab9442d9c096446e860b2"},
853 | {file = "grpcio_tools-1.43.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:766771ef5b60ebcba0a3bdb302dd92fda988552eb8508451ff6d97371eac38e5"},
854 | {file = "grpcio_tools-1.43.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:178a881db5de0f89abf3aeeb260ecfd1116cc31f88fb600a45fb5b19c3323b33"},
855 | {file = "grpcio_tools-1.43.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:019f55929e963214471825c7a4cdab7a57069109d5621b24e4db7b428b5fe47d"},
856 | {file = "grpcio_tools-1.43.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6c0e1d1b47554c580882d392b739df91a55b6a8ec696b2b2e1bbc127d63df2c"},
857 | {file = "grpcio_tools-1.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c5c80098fa69593b828d119973744de03c3f9a6935df8a02e4329a39b7072f5"},
858 | {file = "grpcio_tools-1.43.0-cp310-cp310-win32.whl", hash = "sha256:53f7dcaa4218df1b64b39d0fc7236a8270e8ab2db4ab8cd1d2fda0e6d4544946"},
859 | {file = "grpcio_tools-1.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:5be6d402b0cafef20ba3abb3baa37444961d9a9c4a6434d3d7c1f082f7697deb"},
860 | {file = "grpcio_tools-1.43.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:8953fdebef6905d7ff13a5a376b21b6fecd808d18bf4f0d3990ffe4a215d56eb"},
861 | {file = "grpcio_tools-1.43.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:18870dcc8369ac4c37213e6796d8dc20494ea770670204f5e573f88e69eaaf0b"},
862 | {file = "grpcio_tools-1.43.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:010a4be6a2fccbd6741a4809c5da7f2e39a1e9e227745e6b495be567638bbeb9"},
863 | {file = "grpcio_tools-1.43.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:426f16b6b14d533ce61249a18fbcd1a23a4fa0c71a6d7ab347b1c7f862847bb8"},
864 | {file = "grpcio_tools-1.43.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:f974cb0bea88bac892c3ed16da92c6ac88cff0fea17f24bf0e1892eb4d27cd00"},
865 | {file = "grpcio_tools-1.43.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55c2e604536e06248e2f81e549737fb3a180c8117832e494a0a8a81fbde44837"},
866 | {file = "grpcio_tools-1.43.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f97f9ffa49348fb24692751d2d4455ef2968bd07fe536d65597caaec14222629"},
867 | {file = "grpcio_tools-1.43.0-cp36-cp36m-win32.whl", hash = "sha256:6eaf97414237b8670ae9fa623879a26eabcc4c635b550c79a81e17eb600d6ae3"},
868 | {file = "grpcio_tools-1.43.0-cp36-cp36m-win_amd64.whl", hash = "sha256:04f100c1f6a7c72c537760c33582f6970070bd6fa6676b529bccfa31cc58bc79"},
869 | {file = "grpcio_tools-1.43.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:9dbb6d1f58f26d88ae689f1b49de84cfaf4786c81c01b9001d3ceea178116a07"},
870 | {file = "grpcio_tools-1.43.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:63862a441a77f6326ea9fe4bb005882f0e363441a5968d9cf8621c34d3dadc2b"},
871 | {file = "grpcio_tools-1.43.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6dea0cb2e79b67593553ed8662f70e4310599fa8850fc0e056b19fcb63572b7f"},
872 | {file = "grpcio_tools-1.43.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3eb4aa5b0e578c3d9d9da8e37a2ef73654287a498b8081543acd0db7f0ec1a9c"},
873 | {file = "grpcio_tools-1.43.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:09464c6b17663088144b7e6ea10e9465efdcee03d4b2ffefab39a799bd8360f8"},
874 | {file = "grpcio_tools-1.43.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2458d6b0404f83d95aef00cec01f310d30e9719564a25be50e39b259f6a2da5d"},
875 | {file = "grpcio_tools-1.43.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e9bb5da437364b7dcd2d3c6850747081ecbec0ba645c96c6d471f7e21fdcadb"},
876 | {file = "grpcio_tools-1.43.0-cp37-cp37m-win32.whl", hash = "sha256:2737f749a6ab965748629e619b35f3e1cbe5820fc79e34c88f73cb99efc71dde"},
877 | {file = "grpcio_tools-1.43.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c39cbe7b902bb92f9afaa035091f5e2b8be35acbac501fec8cb6a0be7d7cdbbd"},
878 | {file = "grpcio_tools-1.43.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:05550ba473cff7c09e905fcfb2263fd1f7600389660194ec022b5d5a3802534b"},
879 | {file = "grpcio_tools-1.43.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:ce13a922db8f5f95c5041d3a4cbf04d942b353f0cba9b251a674f69a31a2d3a6"},
880 | {file = "grpcio_tools-1.43.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:f19d40690c97365c1c1bde81474e6f496d7ab76f87e6d2889c72ad01bac98f2d"},
881 | {file = "grpcio_tools-1.43.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba3da574eb08fcaed541b3fc97ce217360fd86d954fa9ad6a604803d57a2e049"},
882 | {file = "grpcio_tools-1.43.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:efd1eb5880001f5189cfa3a774675cc9bbc8cc51586a3e90fe796394ac8626b8"},
883 | {file = "grpcio_tools-1.43.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:234c7a5af653357df5c616e013173eddda6193146c8ab38f3108c4784f66be26"},
884 | {file = "grpcio_tools-1.43.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7e3662f62d410b3f81823b5fa0f79c6e0e250977a1058e4131867b85138a661"},
885 | {file = "grpcio_tools-1.43.0-cp38-cp38-win32.whl", hash = "sha256:5f2e584d7644ef924e9e042fa151a3bb9f7c28ef1ae260ee6c9cb327982b5e94"},
886 | {file = "grpcio_tools-1.43.0-cp38-cp38-win_amd64.whl", hash = "sha256:98dcb5b756855110fb661ccd6a93a716610b7efcd5720a3aec01358a1a892c30"},
887 | {file = "grpcio_tools-1.43.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:61ef6cb6ccf9b9c27bb85fffc5338194bcf444df502196c2ad0ff8df4706d41e"},
888 | {file = "grpcio_tools-1.43.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:1def9b68ac9e62674929bc6590a33d89635f1cf16016657d9e16a69f41aa5c36"},
889 | {file = "grpcio_tools-1.43.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:b68cc0c95a0f8c757e8d69b5fa46111d5c9d887ae62af28f827649b1d1b70fe1"},
890 | {file = "grpcio_tools-1.43.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:e956b5c3b586d7b27eae49fb06f544a26288596fe12e22ffec768109717276d1"},
891 | {file = "grpcio_tools-1.43.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:671e61bbc91d8d568f12c3654bb5a91fce9f3fdfd5ec2cfc60c2d3a840449aa6"},
892 | {file = "grpcio_tools-1.43.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7173ed19854d1066bce9bdc09f735ca9c13e74a25d47a1cc5d1fe803b53bffb"},
893 | {file = "grpcio_tools-1.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1adb0dbcc1c10b86dcda910b8f56e39210e401bcee923dba166ba923a5f4696a"},
894 | {file = "grpcio_tools-1.43.0-cp39-cp39-win32.whl", hash = "sha256:ebfb94ddb454a6dc3a505d9531dc81c948e6364e181b8795bfad3f3f479974dc"},
895 | {file = "grpcio_tools-1.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:d21928b680e6e29538688cffbf53f3d5a53cff0ec8f0c33139641700045bdf1a"},
896 | ]
897 | h11 = [
898 | {file = "h11-0.13.0-py3-none-any.whl", hash = "sha256:8ddd78563b633ca55346c8cd41ec0af27d3c79931828beffb46ce70a379e7442"},
899 | {file = "h11-0.13.0.tar.gz", hash = "sha256:70813c1135087a248a4d38cc0e1a0181ffab2188141a93eaf567940c3957ff06"},
900 | ]
901 | httpcore = [
902 | {file = "httpcore-0.12.3-py3-none-any.whl", hash = "sha256:93e822cd16c32016b414b789aeff4e855d0ccbfc51df563ee34d4dbadbb3bcdc"},
903 | {file = "httpcore-0.12.3.tar.gz", hash = "sha256:37ae835fb370049b2030c3290e12ed298bf1473c41bb72ca4aa78681eba9b7c9"},
904 | ]
905 | httpx = [
906 | {file = "httpx-0.17.1-py3-none-any.whl", hash = "sha256:d379653bd457e8257eb0df99cb94557e4aac441b7ba948e333be969298cac272"},
907 | {file = "httpx-0.17.1.tar.gz", hash = "sha256:cc2a55188e4b25272d2bcd46379d300f632045de4377682aa98a8a6069d55967"},
908 | ]
909 | humanize = [
910 | {file = "humanize-3.13.1-py3-none-any.whl", hash = "sha256:a6f7cc1597db69a4e571ad5e19b4da07ee871da5a9de2b233dbfab02d98e9754"},
911 | {file = "humanize-3.13.1.tar.gz", hash = "sha256:12f113f2e369dac7f35d3823f49262934f4a22a53a6d3d4c86b736f50db88c7b"},
912 | ]
913 | idna = [
914 | {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
915 | {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
916 | ]
917 | iniconfig = [
918 | {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
919 | {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
920 | ]
921 | iso8601 = [
922 | {file = "iso8601-0.1.16-py2.py3-none-any.whl", hash = "sha256:906714829fedbc89955d52806c903f2332e3948ed94e31e85037f9e0226b8376"},
923 | {file = "iso8601-0.1.16.tar.gz", hash = "sha256:36532f77cc800594e8f16641edae7f1baf7932f05d8e508545b95fc53c6dc85b"},
924 | ]
925 | loguru = [
926 | {file = "loguru-0.5.3-py3-none-any.whl", hash = "sha256:f8087ac396b5ee5f67c963b495d615ebbceac2796379599820e324419d53667c"},
927 | {file = "loguru-0.5.3.tar.gz", hash = "sha256:b28e72ac7a98be3d28ad28570299a393dfcd32e5e3f6a353dec94675767b6319"},
928 | ]
929 | mypy-extensions = [
930 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
931 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
932 | ]
933 | packaging = [
934 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"},
935 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"},
936 | ]
937 | pathspec = [
938 | {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"},
939 | {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"},
940 | ]
941 | pluggy = [
942 | {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
943 | {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
944 | ]
945 | protobuf = [
946 | {file = "protobuf-3.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1cb2ed66aac593adbf6dca4f07cd7ee7e2958b17bbc85b2cc8bc564ebeb258ec"},
947 | {file = "protobuf-3.19.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:898bda9cd37ec0c781b598891e86435de80c3bfa53eb483a9dac5a11ec93e942"},
948 | {file = "protobuf-3.19.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ad761ef3be34c8bdc7285bec4b40372a8dad9e70cfbdc1793cd3cf4c1a4ce74"},
949 | {file = "protobuf-3.19.3-cp310-cp310-win32.whl", hash = "sha256:2cddcbcc222f3144765ccccdb35d3621dc1544da57a9aca7e1944c1a4fe3db11"},
950 | {file = "protobuf-3.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:6202df8ee8457cb00810c6e76ced480f22a1e4e02c899a14e7b6e6e1de09f938"},
951 | {file = "protobuf-3.19.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:397d82f1c58b76445469c8c06b8dee1ff67b3053639d054f52599a458fac9bc6"},
952 | {file = "protobuf-3.19.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e54b8650e849ee8e95e481024bff92cf98f5ec61c7650cb838d928a140adcb63"},
953 | {file = "protobuf-3.19.3-cp36-cp36m-win32.whl", hash = "sha256:3bf3a07d17ba3511fe5fa916afb7351f482ab5dbab5afe71a7a384274a2cd550"},
954 | {file = "protobuf-3.19.3-cp36-cp36m-win_amd64.whl", hash = "sha256:afa8122de8064fd577f49ae9eef433561c8ace97a0a7b969d56e8b1d39b5d177"},
955 | {file = "protobuf-3.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18c40a1b8721026a85187640f1786d52407dc9c1ba8ec38accb57a46e84015f6"},
956 | {file = "protobuf-3.19.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:af7238849fa79285d448a24db686517570099739527a03c9c2971cce99cc5ae2"},
957 | {file = "protobuf-3.19.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e765e6dfbbb02c55e4d6d1145743401a84fc0b508f5a81b2c5a738cf86353139"},
958 | {file = "protobuf-3.19.3-cp37-cp37m-win32.whl", hash = "sha256:c781402ed5396ab56358d7b866d78c03a77cbc26ba0598d8bb0ac32084b1a257"},
959 | {file = "protobuf-3.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:544fe9705189b249380fae07952d220c97f5c6c9372a6f936cc83a79601dcb70"},
960 | {file = "protobuf-3.19.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84bf3aa3efb00dbe1c7ed55da0f20800b0662541e582d7e62b3e1464d61ed365"},
961 | {file = "protobuf-3.19.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:3f80a3491eaca767cdd86cb8660dc778f634b44abdb0dffc9b2a8e8d0cd617d0"},
962 | {file = "protobuf-3.19.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9401d96552befcc7311f5ef8f0fa7dba0ef5fd805466b158b141606cd0ab6a8"},
963 | {file = "protobuf-3.19.3-cp38-cp38-win32.whl", hash = "sha256:ef02d112c025e83db5d1188a847e358beab3e4bbfbbaf10eaf69e67359af51b2"},
964 | {file = "protobuf-3.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:1291a0a7db7d792745c99d4657b4c5c4942695c8b1ac1bfb993a34035ec123f7"},
965 | {file = "protobuf-3.19.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49677e5e9c7ea1245a90c2e8a00d304598f22ea3aa0628f0e0a530a9e70665fa"},
966 | {file = "protobuf-3.19.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:df2ba379ee42427e8fcc6a0a76843bff6efb34ef5266b17f95043939b5e25b69"},
967 | {file = "protobuf-3.19.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2acd7ca329be544d1a603d5f13a4e34a3791c90d651ebaf130ba2e43ae5397c6"},
968 | {file = "protobuf-3.19.3-cp39-cp39-win32.whl", hash = "sha256:b53519b2ebec70cfe24b4ddda21e9843f0918d7c3627a785393fb35d402ab8ad"},
969 | {file = "protobuf-3.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:8ceaf5fdb72c8e1fcb7be9f2b3b07482ce058a3548180c0bdd5c7e4ac5e14165"},
970 | {file = "protobuf-3.19.3-py2.py3-none-any.whl", hash = "sha256:f6d4b5b7595a57e69eb7314c67bef4a3c745b4caf91accaf72913d8e0635111b"},
971 | {file = "protobuf-3.19.3.tar.gz", hash = "sha256:d975a6314fbf5c524d4981e24294739216b5fb81ef3c14b86fb4b045d6690907"},
972 | ]
973 | psutil = [
974 | {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:55ce319452e3d139e25d6c3f85a1acf12d1607ddedea5e35fb47a552c051161b"},
975 | {file = "psutil-5.9.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7336292a13a80eb93c21f36bde4328aa748a04b68c13d01dfddd67fc13fd0618"},
976 | {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:cb8d10461c1ceee0c25a64f2dd54872b70b89c26419e147a05a10b753ad36ec2"},
977 | {file = "psutil-5.9.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:7641300de73e4909e5d148e90cc3142fb890079e1525a840cf0dfd39195239fd"},
978 | {file = "psutil-5.9.0-cp27-none-win32.whl", hash = "sha256:ea42d747c5f71b5ccaa6897b216a7dadb9f52c72a0fe2b872ef7d3e1eacf3ba3"},
979 | {file = "psutil-5.9.0-cp27-none-win_amd64.whl", hash = "sha256:ef216cc9feb60634bda2f341a9559ac594e2eeaadd0ba187a4c2eb5b5d40b91c"},
980 | {file = "psutil-5.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90a58b9fcae2dbfe4ba852b57bd4a1dded6b990a33d6428c7614b7d48eccb492"},
981 | {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d41f8b3e9ebb6b6110057e40019a432e96aae2008951121ba4e56040b84f3"},
982 | {file = "psutil-5.9.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:742c34fff804f34f62659279ed5c5b723bb0195e9d7bd9907591de9f8f6558e2"},
983 | {file = "psutil-5.9.0-cp310-cp310-win32.whl", hash = "sha256:8293942e4ce0c5689821f65ce6522ce4786d02af57f13c0195b40e1edb1db61d"},
984 | {file = "psutil-5.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:9b51917c1af3fa35a3f2dabd7ba96a2a4f19df3dec911da73875e1edaf22a40b"},
985 | {file = "psutil-5.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9805fed4f2a81de98ae5fe38b75a74c6e6ad2df8a5c479594c7629a1fe35f56"},
986 | {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51f1af02334e4b516ec221ee26b8fdf105032418ca5a5ab9737e8c87dafe203"},
987 | {file = "psutil-5.9.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32acf55cb9a8cbfb29167cd005951df81b567099295291bcfd1027365b36591d"},
988 | {file = "psutil-5.9.0-cp36-cp36m-win32.whl", hash = "sha256:e5c783d0b1ad6ca8a5d3e7b680468c9c926b804be83a3a8e95141b05c39c9f64"},
989 | {file = "psutil-5.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d62a2796e08dd024b8179bd441cb714e0f81226c352c802fca0fd3f89eeacd94"},
990 | {file = "psutil-5.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d00a664e31921009a84367266b35ba0aac04a2a6cad09c550a89041034d19a0"},
991 | {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7779be4025c540d1d65a2de3f30caeacc49ae7a2152108adeaf42c7534a115ce"},
992 | {file = "psutil-5.9.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:072664401ae6e7c1bfb878c65d7282d4b4391f1bc9a56d5e03b5a490403271b5"},
993 | {file = "psutil-5.9.0-cp37-cp37m-win32.whl", hash = "sha256:df2c8bd48fb83a8408c8390b143c6a6fa10cb1a674ca664954de193fdcab36a9"},
994 | {file = "psutil-5.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1d7b433519b9a38192dfda962dd8f44446668c009833e1429a52424624f408b4"},
995 | {file = "psutil-5.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3400cae15bdb449d518545cbd5b649117de54e3596ded84aacabfbb3297ead2"},
996 | {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2237f35c4bbae932ee98902a08050a27821f8f6dfa880a47195e5993af4702d"},
997 | {file = "psutil-5.9.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1070a9b287846a21a5d572d6dddd369517510b68710fca56b0e9e02fd24bed9a"},
998 | {file = "psutil-5.9.0-cp38-cp38-win32.whl", hash = "sha256:76cebf84aac1d6da5b63df11fe0d377b46b7b500d892284068bacccf12f20666"},
999 | {file = "psutil-5.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:3151a58f0fbd8942ba94f7c31c7e6b310d2989f4da74fcbf28b934374e9bf841"},
1000 | {file = "psutil-5.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:539e429da49c5d27d5a58e3563886057f8fc3868a5547b4f1876d9c0f007bccf"},
1001 | {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58c7d923dc209225600aec73aa2c4ae8ea33b1ab31bc11ef8a5933b027476f07"},
1002 | {file = "psutil-5.9.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3611e87eea393f779a35b192b46a164b1d01167c9d323dda9b1e527ea69d697d"},
1003 | {file = "psutil-5.9.0-cp39-cp39-win32.whl", hash = "sha256:4e2fb92e3aeae3ec3b7b66c528981fd327fb93fd906a77215200404444ec1845"},
1004 | {file = "psutil-5.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:7d190ee2eaef7831163f254dc58f6d2e2a22e27382b936aab51c835fc080c3d3"},
1005 | {file = "psutil-5.9.0.tar.gz", hash = "sha256:869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"},
1006 | ]
1007 | py = [
1008 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
1009 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
1010 | ]
1011 | pyparsing = [
1012 | {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"},
1013 | {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"},
1014 | ]
1015 | pypika-tortoise = [
1016 | {file = "pypika-tortoise-0.1.3.tar.gz", hash = "sha256:ecdf2d6e0aeb0e15880d9e2ead41362ec7320f37fb25a3a71664c2e1105ad218"},
1017 | {file = "pypika_tortoise-0.1.3-py3-none-any.whl", hash = "sha256:28fb2715a94ff2f3bc1c4ef6cc46c385c244c27d100aac760231bf612361d5ba"},
1018 | ]
1019 | pytest = [
1020 | {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"},
1021 | {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"},
1022 | ]
1023 | pytest-asyncio = [
1024 | {file = "pytest-asyncio-0.14.0.tar.gz", hash = "sha256:9882c0c6b24429449f5f969a5158b528f39bde47dc32e85b9f0403965017e700"},
1025 | {file = "pytest_asyncio-0.14.0-py3-none-any.whl", hash = "sha256:2eae1e34f6c68fc0a9dc12d4bea190483843ff4708d24277c41568d6b6044f1d"},
1026 | ]
1027 | pytest-httpserver = [
1028 | {file = "pytest_httpserver-1.0.3-py3-none-any.whl", hash = "sha256:f613b4dbb18cb2b2d78a8f6a66f22c4fea92167684d9ae355e33776759794827"},
1029 | {file = "pytest_httpserver-1.0.3.tar.gz", hash = "sha256:87561c4fa6a7bc306d76d1979a3eb9d54eb26bfb2f3f51f1643bf3c090ce629d"},
1030 | ]
1031 | pytest-ordering = [
1032 | {file = "pytest-ordering-0.6.tar.gz", hash = "sha256:561ad653626bb171da78e682f6d39ac33bb13b3e272d406cd555adb6b006bda6"},
1033 | {file = "pytest_ordering-0.6-py2-none-any.whl", hash = "sha256:27fba3fc265f5d0f8597e7557885662c1bdc1969497cd58aff6ed21c3b617de2"},
1034 | {file = "pytest_ordering-0.6-py3-none-any.whl", hash = "sha256:3f314a178dbeb6777509548727dc69edf22d6d9a2867bf2d310ab85c403380b6"},
1035 | ]
1036 | pytz = [
1037 | {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"},
1038 | {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"},
1039 | ]
1040 | regex = [
1041 | {file = "regex-2022.1.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:34316bf693b1d2d29c087ee7e4bb10cdfa39da5f9c50fa15b07489b4ab93a1b5"},
1042 | {file = "regex-2022.1.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7a0b9f6a1a15d494b35f25ed07abda03209fa76c33564c09c9e81d34f4b919d7"},
1043 | {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f99112aed4fb7cee00c7f77e8b964a9b10f69488cdff626ffd797d02e2e4484f"},
1044 | {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a2bf98ac92f58777c0fafc772bf0493e67fcf677302e0c0a630ee517a43b949"},
1045 | {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8618d9213a863c468a865e9d2ec50221015f7abf52221bc927152ef26c484b4c"},
1046 | {file = "regex-2022.1.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b52cc45e71657bc4743a5606d9023459de929b2a198d545868e11898ba1c3f59"},
1047 | {file = "regex-2022.1.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e12949e5071c20ec49ef00c75121ed2b076972132fc1913ddf5f76cae8d10b4"},
1048 | {file = "regex-2022.1.18-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b02e3e72665cd02afafb933453b0c9f6c59ff6e3708bd28d0d8580450e7e88af"},
1049 | {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:abfcb0ef78df0ee9df4ea81f03beea41849340ce33a4c4bd4dbb99e23ec781b6"},
1050 | {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6213713ac743b190ecbf3f316d6e41d099e774812d470422b3a0f137ea635832"},
1051 | {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:61ebbcd208d78658b09e19c78920f1ad38936a0aa0f9c459c46c197d11c580a0"},
1052 | {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:b013f759cd69cb0a62de954d6d2096d648bc210034b79b1881406b07ed0a83f9"},
1053 | {file = "regex-2022.1.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9187500d83fd0cef4669385cbb0961e227a41c0c9bc39219044e35810793edf7"},
1054 | {file = "regex-2022.1.18-cp310-cp310-win32.whl", hash = "sha256:94c623c331a48a5ccc7d25271399aff29729fa202c737ae3b4b28b89d2b0976d"},
1055 | {file = "regex-2022.1.18-cp310-cp310-win_amd64.whl", hash = "sha256:1a171eaac36a08964d023eeff740b18a415f79aeb212169080c170ec42dd5184"},
1056 | {file = "regex-2022.1.18-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:49810f907dfe6de8da5da7d2b238d343e6add62f01a15d03e2195afc180059ed"},
1057 | {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d2f5c3f7057530afd7b739ed42eb04f1011203bc5e4663e1e1d01bb50f813e3"},
1058 | {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85ffd6b1cb0dfb037ede50ff3bef80d9bf7fa60515d192403af6745524524f3b"},
1059 | {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba37f11e1d020969e8a779c06b4af866ffb6b854d7229db63c5fdddfceaa917f"},
1060 | {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e27ea1ebe4a561db75a880ac659ff439dec7f55588212e71700bb1ddd5af9"},
1061 | {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:37978254d9d00cda01acc1997513f786b6b971e57b778fbe7c20e30ae81a97f3"},
1062 | {file = "regex-2022.1.18-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e54a1eb9fd38f2779e973d2f8958fd575b532fe26013405d1afb9ee2374e7ab8"},
1063 | {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:768632fd8172ae03852e3245f11c8a425d95f65ff444ce46b3e673ae5b057b74"},
1064 | {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:de2923886b5d3214be951bc2ce3f6b8ac0d6dfd4a0d0e2a4d2e5523d8046fdfb"},
1065 | {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1333b3ce73269f986b1fa4d5d395643810074dc2de5b9d262eb258daf37dc98f"},
1066 | {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:d19a34f8a3429bd536996ad53597b805c10352a8561d8382e05830df389d2b43"},
1067 | {file = "regex-2022.1.18-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d2f355a951f60f0843f2368b39970e4667517e54e86b1508e76f92b44811a8a"},
1068 | {file = "regex-2022.1.18-cp36-cp36m-win32.whl", hash = "sha256:2245441445099411b528379dee83e56eadf449db924648e5feb9b747473f42e3"},
1069 | {file = "regex-2022.1.18-cp36-cp36m-win_amd64.whl", hash = "sha256:25716aa70a0d153cd844fe861d4f3315a6ccafce22b39d8aadbf7fcadff2b633"},
1070 | {file = "regex-2022.1.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7e070d3aef50ac3856f2ef5ec7214798453da878bb5e5a16c16a61edf1817cc3"},
1071 | {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22709d701e7037e64dae2a04855021b62efd64a66c3ceed99dfd684bfef09e38"},
1072 | {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9099bf89078675c372339011ccfc9ec310310bf6c292b413c013eb90ffdcafc"},
1073 | {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04611cc0f627fc4a50bc4a9a2e6178a974c6a6a4aa9c1cca921635d2c47b9c87"},
1074 | {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:552a39987ac6655dad4bf6f17dd2b55c7b0c6e949d933b8846d2e312ee80005a"},
1075 | {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e031899cb2bc92c0cf4d45389eff5b078d1936860a1be3aa8c94fa25fb46ed8"},
1076 | {file = "regex-2022.1.18-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2dacb3dae6b8cc579637a7b72f008bff50a94cde5e36e432352f4ca57b9e54c4"},
1077 | {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e5c31d70a478b0ca22a9d2d76d520ae996214019d39ed7dd93af872c7f301e52"},
1078 | {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb804c7d0bfbd7e3f33924ff49757de9106c44e27979e2492819c16972ec0da2"},
1079 | {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:36b2d700a27e168fa96272b42d28c7ac3ff72030c67b32f37c05616ebd22a202"},
1080 | {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:16f81025bb3556eccb0681d7946e2b35ff254f9f888cff7d2120e8826330315c"},
1081 | {file = "regex-2022.1.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:da80047524eac2acf7c04c18ac7a7da05a9136241f642dd2ed94269ef0d0a45a"},
1082 | {file = "regex-2022.1.18-cp37-cp37m-win32.whl", hash = "sha256:6ca45359d7a21644793de0e29de497ef7f1ae7268e346c4faf87b421fea364e6"},
1083 | {file = "regex-2022.1.18-cp37-cp37m-win_amd64.whl", hash = "sha256:38289f1690a7e27aacd049e420769b996826f3728756859420eeee21cc857118"},
1084 | {file = "regex-2022.1.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6014038f52b4b2ac1fa41a58d439a8a00f015b5c0735a0cd4b09afe344c94899"},
1085 | {file = "regex-2022.1.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b5d6f9aed3153487252d00a18e53f19b7f52a1651bc1d0c4b5844bc286dfa52"},
1086 | {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9d24b03daf7415f78abc2d25a208f234e2c585e5e6f92f0204d2ab7b9ab48e3"},
1087 | {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf594cc7cc9d528338d66674c10a5b25e3cde7dd75c3e96784df8f371d77a298"},
1088 | {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd914db437ec25bfa410f8aa0aa2f3ba87cdfc04d9919d608d02330947afaeab"},
1089 | {file = "regex-2022.1.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b6840b6448203228a9d8464a7a0d99aa8fa9f027ef95fe230579abaf8a6ee1"},
1090 | {file = "regex-2022.1.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11772be1eb1748e0e197a40ffb82fb8fd0d6914cd147d841d9703e2bef24d288"},
1091 | {file = "regex-2022.1.18-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a602bdc8607c99eb5b391592d58c92618dcd1537fdd87df1813f03fed49957a6"},
1092 | {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7e26eac9e52e8ce86f915fd33380f1b6896a2b51994e40bb094841e5003429b4"},
1093 | {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:519c0b3a6fbb68afaa0febf0d28f6c4b0a1074aefc484802ecb9709faf181607"},
1094 | {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3c7ea86b9ca83e30fa4d4cd0eaf01db3ebcc7b2726a25990966627e39577d729"},
1095 | {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:51f02ca184518702975b56affde6c573ebad4e411599005ce4468b1014b4786c"},
1096 | {file = "regex-2022.1.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:385ccf6d011b97768a640e9d4de25412204fbe8d6b9ae39ff115d4ff03f6fe5d"},
1097 | {file = "regex-2022.1.18-cp38-cp38-win32.whl", hash = "sha256:1f8c0ae0a0de4e19fddaaff036f508db175f6f03db318c80bbc239a1def62d02"},
1098 | {file = "regex-2022.1.18-cp38-cp38-win_amd64.whl", hash = "sha256:760c54ad1b8a9b81951030a7e8e7c3ec0964c1cb9fee585a03ff53d9e531bb8e"},
1099 | {file = "regex-2022.1.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:93c20777a72cae8620203ac11c4010365706062aa13aaedd1a21bb07adbb9d5d"},
1100 | {file = "regex-2022.1.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6aa427c55a0abec450bca10b64446331b5ca8f79b648531138f357569705bc4a"},
1101 | {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38baee6bdb7fe1b110b6b3aaa555e6e872d322206b7245aa39572d3fc991ee4"},
1102 | {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:752e7ddfb743344d447367baa85bccd3629c2c3940f70506eb5f01abce98ee68"},
1103 | {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8acef4d8a4353f6678fd1035422a937c2170de58a2b29f7da045d5249e934101"},
1104 | {file = "regex-2022.1.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c73d2166e4b210b73d1429c4f1ca97cea9cc090e5302df2a7a0a96ce55373f1c"},
1105 | {file = "regex-2022.1.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24c89346734a4e4d60ecf9b27cac4c1fee3431a413f7aa00be7c4d7bbacc2c4d"},
1106 | {file = "regex-2022.1.18-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:596f5ae2eeddb79b595583c2e0285312b2783b0ec759930c272dbf02f851ff75"},
1107 | {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ecfe51abf7f045e0b9cdde71ca9e153d11238679ef7b5da6c82093874adf3338"},
1108 | {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1d6301f5288e9bdca65fab3de6b7de17362c5016d6bf8ee4ba4cbe833b2eda0f"},
1109 | {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:93cce7d422a0093cfb3606beae38a8e47a25232eea0f292c878af580a9dc7605"},
1110 | {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cf0db26a1f76aa6b3aa314a74b8facd586b7a5457d05b64f8082a62c9c49582a"},
1111 | {file = "regex-2022.1.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:defa0652696ff0ba48c8aff5a1fac1eef1ca6ac9c660b047fc8e7623c4eb5093"},
1112 | {file = "regex-2022.1.18-cp39-cp39-win32.whl", hash = "sha256:6db1b52c6f2c04fafc8da17ea506608e6be7086715dab498570c3e55e4f8fbd1"},
1113 | {file = "regex-2022.1.18-cp39-cp39-win_amd64.whl", hash = "sha256:ebaeb93f90c0903233b11ce913a7cb8f6ee069158406e056f884854c737d2442"},
1114 | {file = "regex-2022.1.18.tar.gz", hash = "sha256:97f32dc03a8054a4c4a5ab5d761ed4861e828b2c200febd4e46857069a483916"},
1115 | ]
1116 | rfc3986 = [
1117 | {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
1118 | {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
1119 | ]
1120 | six = [
1121 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
1122 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
1123 | ]
1124 | sniffio = [
1125 | {file = "sniffio-1.2.0-py3-none-any.whl", hash = "sha256:471b71698eac1c2112a40ce2752bb2f4a4814c22a54a3eed3676bc0f5ca9f663"},
1126 | {file = "sniffio-1.2.0.tar.gz", hash = "sha256:c4666eecec1d3f50960c6bdf61ab7bc350648da6c126e3cf6898d8cd4ddcd3de"},
1127 | ]
1128 | toml = [
1129 | {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
1130 | {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
1131 | ]
1132 | tomlkit = [
1133 | {file = "tomlkit-0.7.2-py2.py3-none-any.whl", hash = "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117"},
1134 | {file = "tomlkit-0.7.2.tar.gz", hash = "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754"},
1135 | ]
1136 | tortoise-orm = [
1137 | {file = "tortoise-orm-0.17.8.tar.gz", hash = "sha256:1f5020e9964d32a4d6ed685d466b5d7285de328a63ee92ee988c1e4baf8fefbf"},
1138 | {file = "tortoise_orm-0.17.8-py3-none-any.whl", hash = "sha256:f18c41bb83be4748a6ca259ed7309ca954b35f5790971824bbc79a11d2b1ef3b"},
1139 | ]
1140 | typed-ast = [
1141 | {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"},
1142 | {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"},
1143 | {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"},
1144 | {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"},
1145 | {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"},
1146 | {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"},
1147 | {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"},
1148 | {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"},
1149 | {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"},
1150 | {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"},
1151 | {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"},
1152 | {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"},
1153 | {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"},
1154 | {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"},
1155 | {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"},
1156 | {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"},
1157 | {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"},
1158 | {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"},
1159 | {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"},
1160 | {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"},
1161 | {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"},
1162 | {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"},
1163 | {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"},
1164 | {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"},
1165 | ]
1166 | typing-extensions = [
1167 | {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"},
1168 | {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"},
1169 | ]
1170 | uvloop = [
1171 | {file = "uvloop-0.15.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:e71fb9038bfcd7646ca126c5ef19b17e48d4af9e838b2bcfda7a9f55a6552a32"},
1172 | {file = "uvloop-0.15.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7522df4e45e4f25b50adbbbeb5bb9847495c438a628177099d2721f2751ff825"},
1173 | {file = "uvloop-0.15.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae2b325c0f6d748027f7463077e457006b4fdb35a8788f01754aadba825285ee"},
1174 | {file = "uvloop-0.15.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:0de811931e90ae2da9e19ce70ffad73047ab0c1dba7c6e74f9ae1a3aabeb89bd"},
1175 | {file = "uvloop-0.15.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7f4b8a905df909a407c5791fb582f6c03b0d3b491ecdc1cdceaefbc9bf9e08f6"},
1176 | {file = "uvloop-0.15.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d8ffe44ae709f839c54bacf14ed283f41bee90430c3b398e521e10f8d117b3a"},
1177 | {file = "uvloop-0.15.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:63a3288abbc9c8ee979d7e34c34e780b2fbab3e7e53d00b6c80271119f277399"},
1178 | {file = "uvloop-0.15.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5cda65fc60a645470b8525ce014516b120b7057b576fa876cdfdd5e60ab1efbb"},
1179 | {file = "uvloop-0.15.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ff05116ede1ebdd81802df339e5b1d4cab1dfbd99295bf27e90b4cec64d70e9"},
1180 | {file = "uvloop-0.15.3.tar.gz", hash = "sha256:905f0adb0c09c9f44222ee02f6b96fd88b493478fffb7a345287f9444e926030"},
1181 | ]
1182 | werkzeug = [
1183 | {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"},
1184 | {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"},
1185 | ]
1186 | win32-setctime = [
1187 | {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"},
1188 | {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"},
1189 | ]
1190 | xray-rpc = [
1191 | {file = "xray-rpc-1.5.2.tar.gz", hash = "sha256:ea9aba5742a4a390811feb61330a94ed8b68f746ff1db4dad504d5eacb8b9a69"},
1192 | {file = "xray_rpc-1.5.2-py3-none-any.whl", hash = "sha256:94042813b92ea3061206b82da87984c3a01e902a4868c5731466d152289a531d"},
1193 | ]
1194 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "xray-node"
3 | version = "0.0.1"
4 | description = ""
5 | authors = ["guyue-ls "]
6 | license = "AGPLv3"
7 | packages = [
8 | { include = "xray_node" },
9 | ]
10 | readme = "README.md"
11 | repository = "https://github.com/laoshan-tech/xray-node"
12 | homepage = "https://github.com/laoshan-tech/xray-node"
13 |
14 | [tool.poetry.dependencies]
15 | python = "^3.8"
16 | httpx = "^0.17.1"
17 | psutil = "^5.8.0"
18 | tomlkit = "^0.7.0"
19 | grpcio = "^1.37.0"
20 | grpcio-tools = "^1.37.0"
21 | xray-rpc = "^1.4.2"
22 | uvloop = [
23 | { version = "^0.15.2", platform = "linux|macos" },
24 | ]
25 | grpcio-status = "^1.37.0"
26 | click = "^7.1.2"
27 | tortoise-orm = "^0.17.2"
28 | pytest-httpserver = "^1.0.0"
29 | humanize = "^3.7.1"
30 | loguru = "^0.5.3"
31 |
32 | [tool.poetry.dev-dependencies]
33 | black = "^20.8b1"
34 | pytest-asyncio = "^0.14.0"
35 | pytest-ordering = "^0.6"
36 |
37 | [[tool.poetry.source]]
38 | name = "aliyun"
39 | url = "https://mirrors.aliyun.com/pypi/simple/"
40 | default = true
41 |
42 | [tool.poetry.scripts]
43 | xnode = "xray_node.main:main"
44 |
45 | [tool.black]
46 | line-length = 120
47 | target-version = ["py38"]
48 |
49 | [build-system]
50 | requires = ["poetry-core>=1.0.0"]
51 | build-backend = "poetry.core.masonry.api"
52 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/laoshan-tech/xray-node/b9874e191796b5c9326442bf803f671c6735acb8/tests/__init__.py
--------------------------------------------------------------------------------
/tests/test_api.py:
--------------------------------------------------------------------------------
1 | from urllib.parse import urlparse
2 |
3 | import pytest
4 | from pytest_httpserver import HTTPServer
5 |
6 | from xray_node.api.sspanel import SSPanelAPI
7 | from xray_node.utils.consts import NodeTypeEnum
8 |
9 |
10 | class TestSSPanel(object):
11 | HTTPServer.DEFAULT_LISTEN_PORT = 50650
12 |
13 | ss_server = "hk.aaa.com;port=12345#23456"
14 | vmess_server = (
15 | "1.3.5.7;443;2;ws;tls;path=/v2ray|server=hk.domain.com|host=hk.domain.com|outside_port=34567|inside_port=12345"
16 | )
17 | trojan_server = "gz.aaa.com;port=443#12345|host=hk.aaa.com"
18 |
19 | api = SSPanelAPI(endpoint=f"http://127.0.0.1:{HTTPServer.DEFAULT_LISTEN_PORT}/", api_key="sspanel_test", node_id=2)
20 |
21 | def test_parse_ss(self):
22 | node = self.api.parse_ss(raw_data=self.ss_server)
23 | assert node.node_id == 2
24 | assert node.listen_port == 23456
25 |
26 | def test_parse_vmess(self):
27 | node = self.api.parse_vmess(raw_data=self.vmess_server)
28 | assert node.node_id == 2
29 | assert node.host == "hk.domain.com"
30 | assert node.listen_port == 12345
31 | assert node.transport == "ws"
32 | assert node.path == "/v2ray"
33 | assert node.enable_tls is True
34 | assert node.tls_type == "tls"
35 |
36 | def test_parse_trojan(self):
37 | node = self.api.parse_trojan(raw_data=self.trojan_server)
38 | assert node.node_id == 2
39 | assert node.listen_port == 12345
40 | assert node.host == "hk.aaa.com"
41 |
42 | @pytest.mark.asyncio
43 | async def test_fetch_info(self, httpserver: HTTPServer):
44 | node_uri = urlparse(self.api.fetch_node_info_api).path
45 | node_handler = httpserver.expect_request(uri=node_uri, query_string=f"key={self.api.mu_key}")
46 | node_handler.respond_with_json(
47 | response_json={
48 | "ret": 1,
49 | "data": {
50 | "node_group": 0,
51 | "node_class": 0,
52 | "node_speedlimit": 0,
53 | "traffic_rate": 1,
54 | "mu_only": 1,
55 | "sort": 0,
56 | "server": "1.1.1.1",
57 | "disconnect_time": 60,
58 | "type": "SSPanel-UIM",
59 | },
60 | }
61 | )
62 |
63 | user_uri = urlparse(self.api.fetch_user_list_api).path
64 | user_handler = httpserver.expect_request(
65 | uri=user_uri, query_string=f"key={self.api.mu_key}&node_id={self.api.node_id}"
66 | )
67 | user_handler.respond_with_json(
68 | response_json={
69 | "ret": 1,
70 | "data": [
71 | {
72 | "id": 1,
73 | "passwd": "tVuKjjs0o04CxCac",
74 | "u": 0,
75 | "d": 0,
76 | "transfer_enable": 1073741824,
77 | "port": 1025,
78 | "method": "rc4-md5",
79 | "node_speedlimit": 0,
80 | "node_connector": 0,
81 | "protocol": "origin",
82 | "protocol_param": None,
83 | "obfs": "plain",
84 | "obfs_param": None,
85 | "is_multi_user": 0,
86 | },
87 | {
88 | "id": 2,
89 | "passwd": "Km9smW54mcZglG0L",
90 | "u": 0,
91 | "d": 0,
92 | "transfer_enable": 1073741824,
93 | "port": 14001,
94 | "method": "aes-256-gcm",
95 | "node_speedlimit": 0,
96 | "node_connector": 0,
97 | "protocol": "origin",
98 | "protocol_param": "",
99 | "obfs": "plain",
100 | "obfs_param": "",
101 | "is_multi_user": 1,
102 | },
103 | {
104 | "id": 3,
105 | "passwd": "oZGK1wrsvOClrJld",
106 | "u": 0,
107 | "d": 0,
108 | "transfer_enable": 1073741824,
109 | "port": 38375,
110 | "method": "chacha20-ietf",
111 | "node_speedlimit": 0,
112 | "node_connector": 0,
113 | "protocol": "auth_aes128_sha1",
114 | "protocol_param": "",
115 | "obfs": "http_simple",
116 | "obfs_param": "",
117 | "is_multi_user": 0,
118 | },
119 | ],
120 | }
121 | )
122 |
123 | node = await self.api.fetch_node_info()
124 | users = await self.api.fetch_user_list()
125 |
126 | assert self.api.node_type == NodeTypeEnum.Shadowsocks
127 | assert node.node_id == 2
128 | assert node.listen_port == 14001
129 | assert node.method == "aes-256-gcm"
130 | assert len(users) == 3
131 |
--------------------------------------------------------------------------------
/tests/test_config.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import pytest
4 |
5 | from xray_node.config import init_config, Config
6 |
7 |
8 | class TestConfig(object):
9 | @pytest.mark.asyncio
10 | async def test_init_config(self):
11 | fn = Path("./xnode.toml")
12 | init_config(target=fn)
13 | assert fn.exists()
14 |
15 | def test_config_cls(self):
16 | cfg = Config()
17 | assert len(cfg.clients) > 0
18 | assert cfg.local_api_host == "127.0.0.1"
19 | assert cfg.local_api_port == 10085
20 |
--------------------------------------------------------------------------------
/tests/test_core.py:
--------------------------------------------------------------------------------
1 | import uuid
2 |
3 | import pytest
4 |
5 | from xray_node.core.xray import Xray, VMessInbound, VLESSInbound, ShadowsocksInbound, TrojanInbound
6 | from xray_node.utils.consts import CIPHER_TYPE_DICT
7 | from xray_node.utils.install import XrayFile
8 | from xray_node.utils.port import check_port_alive
9 |
10 |
11 | class TestXray(object):
12 | xray_f = XrayFile()
13 | xray = Xray(xray_f=xray_f)
14 |
15 | email = "a@test.com"
16 | host = "127.0.0.1"
17 | vmess_p = 60001
18 | vless_p = 60002
19 | ss_p = 60003
20 | trojan_p = 60004
21 |
22 | @pytest.mark.asyncio
23 | async def test_start_xray(self):
24 | await self.xray.start()
25 | assert await self.xray.is_running() is True
26 |
27 | @pytest.mark.asyncio
28 | async def test_add_inbound(self):
29 | vmess_proto = VMessInbound()
30 | await self.xray.add_inbound(
31 | inbound_tag="vmess_test", address=self.host, port=self.vmess_p, protocol=vmess_proto
32 | )
33 | assert await check_port_alive(host=self.host, port=self.vmess_p) is True
34 |
35 | vless_proto = VLESSInbound()
36 | await self.xray.add_inbound(
37 | inbound_tag="vless_test", address=self.host, port=self.vless_p, protocol=vless_proto
38 | )
39 | assert await check_port_alive(host=self.host, port=self.vless_p) is True
40 |
41 | ss_proto = ShadowsocksInbound()
42 | await self.xray.add_inbound(inbound_tag="ss_test", address=self.host, port=self.ss_p, protocol=ss_proto)
43 | assert await check_port_alive(host=self.host, port=self.ss_p) is True
44 |
45 | trojan_proto = TrojanInbound()
46 | await self.xray.add_inbound(
47 | inbound_tag="trojan_test", address=self.host, port=self.trojan_p, protocol=trojan_proto
48 | )
49 | assert await check_port_alive(host=self.host, port=self.trojan_p) is True
50 |
51 | @pytest.mark.asyncio
52 | async def test_remove_inbound(self):
53 | await self.xray.remove_inbound(inbound_tag="vmess_test")
54 | assert await check_port_alive(host=self.host, port=self.vmess_p) is False
55 |
56 | await self.xray.remove_inbound(inbound_tag="vless_test")
57 | assert await check_port_alive(host=self.host, port=self.vless_p) is False
58 |
59 | await self.xray.remove_inbound(inbound_tag="ss_test")
60 | assert await check_port_alive(host=self.host, port=self.ss_p) is False
61 |
62 | await self.xray.remove_inbound(inbound_tag="trojan_test")
63 | assert await check_port_alive(host=self.host, port=self.trojan_p) is False
64 |
65 | @pytest.mark.asyncio
66 | async def test_stop_xray(self):
67 | await self.xray.stop()
68 | assert await self.xray.is_running() is False
69 |
--------------------------------------------------------------------------------
/tests/test_http.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import pytest
4 |
5 | from xray_node.utils import http
6 |
7 |
8 | @pytest.mark.run(order=0)
9 | class TestHttp(object):
10 | @pytest.mark.asyncio
11 | async def test_download(self):
12 | url = "https://cdn.jsdelivr.net/gh/jquery/jquery/dist/jquery.min.js"
13 | target = Path(__file__).parent / "download.test"
14 | assert await http.download(url=url, target=target) is True
15 | target.unlink()
16 |
--------------------------------------------------------------------------------
/tests/test_install.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from xray_node.utils import install
4 |
5 |
6 | @pytest.mark.run(order=1)
7 | class TestInstall(object):
8 | xray_file = install.XrayFile()
9 |
10 | def test_prepare_install(self):
11 | assert install._prepare_install(xray_f=self.xray_file) is True
12 |
13 | @pytest.mark.asyncio
14 | async def test_download_xray_zip(self):
15 | assert await install._download_xray_zip(xray_f=self.xray_file) is True
16 |
17 | @pytest.mark.asyncio
18 | async def test_unzip_xray(self):
19 | assert await install._unzip_xray_core(xray_f=self.xray_file) is True
20 |
21 | @pytest.mark.asyncio
22 | async def test_install(self):
23 | assert await install.install_xray(use_cdn=True) is True
24 |
--------------------------------------------------------------------------------
/xray_node/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/laoshan-tech/xray-node/b9874e191796b5c9326442bf803f671c6735acb8/xray_node/__init__.py
--------------------------------------------------------------------------------
/xray_node/api/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from json import JSONDecodeError
4 | from typing import Union, Type, TYPE_CHECKING
5 |
6 | import httpx
7 | from loguru import logger
8 |
9 | from xray_node.exceptions import UnsupportedAPI, APIStatusError, APIContentError
10 | from xray_node.utils import http
11 |
12 | if TYPE_CHECKING:
13 | from xray_node.api.sspanel import SSPanelAPI
14 | from xray_node.api.v2board import V2BoardAPI
15 |
16 |
17 | class BaseAPI(object):
18 | """
19 | API基类
20 | """
21 |
22 | session = http.client
23 |
24 | def __init__(self, endpoint: str, node_id: int, api_key: str, node_type: Union[str, None]):
25 | self.endpoint = endpoint
26 | self.node_id = node_id
27 | self.api_key = api_key
28 | self.node_type = node_type
29 | self.fetch_user_list_api = ""
30 | self.report_user_online_ip_api = ""
31 |
32 | def _prepare_api(self) -> None:
33 | """
34 | 拼装API地址
35 | :return:
36 | """
37 | return
38 |
39 | @staticmethod
40 | def parse_resp(req: httpx.Response) -> dict:
41 | """
42 | 解析响应包
43 | :param req:
44 | :return:
45 | """
46 | if req.status_code >= 400:
47 | logger.error(f"请求 {req.url} 出错,响应详情 {req.text}")
48 | raise APIStatusError(msg=req.status_code)
49 | else:
50 | try:
51 | d = req.json()
52 | return d
53 | except JSONDecodeError:
54 | logger.error(f"请求 {req.url} 解析JSON失败,响应详情 {req.text}")
55 | raise APIContentError(msg=req.text)
56 |
57 | async def fetch_user_list(self) -> list:
58 | """
59 | 获取user列表
60 | :return:
61 | """
62 | raise NotImplementedError("fetch_user_list method not defined")
63 |
64 | async def fetch_node_info(self):
65 | """
66 | 获取节点信息
67 | :return:
68 | """
69 | raise NotImplementedError("fetch_node_info method not defined")
70 |
71 | async def report_user_traffic(self, traffic_data: list) -> bool:
72 | """
73 | 上报用户流量信息
74 | :param traffic_data:
75 | :return:
76 | """
77 | raise NotImplementedError("report_user_traffic method not defined")
78 |
79 | async def report_user_stats(self, stats_data: list) -> bool:
80 | """
81 | 上报用户状态信息
82 | :param stats_data:
83 | :return:
84 | """
85 | raise NotImplementedError("report_user_stats method not defined")
86 |
87 |
88 | def get_api_cls_by_name(panel_type: str) -> Union[Type[SSPanelAPI], Type[V2BoardAPI]]:
89 | """
90 | 获取API操作类
91 | :param panel_type:
92 | :return:
93 | """
94 | from xray_node.api.sspanel import SSPanelAPI
95 | from xray_node.api.v2board import V2BoardAPI
96 |
97 | panel_cls_dict = {"sspanel": SSPanelAPI, "v2board": V2BoardAPI}
98 |
99 | cls = panel_cls_dict.get(panel_type)
100 | if cls is None:
101 | raise UnsupportedAPI(msg=panel_type)
102 | else:
103 | return cls
104 |
--------------------------------------------------------------------------------
/xray_node/api/entities.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 |
3 |
4 | @dataclass
5 | class GenericUser(object):
6 | user_id: int
7 | panel_name: str
8 | node_id: int
9 | email: str
10 | speed_limit: int
11 |
12 |
13 | @dataclass
14 | class SSUser(GenericUser):
15 | password: str
16 | method: str
17 | is_multi_user: bool = False
18 | listen_port: int = 0
19 |
20 |
21 | @dataclass
22 | class VMessUser(GenericUser):
23 | uuid: str
24 |
25 |
26 | @dataclass
27 | class VLessUser(VMessUser):
28 | pass
29 |
30 |
31 | @dataclass
32 | class TrojanUser(VMessUser):
33 | flow: str = "xtls-rprx-direct"
34 |
35 |
36 | @dataclass
37 | class GenericNode(object):
38 | node_id: int
39 | panel_name: str
40 | listen_port: int
41 | listen_host: str
42 |
43 |
44 | @dataclass
45 | class SSNode(GenericNode):
46 | method: str = "unknown"
47 |
48 |
49 | @dataclass
50 | class VMessNode(GenericNode):
51 | alter_id: int
52 | transport: str
53 | enable_tls: bool
54 | tls_type: str
55 | path: str
56 | host: str
57 |
58 |
59 | @dataclass
60 | class VLessNode(VMessNode):
61 | pass
62 |
63 |
64 | @dataclass
65 | class TrojanNode(GenericNode):
66 | host: str
67 | enable_xtls: bool
68 | enable_vless: bool
69 |
70 |
71 | @dataclass
72 | class SSPanelOnlineIPData(object):
73 | user_id: int
74 | ip: list
75 |
76 |
77 | @dataclass
78 | class SSPanelTrafficData(object):
79 | user_id: int
80 | upload: int
81 | download: int
82 |
83 |
84 | @dataclass
85 | class V2BoardTrafficData(object):
86 | user_id: int
87 | upload: int
88 | download: int
89 |
--------------------------------------------------------------------------------
/xray_node/api/sspanel.py:
--------------------------------------------------------------------------------
1 | import time
2 | from typing import List, Union
3 | from urllib.parse import urljoin, urlparse
4 |
5 | import psutil
6 | from loguru import logger
7 |
8 | from xray_node.api import BaseAPI, entities
9 | from xray_node.exceptions import FetchNodeInfoError, ReportNodeStatsError, ReportUserTrafficError
10 | from xray_node.utils.consts import SSPANEL_NODE_TYPE, NodeTypeEnum
11 |
12 |
13 | class SSPanelAPI(BaseAPI):
14 | """
15 | SSPanel
16 | """
17 |
18 | def __init__(self, endpoint: str, node_id: int, api_key: str):
19 | super(SSPanelAPI, self).__init__(endpoint=endpoint, node_id=node_id, api_key=api_key, node_type=None)
20 | self.mu_key = self.api_key
21 |
22 | self.node = None
23 | self.multi_user: Union[None, entities.SSUser] = None
24 | self._prepare_api()
25 |
26 | def _prepare_api(self) -> None:
27 | self.fetch_node_info_api = urljoin(base=self.endpoint, url=f"/mod_mu/nodes/{self.node_id}/info")
28 | self.fetch_user_list_api = urljoin(base=self.endpoint, url=f"/mod_mu/users")
29 | self.report_node_stats_api = urljoin(base=self.endpoint, url=f"/mod_mu/nodes/{self.node_id}/info")
30 | self.report_user_online_ip_api = urljoin(base=self.endpoint, url=f"/mod_mu/users/aliveip")
31 | self.report_user_traffic_api = urljoin(base=self.endpoint, url=f"/mod_mu/users/traffic")
32 |
33 | async def fetch_node_info(
34 | self,
35 | ) -> Union[entities.SSNode, entities.VMessNode, entities.VLessNode, entities.TrojanNode]:
36 | req = await self.session.get(url=self.fetch_node_info_api, params={"key": self.mu_key})
37 | result = self.parse_resp(req=req)
38 | ret = result["ret"]
39 | if ret != 1:
40 | raise FetchNodeInfoError(msg=result["data"])
41 |
42 | node_data = result["data"]
43 | self.node_type = SSPANEL_NODE_TYPE.get(int(node_data["sort"]))
44 |
45 | if self.node_type == NodeTypeEnum.Shadowsocks:
46 | self.node = self.parse_ss(raw_data=node_data["server"])
47 | elif self.node_type in (NodeTypeEnum.VMess, NodeTypeEnum.VLess):
48 | self.node = self.parse_vmess(raw_data=node_data["server"])
49 | elif self.node_type == NodeTypeEnum.Trojan:
50 | self.node = self.parse_trojan(raw_data=node_data["server"])
51 | else:
52 | raise
53 |
54 | self.handle_ss_multi_user()
55 | return self.node
56 |
57 | async def report_node_stats(self) -> bool:
58 | """
59 | 上报节点状态
60 | :return:
61 | """
62 | load = psutil.getloadavg()
63 | post_body = {"uptime": time.time() - psutil.boot_time(), "load": " ".join(("%.2f" % i for i in load))}
64 |
65 | req = await self.session.post(url=self.report_node_stats_api, params={"key": self.mu_key}, json=post_body)
66 | result = self.parse_resp(req=req)
67 | ret = result["ret"]
68 | if ret != 1:
69 | raise ReportNodeStatsError(msg=result["data"])
70 | else:
71 | return True
72 |
73 | def parse_ss(self, raw_data: str) -> entities.SSNode:
74 | """
75 | 解析SS信息
76 | :param raw_data:
77 | :return:
78 | """
79 | parts = raw_data.split(";")
80 | ip = parts[0]
81 | extra = parts[1].split("|") if len(parts) >= 2 else []
82 |
83 | conn_port, listen_port = 0, 0
84 | for item in extra:
85 | key, value = item.split("=", maxsplit=1)
86 | if key == "":
87 | continue
88 |
89 | # 目前拿不到这部分信息,即便配置了也没用
90 | if key == "port":
91 | conn_port, listen_port = value.split("#", maxsplit=1)
92 |
93 | node = entities.SSNode(
94 | node_id=self.node_id,
95 | panel_name=urlparse(self.endpoint).netloc,
96 | listen_port=int(listen_port),
97 | listen_host="0.0.0.0",
98 | )
99 | return node
100 |
101 | def parse_vmess(self, raw_data: str) -> Union[entities.VMessNode, entities.VLessNode]:
102 | """
103 | 解析VMess信息
104 | :return:
105 | """
106 | is_vless = False
107 | parts = raw_data.split(";")
108 |
109 | ip, port, alter_id = parts[0:3]
110 |
111 | transport, tls = parts[3:5]
112 | if tls:
113 | tls_type = tls
114 | enable_tls = True
115 | else:
116 | tls_type = None
117 | enable_tls = False
118 |
119 | extra = parts[5].split("|")
120 | host, path = "", ""
121 | for item in extra:
122 | key, value = item.split("=", maxsplit=1)
123 | if key == "":
124 | continue
125 |
126 | if key == "path":
127 | path = value
128 | elif key == "host":
129 | host = value
130 | elif key == "enable_vless":
131 | if value == "true":
132 | is_vless = True
133 | else:
134 | is_vless = False
135 | elif key == "inside_port":
136 | port = int(value)
137 |
138 | if is_vless:
139 | node = entities.VLessNode(
140 | node_id=self.node_id,
141 | panel_name=urlparse(self.endpoint).netloc,
142 | listen_port=int(port),
143 | listen_host="0.0.0.0",
144 | alter_id=alter_id,
145 | transport=transport,
146 | enable_tls=enable_tls,
147 | tls_type=tls_type,
148 | path=path,
149 | host=host,
150 | )
151 | else:
152 | node = entities.VMessNode(
153 | node_id=self.node_id,
154 | panel_name=urlparse(self.endpoint).netloc,
155 | listen_port=int(port),
156 | listen_host="0.0.0.0",
157 | alter_id=alter_id,
158 | transport=transport,
159 | enable_tls=enable_tls,
160 | tls_type=tls_type,
161 | path=path,
162 | host=host,
163 | )
164 | return node
165 |
166 | def parse_trojan(self, raw_data: str) -> entities.TrojanNode:
167 | """
168 | 解析Trojan配置
169 | :param raw_data:
170 | :return:
171 | """
172 | parts = raw_data.split(";")
173 | ip = parts[0]
174 | extra = parts[1].split("|")
175 |
176 | host, conn_port, listen_port, enable_xtls, enable_vless = "", 0, 0, False, False
177 | for item in extra:
178 | key, value = item.split("=", maxsplit=1)
179 | if key == "":
180 | continue
181 |
182 | if key == "port":
183 | conn_port, listen_port = value.split("#", maxsplit=1)
184 | elif key == "host":
185 | host = value
186 | elif key == "enable_xtls":
187 | if value == "true":
188 | enable_xtls = True
189 | elif key == "enable_vless":
190 | if value == "true":
191 | enable_vless = True
192 |
193 | node = entities.TrojanNode(
194 | node_id=self.node_id,
195 | panel_name=urlparse(self.endpoint).netloc,
196 | listen_port=int(listen_port),
197 | listen_host="0.0.0.0",
198 | host=host,
199 | enable_xtls=enable_xtls,
200 | enable_vless=enable_vless,
201 | )
202 | return node
203 |
204 | async def fetch_user_list(self) -> List[entities.GenericUser]:
205 | req = await self.session.get(url=self.fetch_user_list_api, params={"key": self.mu_key, "node_id": self.node_id})
206 | result = self.parse_resp(req=req)
207 | ret = result["ret"]
208 | if ret != 1:
209 | raise FetchNodeInfoError(msg=result["data"])
210 |
211 | user_data = result["data"]
212 | if len(user_data) > 0:
213 | logger.info(f"获取用户信息成功,本次获取到 {len(user_data)} 个用户信息")
214 | users = [self.parse_user(data=u) for u in user_data]
215 | else:
216 | logger.warning(f"未获取到有效用户")
217 | users = []
218 |
219 | self.handle_ss_multi_user()
220 | return users
221 |
222 | def parse_user(self, data: dict) -> entities.GenericUser:
223 | """
224 | 从API数据解析用户信息
225 | :return:
226 | """
227 | uid = data.get("id", -1)
228 | email = data.get("email", f"{uid}@{urlparse(self.endpoint).netloc}")
229 | speed_limit = data.get("node_speedlimit", 0)
230 |
231 | if self.node_type is None:
232 | raise Exception("节点信息未获取")
233 |
234 | if self.node_type == NodeTypeEnum.Shadowsocks:
235 | user = entities.SSUser(
236 | user_id=uid,
237 | panel_name=self.node.panel_name,
238 | node_id=self.node.node_id,
239 | email=email,
240 | speed_limit=speed_limit,
241 | password=data.get("passwd", ""),
242 | method=data.get("method"),
243 | is_multi_user=data.get("is_multi_user", 0),
244 | listen_port=data.get("port", 0),
245 | )
246 | if user.is_multi_user > 0 and self.multi_user is None:
247 | self.multi_user = user
248 |
249 | elif self.node_type == NodeTypeEnum.VMess:
250 | user = entities.VMessUser(
251 | user_id=uid,
252 | panel_name=self.node.panel_name,
253 | node_id=self.node.node_id,
254 | email=email,
255 | speed_limit=speed_limit,
256 | uuid=data.get("uuid", ""),
257 | )
258 | elif self.node_type == NodeTypeEnum.VLess:
259 | user = entities.VLessUser(
260 | user_id=uid,
261 | panel_name=self.node.panel_name,
262 | node_id=self.node.node_id,
263 | email=email,
264 | speed_limit=speed_limit,
265 | uuid=data.get("uuid", ""),
266 | )
267 | elif self.node_type == NodeTypeEnum.Trojan:
268 | user = entities.TrojanUser(
269 | user_id=uid,
270 | panel_name=self.node.panel_name,
271 | node_id=self.node.node_id,
272 | email=email,
273 | speed_limit=speed_limit,
274 | uuid=data.get("uuid", ""),
275 | )
276 | else:
277 | raise
278 |
279 | return user
280 |
281 | def handle_ss_multi_user(self):
282 | """
283 | SS单端口多用户时需要单独处理承载用户的情况
284 | :return:
285 | """
286 | if self.multi_user and self.node_type == NodeTypeEnum.Shadowsocks and self.node:
287 | self.node.listen_port = self.multi_user.listen_port
288 | self.node.method = self.multi_user.method
289 | else:
290 | logger.debug("不满足合并单端口承载用户信息条件,跳过")
291 |
292 | async def report_user_stats(self, stats_data: List[entities.SSPanelOnlineIPData]) -> bool:
293 | """
294 | 上报用户在线IP
295 | :param stats_data:
296 | :return:
297 | """
298 | ds = []
299 | for d in stats_data:
300 | ds.extend([{"ip": ip, "user_id": d.user_id} for ip in d.ip])
301 | post_body = {"data": ds}
302 |
303 | req = await self.session.post(
304 | url=self.report_user_online_ip_api, params={"key": self.mu_key, "node_id": self.node_id}, json=post_body
305 | )
306 | result = self.parse_resp(req=req)
307 | ret = result["ret"]
308 | if ret != 1:
309 | raise ReportNodeStatsError(msg=result["data"])
310 | else:
311 | return True
312 |
313 | async def report_user_traffic(self, traffic_data: List[entities.SSPanelTrafficData]) -> bool:
314 | """
315 | 上报用户流量
316 | :param traffic_data:
317 | :return:
318 | """
319 | post_body = {"data": [{"user_id": d.user_id, "u": d.upload, "d": d.download} for d in traffic_data]}
320 | req = await self.session.post(
321 | url=self.report_user_traffic_api, params={"key": self.mu_key, "node_id": self.node_id}, json=post_body
322 | )
323 | result = self.parse_resp(req=req)
324 | ret = result["ret"]
325 | if ret != 1:
326 | raise ReportUserTrafficError(msg=result["data"])
327 | else:
328 | return True
329 |
--------------------------------------------------------------------------------
/xray_node/api/v2board.py:
--------------------------------------------------------------------------------
1 | from typing import Union, List
2 | from urllib.parse import urljoin, urlparse
3 |
4 | from loguru import logger
5 |
6 | from xray_node.api import BaseAPI, entities
7 | from xray_node.exceptions import UnsupportedNode, ReportUserTrafficError
8 |
9 |
10 | class V2BoardAPI(BaseAPI):
11 | def __init__(self, endpoint: str, node_id: int, api_key: str, node_type: str):
12 | super(V2BoardAPI, self).__init__(endpoint=endpoint, node_id=node_id, api_key=api_key, node_type=node_type)
13 | self.token = self.api_key
14 |
15 | self.node = None
16 | self.multi_user: Union[None, entities.SSUser] = None
17 | self._prepare_api()
18 |
19 | def _prepare_api(self) -> None:
20 | # V2ray
21 | if self.node_type in ("vmess", "vless"):
22 | self.fetch_user_list_api = urljoin(self.endpoint, "/api/v1/server/Deepbwork/user")
23 | self.fetch_node_info_api = urljoin(self.endpoint, "/api/v1/server/Deepbwork/config")
24 | self.report_user_traffic_api = urljoin(self.endpoint, "/api/v1/server/Deepbwork/submit")
25 | # Shadowsocks
26 | elif self.node_type == "shadowsocks":
27 | self.fetch_user_list_api = urljoin(self.endpoint, "/api/v1/server/ShadowsocksTidalab/user")
28 | self.fetch_node_info_api = urljoin(self.endpoint, "")
29 | self.report_user_traffic_api = urljoin(self.endpoint, "/api/v1/server/ShadowsocksTidalab/submit")
30 | # Trojan
31 | elif self.node_type == "trojan":
32 | self.fetch_user_list_api = urljoin(self.endpoint, "/api/v1/server/TrojanTidalab/user")
33 | self.fetch_node_info_api = urljoin(self.endpoint, "/api/v1/server/TrojanTidalab/config")
34 | self.report_user_traffic_api = urljoin(self.endpoint, "/api/v1/server/TrojanTidalab/submit")
35 | else:
36 | raise UnsupportedNode(msg=self.node_type)
37 |
38 | async def fetch_node_info(self):
39 | req = await self.session.get(
40 | self.fetch_node_info_api, params={"node_id": self.node_id, "token": self.token, "local_port": 1}
41 | )
42 | result = self.parse_resp(req=req)
43 |
44 | if self.node_type in ("vmess", "vless"):
45 | self.node = self.parse_vmess(result)
46 | elif self.node_type == "shadowsocks":
47 | self.node = self.parse_ss(result)
48 | elif self.node_type == "trojan":
49 | self.node = self.parse_trojan(result)
50 | else:
51 | raise UnsupportedNode(msg=self.node_type)
52 |
53 | return self.node
54 |
55 | async def report_node_stats(self):
56 | return
57 |
58 | def handle_multi_user(self):
59 | """
60 | v2board节点部分信息保存在用户数据中
61 | :return:
62 | """
63 | if self.multi_user and self.node_type == "shadowsocks" and self.node:
64 | self.node.listen_port = self.multi_user.listen_port
65 | self.node.method = self.multi_user.method
66 | else:
67 | logger.debug("不满足合并单端口承载用户信息条件,跳过")
68 |
69 | def parse_ss(self, raw_data: dict) -> entities.SSNode:
70 | node = entities.SSNode(
71 | node_id=self.node_id,
72 | panel_name=urlparse(self.endpoint).netloc,
73 | listen_port=0,
74 | listen_host="0.0.0.0",
75 | )
76 | return node
77 |
78 | def parse_vmess(self, raw_data: dict) -> entities.VMessNode:
79 | inbound_info = raw_data.get("inbound", {})
80 | port = inbound_info.get("port", 0)
81 | transport = inbound_info.get("streamSettings", {}).get("network", "")
82 | enable_tls = inbound_info.get("streamSettings", {}).get("security") == "tls"
83 |
84 | host, path = "", ""
85 | if transport == "ws":
86 | host = inbound_info.get("streamSettings", {}).get("wsSettings", {}).get("headers", "")
87 | path = inbound_info.get("streamSettings", {}).get("wsSettings", {}).get("path", "")
88 |
89 | node = entities.VMessNode(
90 | node_id=self.node_id,
91 | panel_name=urlparse(self.endpoint).netloc,
92 | listen_host="0.0.0.0",
93 | listen_port=port,
94 | alter_id=0,
95 | transport=transport,
96 | enable_tls=enable_tls,
97 | tls_type="tls",
98 | host=host,
99 | path=path,
100 | )
101 | return node
102 |
103 | def parse_trojan(self, raw_data: dict) -> entities.TrojanNode:
104 | host = raw_data.get("ssl", {}).get("sni", "")
105 | port = raw_data.get("local_port", 0)
106 |
107 | node = entities.TrojanNode(
108 | node_id=self.node_id,
109 | panel_name=urlparse(self.endpoint).netloc,
110 | listen_port=port,
111 | listen_host="0.0.0.0",
112 | host=host,
113 | enable_xtls=False,
114 | enable_vless=False,
115 | )
116 | return node
117 |
118 | async def fetch_user_list(self) -> list:
119 | req = await self.session.get(
120 | self.fetch_user_list_api, params={"node_id": self.node_id, "token": self.token, "local_port": 1}
121 | )
122 | result = self.parse_resp(req=req)
123 |
124 | user_data = result["data"]
125 | if len(user_data) > 0:
126 | logger.info(f"获取用户信息成功,本次获取到 {len(user_data)} 个用户信息")
127 | users = [self.parse_user(data=u, idx=idx) for idx, u in enumerate(user_data)]
128 | else:
129 | logger.warning(f"未获取到有效用户")
130 | users = []
131 |
132 | self.handle_multi_user()
133 | return users
134 |
135 | async def report_user_stats(self, stats_data: list) -> bool:
136 | pass
137 |
138 | async def report_user_traffic(self, traffic_data: List[entities.V2BoardTrafficData]) -> bool:
139 | post_body = [{"user_id": d.user_id, "u": d.upload, "d": d.download} for d in traffic_data]
140 | req = await self.session.post(
141 | url=self.report_user_traffic_api, params={"node_id": self.node_id, "token": self.token}, json=post_body
142 | )
143 | result = self.parse_resp(req=req)
144 | ret = result["ret"]
145 | if ret != 1:
146 | raise ReportUserTrafficError(msg=result["data"])
147 | else:
148 | return True
149 |
150 | def parse_user(self, data: dict, idx: int = 0):
151 | uid = data.get("id", -1)
152 | email = data.get("email", f"{uid}@{urlparse(self.endpoint).netloc}")
153 | speed_limit = 0
154 |
155 | if self.node_type is None:
156 | raise Exception("节点信息未获取")
157 |
158 | if self.node_type == "shadowsocks":
159 | user = entities.SSUser(
160 | user_id=uid,
161 | panel_name=self.node.panel_name,
162 | node_id=self.node.node_id,
163 | email=email,
164 | speed_limit=speed_limit,
165 | password=data.get("secret", ""),
166 | method=data.get("cipher"),
167 | is_multi_user=idx == 0,
168 | listen_port=data.get("port", 0),
169 | )
170 | if idx == 0 and self.multi_user is None:
171 | self.multi_user = user
172 |
173 | elif self.node_type in ("vmess", "vless"):
174 | user = entities.VMessUser(
175 | user_id=uid,
176 | panel_name=self.node.panel_name,
177 | node_id=self.node.node_id,
178 | email=email,
179 | speed_limit=speed_limit,
180 | uuid=data.get("trojan_user", {}).get("uuid", ""),
181 | )
182 | if idx == 0:
183 | self.node.alter_id = data.get("alter_id", 0)
184 | elif self.node_type == "trojan":
185 | user = entities.TrojanUser(
186 | user_id=uid,
187 | panel_name=self.node.panel_name,
188 | node_id=self.node.node_id,
189 | email=email,
190 | speed_limit=speed_limit,
191 | uuid=data.get("v2ray_user", {}).get("uuid", ""),
192 | )
193 | else:
194 | raise
195 |
196 | return user
197 |
--------------------------------------------------------------------------------
/xray_node/config.py:
--------------------------------------------------------------------------------
1 | import threading
2 | from pathlib import Path
3 | from typing import List, Union
4 |
5 | import tomlkit
6 | from loguru import logger
7 |
8 | from xray_node.api import entities
9 | from xray_node.utils.consts import NodeTypeEnum
10 |
11 |
12 | def init_config(target: Path):
13 | """
14 | 初始化配置文件
15 | :param target:
16 | :return:
17 | """
18 | template = """# xray-node
19 |
20 | [log]
21 | level = "info" # debug/info/warning/error
22 |
23 | [user]
24 | mode = "local" # local/remote
25 |
26 | [panel]
27 | type = "sspanel" # sspanel/v2board/django-sspanel
28 | protocol = "vmess"
29 | endpoint = "http://xxx.xxxx.com/"
30 | node_id = 1
31 | api_key = "key"
32 |
33 | [[user.clients]]
34 | type = "shadowsocks"
35 | password = "aabbccdd"
36 | speed_limit = 0
37 | method = "aes-256-gcm"
38 | node_id = 1
39 |
40 | [[user.clients]]
41 | type = "shadowsocks"
42 | password = "aabbccdd"
43 | speed_limit = 0
44 | method = "aes-256-gcm"
45 | node_id = 1
46 |
47 | [[user.clients]]
48 | type = "shadowsocks"
49 | password = "aabbccdd"
50 | speed_limit = 0
51 | method = "aes-256-gcm"
52 | node_id = 1
53 |
54 | [[user.clients]]
55 | type = "vmess"
56 | uuid = "595abb61-be40-4cee-afb4-d889dcd510cb"
57 | speed_limit = 0
58 | node_id = 2
59 |
60 | [xray.api]
61 | host = "127.0.0.1"
62 | port = 10085
63 |
64 | [[xray.inbounds]]
65 | node_id = 1
66 | listen = "0.0.0.0"
67 | port = 1234
68 | protocol = "shadowsocks"
69 |
70 | [[xray.inbounds]]
71 | node_id = 2
72 | listen = "0.0.0.0"
73 | port = 2345
74 | protocol = "vmess"
75 | transport = "ws"
76 | alter_id = 64
77 | path = "/ws"
78 | host = "a.com"
79 | enable_tls = true
80 | tls_type = "xtls"
81 | """
82 | target.parent.mkdir(mode=0o755, parents=True, exist_ok=True)
83 | with open(target, "w", encoding="utf-8") as f:
84 | f.write(template)
85 |
86 |
87 | class Config(object):
88 | _instance_lock = threading.Lock()
89 |
90 | def __init__(self, cfg: Path = None, *args, **kwargs):
91 | super().__init__(*args, **kwargs)
92 | if cfg:
93 | self.fn = cfg
94 | else:
95 | self.fn = Path.home() / "xnode.toml"
96 |
97 | if not self.fn.exists():
98 | init_config(target=self.fn)
99 |
100 | with open(self.fn, "r", encoding="utf-8") as f:
101 | self.content = tomlkit.parse(f.read())
102 |
103 | self.log_level = self.content["log"]["level"]
104 |
105 | self.user_mode = self.content["user"]["mode"]
106 | if self.user_mode == "remote":
107 | self.panel_type = self.content["panel"]["type"]
108 | self.node_type = self.content["panel"]["protocol"]
109 | self.endpoint = self.content["panel"]["endpoint"]
110 | self.api_key = self.content["panel"]["api_key"]
111 | self.node_id = self.content["panel"]["node_id"]
112 |
113 | self.clients = self.content["user"]["clients"]
114 | self.inbounds = self.content["xray"]["inbounds"]
115 |
116 | self.local_api_host = self.content["xray"]["api"]["host"]
117 | self.local_api_port = self.content["xray"]["api"]["port"]
118 |
119 | self.ss_cipher_type_set = set()
120 |
121 | def __new__(cls, cfg: Path = None, *args, **kwargs):
122 | if not hasattr(Config, "_instance"):
123 | with Config._instance_lock:
124 | if not hasattr(Config, "_instance"):
125 | Config._instance = super(Config, cls).__new__(cls, *args, **kwargs)
126 | return Config._instance
127 |
128 | def load_local_nodes(
129 | self,
130 | ) -> List[Union[entities.SSNode, entities.VMessNode, entities.VLessNode, entities.TrojanNode]]:
131 | """
132 | 通过配置文件加载节点信息
133 | :return:
134 | """
135 | nodes = []
136 | for idx, inbound in enumerate(self.inbounds):
137 | panel_name = f"local"
138 | listen_host = inbound.get("listen", "0.0.0.0")
139 | alter_id = inbound.get("alter_id", 16)
140 | try:
141 | if inbound["protocol"] in (NodeTypeEnum.Shadowsocks.value, NodeTypeEnum.ShadowsocksR.value):
142 | n = entities.SSNode(
143 | node_id=inbound.get("node_id", idx),
144 | panel_name=panel_name,
145 | listen_port=inbound["port"],
146 | listen_host=listen_host,
147 | )
148 | nodes.append(n)
149 | elif inbound["protocol"] == NodeTypeEnum.VMess.value:
150 | n = entities.VMessNode(
151 | node_id=inbound.get("node_id", idx),
152 | panel_name=panel_name,
153 | listen_port=inbound["port"],
154 | listen_host=listen_host,
155 | alter_id=alter_id,
156 | transport=inbound["transport"],
157 | enable_tls=inbound["enable_tls"],
158 | tls_type=inbound["tls_type"],
159 | path=inbound.get("path", "/ws"),
160 | host=inbound["host"],
161 | )
162 | nodes.append(n)
163 | elif inbound["protocol"] == NodeTypeEnum.VLess.value:
164 | n = entities.VMessNode(
165 | node_id=inbound.get("node_id", idx),
166 | panel_name=panel_name,
167 | listen_port=inbound["port"],
168 | listen_host=listen_host,
169 | alter_id=alter_id,
170 | transport=inbound["transport"],
171 | enable_tls=inbound["enable_tls"],
172 | tls_type=inbound["tls_type"],
173 | path=inbound.get("path", "/ws"),
174 | host=inbound["host"],
175 | )
176 | nodes.append(n)
177 | elif inbound["protocol"] == NodeTypeEnum.Trojan.value:
178 | n = entities.TrojanNode(
179 | node_id=inbound.get("node_id", idx),
180 | panel_name=panel_name,
181 | listen_port=inbound["port"],
182 | listen_host=listen_host,
183 | enable_xtls=inbound["enable_xtls"],
184 | enable_vless=inbound["enable_vless"],
185 | host=inbound["host"],
186 | )
187 | nodes.append(n)
188 | except KeyError as e:
189 | logger.error(f"从配置文件中加载节点时出错 {e}")
190 | continue
191 |
192 | return nodes
193 |
194 | def load_local_users(
195 | self,
196 | ) -> List[Union[entities.SSUser, entities.VMessUser, entities.VLessUser, entities.TrojanUser]]:
197 | """
198 | 通过配置文件加载用户信息
199 | :return:
200 | """
201 | clients = self.content["user"]["clients"]
202 |
203 | users = []
204 | for idx, c in enumerate(clients):
205 | try:
206 | if c["type"] in (NodeTypeEnum.Shadowsocks.value, NodeTypeEnum.ShadowsocksR.value):
207 | u = entities.SSUser(
208 | user_id=idx,
209 | panel_name="local",
210 | node_id=c.get("node_id", idx),
211 | email=f"{idx}@local",
212 | speed_limit=c.get("speed_limit", 0),
213 | password=c["password"],
214 | method=c["method"],
215 | )
216 | self.ss_cipher_type_set.add(c["method"])
217 | users.append(u)
218 | elif c["type"] == NodeTypeEnum.VMess.value:
219 | u = entities.VMessUser(
220 | user_id=idx,
221 | panel_name="local",
222 | node_id=c.get("node_id", idx),
223 | email=f"{idx}@local",
224 | speed_limit=c.get("speed_limit", 0),
225 | uuid=c["uuid"],
226 | )
227 | users.append(u)
228 | elif c["type"] == NodeTypeEnum.VLess.value:
229 | u = entities.VLessUser(
230 | user_id=idx,
231 | panel_name="local",
232 | node_id=c.get("node_id", idx),
233 | email=f"{idx}@local",
234 | speed_limit=c.get("speed_limit", 0),
235 | uuid=c["uuid"],
236 | )
237 | users.append(u)
238 | elif c["type"] == NodeTypeEnum.Trojan.value:
239 | u = entities.TrojanUser(
240 | user_id=idx,
241 | panel_name="local",
242 | node_id=c.get("node_id", idx),
243 | email=f"{idx}@local",
244 | speed_limit=c.get("speed_limit", 0),
245 | uuid=c["uuid"],
246 | )
247 | users.append(u)
248 | else:
249 | logger.error(f"不支持的用户类型 {c}")
250 | continue
251 | except KeyError as e:
252 | logger.error(f"从配置文件中加载用户时出错 {e}")
253 | continue
254 |
255 | return users
256 |
--------------------------------------------------------------------------------
/xray_node/core/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/laoshan-tech/xray-node/b9874e191796b5c9326442bf803f671c6735acb8/xray_node/core/__init__.py
--------------------------------------------------------------------------------
/xray_node/core/cfg.py:
--------------------------------------------------------------------------------
1 | from xray_node.config import Config
2 |
3 | BASE_CFG = {"log": {}, "api": {}, "dns": {}, "stats": {}, "policy": {}, "transport": {}, "routing": {}, "inbounds": []}
4 |
5 | API_CFG = {"api": {"tag": "api", "services": ["HandlerService", "LoggerService", "StatsService"]}}
6 |
7 | POLICY_CFG = {
8 | "policy": {
9 | "levels": {
10 | "0": {
11 | "statsUserUplink": True,
12 | "statsUserDownlink": True,
13 | }
14 | },
15 | "system": {
16 | "statsInboundUplink": True,
17 | "statsInboundDownlink": True,
18 | "statsOutboundUplink": True,
19 | "statsOutboundDownlink": True,
20 | },
21 | }
22 | }
23 |
24 | ROUTING_CFG = {
25 | "routing": {
26 | "settings": {"rules": [{"inboundTag": ["api"], "outboundTag": "api", "type": "field"}]},
27 | "strategy": "rules",
28 | },
29 | }
30 |
31 |
32 | def get_inbound_cfg(cfg_cls: Config):
33 | return {
34 | "inbounds": [
35 | {
36 | "listen": cfg_cls.local_api_host,
37 | "port": cfg_cls.local_api_port,
38 | "protocol": "dokodemo-door",
39 | "settings": {"address": cfg_cls.local_api_host},
40 | "tag": "api",
41 | },
42 | ]
43 | }
44 |
45 |
46 | OUTBOUNDS_CFG = {"outbounds": [{"protocol": "freedom"}]}
47 |
--------------------------------------------------------------------------------
/xray_node/core/xray.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import json
3 | from typing import Union
4 |
5 | import grpc
6 | import humanize
7 | import psutil
8 | from google.protobuf import message as _message
9 | from loguru import logger
10 | from xray_rpc.app.proxyman import config_pb2 as proxyman_config_pb2
11 | from xray_rpc.app.proxyman.command import (
12 | command_pb2_grpc as proxyman_command_pb2_grpc,
13 | command_pb2 as proxyman_command_pb2,
14 | )
15 | from xray_rpc.app.stats.command import command_pb2 as stats_command_pb2, command_pb2_grpc as stats_command_pb2_grpc
16 | from xray_rpc.common.net import port_pb2, address_pb2
17 | from xray_rpc.common.protocol import user_pb2
18 | from xray_rpc.common.serial import typed_message_pb2
19 | from xray_rpc.core import config_pb2 as core_config_pb2
20 | from xray_rpc.proxy.shadowsocks import config_pb2 as shadowsocks_config_pb2
21 | from xray_rpc.proxy.trojan import config_pb2 as trojan_config_pb2
22 | from xray_rpc.proxy.vless import account_pb2 as vless_account_pb2
23 | from xray_rpc.proxy.vless.inbound import config_pb2 as vless_inbound_config_pb2
24 | from xray_rpc.proxy.vmess import account_pb2 as vmess_account_pb2
25 | from xray_rpc.proxy.vmess.inbound import config_pb2 as vmess_inbound_config_pb2
26 |
27 | from xray_node.config import Config
28 | from xray_node.core import cfg
29 | from xray_node.exceptions import (
30 | EmailExistsError,
31 | InboundTagNotFound,
32 | XrayError,
33 | AddressAlreadyInUseError,
34 | InboundTagAlreadyExists,
35 | )
36 | from xray_node.mdb import models
37 | from xray_node.utils.consts import NETWORK_DICT, NodeTypeEnum, CIPHER_TYPE_DICT
38 | from xray_node.utils.install import XrayFile
39 |
40 |
41 | def to_typed_message(message: _message):
42 | return typed_message_pb2.TypedMessage(type=message.DESCRIPTOR.full_name, value=message.SerializeToString())
43 |
44 |
45 | def ip2bytes(ip: str):
46 | return bytes([int(i) for i in ip.split(".")])
47 |
48 |
49 | class Protocol(object):
50 | def __init__(self):
51 | self.message = None
52 |
53 |
54 | class VMessInbound(Protocol):
55 | def __init__(self):
56 | """
57 | VMess
58 | """
59 | super(VMessInbound, self).__init__()
60 | self.message = to_typed_message(vmess_inbound_config_pb2.Config(user=[]))
61 |
62 |
63 | class VLESSInbound(Protocol):
64 | def __init__(self):
65 | """
66 | VLESS
67 | """
68 | super(VLESSInbound, self).__init__()
69 | self.message = to_typed_message(
70 | vless_inbound_config_pb2.Config(
71 | clients=[],
72 | decryption="",
73 | fallbacks=[],
74 | )
75 | )
76 |
77 |
78 | class ShadowsocksInbound(Protocol):
79 | def __init__(self):
80 | """
81 | Shadowsocks
82 | """
83 | super(ShadowsocksInbound, self).__init__()
84 | self.message = to_typed_message(
85 | shadowsocks_config_pb2.ServerConfig(
86 | users=[],
87 | network=[NETWORK_DICT["tcp"], NETWORK_DICT["udp"]],
88 | )
89 | )
90 |
91 |
92 | class TrojanInbound(Protocol):
93 | def __init__(self):
94 | """
95 | Trojan
96 | """
97 | super(TrojanInbound, self).__init__()
98 | self.message = to_typed_message(
99 | trojan_config_pb2.ServerConfig(
100 | users=[],
101 | fallbacks=[],
102 | )
103 | )
104 |
105 |
106 | class Xray(object):
107 | def __init__(self, xray_f: XrayFile):
108 | self.xray_f = xray_f
109 | self.xray_proc: Union[None, psutil.Process] = None
110 |
111 | self.config = Config(cfg=self.xray_f.xn_cfg_fn)
112 | self.xray_client = grpc.insecure_channel(target=f"{self.config.local_api_host}:{self.config.local_api_port}")
113 |
114 | async def get_user_upload_traffic(self, email: str, reset: bool = False) -> Union[int, None]:
115 | """
116 | 获取用户上行流量,单位字节
117 | :param email: 邮箱,用于标识用户
118 | :param reset: 是否重置流量统计
119 | :return:
120 | """
121 | stub = stats_command_pb2_grpc.StatsServiceStub(self.xray_client)
122 | try:
123 | resp = stub.GetStats(
124 | stats_command_pb2.GetStatsRequest(name=f"user>>>{email}>>>traffic>>>uplink", reset=reset)
125 | )
126 | return resp.stat.value
127 | except grpc.RpcError as rpc_err:
128 | detail = rpc_err.details()
129 | if detail.endswith("uplink not found."):
130 | logger.debug(f"{email} 未找到,可能是不存在或是未使用")
131 | return 0
132 | else:
133 | raise XrayError(detail)
134 |
135 | async def get_user_download_traffic(self, email: str, reset: bool = False) -> Union[int, None]:
136 | """
137 | 获取用户下行流量,单位字节
138 | :param email: 邮箱,用于标识用户
139 | :param reset: 是否重置流量统计
140 | :return:
141 | """
142 | stub = stats_command_pb2_grpc.StatsServiceStub(self.xray_client)
143 | try:
144 | resp = stub.GetStats(
145 | stats_command_pb2.GetStatsRequest(name=f"user>>>{email}>>>traffic>>>downlink", reset=reset)
146 | )
147 | return resp.stat.value
148 | except grpc.RpcError as rpc_err:
149 | detail = rpc_err.details()
150 | if detail.endswith("downlink not found."):
151 | logger.debug(f"{email} 未找到,可能是不存在或是未使用")
152 | return 0
153 | else:
154 | raise XrayError(detail)
155 |
156 | async def get_inbound_upload_traffic(self, inbound_tag: str, reset: bool = False) -> Union[int, None]:
157 | """
158 | 获取特定传入连接上行流量,单位字节
159 | :param inbound_tag:
160 | :return:
161 | """
162 | stub = stats_command_pb2_grpc.StatsServiceStub(self.xray_client)
163 | try:
164 | resp = stub.GetStats(
165 | stats_command_pb2.GetStatsRequest(name=f"inbound>>>{inbound_tag}>>>traffic>>>uplink", reset=reset)
166 | )
167 | return resp.stat.value
168 | except grpc.RpcError as rpc_err:
169 | detail = rpc_err.details()
170 | raise XrayError(detail)
171 |
172 | async def get_inbound_download_traffic(self, inbound_tag: str, reset: bool = False) -> Union[int, None]:
173 | """
174 | 获取特定传入连接下行流量,单位字节
175 | :param inbound_tag:
176 | :return:
177 | """
178 | stub = stats_command_pb2_grpc.StatsServiceStub(self.xray_client)
179 | try:
180 | resp = stub.GetStats(
181 | stats_command_pb2.GetStatsRequest(name=f"inbound>>>{inbound_tag}>>>traffic>>>downlink", reset=reset)
182 | )
183 | return resp.stat.value
184 | except grpc.RpcError as rpc_err:
185 | detail = rpc_err.details()
186 | raise XrayError(detail)
187 |
188 | async def add_user(
189 | self,
190 | inbound_tag: str,
191 | email: str,
192 | level: int,
193 | type: str,
194 | password: str = "",
195 | cipher_type: int = 0,
196 | uuid: str = "",
197 | alter_id: int = 0,
198 | flow: str = "xtls-rprx-direct",
199 | ):
200 | """
201 | 在一个传入连接中添加一个用户
202 | :param inbound_tag:
203 | :param email:
204 | :param level:
205 | :param type:
206 | :param password:
207 | :param cipher_type:
208 | :param uuid:
209 | :param alter_id:
210 | :param flow:
211 | :return:
212 | """
213 | stub = proxyman_command_pb2_grpc.HandlerServiceStub(self.xray_client)
214 | try:
215 | if type == NodeTypeEnum.VMess.value:
216 | user = user_pb2.User(
217 | email=email,
218 | level=level,
219 | account=to_typed_message(vmess_account_pb2.Account(id=uuid, alter_id=alter_id)),
220 | )
221 | elif type == NodeTypeEnum.VLess.value:
222 | user = user_pb2.User(
223 | email=email,
224 | level=level,
225 | account=to_typed_message(vless_account_pb2.Account(id=uuid, alter_id=alter_id)),
226 | )
227 | elif type == NodeTypeEnum.Shadowsocks.value:
228 | user = user_pb2.User(
229 | email=email,
230 | level=level,
231 | account=to_typed_message(
232 | shadowsocks_config_pb2.Account(password=password, cipher_type=cipher_type)
233 | ),
234 | )
235 | elif type == NodeTypeEnum.Trojan.value:
236 | user = user_pb2.User(
237 | email=email,
238 | level=level,
239 | account=to_typed_message(trojan_config_pb2.Account(password=password, flow=flow)),
240 | )
241 | else:
242 | raise XrayError(f"不支持的传入连接类型 {type}")
243 |
244 | stub.AlterInbound(
245 | proxyman_command_pb2.AlterInboundRequest(
246 | tag=inbound_tag,
247 | operation=to_typed_message(proxyman_command_pb2.AddUserOperation(user=user)),
248 | )
249 | )
250 | except grpc.RpcError as rpc_err:
251 | detail = rpc_err.details()
252 | if detail.endswith(f"User {email} already exists."):
253 | raise EmailExistsError(detail, email)
254 | elif detail.endswith(f"handler not found: {inbound_tag}"):
255 | raise InboundTagNotFound(detail, inbound_tag)
256 | else:
257 | raise XrayError(detail)
258 |
259 | async def remove_user(self, inbound_tag: str, email: str):
260 | """
261 | 在一个传入连接中删除一个用户
262 | :param inbound_tag:
263 | :param email:
264 | :return:
265 | """
266 | stub = proxyman_command_pb2_grpc.HandlerServiceStub(self.xray_client)
267 | try:
268 | stub.AlterInbound(
269 | proxyman_command_pb2.AlterInboundRequest(
270 | tag=inbound_tag, operation=to_typed_message(proxyman_command_pb2.RemoveUserOperation(email=email))
271 | )
272 | )
273 | except grpc.RpcError as rpc_err:
274 | detail = rpc_err.details()
275 | if detail.endswith(f"User {email} already exists."):
276 | raise EmailExistsError(detail, email)
277 | elif detail.endswith(f"handler not found: {inbound_tag}"):
278 | raise InboundTagNotFound(detail, inbound_tag)
279 | else:
280 | raise XrayError(detail)
281 |
282 | async def add_inbound(self, inbound_tag: str, address: str, port: int, protocol: Protocol) -> None:
283 | """
284 | 增加传入连接
285 | :param inbound_tag: 传入连接的标识
286 | :param address: 监听地址
287 | :param port: 监听端口
288 | :param protocol: 代理配置
289 | """
290 | stub = proxyman_command_pb2_grpc.HandlerServiceStub(self.xray_client)
291 | try:
292 | resp = stub.AddInbound(
293 | proxyman_command_pb2.AddInboundRequest(
294 | inbound=core_config_pb2.InboundHandlerConfig(
295 | tag=inbound_tag,
296 | receiver_settings=to_typed_message(
297 | proxyman_config_pb2.ReceiverConfig(
298 | port_range=port_pb2.PortRange(
299 | From=port,
300 | To=port,
301 | ),
302 | listen=address_pb2.IPOrDomain(
303 | ip=ip2bytes(address), # 4字节或16字节
304 | ),
305 | allocation_strategy=None,
306 | stream_settings=None,
307 | receive_original_destination=None,
308 | domain_override=None,
309 | sniffing_settings=None,
310 | )
311 | ),
312 | proxy_settings=protocol.message,
313 | )
314 | )
315 | )
316 | except grpc.RpcError as rpc_err:
317 | detail = rpc_err.details()
318 | if detail.endswith("address already in use"):
319 | raise AddressAlreadyInUseError(detail, port)
320 | elif detail.endswith(f"existing tag found: {inbound_tag}"):
321 | raise InboundTagAlreadyExists(detail, inbound_tag)
322 | else:
323 | raise XrayError(detail)
324 |
325 | async def remove_inbound(self, inbound_tag: str):
326 | """
327 | 删除传入连接
328 | :param inbound_tag:
329 | :return:
330 | """
331 | stub = proxyman_command_pb2_grpc.HandlerServiceStub(self.xray_client)
332 | try:
333 | stub.RemoveInbound(proxyman_command_pb2.RemoveInboundRequest(tag=inbound_tag))
334 | except grpc.RpcError as rpc_err:
335 | detail = rpc_err.details()
336 | if detail == "not enough information for making a decision":
337 | raise InboundTagNotFound(detail, inbound_tag)
338 | else:
339 | raise XrayError(detail)
340 |
341 | async def sync_data_from_db(self):
342 | """
343 | 从数据库同步节点与用户数据
344 | :return:
345 | """
346 | active_nodes = await models.Node.filter_active_nodes()
347 | for n in active_nodes:
348 | proto = Protocol()
349 | if n.type == NodeTypeEnum.Shadowsocks.value:
350 | proto = ShadowsocksInbound()
351 | elif n.type == NodeTypeEnum.VMess.value:
352 | proto = VMessInbound()
353 | elif n.type == NodeTypeEnum.VLess.value:
354 | proto = VLESSInbound()
355 | elif n.type == NodeTypeEnum.Trojan.value:
356 | proto = TrojanInbound()
357 |
358 | try:
359 | await self.add_inbound(
360 | inbound_tag=n.inbound_tag, address=n.listen_host, port=n.listen_port, protocol=proto
361 | )
362 | logger.info(f"添加入向代理 {n.inbound_tag} 成功")
363 | except InboundTagAlreadyExists as e:
364 | logger.debug(f"入向代理 {e.inbound_tag} 已存在,跳过")
365 | except XrayError as e:
366 | logger.exception(f"添加入向代理 {n.inbound_tag} 出错 {e.detail}")
367 | continue
368 |
369 | deleted_nodes = await models.Node.filter_deleted_nodes()
370 | for n in deleted_nodes:
371 | try:
372 | await self.remove_inbound(inbound_tag=n.inbound_tag)
373 | logger.info(f"删除入向代理 {n.inbound_tag} 成功")
374 | except InboundTagNotFound as e:
375 | logger.info(f"入向代理不存在 {e.inbound_tag},跳过")
376 | except XrayError as e:
377 | logger.exception(f"删除入向代理 {n.inbound_tag} 出错 {e.detail}")
378 | continue
379 |
380 | await models.Node.prune_nodes()
381 |
382 | active_users = await models.User.filter_active_users()
383 | for u in active_users:
384 | try:
385 | if u.node.cipher_type != "unknown":
386 | method = CIPHER_TYPE_DICT.get(u.node.cipher_type, 0)
387 | else:
388 | method = CIPHER_TYPE_DICT.get(u.method, 0)
389 |
390 | await self.add_user(
391 | inbound_tag=u.node.inbound_tag,
392 | email=u.email,
393 | level=0,
394 | type=u.node.type,
395 | password=u.password,
396 | cipher_type=method,
397 | uuid=u.uuid,
398 | alter_id=u.node.alter_id,
399 | flow=u.flow,
400 | )
401 | logger.info(f"添加用户 {u} 成功")
402 | except EmailExistsError as e:
403 | logger.debug(f"用户 {e.email} 已存在,跳过")
404 | except XrayError as e:
405 | logger.exception(f"添加用户 {u} 出错 {e.detail}")
406 |
407 | try:
408 | user_upload = await self.get_user_upload_traffic(email=u.email, reset=True)
409 | user_download = await self.get_user_download_traffic(email=u.email, reset=True)
410 | await u.sync_user_traffic(upload=user_upload, download=user_download)
411 | logger.debug(
412 | f"同步用户 {u} 流量成功,上行 {humanize.naturalsize(user_upload)} 下行 {humanize.naturalsize(user_download)}"
413 | )
414 | except XrayError as e:
415 | logger.error(e)
416 | continue
417 |
418 | deleted_users = await models.User.filter_deleted_users()
419 | for u in deleted_users:
420 | try:
421 | await self.remove_user(inbound_tag=u.node.inbound_tag, email=u.email)
422 | except XrayError as e:
423 | logger.exception(f"删除用户 {u} 出错 {e.detail}")
424 |
425 | async def gen_cfg(self) -> None:
426 | """
427 | 生成基础配置文件
428 | :return:
429 | """
430 | default_cfgs = [
431 | ("00_base.json", cfg.BASE_CFG),
432 | ("01_api.json", cfg.API_CFG),
433 | ("02_policy.json", cfg.POLICY_CFG),
434 | ("03_routing.json", cfg.ROUTING_CFG),
435 | ("04_inbounds.json", cfg.get_inbound_cfg(cfg_cls=self.config)),
436 | ("05_outbounds.json", cfg.OUTBOUNDS_CFG),
437 | ]
438 |
439 | for fn, content in default_cfgs:
440 | p = self.xray_f.xray_conf_dir / fn
441 | if not p.exists():
442 | with open(p, "w") as f:
443 | json.dump(content, f, indent=2)
444 |
445 | async def is_running(self) -> bool:
446 | """
447 | 检查xray-core运行
448 | :return:
449 | """
450 | return psutil.pid_exists(self.xray_proc.pid)
451 |
452 | async def start(self) -> None:
453 | """
454 | 启动xray-core
455 | :return:
456 | """
457 | await self.gen_cfg()
458 | self.xray_proc = await asyncio.create_subprocess_exec(
459 | self.xray_f.xray_exe_fn,
460 | "run",
461 | "-confdir",
462 | self.xray_f.xray_conf_dir,
463 | stdout=asyncio.subprocess.PIPE,
464 | stdin=asyncio.subprocess.PIPE,
465 | stderr=asyncio.subprocess.PIPE,
466 | )
467 | await asyncio.sleep(0.5)
468 | if await self.is_running() is True:
469 | logger.info(f"xray-core 启动成功")
470 | else:
471 | self.xray_proc = None
472 | logger.warning(f"xray-core 启动失败")
473 |
474 | async def stop(self) -> None:
475 | """
476 | 停止xray-core
477 | :return:
478 | """
479 | if self.xray_proc and await self.is_running():
480 | self.xray_proc.terminate()
481 | else:
482 | return
483 |
484 | await asyncio.sleep(0.5)
485 | if await self.is_running():
486 | logger.warning(f"xray-core 进程 {self.xray_proc.pid} 仍在运行,尝试 kill")
487 | self.xray_proc.kill()
488 | else:
489 | logger.info(f"xray-core 进程 {self.xray_proc.pid} 已停止运行")
490 | return
491 |
492 | await asyncio.sleep(0.5)
493 | if await self.is_running():
494 | logger.error(f"xray-core 进程 {self.xray_proc.pid} 仍在运行,kill 失败,需要手动处理")
495 | else:
496 | logger.info(f"xray-core 进程 {self.xray_proc.pid} 已停止运行")
497 | return
498 |
--------------------------------------------------------------------------------
/xray_node/exceptions.py:
--------------------------------------------------------------------------------
1 | class XrayError(Exception):
2 | def __init__(self, detail):
3 | self.detail = detail
4 |
5 |
6 | class APIError(Exception):
7 | def __init__(self, msg):
8 | self.msg = msg
9 |
10 |
11 | class DataError(Exception):
12 | def __init__(self, msg):
13 | self.msg = msg
14 |
15 |
16 | class UnsupportedNode(DataError):
17 | def __init__(self, msg):
18 | super(UnsupportedNode, self).__init__(msg=msg)
19 |
20 |
21 | class UnsupportedUser(DataError):
22 | def __init__(self, msg):
23 | super(UnsupportedUser, self).__init__(msg=msg)
24 |
25 |
26 | class NodeDataNotFound(DataError):
27 | def __init__(self, msg):
28 | super(NodeDataNotFound, self).__init__(msg=msg)
29 |
30 |
31 | class EmailExistsError(XrayError):
32 | def __init__(self, detail, email: str):
33 | super(EmailExistsError, self).__init__(detail)
34 | self.email = email
35 |
36 |
37 | class EmailNotFoundError(XrayError):
38 | def __init__(self, detail, email: str):
39 | super(EmailNotFoundError, self).__init__(detail)
40 | self.email = email
41 |
42 |
43 | class InboundTagNotFound(XrayError):
44 | def __init__(self, detail, inbound_tag: str):
45 | super(InboundTagNotFound, self).__init__(detail)
46 | self.inbound_tag = inbound_tag
47 |
48 |
49 | class InboundTagAlreadyExists(XrayError):
50 | def __init__(self, detail, inbound_tag: str):
51 | super(InboundTagAlreadyExists, self).__init__(detail)
52 | self.inbound_tag = inbound_tag
53 |
54 |
55 | class AddressAlreadyInUseError(XrayError):
56 | def __init__(self, detail, port):
57 | super(AddressAlreadyInUseError, self).__init__(detail)
58 | self.port = port
59 |
60 |
61 | class APIStatusError(APIError):
62 | def __init__(self, msg):
63 | super(APIStatusError, self).__init__(msg=msg)
64 |
65 |
66 | class APIContentError(APIError):
67 | def __init__(self, msg):
68 | super(APIContentError, self).__init__(msg=msg)
69 |
70 |
71 | class UnsupportedAPI(APIError):
72 | def __init__(self, msg):
73 | super(UnsupportedAPI, self).__init__(msg=msg)
74 |
75 |
76 | class FetchNodeInfoError(APIError):
77 | def __init__(self, msg):
78 | super(FetchNodeInfoError, self).__init__(msg=msg)
79 |
80 |
81 | class ReportNodeStatsError(APIError):
82 | def __init__(self, msg):
83 | super(ReportNodeStatsError, self).__init__(msg=msg)
84 |
85 |
86 | class ReportUserTrafficError(APIError):
87 | def __init__(self, msg):
88 | super(ReportUserTrafficError, self).__init__(msg=msg)
89 |
--------------------------------------------------------------------------------
/xray_node/main.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import logging
3 | import os
4 | import sys
5 | from pathlib import Path
6 | from typing import Union
7 |
8 | import click
9 | import httpx
10 | import psutil
11 | from loguru import logger
12 |
13 | from xray_node.api import get_api_cls_by_name, entities, sspanel, v2board
14 | from xray_node.config import Config
15 | from xray_node.core.xray import Xray
16 | from xray_node.exceptions import ReportNodeStatsError, APIStatusError
17 | from xray_node.mdb import init_db, models
18 | from xray_node.utils.http import client
19 | from xray_node.utils.install import XrayFile, install_xray, is_xray_installed
20 |
21 |
22 | class XrayNode(object):
23 | def __init__(self, install_path: Path = None, force_update: bool = False, use_cdn: bool = False):
24 | self.__prepared = False
25 | self.install_path = install_path
26 | self.force_update = force_update
27 | self.use_cdn = use_cdn
28 | self.xray_f = XrayFile(install_path=self.install_path)
29 | self.api_cls = None
30 |
31 | def __init_config(self) -> None:
32 | """
33 | 读入配置文件
34 | :return:
35 | """
36 | self.config = Config(cfg=self.xray_f.xn_cfg_fn)
37 |
38 | def __prepare_logger(self) -> None:
39 | """
40 | 初始化日志类
41 | :return:
42 | """
43 | log_levels = {
44 | "CRITICAL": logging.CRITICAL,
45 | "ERROR": logging.ERROR,
46 | "WARNING": logging.WARNING,
47 | "INFO": logging.INFO,
48 | "DEBUG": logging.DEBUG,
49 | }
50 | level = log_levels[self.config.log_level.upper()]
51 | logger.remove()
52 | logger.add(sys.stderr, level=level)
53 |
54 | def __init_loop(self) -> None:
55 | """
56 | 初始化事件循环
57 | :return:
58 | """
59 | try:
60 | import uvloop
61 |
62 | logger.info("使用 uvloop 加速")
63 | uvloop.install()
64 | except ImportError:
65 | logger.info("使用原生 asyncio")
66 |
67 | self.loop = asyncio.get_event_loop()
68 |
69 | def __prepare(self) -> None:
70 | """
71 | 预处理
72 | :return:
73 | """
74 | if self.__prepared:
75 | return
76 |
77 | self.__init_loop()
78 | self.__init_config()
79 | self.__prepare_logger()
80 |
81 | self.xray = Xray(xray_f=self.xray_f)
82 |
83 | self.__prepared = True
84 |
85 | async def __cleanup(self) -> None:
86 | """
87 | 清理任务
88 | :return:
89 | """
90 | logger.info("正在关闭 Xray 服务")
91 | await self.xray.stop()
92 | if not client.is_closed:
93 | await client.aclose()
94 |
95 | tasks = [t for t in asyncio.all_tasks() if t is not asyncio.current_task()]
96 | [task.cancel() for task in tasks]
97 | await asyncio.gather(*tasks, return_exceptions=True)
98 |
99 | async def __sync_user_from_local(self):
100 | """
101 | 本地模式同步用户
102 | :return:
103 | """
104 | nodes = self.config.load_local_nodes()
105 | users = self.config.load_local_users()
106 | await models.Node.create_or_update_from_data_list(node_data_list=nodes)
107 | await models.User.create_or_update_from_data_list(user_data_list=users)
108 |
109 | def __get_api_cls(self) -> Union[sspanel.SSPanelAPI, v2board.V2BoardAPI]:
110 | if self.api_cls is None:
111 | cls = get_api_cls_by_name(panel_type=self.config.panel_type)
112 | self.api_cls = cls(
113 | endpoint=self.config.endpoint,
114 | api_key=self.config.api_key,
115 | node_id=self.config.node_id,
116 | node_type=self.config.node_type,
117 | )
118 |
119 | return self.api_cls
120 |
121 | async def __sync_user_from_remote(self):
122 | """
123 | 远程模式同步用户
124 | :return:
125 | """
126 | api_cls = self.__get_api_cls()
127 |
128 | node = await api_cls.fetch_node_info()
129 | users = await api_cls.fetch_user_list()
130 | await models.Node.create_or_update_from_data_list(node_data_list=[node])
131 | await models.User.create_or_update_from_data_list(user_data_list=users)
132 |
133 | async def __report_stats(self):
134 | """
135 | 向远程同步状态数据
136 | :return:
137 | """
138 | api_cls = self.__get_api_cls()
139 |
140 | try:
141 | await api_cls.report_node_stats()
142 | except APIStatusError as e:
143 | logger.error(f"上报节点状态信息API状态码异常 {e.msg}")
144 | except ReportNodeStatsError as e:
145 | logger.error(f"上报节点状态信息错误 {e.msg}")
146 |
147 | active_users = await models.User.filter_active_users()
148 |
149 | try:
150 | await api_cls.report_user_stats(
151 | stats_data=[
152 | entities.SSPanelOnlineIPData(user_id=u.user_id, ip=list(u.conn_ip_set)) for u in active_users
153 | ]
154 | )
155 | except APIStatusError as e:
156 | logger.error(f"上报用户状态信息API状态码异常 {e.msg}")
157 | except ReportNodeStatsError as e:
158 | logger.error(f"上报用户状态信息错误 {e.msg}")
159 |
160 | try:
161 | await api_cls.report_user_traffic(
162 | traffic_data=[
163 | entities.SSPanelTrafficData(user_id=u.user_id, upload=u.upload_traffic, download=u.download_traffic)
164 | for u in active_users
165 | ]
166 | )
167 | await models.User.reset_user_traffic()
168 | except APIStatusError as e:
169 | logger.error(f"上报用户流量信息API状态码异常 {e.msg}")
170 | except ReportNodeStatsError as e:
171 | logger.error(f"上报用户流量信息错误 {e.msg}")
172 |
173 | async def __user_man_cron(self):
174 | """
175 | 用户管理
176 | :return:
177 | """
178 | while True:
179 | try:
180 | if self.config.user_mode == "local":
181 | logger.info(f"使用本地配置文件 {self.config.fn} 加载用户信息")
182 | await self.__sync_user_from_local()
183 | elif self.config.user_mode == "remote":
184 | logger.info(f"使用远程服务加载用户信息")
185 | await self.__report_stats()
186 | await self.__sync_user_from_remote()
187 |
188 | await self.xray.sync_data_from_db()
189 | except (httpx.ConnectError, httpx.ConnectTimeout) as e:
190 | logger.error(f"请求远程服务出错 {e}")
191 | except Exception as e:
192 | logger.exception(f"用户管理出错 {e}")
193 | finally:
194 | await asyncio.sleep(60)
195 |
196 | async def __run_xray(self):
197 | """
198 | xray-core服务启动与用户管理
199 | :return:
200 | """
201 | if not is_xray_installed(xray_f=self.xray_f):
202 | logger.error(f"xray-core 未成功安装在 {self.xray_f.xray_install_path} 下,退出")
203 | if self.loop.is_running():
204 | self.loop.stop()
205 |
206 | return
207 |
208 | await init_db()
209 | await self.xray.start()
210 | await self.__user_man_cron()
211 |
212 | def install(self) -> None:
213 | """
214 | 安装xray-core
215 | :return:
216 | """
217 | self.__prepare()
218 | self.loop.run_until_complete(
219 | install_xray(install_path=self.install_path, force_update=self.force_update, use_cdn=self.use_cdn)
220 | )
221 |
222 | def start(self) -> None:
223 | """
224 | 启动服务
225 | :return:
226 | """
227 | self.__prepare()
228 | self.loop.create_task(self.__run_xray())
229 | try:
230 | self.loop.run_forever()
231 | except KeyboardInterrupt:
232 | self.loop.run_until_complete(self.__cleanup())
233 | logger.info("正在退出......")
234 | finally:
235 | self.loop.stop()
236 | self.loop.close()
237 | p = psutil.Process(pid=os.getpid())
238 | p.terminate()
239 |
240 |
241 | @click.group()
242 | def cli():
243 | """
244 | xray-node is a nice management tool for ss/vmess/vless/trojan proxy nodes based on xray-core.
245 | """
246 | pass
247 |
248 |
249 | @cli.command()
250 | @click.option(
251 | "-p",
252 | "--path",
253 | default=Path.home() / "xray-node",
254 | type=click.Path(file_okay=False, dir_okay=True),
255 | help="xray-core installation path.",
256 | )
257 | def run(path):
258 | """
259 | Run xray-core.
260 | """
261 | xn = XrayNode(install_path=Path(path))
262 | xn.start()
263 |
264 |
265 | @cli.command()
266 | @click.option(
267 | "-p",
268 | "--path",
269 | default=Path.home() / "xray-node",
270 | type=click.Path(file_okay=False, dir_okay=True),
271 | help="xray-core installation path.",
272 | )
273 | @click.option("--force-update", default=False, is_flag=True, help="Force update xray-core.")
274 | @click.option("--use-cdn", default=False, is_flag=True, help="Install xray-core from CDN.")
275 | def install(path, force_update: bool, use_cdn: bool):
276 | """
277 | Install xray-core.
278 | """
279 | xn = XrayNode(install_path=Path(path), force_update=force_update, use_cdn=use_cdn)
280 | xn.install()
281 |
282 |
283 | if __name__ == "__main__":
284 | cli()
285 |
--------------------------------------------------------------------------------
/xray_node/mdb/__init__.py:
--------------------------------------------------------------------------------
1 | from loguru import logger
2 | from tortoise import Tortoise
3 |
4 |
5 | async def init_db():
6 | logger.info("初始化内存数据库")
7 | await Tortoise.init(db_url="sqlite://:memory:", modules={"models": ["xray_node.mdb.models"]})
8 | await Tortoise.generate_schemas()
9 |
--------------------------------------------------------------------------------
/xray_node/mdb/models.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import json
4 | from typing import Any, Union, Type, List, Set
5 |
6 | from loguru import logger
7 | from tortoise import fields
8 | from tortoise.models import Model
9 | from tortoise.transactions import atomic
10 |
11 | from xray_node.api import entities
12 | from xray_node.exceptions import UnsupportedUser, NodeDataNotFound, UnsupportedNode
13 | from xray_node.utils.consts import NodeTypeEnum
14 |
15 |
16 | class IPSetField(fields.CharField):
17 | def to_db_value(self, value: Any, instance: "Union[Type[Model], Model]") -> str:
18 | if type(value) is not set:
19 | value = []
20 | data = json.dumps(list(value))
21 | if len(data) > self.max_length:
22 | raise ValueError("Data too long.")
23 | return data
24 |
25 | def to_python_value(self, value: Any) -> Union[Set, None]:
26 | if value is None:
27 | return value
28 | _v = json.loads(value)
29 | return set(_v)
30 |
31 |
32 | class User(Model):
33 | id = fields.BigIntField(pk=True)
34 | node: fields.ForeignKeyRelation[Node] = fields.ForeignKeyField(
35 | description="节点ID", model_name="models.Node", related_name="users"
36 | )
37 | user_id = fields.BigIntField(description="面板系统内的ID")
38 | email = fields.CharField(description="邮箱", max_length=256)
39 | uuid = fields.CharField(description="UUID", default="", max_length=128)
40 | port = fields.IntField(description="端口", default=0, index=True)
41 | method = fields.CharField(description="加密方法", default="", max_length=64)
42 | password = fields.CharField(description="密码", default="", max_length=128)
43 | flow = fields.CharField(description="Xray流控策略", default="", max_length=64)
44 | upload_traffic = fields.BigIntField(description="上传流量", default=0)
45 | download_traffic = fields.BigIntField(description="下载流量", default=0)
46 | total_traffic = fields.BigIntField(description="总流量", default=0)
47 | last_use_time = fields.DatetimeField(description="上次使用时间", auto_now=True, null=True, index=True)
48 | conn_ip_set = IPSetField(description="连接IP", default=set(), max_length=65535)
49 | is_deleted = fields.BooleanField(description="是否删除", default=False, index=True)
50 |
51 | def __str__(self):
52 | return f"User-{self.user_id}-{self.email}"
53 |
54 | @classmethod
55 | async def _gen_obj_from_user(
56 | cls, u: Union[entities.SSUser, entities.VMessUser, entities.VLessUser, entities.TrojanUser]
57 | ) -> User:
58 | """
59 | 根据数据生成ORM对象
60 | :param u:
61 | :return:
62 | """
63 | node_obj = await Node.filter(panel_name=u.panel_name, node_id=u.node_id).first()
64 |
65 | if not node_obj:
66 | raise NodeDataNotFound(msg=f"{u.panel_name}, {u.node_id}")
67 |
68 | if isinstance(u, entities.SSUser):
69 | user_obj = cls(
70 | node=node_obj,
71 | user_id=u.user_id,
72 | email=u.email,
73 | port=u.listen_port,
74 | method=u.method,
75 | password=u.password,
76 | )
77 | elif isinstance(u, (entities.VMessUser, entities.VLessUser)):
78 | user_obj = cls(node=node_obj, user_id=u.user_id, email=u.email, uuid=u.uuid)
79 | elif isinstance(u, entities.TrojanUser):
80 | user_obj = cls(node=node_obj, user_id=u.user_id, email=u.email, uuid=u.uuid, flow=u.flow)
81 | else:
82 | raise UnsupportedUser(msg=f"{type(u).__name__}")
83 |
84 | return user_obj
85 |
86 | @classmethod
87 | def _create_or_update_from_data(
88 | cls,
89 | data: Union[entities.SSUser, entities.VMessUser, entities.VLessUser, entities.TrojanUser],
90 | ):
91 | """
92 | 根据数据创建或更新用户
93 | :param data:
94 | :return:
95 | """
96 | cls.get_or_create(user_id=data.user_id)
97 |
98 | @classmethod
99 | @atomic()
100 | async def create_or_update_from_data_list(
101 | cls,
102 | user_data_list: List[Union[entities.SSUser, entities.VMessUser, entities.VLessUser, entities.TrojanUser]],
103 | ):
104 | """
105 | 根据数据列表创建或更新用户
106 | :param user_data_list:
107 | :return:
108 | """
109 | if await cls.all().count() < 1:
110 | logger.info(f"User表内无数据,全量插入")
111 | new_users = [await cls._gen_obj_from_user(u=u) for u in user_data_list]
112 | await cls.bulk_create(objects=new_users)
113 | else:
114 | db_user_dict = {
115 | f"{u.node.panel_name}-{u.user_id}": u
116 | for u in await cls.filter(is_deleted=False).prefetch_related("node").all()
117 | }
118 | enabled_user_ids = []
119 | need_update_or_create_users = []
120 |
121 | for user_data in user_data_list:
122 | old_db_user = db_user_dict.get(f"{user_data.panel_name}-{user_data.user_id}")
123 | if (
124 | not old_db_user
125 | or (hasattr(user_data, "listen_port") and old_db_user.port != user_data.listen_port)
126 | or (hasattr(user_data, "password") and old_db_user.password != user_data.password)
127 | or (hasattr(user_data, "method") and old_db_user.method != user_data.method)
128 | or (hasattr(user_data, "uuid") and old_db_user.uuid != user_data.uuid)
129 | ):
130 | need_update_or_create_users.append(user_data)
131 |
132 | for u in need_update_or_create_users:
133 | cls._create_or_update_from_data(data=u)
134 |
135 | @classmethod
136 | async def filter_active_users(cls) -> List[User]:
137 | """
138 | 返回生效的用户数据
139 | :return:
140 | """
141 | active_users = await User.filter(is_deleted=False).select_related("node").all()
142 | return active_users
143 |
144 | @classmethod
145 | async def filter_deleted_users(cls) -> List[User]:
146 | """
147 | 返回删除的用户数据
148 | :return:
149 | """
150 | deleted_users = await User.filter(is_deleted=True).prefetch_related().all()
151 | return deleted_users
152 |
153 | @classmethod
154 | async def prune_users(cls) -> None:
155 | """
156 | 清理无用用户数据
157 | :return:
158 | """
159 | await User.filter(is_deleted=True).delete()
160 |
161 | async def sync_user_traffic(self, upload: int, download: int):
162 | """
163 | 从Xray同步用户数据流量
164 | :param upload:
165 | :param download:
166 | :return:
167 | """
168 | self.upload_traffic += upload
169 | self.download_traffic += download
170 | self.total_traffic += upload + download
171 | await self.save()
172 |
173 | @classmethod
174 | async def reset_user_traffic(cls) -> None:
175 | """
176 | 重置流量
177 | :return:
178 | """
179 | await User.filter(is_deleted=False).update(upload_traffic=0, download_traffic=0, total_traffic=0)
180 |
181 |
182 | class Node(Model):
183 | id = fields.BigIntField(pk=True)
184 | panel_name = fields.CharField(description="面板名称", max_length=256)
185 | node_id = fields.BigIntField(description="面板系统内的ID")
186 | type = fields.CharField(description="节点类型", max_length=128)
187 | tag = fields.CharField(description="Inbound tag", max_length=256)
188 | protocol = fields.CharField(description="协议", max_length=128)
189 | speed_limit = fields.BigIntField(description="限速", default=0)
190 | cipher_type = fields.CharField(description="加密方式", max_length=64)
191 | listen_host = fields.CharField(description="监听Host", max_length=64)
192 | listen_port = fields.IntField(description="监听端口")
193 | alter_id = fields.IntField(description="Alter ID", default=4)
194 | enable_tls = fields.BooleanField(description="是否开启TLS", default=False)
195 | enable_proxy_protocol = fields.BooleanField(description="", default=False)
196 | transport_mode = fields.CharField(description="Transport", max_length=128, default="tcp")
197 | path = fields.CharField(description="Path", max_length=256, default="/ws")
198 | host = fields.CharField(description="Host", max_length=256)
199 | cert_path = fields.CharField(description="证书", max_length=256)
200 | key_path = fields.CharField(description="Key", max_length=256)
201 | is_deleted = fields.BooleanField(description="是否删除", default=False, index=True)
202 |
203 | def __str__(self):
204 | return f"Node-{self.panel_name}-{self.node_id}-{self.type}"
205 |
206 | @property
207 | def inbound_tag(self):
208 | return f"{self.panel_name}-{self.node_id}-{self.type}-{self.listen_port}"
209 |
210 | @classmethod
211 | async def _gen_obj_from_node(
212 | cls, n: Union[entities.SSNode, entities.VMessNode, entities.VLessNode, entities.TrojanNode]
213 | ) -> Node:
214 | """
215 | 根据数据生成ORM对象
216 | :param n:
217 | :return:
218 | """
219 | if isinstance(n, entities.SSNode):
220 | node_obj = cls(
221 | panel_name=n.panel_name,
222 | node_id=n.node_id,
223 | type=NodeTypeEnum.Shadowsocks.value,
224 | tag=f"{n.panel_name}-{NodeTypeEnum.Shadowsocks.value}-{n.node_id}",
225 | protocol=NodeTypeEnum.Shadowsocks.value,
226 | cipher_type=n.method,
227 | listen_host=n.listen_host,
228 | listen_port=n.listen_port,
229 | host="",
230 | cert_path="",
231 | key_path="",
232 | )
233 | elif isinstance(n, entities.VMessNode):
234 | node_obj = cls(
235 | panel_name=n.panel_name,
236 | node_id=n.node_id,
237 | type=NodeTypeEnum.VMess.value,
238 | tag=f"{n.panel_name}-{NodeTypeEnum.VMess.value}-{n.node_id}",
239 | protocol=NodeTypeEnum.VMess.value,
240 | cipher_type="",
241 | listen_host=n.listen_host,
242 | listen_port=n.listen_port,
243 | alter_id=n.alter_id,
244 | enable_tls=n.enable_tls,
245 | transport_mode=n.transport,
246 | path=n.path,
247 | host=n.host,
248 | cert_path="",
249 | key_path="",
250 | )
251 | elif isinstance(n, entities.VLessNode):
252 | node_obj = cls(
253 | panel_name=n.panel_name,
254 | node_id=n.node_id,
255 | type=NodeTypeEnum.VLess.value,
256 | tag=f"{n.panel_name}-{NodeTypeEnum.VLess.value}-{n.node_id}",
257 | protocol=NodeTypeEnum.VLess.value,
258 | cipher_type="",
259 | listen_host=n.listen_host,
260 | listen_port=n.listen_port,
261 | alter_id=n.alter_id,
262 | enable_tls=n.enable_tls,
263 | transport_mode=n.transport,
264 | path=n.path,
265 | host=n.host,
266 | cert_path="",
267 | key_path="",
268 | )
269 | elif isinstance(n, entities.TrojanNode):
270 | node_obj = cls(
271 | panel_name=n.panel_name,
272 | node_id=n.node_id,
273 | type=NodeTypeEnum.Trojan.value,
274 | tag=f"{n.panel_name}-{NodeTypeEnum.Trojan.value}-{n.node_id}",
275 | protocol=NodeTypeEnum.Trojan.value,
276 | cipher_type="",
277 | listen_host=n.listen_host,
278 | listen_port=n.listen_port,
279 | host=n.host,
280 | cert_path="",
281 | key_path="",
282 | )
283 | else:
284 | raise UnsupportedNode(msg=f"{type(n).__name__}")
285 |
286 | return node_obj
287 |
288 | @classmethod
289 | def _create_or_update_from_data(
290 | cls,
291 | data: Union[entities.SSNode, entities.VMessNode, entities.VLessNode, entities.TrojanNode],
292 | ):
293 | """
294 | 根据数据创建或更新节点
295 | :param data:
296 | :return:
297 | """
298 | cls.get_or_create(node_id=data.node_id)
299 |
300 | @classmethod
301 | @atomic()
302 | async def create_or_update_from_data_list(
303 | cls, node_data_list: List[Union[entities.SSNode, entities.VMessNode, entities.VLessNode, entities.TrojanNode]]
304 | ):
305 | """
306 | 根据数据列表创建或更新节点
307 | :param node_data_list:
308 | :return:
309 | """
310 | if await cls.all().count() < 1:
311 | logger.info(f"Node表内无数据,全量插入")
312 | new_nodes = [await cls._gen_obj_from_node(n=n) for n in node_data_list]
313 | await cls.bulk_create(objects=new_nodes)
314 | else:
315 | db_node_dict = {f"{n.panel_name}-{n.node_id}": n for n in await cls.filter(is_deleted=False).all()}
316 | enabled_node_ids = []
317 | need_update_or_create_nodes = []
318 |
319 | for node_data in node_data_list:
320 | old_db_node = db_node_dict.get(f"{node_data.panel_name}-{node_data.node_id}")
321 | if not old_db_node:
322 | need_update_or_create_nodes.append(node_data)
323 |
324 | for u in need_update_or_create_nodes:
325 | cls._create_or_update_from_data(data=u)
326 |
327 | @classmethod
328 | async def filter_active_nodes(cls) -> List[Node]:
329 | """
330 | 返回生效的节点数据
331 | :return:
332 | """
333 | active_nodes = await Node.filter(is_deleted=False).all()
334 | return active_nodes
335 |
336 | @classmethod
337 | async def filter_deleted_nodes(cls) -> List[Node]:
338 | """
339 | 返回删除的节点数据
340 | :return:
341 | """
342 | deleted_nodes = await Node.filter(is_deleted=True).all()
343 | return deleted_nodes
344 |
345 | @classmethod
346 | async def prune_nodes(cls) -> None:
347 | """
348 | 清理无用节点数据
349 | :return:
350 | """
351 | await Node.filter(is_deleted=True).delete()
352 |
--------------------------------------------------------------------------------
/xray_node/utils/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/laoshan-tech/xray-node/b9874e191796b5c9326442bf803f671c6735acb8/xray_node/utils/__init__.py
--------------------------------------------------------------------------------
/xray_node/utils/consts.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 | from xray_rpc.common.net import network_pb2
4 | from xray_rpc.proxy.shadowsocks.config_pb2 import (
5 | NONE,
6 | UNKNOWN,
7 | AES_128_CFB,
8 | AES_256_CFB,
9 | CHACHA20,
10 | CHACHA20_IETF,
11 | CHACHA20_POLY1305,
12 | AES_128_GCM,
13 | AES_256_GCM,
14 | )
15 |
16 |
17 | class NodeTypeEnum(Enum):
18 | Shadowsocks = "shadowsocks"
19 | ShadowsocksR = "shadowsocksr"
20 | VMess = "vmess"
21 | VLess = "vless"
22 | Trojan = "trojan"
23 |
24 |
25 | XRAY_GITHUB_USER = "XTLS"
26 | XRAY_GITHUB_REPO = "Xray-core"
27 |
28 | CIPHER_TYPE_DICT = {
29 | "none": NONE,
30 | "unknown": UNKNOWN,
31 | "aes-128-gcm": AES_128_GCM,
32 | "aes-256-gcm": AES_256_GCM,
33 | "aes-128-cfb": AES_128_CFB,
34 | "aes-256-cfb": AES_256_CFB,
35 | "chacha20": CHACHA20,
36 | "chacha20-ietf": CHACHA20_IETF,
37 | "chacha20-poly1305": CHACHA20_POLY1305,
38 | }
39 |
40 | NETWORK_DICT = {"tcp": network_pb2.TCP, "udp": network_pb2.UDP, "raw-tcp": network_pb2.RawTCP}
41 |
42 | SSPANEL_NODE_TYPE = {
43 | 0: NodeTypeEnum.Shadowsocks,
44 | 10: NodeTypeEnum.Shadowsocks,
45 | 11: NodeTypeEnum.VMess,
46 | 12: NodeTypeEnum.VMess,
47 | 14: NodeTypeEnum.Trojan,
48 | }
49 |
--------------------------------------------------------------------------------
/xray_node/utils/http.py:
--------------------------------------------------------------------------------
1 | from pathlib import Path
2 |
3 | import httpx
4 | from loguru import logger
5 |
6 | client = httpx.AsyncClient(timeout=httpx.Timeout(timeout=10, connect=15), headers={"User-Agent": "xray-node"})
7 |
8 |
9 | async def download(url: str, target: Path) -> bool:
10 | """
11 | 下载文件
12 | :param url:
13 | :param target: 保存文件路径
14 | :return: 是否成功
15 | """
16 | with open(target, "wb") as f:
17 | try:
18 | async with client.stream(method="GET", url=url) as resp:
19 | logger.info(f"下载 {url} 开始......")
20 | async for chunk in resp.aiter_bytes():
21 | f.write(chunk)
22 |
23 | logger.info(f"从 {url} 下载文件到 {target} 成功")
24 | return True
25 | except Exception as e:
26 | logger.error(f"从 {url} 下载文件到 {target} 失败,{e}")
27 | return False
28 |
--------------------------------------------------------------------------------
/xray_node/utils/install.py:
--------------------------------------------------------------------------------
1 | import hashlib
2 | import platform
3 | import re
4 | import zipfile
5 | from pathlib import Path
6 |
7 | from loguru import logger
8 |
9 | from xray_node.utils import http, consts
10 |
11 |
12 | class XrayFile(object):
13 | def __init__(self, install_path: Path = None, use_cdn: bool = False):
14 | """
15 | xray-core文件目录相关
16 | :param install_path: 安装目录
17 | :param use_cdn: 是否使用CDN加速下载
18 | """
19 | if install_path is None:
20 | self.path = Path.home() / "xray-node"
21 | else:
22 | self.path = install_path
23 |
24 | self.use_cdn = use_cdn
25 | self.platform = "macos" if platform.system().lower() == "darwin" else platform.system().lower()
26 | self.arch = 64 if platform.machine().endswith("64") else 32
27 |
28 | @property
29 | def xray_conf_dir(self) -> Path:
30 | """
31 | xray-core配置目录
32 | :return:
33 | """
34 | path = self.xray_install_path / "conf"
35 | if not path.exists():
36 | path.mkdir(mode=0o755)
37 |
38 | return path
39 |
40 | @property
41 | def xn_cfg_fn(self) -> Path:
42 | """
43 | xray-node配置路径
44 | :return:
45 | """
46 | return self.path / "xnode.toml"
47 |
48 | @property
49 | def xray_install_path(self) -> Path:
50 | """
51 | xray-core安装目录
52 | :return:
53 | """
54 | return self.path
55 |
56 | @property
57 | def xray_exe_fn(self) -> Path:
58 | """
59 | xray-core可执行文件路径
60 | :return:
61 | """
62 | if self.platform == "windows":
63 | return self.path / "xray.exe"
64 | else:
65 | return self.path / "xray"
66 |
67 | @property
68 | def xray_zip_fn(self) -> Path:
69 | """
70 | xray-core压缩包路径
71 | :return:
72 | """
73 | return self.path / f"xray-{self.platform}-{self.arch}.zip"
74 |
75 | @property
76 | def xray_download_url_fmt(self) -> str:
77 | """
78 | xray-core下载URL,需要填充tag
79 | :return:
80 | """
81 | if self.use_cdn:
82 | return f"https://download.fastgit.org/{consts.XRAY_GITHUB_USER}/{consts.XRAY_GITHUB_REPO}/releases/download/{{tag}}/Xray-{self.platform}-{self.arch}.zip"
83 | else:
84 | return f"https://github.com/{consts.XRAY_GITHUB_USER}/{consts.XRAY_GITHUB_REPO}/releases/download/{{tag}}/Xray-{self.platform}-{self.arch}.zip"
85 |
86 | @property
87 | def xray_download_hash_url_fmt(self) -> str:
88 | """
89 | xray-core压缩包Hash下载URL,需要填充tag
90 | :return:
91 | """
92 | if self.use_cdn:
93 | return f"https://download.fastgit.org/{consts.XRAY_GITHUB_USER}/{consts.XRAY_GITHUB_REPO}/releases/download/{{tag}}/Xray-{self.platform}-{self.arch}.zip.dgst"
94 | else:
95 | return f"https://github.com/{consts.XRAY_GITHUB_USER}/{consts.XRAY_GITHUB_REPO}/releases/download/{{tag}}/Xray-{self.platform}-{self.arch}.zip.dgst"
96 |
97 |
98 | def _prepare_install(xray_f: XrayFile) -> bool:
99 | """
100 | 安装前的准备
101 | :param xray_f: XrayFile 对象
102 | :return:
103 | """
104 |
105 | try:
106 | if not xray_f.xray_install_path.exists():
107 | xray_f.xray_install_path.mkdir(mode=0o755)
108 | return True
109 | except OSError as e:
110 | logger.exception(f"创建 xray-node 目录失败,{e}")
111 | return False
112 |
113 |
114 | def is_xray_installed(xray_f: XrayFile) -> bool:
115 | """
116 | 检查是否已安装
117 | :param xray_f:
118 | :return:
119 | """
120 | if xray_f.xray_exe_fn.exists():
121 | xray_f.xray_exe_fn.chmod(mode=0o755)
122 | return True
123 | else:
124 | return False
125 |
126 |
127 | async def _get_xray_zip_hash(hash_url: str) -> str:
128 | """
129 | 获取压缩包hash值
130 | :param hash_url:
131 | :return:
132 | """
133 | req = await http.client.get(url=hash_url)
134 | if req.status_code != 200:
135 | xray_hash = ""
136 | else:
137 | xray_hash_match = re.match(r"^MD5=\s+\b(.*)\b$", req.text, re.MULTILINE)
138 | if xray_hash_match:
139 | xray_hash = xray_hash_match.group(1)
140 | else:
141 | xray_hash = ""
142 |
143 | return xray_hash
144 |
145 |
146 | def _get_file_md5(fn: Path) -> str:
147 | """
148 | 获取文件的md5值
149 | :param fn: 文件路径
150 | :return: md5校验值
151 | """
152 | m = hashlib.md5() # 创建md5对象
153 | with open(fn, "rb") as fobj:
154 | while True:
155 | data = fobj.read(4096)
156 | if not data:
157 | break
158 | m.update(data) # 更新md5对象
159 |
160 | return m.hexdigest() # 返回md5对象
161 |
162 |
163 | async def _download_xray_zip(xray_f: XrayFile) -> bool:
164 | """
165 | 下载xray-core
166 | :param xray_f:
167 | :return:
168 | """
169 | try:
170 | req = await http.client.get(
171 | f"https://api.github.com/repos/{consts.XRAY_GITHUB_USER}/{consts.XRAY_GITHUB_REPO}/releases/latest"
172 | )
173 | if req.status_code != 200:
174 | logger.error(f"获取 xray-core 最新 release 版本失败,状态码 {req.status_code}")
175 | return False
176 |
177 | result = req.json()
178 | latest_tag = result["tag_name"]
179 |
180 | md5_hash = await _get_xray_zip_hash(hash_url=xray_f.xray_download_hash_url_fmt.format(tag=latest_tag))
181 |
182 | download_success = await http.download(
183 | url=xray_f.xray_download_url_fmt.format(tag=latest_tag), target=xray_f.xray_zip_fn
184 | )
185 | if download_success:
186 | if md5_hash == _get_file_md5(fn=xray_f.xray_zip_fn):
187 | logger.info(f"下载 xray-core 成功,md5 校验成功")
188 | return True
189 | else:
190 | logger.warning(f"下载 xray-core 成功,但 md5 校验失败")
191 | return False
192 | else:
193 | return False
194 | except Exception as e:
195 | logger.exception(f"下载 xray-core 失败,{e}")
196 | return False
197 |
198 |
199 | async def _unzip_xray_core(xray_f: XrayFile) -> bool:
200 | """
201 | 解压xray-core
202 | :param xray_f:
203 | :return:
204 | """
205 | if xray_f.xray_zip_fn.exists():
206 | zip_file = zipfile.ZipFile(xray_f.xray_zip_fn, "r")
207 | for f in zip_file.namelist():
208 | if f not in ("LICENSE", "README.md"):
209 | zip_file.extract(f, xray_f.xray_zip_fn.parent)
210 | zip_file.close()
211 | return True
212 | else:
213 | logger.warning(f"{xray_f.xray_zip_fn} 不存在")
214 | return False
215 |
216 |
217 | async def install_xray(install_path: Path = None, force_update: bool = False, use_cdn: bool = False) -> bool:
218 | """
219 | 安装xray-core
220 | :param install_path: 指定安装目录
221 | :param force_update: 是否强制升级,默认为否
222 | :param use_cdn: 是否使用CDN加速下载
223 | :return:
224 | """
225 | if install_path is None:
226 | path = Path.home() / "xray-node"
227 | else:
228 | path = install_path
229 |
230 | xray_file = XrayFile(install_path=path, use_cdn=use_cdn)
231 |
232 | if not _prepare_install(xray_f=xray_file):
233 | return False
234 |
235 | if force_update is False and is_xray_installed(xray_f=xray_file):
236 | logger.info(f"xray-core 已经安装在 {path} 目录下")
237 | return True
238 |
239 | if await _download_xray_zip(xray_f=xray_file) and await _unzip_xray_core(xray_f=xray_file):
240 | if is_xray_installed(xray_f=xray_file):
241 | logger.info(f"成功安装 xray-core 至 {xray_file.xray_install_path}")
242 | return True
243 | else:
244 | return False
245 | else:
246 | return False
247 |
--------------------------------------------------------------------------------
/xray_node/utils/log.py:
--------------------------------------------------------------------------------
1 | import logging
2 |
3 | from loguru import logger
4 |
5 |
6 | class InterceptHandler(logging.Handler):
7 | def emit(self, record):
8 | # Get corresponding Loguru level if it exists
9 | try:
10 | level = logger.level(record.levelname).name
11 | except ValueError:
12 | level = record.levelno
13 |
14 | # Find caller from where originated the logged message
15 | frame, depth = logging.currentframe(), 2
16 | while frame.f_code.co_filename == logging.__file__:
17 | frame = frame.f_back
18 | depth += 1
19 |
20 | logger.opt(depth=depth, exception=record.exc_info).log(level, record.getMessage())
21 |
--------------------------------------------------------------------------------
/xray_node/utils/port.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 |
3 | from loguru import logger
4 |
5 |
6 | async def check_port_alive(host: str, port: int, timeout: float = 2.0) -> bool:
7 | """
8 | 检测端口开放
9 | :param host:
10 | :param port:
11 | :param timeout:
12 | :return:
13 | """
14 | try:
15 | future = asyncio.open_connection(host=host, port=port)
16 | reader, writer = await asyncio.wait_for(future, timeout=timeout)
17 | writer.close()
18 | return True
19 | except (ConnectionRefusedError, asyncio.TimeoutError):
20 | logger.warning(f"{host}:{port} 端口关闭或连接超时")
21 | return False
22 | except Exception as e:
23 | logger.error(f"{host}:{port} 连接异常,{e}")
24 | return False
25 |
--------------------------------------------------------------------------------