├── .dockerignore
├── .github
└── workflows
│ └── pypi.yml
├── .gitignore
├── LICENSE
├── Makefile
├── README.md
├── __init__.py
├── opssdk
├── __init__.py
└── utils
│ └── __init__.py
├── pyproject.toml
├── setup.cfg
├── setup.py
└── websdk2
├── __init__.py
├── api_set.py
├── apis
├── __init__.py
├── admin_apis.py
├── agent_apis.py
├── cmdb_apis.py
├── kerrigan_apis.py
├── mgv4_apis.py
├── notice_apis.py
└── task_apis.py
├── application.py
├── base_handler.py
├── cache.py
├── cache_context.py
├── client.py
├── cloud
├── __init__.py
├── qcloud_api.py
└── ucloud_api.py
├── cloud_utils.py
├── configs.py
├── consts.py
├── crud_utils.py
├── db_context.py
├── error.py
├── fetch_coroutine.py
├── jwt_token.py
├── ldap.py
├── logger.py
├── model_utils.py
├── mqhelper.py
├── program.py
├── salt_api.py
├── sqlalchemy_pagination.py
├── tools.py
├── utils
├── __init__.py
├── cc_crypto.py
├── date_format.py
└── pydantic_utils.py
└── web_logs.py
/.dockerignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | MANIFEST
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | .hypothesis/
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # Django stuff:
56 | *.log
57 | local_settings.py
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 |
91 | # Spyder project settings
92 | .spyderproject
93 | .spyproject
94 |
95 | # Rope project settings
96 | .ropeproject
97 |
98 | # mkdocs documentation
99 | /site
100 |
101 | # mypy
102 | .mypy_cache/
103 |
104 | .idea
--------------------------------------------------------------------------------
/.github/workflows/pypi.yml:
--------------------------------------------------------------------------------
1 | name: Publish Python 🐍 distributions 📦 to PyPI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | jobs:
8 | build:
9 | name: Publish Python 🐍 distributions 📦 to PyPI
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v3
13 | with:
14 | fetch-depth: 1
15 |
16 | - name: Set up Python 3.9
17 | uses: actions/setup-python@v4
18 | with:
19 | python-version: 3.9
20 |
21 | - name: Install dependencies
22 | run: |
23 | python -m pip install --upgrade pip
24 | pip install setuptools wheel twine build
25 |
26 | - name: Build distributions
27 | run: |
28 | python -m build
29 |
30 | - name: Publish distribution 📦 to PyPI
31 | uses: pypa/gh-action-pypi-publish@v1.4.2
32 | with:
33 | user: __token__
34 | password: ${{ secrets.PYPI_PASSWORD2 }}
35 | # repository_url: https://pypi.org/legacy/
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Created by .ignore support plugin (hsz.mobi)
2 | ### Python template
3 | # Byte-compiled / optimized / DLL files
4 | __pycache__/
5 | *.py[cod]
6 | *$py.class
7 |
8 | # C extensions
9 | *.so
10 |
11 | # Distribution / packaging
12 | .Python
13 | build/
14 | develop-eggs/
15 | dist/
16 | downloads/
17 | eggs/
18 | .eggs/
19 | lib/
20 | lib64/
21 | parts/
22 | sdist/
23 | var/
24 | wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | MANIFEST
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | .hypothesis/
50 |
51 | # Translations
52 | *.mo
53 | *.pot
54 |
55 | # Django stuff:
56 | *.log
57 | local_settings.py
58 |
59 | # Flask stuff:
60 | instance/
61 | .webassets-cache
62 |
63 | # Scrapy stuff:
64 | .scrapy
65 |
66 | # Sphinx documentation
67 | docs/_build/
68 |
69 | # PyBuilder
70 | target/
71 |
72 | # Jupyter Notebook
73 | .ipynb_checkpoints
74 |
75 | # pyenv
76 | .python-version
77 |
78 | # celery beat schedule file
79 | celerybeat-schedule
80 |
81 | # SageMath parsed files
82 | *.sage.py
83 |
84 | # Environments
85 | .env
86 | .venv
87 | env/
88 | venv/
89 | ENV/
90 |
91 | # Spyder project settings
92 | .spyderproject
93 | .spyproject
94 |
95 | # Rope project settings
96 | .ropeproject
97 |
98 | # mkdocs documentation
99 | /site
100 |
101 | # mypy
102 | .mypy_cache/
103 |
104 | .idea
105 | .github/
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | help: ## help
2 | @awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {sub("\\\\n",sprintf("\n%22c"," "), $$2);printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
3 |
4 | .PHONY: local
5 | local: ## 本地上传pypi
6 | command -v twine || pip install twine -i https://pypi.tuna.tsinghua.edu.cn/simple
7 | python setup.py sdist
8 | twine upload dist/*
9 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## 安装
2 |
3 | ### python3安装
4 |
5 | [python链接](https://www.python.org/)
6 |
7 | ##### python3.9以上
8 |
9 | ##### SDK 安装
10 |
11 | ```bash
12 | $ pip3 install -U git+https://github.com/ss1917/ops_sdk.git
13 | ```
14 |
15 | ## 结构
16 |
17 | ```
18 | ├── README.md 项目readme
19 | └── opssdk
20 | ├── logs 日志模块
21 | ├── install 安装模块
22 | ├── get_info 配置获取
23 | └── operate 运维操作
24 | ├── check 系统参数检查和获取
25 | ├── mysql mysql 操作
26 | ├── mail 发送邮件
27 | └── centralization 集中化管理工具 salt
28 | ├── websdk2 web开发使用
29 | ├── application.py tornado application
30 | ├── base_handler.py tornado 基类
31 | ├── cache.py 处理redis缓存
32 | ├── configs.py 配置文件管理
33 | ├── consts.py 常量
34 | ├── db_context.py MySQL 处理类
35 | ├── error.py 异常
36 | ├── crud_utils.py API CRUD类
37 | ├── model_utils.py 数据库模型处理类 sqlalchemy_pagination
38 | ├── sqlalchemy_pagination.py 分页
39 | ├── fetch_coroutine.py
40 | ├── jwt_token.py JWT处理
41 | ├── mqhelper.py MQ 处理类
42 | ├── program.py
43 | ├── salt_api.py salt 处理类 可以移到工具类
44 | ├── ldap.py LDAP 处理
45 | ├── sms.py 发送短信 可以移到工具类
46 | ├── tools.py 工具类
47 | ├── clent.py API调用客户端封装
48 | └── apis API集合
49 | ├── mgv4_apis 后台API集合
50 | ├── cmdb_apis 配置平台API
51 | ├── agent_apis Agent
52 | ├── kerrigan_apis 配置文件管理
53 | └── notice_apis 待补充
54 | ```
55 |
56 | ## License
57 |
58 | Everything is [GPL v3.0](https://www.gnu.org/licenses/gpl-3.0.html).
--------------------------------------------------------------------------------
/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '0.0.23'
2 |
--------------------------------------------------------------------------------
/opssdk/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ss1917/ops_sdk/ea34a6e30630709b20623d08450c8b65c0c92d88/opssdk/__init__.py
--------------------------------------------------------------------------------
/opssdk/utils/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | '''
4 | author : shenshuo
5 | date : 2018-3-7
6 | role : 工具类
7 | '''
8 |
9 | import time
10 | from opssdk.logs import Log
11 |
12 | log_path = '/log/yunwei/yunwei.log'
13 | log_ins = Log('utils', log_path)
14 |
15 | def timeit(func):
16 | def wrapper(*args, **kwargs):
17 | start_time = time.time()
18 | result = func(*args, **kwargs)
19 | end_time = time.time()
20 | duration = end_time - start_time
21 | log_ins.write_log("info", '%s execute duration :%.3f second' % (str(func), duration))
22 | return result
23 |
24 | return wrapper
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=42", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [bdist_wheel]
2 | universal = True
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """"
4 | author : shenshuo
5 | date : 2024年12月12日
6 | desc : CODO SDK
7 | """
8 |
9 | import sys
10 | from setuptools import setup, find_packages
11 |
12 | VERSION = '1.0.40'
13 |
14 | if sys.version_info < (2, 7) or (3, 0) <= sys.version_info < (3, 6):
15 | print('This program requires at least Python 2.7 or 3.6 to run.')
16 | sys.exit(1)
17 |
18 |
19 | def get_data_files():
20 | data_files = [
21 | ('share/doc/codo_sdk', ['README.md'])
22 | ]
23 | return data_files
24 |
25 |
26 | def get_install_requires():
27 | requires = ['fire==0.5.0', 'shortuuid==1.0.11', 'pymysql==0.9.3', 'sqlalchemy==1.3.23', 'pika==1.3.1',
28 | 'PyJWT==2.0.1', 'requests==2.28.2', 'redis==4.5.1', 'tornado>=6.0', 'loguru>=0.6.0',
29 | 'cryptography==42.0.4', 'ldap3==2.9', 'pydantic>=1.10.5']
30 | return requires
31 |
32 |
33 | setup(
34 | name='codosdk',
35 | version=VERSION,
36 | description="CODO项目的Python SDK",
37 | # packages=['opssdk', 'opssdk.utils', 'websdk2', 'websdk2.apis', 'websdk2.cloud', 'websdk2.utils'],
38 | packages=find_packages(),
39 | url='https://github.com/ss1917/codo_sdk/',
40 | license='GPLv3',
41 | keywords="ops, codo, devops",
42 | install_requires=get_install_requires(),
43 | author='shenshuo',
44 | author_email='191715030@qq.com',
45 | long_description='SDK of the operation and maintenance script logs operate',
46 | # long_description=open('README.md').read(), # 自动读取README文件
47 | # long_description_content_type='text/markdown', # 确保Markdown格式
48 | include_package_data=True,
49 | data_files=get_data_files(),
50 | classifiers=[
51 | 'Development Status :: 5 - Production/Stable',
52 | 'Environment :: Console :: Curses',
53 | 'Intended Audience :: Developers',
54 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
55 | 'Programming Language :: Python :: 3.6',
56 | 'Programming Language :: Python :: 3.7',
57 | 'Programming Language :: Python :: 3.8',
58 | 'Programming Language :: Python :: 3.9',
59 | 'Programming Language :: Python :: 3.9'
60 | ],
61 | platforms='any',
62 | python_requires='>=2.7, >=3.6', # 精确控制支持的Python版本
63 | )
64 |
--------------------------------------------------------------------------------
/websdk2/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ss1917/ops_sdk/ea34a6e30630709b20623d08450c8b65c0c92d88/websdk2/__init__.py
--------------------------------------------------------------------------------
/websdk2/api_set.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Version : 0.0.1
5 | Contact : 191715030@qq.com
6 | Author : shenshuo
7 | Date : 2025/02/12 17:56
8 | Desc : API集合
9 | """
10 |
11 | from .tools import singleton
12 | from .apis import AdminAPIS, TaskAPIS, KerriganAPIS, AdminV4APIS, CMDBAPIS, AgentAPIS, NoticeAPIS
13 |
14 |
15 | @singleton
16 | class ConstAPIS(AdminAPIS, TaskAPIS, KerriganAPIS, AdminV4APIS, CMDBAPIS, AgentAPIS, NoticeAPIS):
17 | """
18 | 集合所有常用 API 配置的常量类,继承了各个 API 模块的接口配置。
19 |
20 | 提供了对 API 配置项的只读管理,并且对每个新增属性值进行校验。
21 | """
22 |
23 | def __init__(self):
24 | pass
25 |
26 | def __setattr__(self, name: str, value: dict) -> None:
27 |
28 | """
29 | 自定义属性设置方法,确保 API 配置项的合法性。
30 |
31 | 校验条件:
32 | 1. 不允许重新绑定常量。
33 | 2. 属性值必须是字典格式。
34 | 3. 必须包含 'url' 和 'description' 字段。
35 |
36 | Args:
37 | name (str): 属性名称
38 | value (dict): 属性值,必须为字典格式并包含必要字段
39 | """
40 |
41 | if name in self.__dict__:
42 | raise TypeError(f"Cannot rebind constant '{name}'.")
43 |
44 | if not isinstance(value, dict):
45 | raise TypeError(f"Value for '{name}' must be a dictionary.")
46 |
47 | # 确保包含 'url' 和 'description' 字段
48 | if 'url' not in value:
49 | raise TypeError(f"Value for '{name}' must contain 'url'.")
50 |
51 | if 'description' not in value:
52 | raise TypeError(f"Value for '{name}' must contain 'description'.")
53 |
54 | self.__dict__[name] = value
55 |
56 |
57 | api_set = ConstAPIS()
58 |
--------------------------------------------------------------------------------
/websdk2/apis/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | from .admin_apis import AdminAPIS
5 | from .task_apis import TaskAPIS
6 | from .kerrigan_apis import KerriganAPIS
7 | from .mgv4_apis import AdminV4APIS
8 | from .cmdb_apis import CMDBAPIS
9 | from .agent_apis import AgentAPIS
10 | from .notice_apis import NoticeAPIS
11 |
--------------------------------------------------------------------------------
/websdk2/apis/admin_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc :记录API
8 | """
9 |
10 |
11 | class AdminAPIS:
12 | route_prefix = "/api/mg"
13 | get_users = dict(method='GET',
14 | url=f'{route_prefix}/v3/accounts/user/',
15 | params={
16 | 'page': 1,
17 | 'limit': 201
18 | },
19 | field_help={},
20 | description='获取用户信息')
21 |
22 | opt_users = dict(method='POST',
23 | url=f'{route_prefix}/v3/accounts/user/',
24 | body={
25 | 'username': None,
26 | 'nickname': None,
27 | 'password': None,
28 | 'department': None,
29 | 'tel': None,
30 | 'wechat': None,
31 | 'no': None,
32 | 'email': None,
33 | 'user_state': '20',
34 | },
35 | field_help={
36 | 'user_state': '20',
37 | },
38 | description='操作用户数据,支持增删改,请修改method和body数据')
39 |
40 | get_resource_info_by_user = dict(method='GET',
41 | url='/mg/v2/overall/resource/user/',
42 | params={
43 | 'nickname': None, ## 默认为当前用户有权限的
44 | 'expand': None ## yes 是扩展为目录的 , no 是可以当全局标签的
45 | },
46 | field_help={
47 | 'nickname': None, ## 默认为当前用户有权限的
48 | 'expand': None ## yes 是扩展为目录的 , no 是可以当全局标签的,没有则是全部
49 | },
50 | description='获取用户有权限的资源组/目录')
51 |
52 | get_resource_info = dict(method='GET',
53 | url='/mg/v2/overall/resource/',
54 | params={
55 | 'key': None, ## 筛选字段
56 | 'value': None ## 筛选字段值
57 | },
58 | field_help={
59 | 'key': None, ## 筛选字段
60 | 'value': None ## 筛选字段值
61 | },
62 | description='获取资源组/目录')
63 |
64 | get_role_list = dict(method='GET',
65 | url=f'{route_prefix}/v3/accounts/role/',
66 | params={
67 | },
68 | field_help={
69 | 'page': '分页/第几页', ### 分页/第几页
70 | 'limit': '分页/每页多少个', ### 分页/每页多少个
71 | 'value': '模糊查询' ### 模糊查询
72 | },
73 | description='获取角色信息')
74 |
75 | get_all_role_user = dict(method='GET',
76 | url=f'{route_prefix}/v3/accounts/all_role_user/',
77 | params={
78 | },
79 | field_help={
80 | 'page': '分页/第几页', ### 分页/第几页
81 | 'limit': '分页/每页多少个', ### 分页/每页多少个
82 | 'value': '模糊查询' ### 模糊查询
83 | },
84 | description='获取用户组和用户组内用户信息')
85 |
86 | get_send_addr = dict(method='GET',
87 | url=f'{route_prefix}/v1/notifications/send_addr/',
88 | params={
89 | },
90 | field_help={
91 | 'users_str': '用户 半角逗号分隔', ### 用户 半角逗号分隔
92 | 'notice_group_str': '通知组 半角逗号分隔', ### 通知组 半角逗号分隔
93 | 'roles_str': '角色 半角逗号分隔' ### 角色 半角逗号分隔
94 | },
95 | description='获取用户的联系方式,手机/邮箱/钉钉ID')
96 |
97 | get_notice_group = dict(method='GET',
98 | url=f'{route_prefix}/v3/notifications/group/',
99 | params={
100 | 'page': 1,
101 | 'limit': 201,
102 | 'value': ''
103 | },
104 | field_help={
105 | 'page': '分页/第几页', ### 分页/第几页
106 | 'limit': '分页/每页多少个', ### 分页/每页多少个
107 | 'value': '模糊查询' ### 模糊查询
108 | },
109 | description='获取通知组')
110 |
111 | opt_notice_group = dict(method='POST',
112 | url=f'{route_prefix}/v3/notifications/group/',
113 | body={
114 | },
115 | field_help={
116 | },
117 | description='操作通知组')
118 |
119 | send_notice = dict(method='POST',
120 | url=f'{route_prefix}/v3/notifications/factory/',
121 | body={
122 | "name": 'default',
123 | "msg": {"msg": "这个即将发布的新版本,创始人xx称它为红树林。而在此之前,每当面临重大升级"}
124 | },
125 | field_help={
126 | "name": '模板名称',
127 | "msg": "发送的消息变量字典",
128 | "notice_conf": "随着通知一起传入的配置信息,会覆盖模板里面的配置信息"
129 | },
130 | description='根据通知模板发送通知')
131 |
132 | send_update_notice = dict(method='PUT',
133 | url=f'{route_prefix}/v3/notifications/factory/',
134 | body={
135 | "agent_id": 27689,
136 | "status_value": "已同意",
137 | "status_bg": "0xFF78C06E",
138 | "task_id": 37491848
139 | },
140 | field_help={
141 | "agent_id": '申请的应用id',
142 | "status_value": "状态值",
143 | "status_bg": "颜色",
144 | "task_id": "已经发送消息ID,发送通知的时候可以获取他的返回"
145 | },
146 | description='变更已发出通知消息的状态, 目前只有钉钉工作通知OA方式可用')
147 |
148 | send_custom_notice = dict(method='POST',
149 | url=f'{route_prefix}/v1/notifications/custom/',
150 | body={
151 | "send_addr": '',
152 | "userid_list": '',
153 | "msg": {"msg": "这个即将发布的新版本,创始人xx称它为红树林。而在此之前,每当面临重大升级"}
154 | },
155 | field_help={
156 | "send_addr": '需要的通知的用户信息',
157 | "userid_list": "",
158 | "msg": "发送的消息变量字典",
159 | },
160 | description='自定义的通知信息')
161 |
--------------------------------------------------------------------------------
/websdk2/apis/agent_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2024年4月12日
7 | Desc :Agent常用API
8 | """
9 |
10 |
11 | class AgentAPIS:
12 | route_prefix = "/api/agent"
13 | agent_list_url = f'{route_prefix}/v1/agent/info'
14 | get_agent_work_url = f'{route_prefix}/v1/manager/agent/get_running_task'
15 | batch_add_task_url = f'{route_prefix}/v1/agent/task/batch'
16 | sync_files_url = f'{route_prefix}/v1/manager/agent/share_file/register'
17 | cloud_native_url = f'{route_prefix}/v1/manager/agent/task/cloud_native'
18 |
19 | sync_files_register_url_v2 = f"{route_prefix}/v1/manager/agent/share_filev2/register"
20 | sync_files_cancel_url_v2 = f"{route_prefix}/v1/manager/agent/share_filev2/cancel"
21 |
22 | get_agent_list = dict(method='GET',
23 | url=agent_list_url,
24 | params={},
25 | field_help={},
26 | description='查看Agent列表')
27 |
28 | get_agent_work = dict(method='GET',
29 | url=get_agent_work_url,
30 | params={},
31 | field_help={},
32 | description='查询agent状态')
33 |
34 | batch_add_task = dict(method='POST',
35 | url=batch_add_task_url,
36 | body={
37 | },
38 | field_help={
39 | },
40 | description='批量脚本任务下发')
41 |
42 | sync_files_task = dict(method='POST',
43 | url=sync_files_url,
44 | body={
45 | },
46 | field_help={
47 | },
48 | description='批量分发文件下发')
49 |
50 | sync_files_register_v2 = dict(method='POST',
51 | url=sync_files_register_url_v2,
52 | body={
53 | },
54 | field_help={
55 | },
56 | description='批量分发文件下发V2')
57 |
58 | sync_files_cancel_v2 = dict(method='POST',
59 | url=sync_files_cancel_url_v2,
60 | body={
61 | },
62 | field_help={
63 | },
64 | description='批量分发文件取消V2')
65 |
66 | cloud_native_task = dict(method='POST',
67 | url=cloud_native_url,
68 | body={
69 | },
70 | field_help={
71 | },
72 | description='云原生任务下发/终止/重试')
73 |
74 | get_cloud_native_task = dict(method='GET',
75 | url=cloud_native_url,
76 | params={},
77 | field_help={
78 | },
79 | description='云原生任务查询')
80 |
--------------------------------------------------------------------------------
/websdk2/apis/cmdb_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc :记录 CMDB API
8 | """
9 |
10 |
11 | class CMDBAPIS:
12 | cmdb_prefix = "/api/cmdb"
13 |
14 | get_tag_list = dict(
15 | method='GET',
16 | url=f'{cmdb_prefix}/api/v2/cmdb/tag/list/',
17 | params={
18 | 'page_number': '1', # 分页/第几页
19 | 'page_size': '200', # 分页/每页多少个
20 | "tag_key": None
21 | },
22 | description='CMDB 获取标签key、value列表'
23 | )
24 |
25 | get_service_tree = dict(method='GET',
26 | url=f'{cmdb_prefix}/api/v2/cmdb/tree/',
27 | params={"biz_id": "0"},
28 | description='获取当前业务服务树')
29 |
30 | get_dynamic_groups = dict(method='GET',
31 | url=f'{cmdb_prefix}/api/v2/cmdb/biz/dynamic_group/list/',
32 | params={
33 | "biz_id": ""
34 | },
35 | description='获取当前业务下动态分组')
36 |
37 | get_dynamic_group_details = dict(method='GET',
38 | url=f'{cmdb_prefix}/api/v2/cmdb/biz/dynamic_group/preview/',
39 | params={
40 | },
41 | description='获取动态分组详细数据')
42 |
43 | get_tree_asset_server = dict(method='GET',
44 | url=f'{cmdb_prefix}/api/v2/cmdb/tree/asset/server',
45 | params={
46 | },
47 | description='根据业务获取资源信息')
48 |
49 | get_tree_asset_server_old = dict(method='GET',
50 | url=f'{cmdb_prefix}/api/v2/cmdb/tree/asset/',
51 | params={
52 | },
53 | description='根据业务获取资源信息,待废弃')
54 |
--------------------------------------------------------------------------------
/websdk2/apis/kerrigan_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc :记录API
8 | """
9 |
10 |
11 | class KerriganAPIS:
12 | kerrigan_prefix = "/api/kerrigan"
13 | get_publish_config = dict(method='GET',
14 | url=f'{kerrigan_prefix}/v1/conf/publish/config/',
15 | params={'project_code': '',
16 | 'environment': '',
17 | 'service': 'service',
18 | 'filename': 'filename'},
19 | field_help={},
20 | description='获取配置'
21 | )
22 |
--------------------------------------------------------------------------------
/websdk2/apis/mgv4_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc :记录API
8 | """
9 |
10 |
11 | class AdminV4APIS:
12 | route_prefix = "/api/p"
13 | get_user_list = dict(method='GET',
14 | url=f'{route_prefix}/v4/user/list/',
15 | params={
16 | 'page': 1,
17 | 'limit': 201
18 | },
19 | field_help={},
20 | description='查看用户列表')
21 |
22 | get_user_contact_info = dict(method='GET',
23 | url=f'{route_prefix}/v4/user/send_addr/',
24 | params={},
25 | field_help={
26 | 'users_str': '用户id 用户名 昵称 半角逗号分隔', # 用户 半角逗号分隔
27 | 'roles_str': '角色id 半角逗号分隔' # 角色 半角逗号分隔
28 | },
29 | description='获取用户的联系方式,手机/邮箱/钉钉ID/飞书ID')
30 |
31 | get_users = dict(method='GET',
32 | url=f'{route_prefix}/v4/user/',
33 | params={
34 | 'page': 1,
35 | 'limit': 201
36 | },
37 | field_help={},
38 | description='管理-查看用户列表')
39 |
40 | opt_users = dict(method='POST',
41 | url=f'{route_prefix}/v4/user/',
42 | body={
43 | 'username': None,
44 | 'nickname': None,
45 | 'password': None,
46 | 'department': None,
47 | 'tel': None,
48 | 'email': None,
49 | 'user_state': '20',
50 | },
51 | field_help={
52 | 'user_state': '20',
53 | },
54 | description='操作用户数据,支持增删改,请修改method和body数据')
55 |
56 | get_biz = dict(method='GET',
57 | url=f'{route_prefix}/v4/biz/',
58 | params={
59 | 'page': 1,
60 | 'limit': 201
61 | },
62 | field_help={},
63 | description='权限中心-业务管理-同步业务可以用')
64 |
65 | get_biz_list = dict(method='GET',
66 | url=f'{route_prefix}/v4/biz/list/',
67 | params={
68 | },
69 | field_help={},
70 | description='PAAS基础功能-查看业务列表和切换')
71 |
72 | get_normal_role_list = dict(method='GET',
73 | url=f'{route_prefix}/v4/role/list/',
74 | params={},
75 | field_help={
76 | 'page': '分页/第几页', ### 分页/第几页
77 | 'limit': '分页/每页多少个', ### 分页/每页多少个
78 | 'searchVal': '模糊查询' ### 模糊查询
79 | },
80 | description='查看常规角色列表')
81 |
82 | get_all_base_role_list = dict(method='GET',
83 | url=f'{route_prefix}/v4/role/base_list/',
84 | params={},
85 | field_help={
86 | },
87 | description='查看所有基础角色')
88 |
89 | get_all_role_user_v4 = dict(method='GET',
90 | url=f'{route_prefix}/v4/all_role_user/',
91 | params={
92 | },
93 | field_help={
94 | },
95 | description='获取所有角色和角色内内用户信息-待废弃')
96 |
97 | get_all_roles_users = dict(method='GET',
98 | url=f'{route_prefix}/v4/all_roles_users/',
99 | params={
100 | },
101 | field_help={
102 | },
103 | description='获取所有角色和角色内内用户信息')
104 |
105 | get_favorites_v4 = dict(method='GET',
106 | url=f'{route_prefix}/v4/favorites/',
107 | params={
108 | },
109 | field_help={
110 | },
111 | description='PAAS-基础功能-公用收藏接口-查看')
112 |
113 | opt_favorites_v4 = dict(method='POST',
114 | url=f'{route_prefix}/v4/favorites/',
115 | body={
116 | "key": "",
117 | "app_code": "overall",
118 | "value": {}
119 | },
120 | field_help={
121 | },
122 | description='PAAS-基础功能-公用收藏接口-添加修改')
123 |
--------------------------------------------------------------------------------
/websdk2/apis/notice_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2025年2月12日13:37:54
7 | Desc :记录API
8 | """
9 |
10 |
11 | class NoticeAPIS:
12 | route_prefix = "/api/noc"
13 | send_router_alert = dict(method='POST',
14 | url=f'{route_prefix}/v1/router-alert',
15 | params={
16 | },
17 | body={
18 | },
19 | field_help={},
20 | description='通过告警路由发送告警')
21 |
--------------------------------------------------------------------------------
/websdk2/apis/task_apis.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc :记录API
8 | """
9 |
10 |
11 | class TaskAPIS:
12 | job_prefix = "/api/job"
13 | create_jobs = dict(method='POST',
14 | url=f'{job_prefix}/v1/flow/accept/create/',
15 | body={
16 | "order_name": "标题",
17 | "creator": "发起人",
18 | "flow_version_name": "依赖的流程名称",
19 | "start_time": "2018-11-27 14:09:50",
20 | "global_params": "{}", ##参数,必须为json格式且可以格式化为字典
21 | "details": "这里是备注"
22 | },
23 | field_help={
24 | "order_name": "标题",
25 | "creator": "提交人",
26 | "flow_version_name": "依赖的流程名称",
27 | "start_node": "如果有多个开始节点,必须指定一个",
28 | "start_time": "在开始节点上设置时间,到这个时间会进行执行",
29 | "global_params": "这里是一个字典,里面的参数可以自行定义,如上,你模板参数里面用到了哪些你都可以在这里定义出来,当你的POST到这个接口时候,我们会自动接受此参数,并帮你运行脚本 解析你要传入的参数。",
30 | "details": "描述,备注信息",
31 | },
32 | description='基于此接口可以创建作业任务')
33 |
34 | ##(r"/v1/proxy/cmdbv3/dynamic_group/info/"
35 | get_proxy_dynamic_group_list = dict(
36 | method='GET',
37 | url=f'{job_prefix}/v1/proxy/cmdb/dynamic_group/list/',
38 | params={
39 | "biz_id": None, ### 业务/资源id
40 | 'page_number': '1', ### 分页/第几页
41 | 'page_size': '200', ### 分页/每页多少个
42 | },
43 | field_help={
44 | },
45 | description='COCO动态分组'
46 | )
47 |
48 | ##CC动态分组 获取动态分组详细数据"
49 | get_proxy_dynamic_group_info = dict(
50 | method='GET',
51 | url=f'{job_prefix}/v1/proxy/cmdb/dynamic_group/info/',
52 | params={
53 | "exec_uuid": None, ### 使用动态分组的UUID查询
54 | 'exec_uuid_list': '[]', ### 批量查询
55 | },
56 | field_help={
57 | },
58 | description='动态分组 获取动态分组详细数据'
59 | )
60 | get_current_order_list = dict(method='GET',
61 | url=f'{job_prefix}/v1/flow/current/list/',
62 | params={
63 | 'page': 1,
64 | 'limit': 201,
65 | },
66 | field_help={
67 |
68 | },
69 | description='查看订单列表'
70 | )
71 | get_history_order_list = dict(method='GET',
72 | url=f'{job_prefix}/v1/flow/history/list/',
73 | params={
74 | 'page': 1,
75 | 'limit': 201,
76 | },
77 | field_help={
78 |
79 | },
80 | description='查看订单列表')
81 |
--------------------------------------------------------------------------------
/websdk2/application.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : ming
5 | date : 2018年1月12日13:43:27
6 | role : 定制 Application
7 | """
8 |
9 | import logging
10 | from abc import ABC
11 | from shortuuid import uuid
12 | from tornado import httpserver, ioloop
13 | from tornado import options as tnd_options
14 | from tornado.options import options, define
15 | from tornado.web import Application as tornadoApp
16 | from tornado.web import RequestHandler
17 | from .configs import configs
18 | from .consts import const
19 | from .logger import init_logging
20 |
21 | # options.log_file_prefix = "/tmp/codo.log"
22 | define("addr", default='0.0.0.0', help="run on the given ip address", type=str)
23 | define("port", default=8000, help="run on the given port", type=int)
24 | define("progid", default=str(uuid()), help="tornado progress id", type=str)
25 | init_logging()
26 | urls_meta_list = []
27 |
28 |
29 | class Application(tornadoApp):
30 | """ 定制 Tornado Application 集成日志、sqlalchemy 等功能 """
31 |
32 | def __init__(self, handlers=None, default_host="", transforms=None, **settings):
33 | tnd_options.parse_command_line()
34 | handlers = handlers or []
35 | if configs.can_import:
36 | configs.import_dict(**settings)
37 |
38 | handlers.extend([(r"/v1/probe/meta/urls/", MetaProbe), ])
39 |
40 | self._generate_url_metadata(handlers)
41 |
42 | max_buffer_size = configs.get('max_buffer_size')
43 | max_body_size = configs.get('max_body_size')
44 | super(Application, self).__init__(handlers, default_host, transforms, **configs)
45 |
46 | if configs.get(const.LOG_LEVEL) in [10, 20, 30, 40]:
47 | log_level = configs.get(const.LOG_LEVEL)
48 | current_level = logging.getLogger().getEffectiveLevel()
49 |
50 | if log_level != current_level: # 仅在与当前级别不同的时候更新
51 | logging.getLogger().setLevel(log_level)
52 | logging.info(f'[APP Logging] Log level configured to: {logging.getLevelName(log_level)} ({log_level})')
53 |
54 | http_server = httpserver.HTTPServer(self, max_buffer_size=max_buffer_size, max_body_size=max_body_size)
55 | http_server.listen(options.port, address=options.addr)
56 | self.io_loop = ioloop.IOLoop.instance()
57 |
58 | def start_server(self):
59 | """
60 | 启动 tornado 服务
61 | :return:
62 | """
63 | try:
64 | logging.info('server address: %(addr)s:%(port)d' % dict(addr=options.addr, port=options.port))
65 | logging.info('web server start sucessfuled.')
66 | self.io_loop.start()
67 | except KeyboardInterrupt:
68 | self.io_loop.stop()
69 | logging.info("Server shut down gracefully.")
70 | except Exception as e:
71 | logging.error(f"Unexpected error: {e}", exc_info=True)
72 |
73 | @staticmethod
74 | def _generate_url_metadata(urls):
75 | """Generate metadata for registered URLs."""
76 | for url in urls:
77 | meta = {
78 | "url": url[0],
79 | "name": url[2].get("handle_name", "暂无")[:30] if len(url) > 2 else "暂无",
80 | "method": url[2].get("method", []) if len(url) > 2 else [],
81 | "status": url[2].get("handle_status", "y")[:2] if len(url) > 2 else "y",
82 | }
83 | urls_meta_list.append(meta)
84 |
85 |
86 | class MetaProbe(ABC, RequestHandler):
87 | def head(self, *args, **kwargs):
88 | self._write_response()
89 |
90 | def get(self, *args, **kwargs):
91 | self._write_response()
92 |
93 | def _write_response(self):
94 | self.write({
95 | "code": 0,
96 | "msg": "Get success",
97 | "count": len(urls_meta_list),
98 | "data": urls_meta_list,
99 | })
100 |
101 |
102 | if __name__ == '__main__':
103 | pass
104 |
--------------------------------------------------------------------------------
/websdk2/base_handler.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """"
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc : 处理API请求
8 | """
9 |
10 | import json
11 | import base64
12 | import hmac
13 | import logging
14 | import traceback
15 | from shortuuid import uuid
16 | # from .cache_context import cache_conn
17 | from tornado.escape import utf8, _unicode
18 | from tornado.web import RequestHandler, HTTPError
19 | from .jwt_token import AuthToken, jwt
20 |
21 |
22 | class BaseHandler(RequestHandler):
23 | def __init__(self, *args, **kwargs):
24 | self.new_csrf_key = str(uuid())
25 | self.business_id, self.resource_group = None, None
26 | self.user_id, self.username, self.nickname, self.email, self.is_super = None, None, None, None, False
27 | self.is_superuser = self.is_super
28 | self.token_verify = False
29 | self.tenant_filter = False
30 | self.params = {}
31 | self.req_data = {}
32 | super(BaseHandler, self).__init__(*args, **kwargs)
33 |
34 | def initialize(self, *args, **kwargs):
35 | pass
36 |
37 | def get_params_dict(self):
38 | self.params = {k: self.get_argument(k) for k in self.request.arguments}
39 | if "filter_map" in self.params:
40 | try:
41 | import json
42 | filter_map = self.params.get('filter_map')
43 | filter_map = json.loads(filter_map)
44 | except:
45 | filter_map = {}
46 | else:
47 | filter_map = {}
48 | self.params['filter_map'] = filter_map
49 |
50 | if self.tenant_filter and self.request_tenant_map and isinstance(self.request_tenant_map, dict):
51 | self.params['filter_map'] = {**filter_map, **self.request_tenant_map}
52 |
53 | if "auth_key" in self.params: self.params.pop('auth_key')
54 |
55 | def get_req_body_dict(self):
56 | if self.request.method in ("POST", "PUT", "PATCH", "DELETE"):
57 | try:
58 | self.req_data = json.loads(self.request.body.decode("utf-8"))
59 | for key in ['_index', '_rowKey', 'update_time']:
60 | self.req_data.pop(key, None)
61 | return self.req_data
62 | except json.JSONDecodeError as err:
63 | logging.error(f"Error parsing JSON data in request {self.request.method} at {self.request.path}: {err}")
64 | except Exception as err:
65 | logging.error(f"Unexpected error in request {self.request.method} at {self.request.path}: {err}")
66 |
67 | def codo_csrf(self):
68 | pass
69 |
70 | def check_xsrf_cookie(self):
71 | if not self.settings.get('xsrf_cookies'): return
72 | if self.request.method in ("GET", "HEAD", "OPTIONS") or self.request.headers.get('Sdk-Method'):
73 | pass
74 | else:
75 | token = (
76 | self.get_argument("_xsrf", None)
77 | or self.request.headers.get("X-Xsrftoken")
78 | or self.request.headers.get("X-Csrftoken")
79 | )
80 | if not token: raise HTTPError(402, "'_xsrf' argument missing from POST")
81 | _, token, _ = self._decode_xsrf_token(token)
82 | _, expected_token, _ = self._get_raw_xsrf_token()
83 | if not token:
84 | raise HTTPError(402, "'_xsrf' argument has invalid format")
85 | if not hmac.compare_digest(utf8(token), utf8(expected_token)):
86 | raise HTTPError(402, "XSRF cookie does not match POST argument")
87 |
88 | def codo_login(self):
89 | ### 登陆验证
90 | auth_key = self.get_cookie('auth_key') if self.get_cookie("auth_key") else self.request.headers.get('auth-key')
91 | if not auth_key: auth_key = self.get_argument('auth_key', default=None, strip=True)
92 |
93 | if not auth_key: raise HTTPError(401, 'auth failed')
94 |
95 | if self.token_verify:
96 | auth_token = AuthToken()
97 | user_info = auth_token.decode_auth_token(auth_key)
98 | else:
99 | user_info = jwt.decode(auth_key, options={"verify_signature": False}).get('data')
100 |
101 | if not user_info: raise HTTPError(401, 'auth failed')
102 |
103 | self.user_id = user_info.get('user_id', None)
104 | self.username = user_info.get('username', None)
105 | self.nickname = user_info.get('nickname', None)
106 | self.email = user_info.get('email', None)
107 | self.is_super = user_info.get('is_superuser', False)
108 |
109 | if not self.user_id: raise HTTPError(401, 'auth failed')
110 |
111 | self.user_id = str(self.user_id)
112 | self.set_secure_cookie("user_id", self.user_id)
113 | self.set_secure_cookie("nickname", self.nickname)
114 | self.set_secure_cookie("username", self.username)
115 | self.set_secure_cookie("email", str(self.email))
116 | self.is_superuser = self.is_super
117 |
118 | def prepare(self):
119 | ### 获取url参数为字典
120 | self.get_params_dict()
121 | ### 验证客户端CSRF
122 | self.codo_csrf()
123 | self.xsrf_token
124 |
125 | ### 登陆验证
126 | self.codo_login()
127 |
128 | def get_current_user(self):
129 | return self.username
130 |
131 | def get_current_id(self):
132 | return self.user_id
133 |
134 | def get_current_nickname(self):
135 | return self.nickname
136 |
137 | def get_current_email(self):
138 | return self.email
139 |
140 | def is_superuser(self):
141 | return self.is_superuser
142 |
143 | @property
144 | def request_resource_group(self):
145 | if not self.resource_group:
146 | self.resource_group = self.get_secure_cookie("resource_group") if self.get_secure_cookie(
147 | "resource_group") else self.request.headers.get('resource-group')
148 |
149 | if not self.resource_group: return None
150 | if isinstance(self.resource_group, bytes): self.resource_group = bytes.decode(self.resource_group)
151 | return self.resource_group
152 |
153 | return self.resource_group
154 |
155 | @property
156 | def request_resource_map(self):
157 | if self.request_resource_group in [None, 'all', '所有项目']:
158 | return dict()
159 | else:
160 | return dict(resource_group=self.request_resource_group)
161 |
162 | @property
163 | def request_business_id(self):
164 | if not self.business_id:
165 | self.business_id = self.get_secure_cookie("business_id") if self.get_secure_cookie("business_id") else \
166 | self.request.headers.get('biz-id')
167 | if not self.business_id: return None
168 | if isinstance(self.business_id, bytes): self.business_id = bytes.decode(self.business_id)
169 | return self.business_id
170 |
171 | return self.business_id
172 |
173 | ### 新添加
174 | @property
175 | def request_tenant(self):
176 | if self.request.headers.get('tenant'):
177 | return str(base64.b64decode(self.request.headers.get('tenant')), "utf-8")
178 | if self.get_secure_cookie('tenant'):
179 | tenant = self.get_secure_cookie('tenant')
180 | return bytes.decode(tenant) if isinstance(tenant, bytes) else tenant
181 | if self.get_secure_cookie('resource_group'):
182 | tenant = self.get_secure_cookie('resource_group')
183 | return bytes.decode(tenant) if isinstance(tenant, bytes) else tenant
184 | return None
185 |
186 | @property
187 | def request_tenantid(self):
188 | if self.request.headers.get('tenantid'): return self.request.headers.get('tenantid')
189 | if self.get_secure_cookie('tenantid'):
190 | tenantid = self.get_secure_cookie('tenantid')
191 | return bytes.decode(tenantid) if isinstance(tenantid, bytes) else tenantid
192 | if self.get_secure_cookie('business_id'):
193 | tenantid = self.get_secure_cookie('business_id')
194 | return bytes.decode(tenantid) if isinstance(tenantid, bytes) else tenantid
195 | return None
196 |
197 | @property
198 | def request_tenant_map(self):
199 | if self.request_tenantid in [None, '500'] or self.request_tenant in [None, 'all', '所有项目']:
200 | return dict()
201 | else:
202 | return dict(tenantid=self.request_tenantid)
203 |
204 | @property
205 | def biz_info_map(self):
206 | from .cache_context import cache_conn
207 | redis_conn = cache_conn()
208 | try:
209 | biz_info_str = redis_conn.get("BIZ_INFO_STR")
210 | biz_info_dict = json.loads(biz_info_str.decode())
211 | except Exception as err:
212 | return {}
213 | return biz_info_dict
214 |
215 | @property
216 | def request_username(self):
217 | return self.username
218 |
219 | @property
220 | def request_user_id(self):
221 | return self.user_id
222 |
223 | @property
224 | def request_nickname(self):
225 | return self.nickname
226 |
227 | @property
228 | def request_email(self):
229 | return self.email
230 |
231 | @property
232 | def request_is_superuser(self):
233 | return self.is_superuser
234 |
235 | def request_fullname(self):
236 | return f'{self.request_username}({self.request_nickname})'
237 |
238 | def write_error(self, status_code, **kwargs):
239 | error_trace_list = traceback.format_exception(*kwargs.get("exc_info"))
240 | if status_code == 404:
241 | self.set_status(status_code)
242 | return self.finish('找不到相关路径-404')
243 |
244 | elif status_code == 400:
245 | self.set_status(status_code)
246 | return self.finish('bad request...')
247 |
248 | elif status_code == 402:
249 | self.set_status(status_code)
250 | return self.finish('csrf error...')
251 |
252 | elif status_code == 403:
253 | self.set_status(status_code)
254 | return self.finish('Sorry, you have no permission. Please contact the administrator')
255 |
256 | if status_code == 500:
257 | self.set_status(status_code)
258 | for line in error_trace_list:
259 | self.write(str(line))
260 | self.finish()
261 |
262 | elif status_code == 401:
263 | self.set_status(status_code)
264 | return self.finish('你没有登录')
265 |
266 | else:
267 | self.set_status(status_code)
268 |
269 |
270 | class LivenessProbe(RequestHandler):
271 | def initialize(self, *args, **kwargs):
272 | pass
273 |
274 | def head(self, *args, **kwargs):
275 | self.write(dict(code=0, msg="I'm OK"))
276 |
277 | def get(self, *args, **kwargs):
278 | self.write(dict(code=0, msg="I'm OK"))
279 |
--------------------------------------------------------------------------------
/websdk2/cache.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : ss
5 | date : 2018年4月11日
6 | role : 缓存
7 | """
8 |
9 | import base64
10 | import json
11 | # import pickle
12 | from .consts import const
13 | import redis
14 | from shortuuid import uuid
15 | from .configs import configs as my_configs
16 | from .tools import singleton, bytes_to_unicode, convert
17 |
18 |
19 | @singleton
20 | class Cache(object):
21 | def __init__(self):
22 | self.__redis_connections = {}
23 | redis_configs = my_configs[const.REDIS_CONFIG_ITEM]
24 | for config_key, redis_config in redis_configs.items():
25 | auth = redis_config[const.RD_AUTH_KEY]
26 | host = redis_config[const.RD_HOST_KEY]
27 | port = redis_config[const.RD_PORT_KEY]
28 | db = redis_config[const.RD_DB_KEY]
29 | return_utf8 = False
30 | if const.RD_DECODE_RESPONSES in redis_config:
31 | return_utf8 = redis_config[const.RD_DECODE_RESPONSES]
32 | password = redis_config[const.RD_PASSWORD_KEY]
33 |
34 | if auth:
35 | redis_conn = redis.Redis(host=host, port=port, db=db, password=password, decode_responses=return_utf8)
36 | else:
37 | redis_conn = redis.Redis(host=host, port=port, db=db, decode_responses=return_utf8)
38 | self.__redis_connections[config_key] = redis_conn
39 |
40 | self.__salt = str(uuid())
41 |
42 | def set(self, key, value, expire=-1, conn_key=const.DEFAULT_RD_KEY, private=True, pipeline=None):
43 | real_key = self.__get_key(key, private)
44 | execute_main = self.__get_execute_main(conn_key, pipeline)
45 | if expire > 0:
46 | execute_main.set(real_key, value, ex=expire)
47 | else:
48 | execute_main.set(real_key, value)
49 |
50 | def set_json(self, key, value, expire=-1, conn_key=const.DEFAULT_RD_KEY, private=True, pipeline=None):
51 | value = json.dumps(value)
52 | value = base64.b64encode(value.encode('utf-8'))
53 | self.set(key, value, expire, conn_key, private, pipeline)
54 |
55 | def get(self, key, default='', conn_key=const.DEFAULT_RD_KEY, private=True, pipeline=None):
56 | real_key = self.__get_key(key, private)
57 | execute_main = self.__get_execute_main(conn_key, pipeline)
58 | if execute_main.exists(real_key):
59 | result = execute_main.get(real_key)
60 | return bytes_to_unicode(result)
61 | return default
62 |
63 | def incr(self, key, private=True,
64 | conn_key=const.DEFAULT_RD_KEY, amount=1):
65 | real_key = self.__get_key(key, private)
66 | execute_main = self.__get_execute_main(conn_key, None)
67 | if execute_main.exists(real_key):
68 | execute_main.incr(real_key, amount=amount)
69 | return self.get(key, default='0',
70 | private=private, conn_key=conn_key)
71 | return None
72 |
73 | def get_json(self, key, default='',
74 | conn_key=const.DEFAULT_RD_KEY, private=True):
75 | result = self.get(key, default, conn_key, private)
76 | result = base64.b64decode(result)
77 | result = bytes_to_unicode(result)
78 | if result:
79 | result = json.loads(result)
80 | return result
81 |
82 | def delete(self, *keys, conn_key=const.DEFAULT_RD_KEY, private=True, pipeline=None):
83 | execute_main = self.__get_execute_main(conn_key, pipeline)
84 | _keys = [self.__get_key(key, private) for key in keys]
85 | return execute_main.delete(*_keys)
86 |
87 | def clear(self, conn_key=const.DEFAULT_RD_KEY):
88 | execute_main = self.__get_execute_main(conn_key, None)
89 | execute_main.flushdb()
90 |
91 | def get_pipeline(self, conn_key=const.DEFAULT_RD_KEY):
92 | return self.__redis_connections[conn_key].pipeline()
93 |
94 | def execute_pipeline(self, pipeline):
95 | if pipeline:
96 | return pipeline.execute()
97 |
98 | def get_conn(self, conn_key=const.DEFAULT_RD_KEY):
99 | return self.__get_execute_main(conn_key)
100 |
101 | def hgetall(self, key, default='', conn_key=const.DEFAULT_RD_KEY, private=True):
102 | real_key = self.__get_key(key, private)
103 | execute_main = self.__get_execute_main(conn_key, None)
104 | if execute_main.exists(real_key):
105 | result = execute_main.hgetall(real_key)
106 | result = convert(result)
107 | else:
108 | return default
109 | return result
110 |
111 | @property
112 | def redis(self):
113 | return self.__get_execute_main()
114 |
115 | def __get_key(self, key, private=True):
116 | if private:
117 | return '%s%s' % (self.__salt, key)
118 | else:
119 | return key
120 |
121 | def __get_execute_main(self, conn_key=const.DEFAULT_RD_KEY, pipeline=None):
122 | if pipeline:
123 | return pipeline
124 | return self.__redis_connections[conn_key]
125 |
126 |
127 | def get_cache():
128 | return Cache()
129 |
--------------------------------------------------------------------------------
/websdk2/cache_context.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018/11/26
7 | Desc :
8 | """
9 | import redis
10 | from .consts import const
11 | from .configs import configs
12 |
13 | cache_conns = {}
14 |
15 | def cache_conn(key=None, db=None):
16 | redis_configs = configs[const.REDIS_CONFIG_ITEM]
17 | if not key:
18 | key = const.DEFAULT_RD_KEY
19 | for config_key, redis_config in redis_configs.items():
20 | auth = redis_config[const.RD_AUTH_KEY]
21 | host = redis_config[const.RD_HOST_KEY]
22 | port = redis_config[const.RD_PORT_KEY]
23 | password = redis_config[const.RD_PASSWORD_KEY]
24 | if db:
25 | db = db
26 | else:
27 | db = redis_config[const.RD_DB_KEY]
28 | return_utf8 = False
29 | if const.RD_DECODE_RESPONSES in redis_config:
30 | return_utf8 = redis_config[const.RD_DECODE_RESPONSES]
31 |
32 | if auth:
33 | redis_pool = redis.ConnectionPool(host=host, port=port, db=db, password=password,
34 | decode_responses=return_utf8)
35 | else:
36 | redis_pool = redis.ConnectionPool(host=host, port=port, db=db, decode_responses=return_utf8)
37 | cache_conns[config_key] = redis.StrictRedis(connection_pool=redis_pool)
38 | return cache_conns[key]
39 |
--------------------------------------------------------------------------------
/websdk2/client.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """"
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018年2月5日13:37:54
7 | Desc : 处理API请求
8 | """
9 |
10 | import json
11 | import requests
12 | from urllib.parse import urlencode
13 | from typing import Union, Optional
14 | import logging
15 | from .consts import const
16 | from .configs import configs
17 | from tornado.httpclient import AsyncHTTPClient
18 |
19 | logger = logging.getLogger(__name__)
20 |
21 |
22 | class AcsClient:
23 | def __init__(self, request=None, auth_key=None, headers=None, endpoint='http://gw.opendevops.cn',
24 | request_timeout=10):
25 | if request:
26 | self.headers = request.headers
27 | elif headers:
28 | self.headers = headers
29 | elif auth_key:
30 | self.headers = {"Cookie": 'auth_key={}'.format(auth_key)}
31 | else:
32 | self.headers = {"Cookie": 'auth_key={}'.format(configs.get(const.API_AUTH_KEY))}
33 |
34 | if 'If-None-Match' in self.headers: del self.headers['If-None-Match']
35 | self.endpoint = endpoint
36 | if configs.get(const.WEBSITE_API_GW_URL) and endpoint == 'http://gw.opendevops.cn':
37 | self.endpoint = configs.get(const.WEBSITE_API_GW_URL)
38 | self.headers['Sdk-Method'] = 'zQtY4sw7sqYspVLrqV'
39 | self.request_timeout = request_timeout
40 |
41 | # 设置返回为json
42 | def do_action(self, **kwargs):
43 | kwargs = self.with_params_data_url(**kwargs)
44 | response = requests.request(kwargs.get('method'), kwargs.get('url'), headers=self.headers,
45 | data=kwargs.get('body'), timeout=self.request_timeout)
46 |
47 | return response.text
48 |
49 | # 返回完整信息
50 | def do_action_v2(self, **kwargs):
51 | kwargs = self.with_params_data_url(**kwargs)
52 | response = requests.request(kwargs.get('method'), kwargs.get('url'), headers=self.headers,
53 | data=kwargs.get('body'), timeout=self.request_timeout)
54 | return response
55 |
56 | def do_action_v3(self, **kwargs):
57 | kwargs = self.with_params_data_url(**kwargs)
58 |
59 | request_params = {
60 | 'method': kwargs.get('method'),
61 | 'url': kwargs.get('url'),
62 | 'headers': self.headers,
63 | 'timeout': self.request_timeout
64 | }
65 |
66 | if kwargs.get('json'):
67 | request_params['json'] = kwargs['json']
68 | else:
69 | request_params['data'] = kwargs.get('body')
70 |
71 | response = requests.request(**request_params)
72 | return response
73 |
74 | async def do_action_with_async(self, **kwargs):
75 |
76 | body = await self._implementation_of_do_action(**kwargs)
77 | return body
78 |
79 | async def _implementation_of_do_action(self, **kwargs):
80 | http_client = AsyncHTTPClient()
81 | request = self.with_params_data_url(**kwargs)
82 | # json=kwargs.get('json')
83 | response = await http_client.fetch(request.get('url'), method=request.get('method'), raise_error=False,
84 | body=request.get('body'), headers=self.headers,
85 | request_timeout=self.request_timeout)
86 |
87 | return response.body
88 |
89 | # import aiohttp
90 | # async def do_action_with_async_v2(self, **kwargs):
91 | # body = await self._implementation_of_do_aiohttp(**kwargs)
92 | # return body
93 | #
94 | # async def _implementation_of_do_aiohttp(self, **kwargs):
95 | # async with aiohttp.ClientSession() as session:
96 | # request = self.with_params_data_url(**kwargs)
97 | # async with session.request(method=request['method'], url=request['url'],
98 | # headers=self.headers, data=request.get('body'),
99 | # timeout=self.request_timeout) as response:
100 | # return await response.read()
101 |
102 | # def with_params_data_url(self, **kwargs):
103 | # # 重新组装URL
104 | # url = "{}{}".format(self.endpoint, kwargs['url'])
105 | # kwargs['url'] = url
106 | #
107 | # if not kwargs['method']: kwargs['method'] = 'GET'
108 | #
109 | # # logging.debug(f"with_params_data_url {kwargs}")
110 | # body = kwargs.get('body', {})
111 | # req_json = kwargs.get('json')
112 | #
113 | # if kwargs['method'] in ['POST', 'post', 'PATCH', 'patch', 'PUT', 'put']:
114 | # if not (body or req_json):
115 | # raise TypeError('method {}, body can not be empty'.format(kwargs['method']))
116 | # else:
117 | # if not isinstance(body, dict):
118 | # json.loads(body)
119 | #
120 | # if body and isinstance(body, dict): kwargs['body'] = json.dumps(body)
121 | #
122 | # params = kwargs.get('params')
123 | # if params: kwargs['url'] = "{}?{}".format(url, urlencode(params))
124 | #
125 | # if not self.headers: self.headers = kwargs.get('headers', {})
126 | #
127 | # if kwargs['method'] not in ['GET', 'get']: self.headers['Content-Type'] = 'application/json'
128 | #
129 | # return kwargs
130 |
131 | def with_params_data_url(self, **kwargs) -> dict:
132 | endpoint = self.endpoint.strip("'").strip('"')
133 | kwargs['url'] = f"{endpoint}{kwargs.get('url', '')}"
134 | kwargs['method'] = kwargs.get('method', 'GET').upper()
135 |
136 | body: Union[dict, str] = kwargs.get('body', {})
137 | req_json: Optional[dict] = kwargs.get('json')
138 |
139 | if kwargs['method'] in {'POST', 'PATCH', 'PUT'}:
140 | if not (body or req_json):
141 | raise TypeError(f"Method {kwargs['method']} requires a non-empty body or JSON payload.")
142 | if body and not isinstance(body, dict):
143 | try:
144 | body = json.loads(body)
145 | except json.JSONDecodeError as e:
146 | raise TypeError(f"Invalid JSON body: {e}")
147 |
148 | if body and isinstance(body, dict):
149 | kwargs['body'] = json.dumps(body)
150 |
151 | params: Optional[dict] = kwargs.get('params')
152 | if params:
153 | kwargs['url'] = f"{kwargs['url']}?{urlencode(params)}"
154 |
155 | kwargs['headers'] = kwargs.get('headers', self.headers or {})
156 |
157 | if kwargs['method'] != 'GET':
158 | kwargs['headers'].setdefault('Content-Type', 'application/json')
159 |
160 | return kwargs
161 |
162 | @staticmethod
163 | def help():
164 | help_info = """
165 | headers = {"Cookie": 'auth_key={}'.format(auth_key)}
166 | ### 三种实例化方式
167 | 1. client = AcsClient(endpoint=endpoint, headers=headers)
168 | 2. client = AcsClient(endpoint=endpoint, request=self.request)
169 | 3. client = AcsClient(endpoint=endpoint, auth_key=auth_key)
170 |
171 | 调用: 传入api 的参数,可以参考下面示例
172 |
173 | 同步
174 | response = client.do_action(**api_set.get_users)
175 | print(json.loads(response))
176 |
177 | 异步
178 | # import asyncio
179 | # loop = asyncio.get_event_loop()
180 | # ### 使用gather或者wait可以同时注册多个任务,实现并发
181 | # # task1 = asyncio.ensure_future(coroutine1)
182 | # # task2 = asyncio.ensure_future(coroutine2)
183 | # # tasks = asyncio.gather(*[task1, task2])
184 | # # loop.run_until_complete(tasks)
185 | # ### 单个使用
186 | # response = loop.run_until_complete(client.do_action_with_async(**api_set.get_users))
187 | # response = json.loads(response)
188 | # print(response)
189 | # loop.close()
190 |
191 | tornado 项目内必须使用异步,不过可以直接使用
192 | from websdk2.client import AcsClient
193 | from websdk2.api_set import api_set
194 | async def get(self):
195 | endpoint = ''
196 | client = AcsClient(endpoint=endpoint, headers=self.request.headers)
197 | response = await client.do_action_with_async(**api_set.get_users)
198 | return self.write(response)
199 |
200 | """
201 | return help_info
202 |
203 |
204 | if __name__ == '__main__':
205 | pass
206 |
--------------------------------------------------------------------------------
/websdk2/cloud/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ss1917/ops_sdk/ea34a6e30630709b20623d08450c8b65c0c92d88/websdk2/cloud/__init__.py
--------------------------------------------------------------------------------
/websdk2/cloud/qcloud_api.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """"
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2019年2月5日13:37:54
7 | Desc : 云厂商的一些方法
8 | """
9 |
10 | import hmac
11 | import hashlib
12 | import base64
13 | from urllib import parse
14 |
15 |
16 | class QCloudApiOpt:
17 |
18 | @staticmethod
19 | def sort_dic(keydict):
20 | return sorted(zip(keydict.keys(), keydict.values()))
21 |
22 | @staticmethod
23 | def get_str_sign(sortlist, api_url):
24 | sign_str_init = ''
25 | for value in sortlist:
26 | sign_str_init += value[0] + '=' + value[1] + '&'
27 | sign_str = 'GET' + api_url + sign_str_init[:-1]
28 | return sign_str, sign_str_init
29 |
30 | @staticmethod
31 | def get_signature(sign_str, secret_key):
32 | secretkey = secret_key
33 | signature = bytes(sign_str, encoding='utf-8')
34 | secretkey = bytes(secretkey, encoding='utf-8')
35 | my_sign = hmac.new(secretkey, signature, hashlib.sha1).digest()
36 | return base64.b64encode(my_sign)
37 |
38 | @staticmethod
39 | def encode_signature(my_sign):
40 | return parse.quote(my_sign)
41 |
42 | @staticmethod
43 | def get_result_url(sign_str, result_sign, api_url):
44 | return 'https://' + api_url + sign_str + 'Signature=' + result_sign
45 |
46 | @staticmethod
47 | def run(keydict, api_url, secret_key):
48 | sortlist = QCloudApiOpt.sort_dic(keydict)
49 | # 获取拼接后的sign字符串
50 | sign_str, sign_str_int = QCloudApiOpt.get_str_sign(sortlist, api_url)
51 | # 获取签名
52 | my_sign = QCloudApiOpt.get_signature(sign_str, secret_key)
53 | # 对签名串进行编码
54 | result_sign = QCloudApiOpt.encode_signature(my_sign)
55 | # 获取最终请求url
56 | result_url = QCloudApiOpt.get_result_url(sign_str_int, result_sign, api_url)
57 | return result_url
58 |
--------------------------------------------------------------------------------
/websdk2/cloud/ucloud_api.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2019年2月5日13:37:54
7 | Desc : 云厂商的一些方法
8 | """
9 |
10 | import requests
11 | import hashlib
12 | import logging
13 |
14 | logging.basicConfig(level=logging.INFO, format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
15 | datefmt='%Y %H:%M:%S')
16 |
17 | logger = logging.getLogger("ucloud")
18 | logger.setLevel(logging.WARN)
19 |
20 |
21 | class UCloudApi:
22 | """UCloud接口"""
23 |
24 | def __init__(self, access_id, access_key, url='https://api.ucloud.cn/'):
25 | self.url = url
26 | self.access_id = access_id
27 | self.access_key = access_key
28 |
29 | def get_region_list(self):
30 | params = {
31 | 'Action': 'GetRegion',
32 | 'PublicKey': self.access_id
33 | }
34 | params = self.add_signature(params)
35 | req = requests.get(url=self.url, params=params)
36 | regions = list(set([r['Region'] for r in req.json()['Regions']]))
37 | return regions
38 |
39 | def get_project_list(self):
40 | params = {
41 | 'Action': 'GetProjectList',
42 | 'PublicKey': self.access_id
43 | }
44 | params = self.add_signature(params)
45 | req = requests.get(url=self.url, params=params)
46 | project = list(set([r['ProjectId'] for r in req.json()['ProjectSet']]))
47 | return project
48 |
49 | def get_project_info(self):
50 | params = {
51 | 'Action': 'GetProjectList',
52 | 'PublicKey': self.access_id
53 | }
54 | params = self.add_signature(params)
55 | req = requests.get(url=self.url, params=params)
56 | project_list = []
57 | for i in req.json()['ProjectSet']:
58 | if isinstance(i, dict):
59 | project_list.append(i)
60 | return project_list
61 |
62 |
63 | def add_signature(self, params):
64 | items = params.items()
65 | params_data = ""
66 | for key in sorted(items):
67 | params_data = params_data + str(key[0]) + str(key[1])
68 | params_data = params_data + self.access_key
69 | sign = hashlib.sha1()
70 | sign.update(params_data.encode('utf8'))
71 | signature = sign.hexdigest()
72 | params['Signature'] = signature
73 | return params
74 |
--------------------------------------------------------------------------------
/websdk2/cloud_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """"
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2019年2月5日13:37:54
7 | Desc : 云厂商的一些方法
8 | """
9 | from .cloud.qcloud_api import QCloudApiOpt
10 | from .cloud.ucloud_api import UCloudApi
11 |
12 |
13 | def cloud_factory(cloud):
14 | if cloud == 'aliyun':
15 | return None
16 |
17 | elif cloud == 'qcloud':
18 | return QCloudApiOpt()
19 |
20 | elif cloud == 'ucloud':
21 | return UCloudApi()
22 |
23 | elif cloud == 'aws':
24 | return None
25 | else:
26 | return None
27 |
--------------------------------------------------------------------------------
/websdk2/configs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018/9/5
7 | Desc : 配置文件
8 | """
9 |
10 | from .consts import const
11 | from .tools import singleton
12 |
13 |
14 | @singleton
15 | class Config(dict):
16 | def __init__(self):
17 | self.__can_import = True
18 | self.__init_default()
19 | dict.__init__(self)
20 |
21 | def __getattr__(self, item, default=""):
22 | if item in self:
23 | return self[item]
24 | return ""
25 |
26 | @property
27 | def can_import(self):
28 | return self.__can_import
29 |
30 | def import_dict(self, **kwargs):
31 | if self.__can_import:
32 | for k, v in kwargs.items():
33 | self[k] = v
34 | self.__can_import = False
35 | else:
36 | raise Exception('ConfigImportError')
37 |
38 | def __init_default(self):
39 | self['debug'] = False
40 | self['autoreload'] = True
41 | self[const.DB_CONFIG_ITEM] = {
42 | const.DEFAULT_DB_KEY: {
43 | const.DBHOST_KEY: '',
44 | const.DBPORT_KEY: 3306,
45 | const.DBUSER_KEY: '',
46 | const.DBPWD_KEY: '',
47 | const.DBNAME_KEY: '',
48 | },
49 | const.READONLY_DB_KEY: {
50 | const.DBHOST_KEY: '',
51 | const.DBPORT_KEY: 3306,
52 | const.DBUSER_KEY: '',
53 | const.DBPWD_KEY: '',
54 | const.DBNAME_KEY: '',
55 | }
56 | }
57 | self[const.REDIS_CONFIG_ITEM] = {
58 | const.DEFAULT_RD_KEY: {
59 | const.RD_HOST_KEY: '',
60 | const.RD_PORT_KEY: 6379,
61 | const.RD_DB_KEY: -1,
62 | const.RD_AUTH_KEY: True,
63 | const.RD_CHARSET_KEY: 'utf-8',
64 | const.RD_PASSWORD_KEY: ''
65 | }
66 | }
67 | self[const.MQ_CONFIG_ITEM] = {
68 | const.DEFAULT_MQ_KEY: {
69 | const.MQ_ADDR: '',
70 | const.MQ_PORT: 5672,
71 | const.MQ_VHOST: '/',
72 | const.MQ_USER: '',
73 | const.MQ_PWD: '',
74 | }
75 | }
76 | # self[const.APP_NAME] = ''
77 | # self[const.LOG_TO_FILE] = False
78 |
79 | def has_item(self, item):
80 | return item in self
81 |
82 | def clear(self):
83 | self.__can_import = True
84 | dict.clear(self)
85 | self.__init_default()
86 |
87 | @staticmethod
88 | def __get_key_dict(sub_set, key):
89 | if key in sub_set:
90 | sk_dict = sub_set[key]
91 | else:
92 | sk_dict = {}
93 | sub_set[key] = sk_dict
94 | return sk_dict
95 |
96 |
97 | configs = Config()
98 |
--------------------------------------------------------------------------------
/websdk2/consts.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : ming
5 | date : 2017/4/11 下午1:54
6 | role : 常量管理
7 | """
8 |
9 | from enum import IntEnum as Enum
10 |
11 |
12 | class ConstError(TypeError):
13 | pass
14 |
15 |
16 | class IntEnum(Enum):
17 | @staticmethod
18 | def find_enum(cls, value):
19 | for k, v in cls._value2member_map_.items():
20 | if k == value:
21 | return v
22 | return None
23 |
24 |
25 | class ErrorCode(IntEnum):
26 | """ 错误码枚举 """
27 |
28 | not_found = 404
29 | bad_request = 400
30 | unauthorized = 401
31 | forbidden = 403
32 | not_allowed = 405
33 | not_acceptable = 406
34 | conflict = 409
35 | gone = 410
36 | precondition_failed = 412
37 | request_entity_too_large = 413
38 | unsupport_media_type = 415
39 | internal_server_error = 500
40 | service_unavailable = 503
41 | service_not_implemented = 501
42 | handler_uncatched_exception = 504
43 | config_import_error = 1001
44 | config_item_notfound_error = 1002
45 |
46 |
47 | class _const(object):
48 | def __setattr__(self, name, value):
49 | if name in self.__dict__:
50 | raise ConstError("Can't rebind const (%s)" % name)
51 | if not name.isupper():
52 | raise ConstError("Const must be upper.")
53 | self.__dict__[name] = value
54 |
55 |
56 | const = _const()
57 |
58 | const.DB_CONFIG_ITEM = 'databases'
59 | const.DBHOST_KEY = 'host'
60 | const.DBPWD_KEY = 'pwd'
61 | const.DBUSER_KEY = 'user'
62 | const.DBNAME_KEY = 'name'
63 | const.DBPORT_KEY = 'port'
64 | const.SF_DB_KEY = 'vmobel'
65 | const.DEFAULT_DB_KEY = 'default'
66 | const.READONLY_DB_KEY = 'readonly'
67 |
68 | const.REDIS_CONFIG_ITEM = 'redises'
69 | const.RD_HOST_KEY = 'host'
70 | const.RD_PORT_KEY = 'port'
71 | const.RD_DB_KEY = 'db'
72 | const.RD_AUTH_KEY = 'auth'
73 | const.RD_CHARSET_KEY = 'charset'
74 | const.RD_DECODE_RESPONSES = 'decode_responses'
75 | const.RD_PASSWORD_KEY = 'password'
76 | const.DEFAULT_RD_KEY = 'default'
77 |
78 | # ETCD
79 | const.DEFAULT_ETCD_KEY = "default"
80 | const.BACKUP_ETCD_KEY = "backup"
81 | const.DEFAULT_ETCD_HOST = "host"
82 | const.DEFAULT_ETCD_PORT = "port"
83 | const.DEFAULT_ETCD_PROTOCOL = "protocol"
84 | const.DEFAULT_ETCD_USER = "user"
85 | const.DEFAULT_ETCD_PWD = "pwd"
86 |
87 | # MQ
88 | const.MQ_CONFIG_ITEM = 'mqs'
89 | const.MQ_ADDR = 'MQ_ADDR'
90 | const.MQ_PORT = 'MQ_PORT'
91 | const.MQ_VHOST = 'MQ_VHOST'
92 | const.MQ_USER = 'MQ_USER'
93 | const.MQ_PWD = 'MQ_PWD'
94 | const.DEFAULT_MQ_KEY = 'default'
95 | const.AGENT_MQ_KEY = 'agent'
96 |
97 | # CRITICAL = 50
98 | # FATAL = CRITICAL
99 | # ERROR = 40
100 | # WARNING = 30
101 | # WARN = WARNING
102 | # INFO = 20
103 | # DEBUG = 10
104 | # NOTSET = 0
105 | const.LOG_LEVEL = "log_level"
106 | # JMS
107 | const.JMS_CONFIG_ITEM = 'jmss'
108 | const.DEFAULT_JMS_KEY = 'default'
109 | const.JMS_API_BASE_URL = "jms_url"
110 | const.JMS_API_KEY_ID = "jms_key_id"
111 | const.JMS_API_KEY_SECRET = "jms_key_secret"
112 |
113 | # consul
114 | const.CONSUL_CONFIG_ITEM = 'consuls'
115 | const.DEFAULT_CS_KEY = 'default'
116 | const.CONSUL_HOST_KEY = 'cs_host'
117 | const.CONSUL_PORT_KEY = 'cs_port'
118 | const.CONSUL_TOKEN_KEY = 'cs_token'
119 | const.CONSUL_SCHEME_KEY = 'cs_scheme'
120 |
121 | # kafka
122 | const.KAFKA_BOOTSTRAP_SERVERS = 'kafka_bootstrap_servers'
123 | const.KAFKA_CLIENT_ID = 'kafka_client_id'
124 | const.KAFKA_TOPIC = 'kafka_topic'
125 |
126 | const.APP_NAME = 'app_name'
127 | const.LOG_PATH = 'log_path'
128 | const.LOG_BACKUP_COUNT = 'log_backup_count'
129 | const.LOG_MAX_FILE_SIZE = 'log_max_filesize'
130 |
131 | const.REQUEST_START_SIGNAL = 'request_start'
132 | const.REQUEST_FINISHED_SIGNAL = 'request_finished'
133 |
134 | const.NW_SALT = 'nw'
135 | const.ALY_SALT = 'aly'
136 | const.TX_SALT = 'tx'
137 | const.SG_SALT = 'sg'
138 | const.DEFAULT_SALT = 'default'
139 | const.SALT_API = 'salt_api'
140 | const.SALT_USER = 'salt_username'
141 | const.SALT_PW = 'salt_password'
142 | const.SALT_OUT = 'salt_timeout'
143 |
144 | const.NW_INCEPTION = 'nw'
145 | const.ALY_INCEPTION = 'aly'
146 | const.TX_INCEPTION = 'tx'
147 | const.DEFAULT_INCEPTION = 'default'
148 |
149 | const.REGION = "cn-hangzhou"
150 | const.PRODUCT_NAME = "Dysmsapi"
151 | const.DOMAIN = "dysmsapi.aliyuncs.com"
152 |
153 | # crypto
154 | const.AES_CRYPTO_KEY = 'aes_crypto_key'
155 | ### app settings
156 | const.APP_SETTINGS = 'APP_SETTINGS'
157 | ### all user info
158 | const.USERS_INFO = 'USERS_INFO'
159 |
160 | # API GW
161 | const.CODO_API_GW = 'api_gw'
162 | const.WEBSITE_API_GW_URL = 'api_gw'
163 | const.CODO_OUTER_API_GW = 'outer_api_gw'
164 | const.API_AUTH_KEY = 'settings_auth_key'
165 | const.EMAILLOGIN_DOMAIN = 'EMAILLOGIN_DOMAIN'
166 | const.EMAILLOGIN_SERVER = 'EMAILLOGIN_SERVER'
167 |
168 | # email
169 | const.EMAIL_SUBJECT_PREFIX = "EMAIL_SUBJECT_PREFIX"
170 | const.EMAIL_HOST = "EMAIL_HOST"
171 | const.EMAIL_PORT = "EMAIL_PORT"
172 | const.EMAIL_HOST_USER = "EMAIL_HOST_USER"
173 | const.EMAIL_HOST_PASSWORD = "EMAIL_HOST_PASSWORD"
174 | const.EMAIL_USE_SSL = "EMAIL_USE_SSL"
175 | const.EMAIL_USE_TLS = "EMAIL_USE_TLS"
176 |
177 | # 短信配置
178 | const.SMS_REGION = "SMS_REGION"
179 | const.SMS_PRODUCT_NAME = "SMS_PRODUCT_NAME"
180 | const.SMS_DOMAIN = "SMS_DOMAIN"
181 |
182 | const.SMS_ACCESS_KEY_ID = 'SMS_ACCESS_KEY_ID'
183 | const.SMS_ACCESS_KEY_SECRET = 'SMS_ACCESS_KEY_SECRET'
184 |
185 | # 钉钉
186 | const.DINGTALK_CLIENT_ID = "dingtalk_client_id"
187 | const.DINGTALK_CLIENT_SECRET = "dingtalk_client_secret"
188 | const.DINGTALK_AGENT_ID = "dingtalk_agent_id"
189 | const.DINGTALK_AUTH = "dingtalk_auth"
190 |
191 | # 飞书
192 | const.FEISHU_CLIENT_ID = "feishu_client_id"
193 | const.FEISHU_CLIENT_SECRET = "feishu_client_secret"
194 | const.FEISHU_AUTH = "feishu_auth"
195 |
196 | # 企微
197 | const.WECHATWORK_CORP_ID = "wechatwork_corp_id"
198 | const.WECHATWORK_AGENT_ID = "wechatwork_agent_id"
199 | const.WECHATWORK_AUTH = "wechatwork_auth"
200 |
201 | # 存储
202 | const.STORAGE_REGION = "STORAGE_REGION"
203 | const.STORAGE_NAME = "STORAGE_NAME"
204 | const.STORAGE_PATH = "STORAGE_PATH"
205 | const.STORAGE_KEY_ID = "STORAGE_KEY_ID"
206 | const.STORAGE_KEY_SECRET = "STORAGE_KEY_SECRET"
207 |
208 | ### LDAP
209 | const.LDAP_SERVER_HOST = "LDAP_SERVER_HOST"
210 | const.LDAP_SERVER_PORT = "LDAP_SERVER_PORT"
211 | const.LDAP_ADMIN_DN = "LDAP_ADMIN_DN"
212 | const.LDAP_ADMIN_PASSWORD = "LDAP_ADMIN_PASSWORD"
213 | const.LDAP_SEARCH_BASE = "LDAP_SEARCH_BASE"
214 | const.LDAP_SEARCH_FILTER = "LDAP_SEARCH_FILTER"
215 | const.LDAP_ATTRIBUTES = "LDAP_ATTRIBUTES"
216 | const.LDAP_USE_SSL = "LDAP_USE_SSL"
217 | const.LDAP_ENABLE = "LDAP_ENABLE"
218 |
219 | # token 超时时间
220 | const.TOKEN_EXP_TIME = "TOKEN_EXP_TIME"
221 |
222 | # 二次认证
223 | const.MFA_GLOBAL = 'MFA_GLOBAL' # 全局开启
224 | const.MFA_ADMIN_ONLY = 'MFA_ADMIN_ONLY' # 管理员开启
225 |
226 | # task event 状态
227 | const.STATE_NEW = '0' # 新建任务
228 | const.STATE_WAIT = '1' # 等待执行
229 | const.STATE_RUNNING = '2' # 正在运行
230 | const.STATE_SUCCESS = '3' # 成功完成
231 | const.STATE_ERROR = '4' # 发生错误
232 | const.STATE_MANUAL = '5' # 等待手动
233 | const.STATE_BREAK = '6' # 中止状态 //不区分手动和自动
234 | const.STATE_TIMING = '7' # 定时状态
235 | const.STATE_UNKNOWN = '8' # 未知状态 // debug
236 | const.STATE_FAIL = '9' # 失败 // debug
237 | const.STATE_IGNORE = '10' # 忽略执行
238 | const.STATE_QUEUE = '11' # 排队中 //订单和任务节点公用
239 | # 订单 状态
240 | const.ORDER_STATE_WAITING = '31' # 订单等待中
241 | const.ORDER_STATE_RUNNING = '32' # 订单执行中
242 | const.ORDER_STATE_SUCCESS = '33' # 订单成功
243 | const.ORDER_STATE_FAIL = '34' # 订单失败
244 | const.ORDER_STATE_WAITING_APPROVAL = '35' # 订单等待审批
245 | const.ORDER_STATE_TERMINATED = '39' # 订单终止
246 | const.ORDER_STATE_QUEUE = '11' # 订单排队中
247 | const.EXEC_TIMEOUT = 1800
248 |
249 | # 节点地址
250 | const.NODE_ADDRESS = 'NODE_ADDRESS'
251 | const.EXEC_NODE_MAP_KEY = 'EXEC_NODE_MAP_KEY'
252 | const.AGENT_USED_KEY = "agent_is_used_map_mark_key"
253 |
254 | # otel
255 | const.JAEGER_EXPORTER_HOST = "jaeger_exporter_host"
256 | const.JAEGER_EXPORTER_PORT = "jaeger_exporter_port"
257 | const.OTEL_ENABLED = "otel_enabled"
258 |
--------------------------------------------------------------------------------
/websdk2/crud_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : shenshuo
5 | Date : 2025年02月08日
6 | Desc : 存储类
7 | """
8 | import json
9 | import time
10 | import logging
11 | from sqlalchemy import true
12 | from sqlalchemy.exc import IntegrityError
13 | from typing import List, Union, Optional
14 | from .db_context import DBContextV2 as DBContext
15 | from .utils.pydantic_utils import sqlalchemy_to_pydantic, ValidationError, PydanticDelList
16 | from .sqlalchemy_pagination import paginate
17 | from .model_utils import model_to_dict
18 |
19 |
20 | def get_millisecond_timestamp() -> int:
21 | """
22 | 获取当前时间的毫秒级时间戳。
23 | :return: 毫秒级时间戳(int)
24 | """
25 | return int(time.time() * 1000)
26 |
27 |
28 | class ModelCRUDView:
29 | def __init__(self, model, **kwargs):
30 | self.model = model
31 | self.pydantic_model_base = sqlalchemy_to_pydantic(model)
32 | self.pydantic_model = sqlalchemy_to_pydantic(model, exclude=['id'])
33 |
34 | def prepare(self):
35 | pass
36 |
37 | @staticmethod
38 | def parse_id_list(id_list: Union[str, List[int]]) -> Optional[List[int]]:
39 | """
40 | 解析和验证 id_list 参数。
41 | """
42 | if isinstance(id_list, str):
43 | try:
44 | id_list = json.loads(id_list)
45 | except json.JSONDecodeError as e:
46 | logging.error(f"Failed to parse id_list: {e}, input: {id_list}")
47 | return None
48 | if isinstance(id_list, list):
49 | try:
50 | # 尝试将所有元素转换为整数
51 | id_list = [int(i) for i in id_list]
52 | return id_list
53 | except Exception as e:
54 | logging.error(f"Invalid id_list element: {e}, input: {id_list}")
55 | return None
56 | logging.error(f"Invalid id_list format: {id_list}")
57 | return None
58 |
59 | @staticmethod
60 | def del_data(data) -> dict:
61 | for key in ['_index', '_rowKey', 'update_time', 'updated_at']:
62 | data.pop(key, None)
63 | return data
64 |
65 | def handle_get(self, data: dict) -> dict:
66 | self.prepare()
67 | data_id = data.get('id')
68 | if not data_id:
69 | return dict(code=1, msg="缺少必要的 'id' 参数", data=None, reason="", timestamp=get_millisecond_timestamp())
70 |
71 | try:
72 | with DBContext('r') as session:
73 | _info = session.query(self.model).filter(self.model.id == data_id).first()
74 | if not _info:
75 | return dict(code=1, msg='数据未找到', data=None, reason="", timestamp=get_millisecond_timestamp())
76 | return dict(code=0,
77 | msg='获取成功',
78 | reason="",
79 | timestamp=get_millisecond_timestamp(),
80 | data={'item': model_to_dict(_info)}
81 | )
82 | except Exception as e:
83 | logging.error(f"Database query failed: {e}")
84 | return dict(code=2, msg='查询失败', data=None, reason=str(e), timestamp=get_millisecond_timestamp())
85 |
86 | def handle_list(self, params: dict, get_by_val_func=None) -> dict:
87 | self.prepare()
88 |
89 | value = params.get('searchValue', params.get('searchVal'))
90 | id_list = params.get('id_list', [])
91 | filter_map = params.pop('filter_map', {})
92 | params.setdefault('page_size', 300) # 统一处理默认值
93 |
94 | # 如果未提供过滤函数,使用默认函数
95 | if get_by_val_func is None:
96 | def default_get_by_val(value: str):
97 | """默认返回不过滤"""
98 | return true()
99 |
100 | get_by_val_func = default_get_by_val
101 |
102 | if not callable(get_by_val_func):
103 | raise ValueError("The `get_by_val_func` parameter must be a callable function.")
104 |
105 | try:
106 | # 调用 get_by_val_func 生成过滤条件
107 | filter_condition = get_by_val_func(value)
108 | # if not isinstance(filter_condition, (bool, type(true()))):
109 | # raise ValueError("The `get_by_val_func` must return a SQLAlchemy filter condition or a boolean.")
110 |
111 | except Exception as e:
112 | raise ValueError(f"Error while executing `get_by_val_func`: {e}")
113 |
114 | try:
115 | with DBContext('r') as session:
116 | query = session.query(self.model).filter(filter_condition).filter_by(**filter_map)
117 | id_list = self.parse_id_list(id_list)
118 | if id_list:
119 | query = query.filter(self.model.id.in_(id_list))
120 |
121 | page = paginate(query, **params)
122 | except Exception as e:
123 | return dict(code=2, msg='查询失败', data=None, reason=str(e), timestamp=get_millisecond_timestamp())
124 |
125 | return dict(
126 | code=0,
127 | msg='获取成功',
128 | reason="",
129 | timestamp=get_millisecond_timestamp(),
130 | data={
131 | 'items': page.items,
132 | 'total_pages': page.pages, # 总页数
133 | 'count': page.total # 总数
134 | }
135 | )
136 |
137 | def handle_add(self, data: dict) -> dict:
138 | self.prepare()
139 | data = self.del_data(data)
140 | try:
141 | self.pydantic_model(**data)
142 | except ValidationError as e:
143 | return dict(code=-1, msg='数据格式出错', reason=str(e), data=None, timestamp=get_millisecond_timestamp())
144 |
145 | data.pop("id", None)
146 |
147 | try:
148 | with DBContext('w', None, True) as db:
149 | __record = self.model(**data)
150 | db.add(__record)
151 | db.flush()
152 | new_id = __record.id
153 | return dict(code=0, msg="创建成功", data={"new_id": new_id}, reason="",
154 | timestamp=get_millisecond_timestamp())
155 | except IntegrityError as e:
156 | return dict(code=-2, msg='不要重复添加', data=None, reason=str(e), timestamp=get_millisecond_timestamp())
157 |
158 | except Exception as e:
159 | return dict(code=-3, msg='创建失败', data=None, reason=str(e), timestamp=get_millisecond_timestamp())
160 |
161 | def handle_update(self, data: dict) -> dict:
162 | self.prepare()
163 | data = self.del_data(data)
164 | try:
165 | valid_data = self.pydantic_model_base(**data)
166 | except ValidationError as e:
167 | return dict(code=-1, msg="数据格式校验失败", reason=str(e), timestamp=get_millisecond_timestamp())
168 |
169 | try:
170 | with DBContext('w', None, True) as db:
171 | db.query(self.model).filter(self.model.id == valid_data.id).update(data)
172 |
173 | except IntegrityError as e:
174 | return dict(code=-2, msg=f'修改失败,已存在', reason=str(e), timestamp=get_millisecond_timestamp())
175 |
176 | except Exception as e:
177 | return dict(code=-3, msg=f'修改失败, {e}', reason=str(e), timestamp=get_millisecond_timestamp())
178 |
179 | return dict(code=0, msg="修改成功", reason='', timestamp=get_millisecond_timestamp())
180 |
181 | def handle_update_no_validation(self, data: dict) -> dict:
182 | """不进行校验的更新方法"""
183 | self.prepare()
184 | data_id = data.get('id')
185 | with DBContext('w', None, True) as db:
186 | db.query(self.model).filter(self.model.id == data_id).update(data)
187 | return dict(code=0, msg='更新成功', reason='', timestamp=get_millisecond_timestamp())
188 |
189 | def handle_delete(self, data: dict) -> dict:
190 | self.prepare()
191 | try:
192 | valid_data = PydanticDelList(**data)
193 | except ValidationError as e:
194 | return dict(code=-1, msg="数据格式校验失败", reason=str(e), timestamp=get_millisecond_timestamp())
195 |
196 | with DBContext('w', None, True) as session:
197 | session.query(self.model).filter(self.model.id.in_(valid_data.id_list)).delete(synchronize_session=False)
198 | return dict(code=0, msg=f"删除成功", reason='', timestamp=get_millisecond_timestamp())
199 |
--------------------------------------------------------------------------------
/websdk2/db_context.py:
--------------------------------------------------------------------------------
1 | # -*-coding:utf-8-*-
2 | """
3 | Author : shenshuo
4 | date : 2017年10月17日17:23:19
5 | role : 数据库连接
6 | """
7 |
8 | import pymysql
9 | from urllib.parse import quote_plus
10 | from sqlalchemy import create_engine
11 | from sqlalchemy.pool import NullPool
12 | from sqlalchemy.orm import sessionmaker
13 | from .consts import const
14 | from .configs import configs
15 |
16 | engines = {}
17 |
18 |
19 | def init_engine(**settings):
20 | if settings:
21 | databases = settings[const.DB_CONFIG_ITEM]
22 | else:
23 | databases = configs[const.DB_CONFIG_ITEM]
24 | for dbkey, db_conf in databases.items():
25 | dbuser = db_conf[const.DBUSER_KEY]
26 | dbpwd = db_conf[const.DBPWD_KEY]
27 | dbhost = db_conf[const.DBHOST_KEY]
28 | dbport = db_conf[const.DBPORT_KEY]
29 | dbname = db_conf[const.DBNAME_KEY]
30 | engine = create_engine('mysql+pymysql://{user}:{pwd}@{host}:{port}/{dbname}?charset=utf8mb4'
31 | .format(user=dbuser, pwd=quote_plus(dbpwd), host=dbhost, port=dbport, dbname=dbname),
32 | # logging_name=dbkey)
33 | logging_name=dbkey, poolclass=NullPool)
34 | engines[dbkey] = engine
35 |
36 |
37 | def get_db_url(dbkey):
38 | databases = configs[const.DB_CONFIG_ITEM]
39 | db_conf = databases[dbkey]
40 | dbuser = db_conf['user']
41 | dbpwd = db_conf['pwd']
42 | dbhost = db_conf['host']
43 | dbport = db_conf.get('port', 3306)
44 | dbname = db_conf['name']
45 |
46 | return 'mysql+pymysql://{user}:{pwd}@{host}:{port}/{dbname}?charset=utf8mb4'.format(user=dbuser, pwd=quote_plus(dbpwd),
47 | host=dbhost, port=dbport,
48 | dbname=dbname, poolclass=NullPool)
49 |
50 |
51 | class DBContext(object):
52 | def __init__(self, rw='r', db_key=None, need_commit=False, **settings):
53 | self.__db_key = db_key
54 | if not self.__db_key:
55 | if rw == 'w':
56 | self.__db_key = const.DEFAULT_DB_KEY
57 | elif rw == 'r':
58 | self.__db_key = const.READONLY_DB_KEY
59 | engine = self.__get_db_engine(self.__db_key, **settings)
60 | self.__engine = engine
61 | self.need_commit = need_commit
62 |
63 | # @property
64 | # def db_key(self):
65 | # return self.__db_key
66 |
67 | @staticmethod
68 | def __get_db_engine(db_key, **settings):
69 | if len(engines) == 0:
70 | init_engine(**settings)
71 | return engines[db_key]
72 |
73 | @property
74 | def session(self):
75 | return self.__session
76 |
77 | def __enter__(self):
78 | self.__session = sessionmaker(bind=self.__engine)()
79 | return self.__session
80 |
81 | def __exit__(self, exc_type, exc_val, exc_tb):
82 | if self.need_commit:
83 | if exc_type:
84 | self.__session.rollback()
85 | else:
86 | self.__session.commit()
87 | self.__session.close()
88 |
89 | def get_session(self):
90 | return self.__session
91 |
92 |
93 | def init_engine_v2(**settings):
94 | if settings:
95 | databases = settings[const.DB_CONFIG_ITEM]
96 | else:
97 | databases = configs[const.DB_CONFIG_ITEM]
98 | for dbkey, db_conf in databases.items():
99 | dbuser = db_conf[const.DBUSER_KEY]
100 | dbpwd = db_conf[const.DBPWD_KEY]
101 | dbhost = db_conf[const.DBHOST_KEY]
102 | dbport = db_conf[const.DBPORT_KEY]
103 | dbname = db_conf[const.DBNAME_KEY]
104 | engine = create_engine('mysql+pymysql://{user}:{pwd}@{host}:{port}/{dbname}?charset=utf8mb4'
105 | .format(user=dbuser, pwd=quote_plus(dbpwd), host=dbhost, port=dbport, dbname=dbname),
106 | logging_name=dbkey, poolclass=None, pool_size=10, max_overflow=50, pool_recycle=3600,
107 | pool_pre_ping=True, pool_timeout=60)
108 | engines[dbkey] = engine
109 |
110 |
111 | class DBContextV2(object):
112 | def __init__(self, rw='r', db_key=None, need_commit=False, **settings):
113 | self.__db_key = db_key
114 | if not self.__db_key:
115 | if rw == 'w':
116 | self.__db_key = const.DEFAULT_DB_KEY
117 | elif rw == 'r':
118 | self.__db_key = const.READONLY_DB_KEY
119 | engine = self.__get_db_engine(self.__db_key, **settings)
120 | self.__engine = engine
121 | self.need_commit = need_commit
122 |
123 | # @property
124 | # def db_key(self):
125 | # return self.__db_key
126 |
127 | @staticmethod
128 | def __get_db_engine(db_key, **settings):
129 | if len(engines) == 0:
130 | init_engine_v2(**settings)
131 | return engines[db_key]
132 |
133 | @property
134 | def session(self):
135 | return self.__session
136 |
137 | def __enter__(self):
138 | self.__session = sessionmaker(bind=self.__engine)()
139 | return self.__session
140 |
141 | def __exit__(self, exc_type, exc_val, exc_tb):
142 | if self.need_commit:
143 | if exc_type:
144 | self.__session.rollback()
145 | else:
146 | self.__session.commit()
147 | self.__session.close()
148 |
149 | def get_session(self):
150 | return self.__session
151 |
--------------------------------------------------------------------------------
/websdk2/error.py:
--------------------------------------------------------------------------------
1 | from enum import IntEnum
2 | from .consts import ErrorCode
3 | class BaseError(Exception):
4 | """ 错误基类,所有错误必须从该类继承 """
5 |
6 | def __init__(self, errorcode, *args, **kwargs):
7 | """
8 | 初始化错误基类
9 | :param errorcode: 错误码
10 | :param args:
11 | :param kwargs:
12 | """
13 | if isinstance(errorcode, IntEnum):
14 | self._errorcode = errorcode
15 | self.kwargs = kwargs
16 | super(BaseError, self).__init__(*args)
17 | else:
18 | raise TypeError(
19 | 'Error code must be vmbsdk.constants.enums.ErrorCode type.')
20 |
21 | @property
22 | def errorcode(self):
23 | return self._errorcode
24 | class BizError(BaseError):
25 | """ 业务错误 """
26 |
27 | def __init__(self, errorcode, *args, **kwargs):
28 | self.__subcode = int(args[1]) if len(args) > 1 else 0
29 | super(BizError, self).__init__(errorcode, *args, **kwargs)
30 |
31 | @property
32 | def subcode(self):
33 | """
34 | 获取
35 | :return:
36 | """
37 | return self.__subcode
38 |
39 | class BadRequestError(BizError):
40 | """ 错误的请求 """
41 |
42 | def __init__(self, *args, **kwargs):
43 | super(BadRequestError, self).__init__(ErrorCode.bad_request,
44 | *args, **kwargs)
45 |
46 |
47 | class ConfigError(Exception):
48 | def __init__(self, config_key, *args, **kwargs):
49 | self.config_key = config_key
50 | super(ConfigError, self).__init__(*args, **kwargs)
--------------------------------------------------------------------------------
/websdk2/fetch_coroutine.py:
--------------------------------------------------------------------------------
1 | import json
2 | from tornado.httpclient import HTTPRequest, AsyncHTTPClient
3 | from tornado.gen import coroutine
4 |
5 |
6 | @coroutine
7 | def fetch_coroutine(url, method='GET', body=None, **kwargs):
8 | request = HTTPRequest(url, method=method, body=body, connect_timeout=5, request_timeout=10)
9 | http_client = AsyncHTTPClient(**kwargs)
10 | response = yield http_client.fetch(request)
11 | body = json.loads(response.body)
12 | return body
--------------------------------------------------------------------------------
/websdk2/jwt_token.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding:utf-8 -*-
3 | import jwt, datetime, hashlib
4 | from .configs import configs as my_configs
5 |
6 |
7 | class AuthToken:
8 | def __init__(self):
9 | self.token_secret = my_configs.get('token_secret', '3AIiOq18i~H=WWTIGq4ODQyMzcsIdfghs')
10 |
11 | def encode_auth_token(self, **kwargs):
12 | """
13 | 生成认证Token
14 | :param user_id: string
15 | :param username: string
16 | :param nickname: string
17 | :return: string
18 | """
19 | try:
20 | exp_time = kwargs.get('exp_time', 1)
21 | payload = {
22 | 'exp': datetime.datetime.utcnow() + datetime.timedelta(days=int(exp_time), seconds=10),
23 | 'nbf': datetime.datetime.utcnow() - datetime.timedelta(seconds=10),
24 | 'iat': datetime.datetime.utcnow(),
25 | 'iss': 'auth: ss',
26 | 'sub': 'my token',
27 | 'id': '15618718060',
28 | 'data': {
29 | 'user_id': kwargs.get('user_id', ''),
30 | 'username': kwargs.get('username', ''),
31 | 'nickname': kwargs.get('nickname', ''),
32 | 'email': kwargs.get('email', ''),
33 | 'is_superuser': kwargs.get('is_superuser', False)
34 | }
35 | }
36 | return jwt.encode(
37 | payload,
38 | self.token_secret,
39 | algorithm='HS256'
40 | )
41 |
42 | except Exception as e:
43 | return e
44 |
45 | def encode_auth_token_v2(self, **kwargs):
46 | ### 支持到自定义小时
47 | """
48 | 生成认证Token
49 | :param user_id: string
50 | :param username: string
51 | :param nickname: string
52 | :param exp_time: int
53 | :param exp_hour: int
54 | :return: string
55 | """
56 | try:
57 | exp_days = kwargs.get('exp_days', 1)
58 | exp_hours = kwargs.get('exp_hours')
59 | if exp_hours and isinstance(exp_hours, int) and exp_days == 1:
60 | exp_time = datetime.datetime.utcnow() + datetime.timedelta(hours=int(exp_hours), seconds=30)
61 | else:
62 | exp_time = datetime.datetime.utcnow() + datetime.timedelta(days=int(exp_days), seconds=30)
63 |
64 | payload = {
65 | 'exp': exp_time,
66 | 'nbf': datetime.datetime.utcnow() - datetime.timedelta(seconds=10),
67 | 'iat': datetime.datetime.utcnow(),
68 | 'iss': 'auth: ss',
69 | 'sub': 'my token',
70 | 'id': '15618718060',
71 | 'data': {
72 | 'user_id': kwargs.get('user_id', ''),
73 | 'username': kwargs.get('username', ''),
74 | 'nickname': kwargs.get('nickname', ''),
75 | 'email': kwargs.get('email', ''),
76 | 'is_superuser': kwargs.get('is_superuser', False)
77 | }
78 | }
79 | return jwt.encode(
80 | payload,
81 | self.token_secret,
82 | algorithm='HS256'
83 | )
84 |
85 | except Exception as e:
86 | return e
87 |
88 | def encode_mfa_token(self, **kwargs):
89 | try:
90 | exp_days = kwargs.get('exp_days', 1)
91 | exp_hours = kwargs.get('exp_hours')
92 |
93 | current_time = datetime.datetime.utcnow()
94 | exp_time = (datetime.datetime.utcnow() + datetime.timedelta(hours=exp_hours) if exp_hours
95 | else datetime.datetime.utcnow() + datetime.timedelta(days=exp_days)) + datetime.timedelta(
96 | seconds=30)
97 |
98 | payload = {
99 | 'exp': exp_time,
100 | 'nbf': current_time - datetime.timedelta(seconds=10),
101 | 'iat': current_time,
102 | 'data': {
103 | 'user_id': kwargs.get('user_id', ''),
104 | 'email': kwargs.get('email', '')
105 | }
106 | }
107 |
108 | return jwt.encode(payload, self.token_secret, algorithm='HS256')
109 |
110 | except Exception as e:
111 | return str(e)
112 |
113 | def decode_auth_token(self, auth_token):
114 | """
115 | 验证Token
116 | :param auth_token:
117 | :return: dict
118 | """
119 | try:
120 | payload = jwt.decode(auth_token, self.token_secret, algorithms=['HS256'],
121 | leeway=datetime.timedelta(seconds=10))
122 | if 'data' in payload and 'user_id' in payload['data']:
123 | return payload['data']
124 | else:
125 | raise jwt.InvalidTokenError
126 | except jwt.ExpiredSignatureError:
127 | return dict(status=-1, msg='Token过期')
128 | except jwt.InvalidTokenError:
129 | return dict(status=-2, msg='无效Token')
130 |
131 |
132 | def gen_md5(pd):
133 | m2 = hashlib.md5()
134 | m2.update(pd.encode("utf-8"))
135 | return m2.hexdigest()
136 |
137 |
138 | def generate_otp_secret():
139 | import os
140 | import base64
141 | return base64.b32encode(os.urandom(32)).decode('utf-8').strip('=')
142 |
--------------------------------------------------------------------------------
/websdk2/ldap.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2023/3/17
7 | Desc : 对接LDAP登录认证
8 | """
9 |
10 | import logging
11 | import json
12 | from typing import Dict, Optional, Tuple, Union
13 | from ldap3 import Server, Connection, SUBTREE, ALL_ATTRIBUTES
14 |
15 |
16 | class LdapApi:
17 | def __init__(self, ldap_server_host, ldap_admin_dn, ldap_admin_password, ldap_server_port=389, ldap_use_ssl=False):
18 | self._ldap_admin_dn = ldap_admin_dn
19 | self._ldap_admin_password = ldap_admin_password
20 | # ldap_server_pool = ServerPool(["172.16.0.102",'172.16.0.103'])
21 | use_ssl = True if ldap_use_ssl in ['y', 'yes', True] else False
22 | self.ldap_server = Server(ldap_server_host, port=ldap_server_port, use_ssl=ldap_use_ssl)
23 |
24 | def ldap_server_test(self):
25 | try:
26 | conn = Connection(self.ldap_server, user=self._ldap_admin_dn, password=self._ldap_admin_password,
27 | check_names=True, lazy=False, raise_exceptions=False)
28 | conn.open()
29 | conn.bind()
30 | return True
31 | except Exception as e:
32 | print("auth fail {}".format(e))
33 | return False
34 |
35 | def ldap_auth_v2(self, username, password, search_base, search_filter='cn'):
36 | if not self.ldap_server_test(): return False, None, None
37 |
38 | conn = Connection(self.ldap_server, user=self._ldap_admin_dn, password=self._ldap_admin_password,
39 | check_names=True, lazy=False, raise_exceptions=False)
40 | conn.open()
41 | conn.bind()
42 | res = conn.search(search_base=search_base, search_filter=f'({search_filter}={username})',
43 | search_scope=SUBTREE, attributes=[search_filter, 'cn', 'sAMAccountName'], paged_size=5)
44 | if not res: return False, None, None
45 | entry = conn.response[0]
46 | dn = entry['dn']
47 | attr_dict = entry['attributes']
48 |
49 | # check password by dn
50 | try:
51 | conn2 = Connection(self.ldap_server, user=dn, password=password, check_names=True, lazy=False,
52 | raise_exceptions=False)
53 | conn2.bind()
54 | if conn2.result["description"] == "success":
55 | try:
56 | if 'email' in attr_dict and isinstance(attr_dict["email"], list) and attr_dict["email"]:
57 | email = attr_dict["email"][0]
58 | elif 'email' in attr_dict and not isinstance(attr_dict["email"], list) and attr_dict["email"]:
59 | email = attr_dict["email"]
60 | elif 'mail' in attr_dict and isinstance(attr_dict["mail"], list) and attr_dict["mail"]:
61 | email = attr_dict["mail"][0]
62 | elif 'mail' in attr_dict and not isinstance(attr_dict["mail"], list) and attr_dict["mail"]:
63 | email = attr_dict["mail"]
64 | else:
65 | email = None
66 | except Exception as err:
67 | print(f"email fail, {err}")
68 | email = None
69 |
70 | return True, attr_dict[search_filter], email
71 | else:
72 | print("auth fail")
73 | return False, None, None
74 | except Exception as e:
75 | print("auth fail {}".format(e))
76 | return False, None, None
77 |
78 | def ldap_auth_v3(self, username, password, search_base, conf_attr_dict, search_filter='cn'):
79 | # 用户 密码 用户ou 映射数据 查询过滤 应和登录的账户关联
80 | if not self.ldap_server_test():
81 | return False, None
82 |
83 | conn = Connection(self.ldap_server, user=self._ldap_admin_dn, password=self._ldap_admin_password,
84 | check_names=True, lazy=False, raise_exceptions=False)
85 | conn.open()
86 | conn.bind()
87 | try:
88 | if isinstance(conf_attr_dict, str): conf_attr_dict = json.loads(conf_attr_dict)
89 | attr_list = list(conf_attr_dict.values())
90 | except Exception as err:
91 | attr_list = ['cn', 'sAMAccountName']
92 | res = conn.search(search_base=search_base, search_filter=f'({search_filter}={username})',
93 | search_scope=SUBTREE, attributes=attr_list, paged_size=1000)
94 |
95 | if not res:
96 | return False, None
97 | entry = conn.response[0]
98 | dn = entry['dn']
99 | attr_dict = entry['attributes']
100 |
101 | # check password by dn
102 | try:
103 | conn2 = Connection(self.ldap_server, user=dn, password=password, check_names=True, lazy=False,
104 | raise_exceptions=False)
105 | conn2.bind()
106 | if conn2.result["description"] == "success":
107 | return True, {k: attr_dict.get(v) for k, v in conf_attr_dict.items()}
108 | else:
109 | print("auth fail 2")
110 | return False, None
111 | except Exception as e:
112 | print(f"auth fail 3 {e}")
113 | return False, None
114 |
115 |
116 | class LdapApiV4:
117 | def __init__(self, ldap_server_host: str, ldap_admin_dn: str, ldap_admin_password: str,
118 | ldap_server_port: int = 389, ldap_use_ssl: Union[bool, str] = False):
119 | self._ldap_admin_dn = ldap_admin_dn
120 | self._ldap_admin_password = ldap_admin_password
121 | self.ldap_server = Server(ldap_server_host, port=ldap_server_port,
122 | use_ssl=str(ldap_use_ssl).lower() in {'y', 'yes', 'true'})
123 |
124 | def test_server_connection(self) -> bool:
125 | try:
126 | with Connection(self.ldap_server, self._ldap_admin_dn, self._ldap_admin_password, auto_bind=True) as conn:
127 | return conn.bound
128 | except Exception as e:
129 | logging.error(f"Server connection failed: {e}")
130 | return False
131 |
132 | def ldap_auth(self, username: str, password: str, search_base: str,
133 | attribute_map: Union[Dict[str, str], str, None] = None,
134 | search_filter: str = 'cn') -> Tuple[bool, Optional[Dict]]:
135 | if not self.test_server_connection():
136 | return False, None
137 | try:
138 | with Connection(self.ldap_server, self._ldap_admin_dn, self._ldap_admin_password) as conn:
139 | if not conn.search(search_base, f'({search_filter}={username})', SUBTREE, attributes=ALL_ATTRIBUTES):
140 | return False, None
141 | user_attrs, user_dn = conn.entries[0].entry_attributes_as_dict, conn.entries[0].entry_dn
142 |
143 | try:
144 | with Connection(self.ldap_server, user=user_dn, password=password, auto_bind=True) as uc:
145 | if not uc.bound:
146 | return False, None
147 | except Exception as e:
148 | logging.error(f"User auth failed: {e}")
149 | return False, None
150 |
151 | final_attrs = user_attrs
152 | if attribute_map:
153 | if isinstance(attribute_map, str):
154 | try:
155 | attribute_map = json.loads(attribute_map)
156 | except json.JSONDecodeError:
157 | attribute_map = None
158 | if isinstance(attribute_map, dict):
159 | final_attrs = {k: (v[0] if isinstance(v := user_attrs.get(mv), list) else v)
160 | for k, mv in attribute_map.items()}
161 | return True, final_attrs
162 | except Exception as e:
163 | logging.error(f"Auth error: {e}")
164 | return False, None
165 |
166 |
167 | if __name__ == "__main__":
168 | obj = LdapApiV4('172.16.0.102', 'cn=Manager,DC=sz,DC=com', '070068')
169 | print(obj.test_server_connection())
170 | print('____________')
171 | print(obj.ldap_auth("yanghongfei", "123456", 'ou=opendevops,dc=sz,dc=com', 'cn'))
172 |
--------------------------------------------------------------------------------
/websdk2/logger.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 |
4 | import logging
5 | import sys
6 | import datetime
7 | import tornado.log
8 |
9 | #
10 | # options.log_file_prefix = os.path.join(os.path.dirname(os.path.dirname(__file__)), f'/tmp/codo.log')
11 |
12 |
13 | # class LogFormatter(tornado.log.LogFormatter):
14 | # default_msec_format = '%s.%03d'
15 | #
16 | # def __init__(self):
17 | # super(LogFormatter, self).__init__(
18 | # fmt='%(color)s%(asctime)s | %(levelname)s%(end_color)s | %(filename)s:%(funcName)s:%(lineno)s - %(message)s',
19 | # datefmt='%Y-%m-%d %H:%M:%S.%f'
20 | # )
21 | #
22 | # def formatTime(self, record, datefmt=None):
23 | # ct = datetime.datetime.now()
24 | # t = ct.strftime(self.default_time_format)
25 | # s = self.default_msec_format % (t, record.msecs)
26 | # return s
27 | #
28 | #
29 | # def init_logging():
30 | # # write file
31 | # [
32 | # i.setFormatter(LogFormatter())
33 | # for i in logging.getLogger().handlers
34 | # ]
35 | # logging.getLogger().setLevel(logging.INFO)
36 | # # write stdout
37 | # stdout_handler = logging.StreamHandler(sys.stdout)
38 | # stdout_handler.setFormatter(LogFormatter())
39 | # logging.getLogger().addHandler(stdout_handler)
40 | # logging.info('[APP Logging Init] logging has been started')
41 |
42 |
43 | class LogFormatter(tornado.log.LogFormatter):
44 | """
45 | Custom log formatter to add color and detailed information to logs.
46 | """
47 | default_msec_format = '%s.%03d'
48 |
49 | def __init__(self):
50 | super(LogFormatter, self).__init__(
51 | fmt='%(color)s%(asctime)s | %(levelname)s%(end_color)s | '
52 | '%(filename)s:%(funcName)s:%(lineno)d - %(message)s',
53 | datefmt='%Y-%m-%d %H:%M:%S.%f'
54 | )
55 |
56 | def formatTime(self, record, datefmt=None):
57 | """
58 | Customize the timestamp format to include milliseconds.
59 | """
60 | ct = datetime.datetime.fromtimestamp(record.created)
61 | t = ct.strftime(self.default_time_format)
62 | s = self.default_msec_format % (t, record.msecs)
63 | return s
64 |
65 |
66 | def init_logging(log_level=logging.INFO):
67 | """
68 | Initialize the logging system with custom formatter and handlers.
69 |
70 | :param log_level: Logging level, e.g., logging.DEBUG, logging.INFO, etc.
71 | """
72 | # Apply the custom formatter to existing handlers
73 | for handler in logging.getLogger().handlers:
74 | handler.setFormatter(LogFormatter())
75 |
76 | # Set the log level
77 | logging.getLogger().setLevel(log_level)
78 |
79 | # Add a handler for stdout logging
80 | stdout_handler = logging.StreamHandler(sys.stdout)
81 | stdout_handler.setFormatter(LogFormatter())
82 | logging.getLogger().addHandler(stdout_handler)
83 |
84 | logging.info(f'[APP Logging Init] Logging initialized with level: {logging.getLevelName(log_level)}')
85 |
86 |
87 | # Example usage
88 | if __name__ == "__main__":
89 | # Pass the desired log level as a parameter
90 | # log_level = logging.DEBUG # You can adjust this to logging.INFO, logging.WARNING, etc.
91 | # init_logging(log_level)
92 | #
93 | # # Test logging at different levels
94 | # logging.debug("This is a debug message")
95 | # logging.info("This is an info message")
96 | # logging.warning("This is a warning message")
97 | # logging.error("This is an error message")
98 | # logging.critical("This is a critical message")
99 | pass
100 |
--------------------------------------------------------------------------------
/websdk2/model_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : shenshuo
5 | Date : 2025年02月08日
6 | Desc : models类
7 | """
8 |
9 | from typing import Type, Union
10 | from datetime import datetime
11 | from sqlalchemy.orm import class_mapper
12 | from sqlalchemy.ext.declarative import DeclarativeMeta
13 | from sqlalchemy import text
14 | from sqlalchemy.exc import IntegrityError
15 | from .utils import get_contain_dict
16 | from .db_context import DBContextV2 as DBContext
17 | from .utils.pydantic_utils import sqlalchemy_to_pydantic, ValidationError, PydanticDelList
18 |
19 |
20 | def model_to_dict(model):
21 | model_dict = {}
22 | for key, column in class_mapper(model.__class__).c.items():
23 | if isinstance(getattr(model, key), datetime):
24 | model_dict[column.name] = str(getattr(model, key))
25 | else:
26 | model_dict[column.name] = getattr(model, key, None)
27 |
28 | if isinstance(getattr(model, "custom_extend_column_dict", None), dict):
29 | model_dict.update(**getattr(model, "custom_extend_column_dict", {}))
30 | return model_dict
31 |
32 |
33 | def queryset_to_list(queryset, **kwargs) -> list:
34 | if kwargs: return [model_to_dict(q) for q in queryset if get_contain_dict(kwargs, model_to_dict(q))]
35 | return [model_to_dict(q) for q in queryset]
36 |
37 |
38 | def GetInsertOrUpdateObj(cls: Type, str_filter: str, **kw) -> classmethod:
39 | """
40 | cls: Model 类名
41 | str_filter: filter的参数.eg:"name='name-14'" 必须设置唯一 支持 and or
42 | **kw: 【属性、值】字典,用于构建新实例,或修改存在的记录
43 | session.add(GetInsertOrUpdateObj(TableTest, "name='name-114'", age=33114, height=123.14, name='name-114'))
44 | """
45 | with DBContext('r') as session:
46 | existing = session.query(cls).filter(text(str_filter)).first()
47 | if not existing:
48 | res = cls()
49 | for k, v in kw.items():
50 | if hasattr(res, k):
51 | setattr(res, k, v)
52 | return res
53 | else:
54 | res = existing
55 | for k, v in kw.items():
56 | if hasattr(res, k):
57 | setattr(res, k, v)
58 |
59 | return res
60 |
61 |
62 | def insert_or_update(cls: Type[DeclarativeMeta], str_filter: str, **kwargs) -> Union[None, DeclarativeMeta]:
63 | """
64 | Insert or update a record in the database.
65 |
66 | Args:
67 | cls (Type[DeclarativeMeta]): Model class.
68 | str_filter (str): Filter parameters. e.g., "name='name-14'". Must be unique. Supports 'and' and 'or'.
69 | **kwargs: Attributes and values dictionary used to construct a new instance or modify an existing record.
70 |
71 | Returns:
72 | Union[None, DeclarativeMeta]: Returns None if no existing record found, otherwise returns the updated or inserted record.
73 | """
74 | with DBContext('r') as session:
75 | existing = session.query(cls).filter(text(str_filter)).first()
76 | if not existing:
77 | res = cls(**kwargs)
78 | for k, v in kwargs.items():
79 | if hasattr(res, k):
80 | setattr(res, k, v)
81 | return res
82 | else:
83 | res = existing
84 | for k, v in kwargs.items():
85 | if hasattr(res, k):
86 | setattr(res, k, v)
87 |
88 | return res
89 |
90 |
91 | class CommonOptView:
92 | def __init__(self, model, **kwargs):
93 | self.model = model
94 | self.pydantic_model_base = sqlalchemy_to_pydantic(model)
95 | self.pydantic_model = sqlalchemy_to_pydantic(model, exclude=['id'])
96 |
97 | def prepare(self):
98 | pass
99 |
100 | @staticmethod
101 | def del_data(data) -> dict:
102 | for key in ['_index', '_rowKey', 'update_time']:
103 | data.pop(key, None)
104 | return data
105 |
106 | def handle_add(self, data: dict) -> dict:
107 | self.prepare()
108 | data = self.del_data(data)
109 | try:
110 | self.pydantic_model(**data)
111 | except ValidationError as e:
112 | return dict(code=-1, msg=str(e))
113 |
114 | try:
115 | with DBContext('w', None, True) as db:
116 | db.add(self.model(**data))
117 | except IntegrityError as e:
118 | return dict(code=-2, msg='不要重复添加')
119 |
120 | except Exception as e:
121 | return dict(code=-3, msg=f'{e}')
122 |
123 | return dict(code=0, msg="创建成功")
124 |
125 | def handle_update(self, data: dict) -> dict:
126 | self.prepare()
127 | data = self.del_data(data)
128 | try:
129 | valid_data = self.pydantic_model_base(**data)
130 | except ValidationError as e:
131 | return dict(code=-1, msg=str(e))
132 |
133 | try:
134 | with DBContext('w', None, True) as db:
135 | db.query(self.model).filter(self.model.id == valid_data.id).update(data)
136 |
137 | except IntegrityError as e:
138 | return dict(code=-2, msg=f'修改失败,已存在')
139 |
140 | except Exception as err:
141 | return dict(code=-3, msg=f'修改失败, {err}')
142 |
143 | return dict(code=0, msg="修改成功")
144 |
145 | def handle_delete(self, data: dict) -> dict:
146 | self.prepare()
147 | try:
148 | valid_data = PydanticDelList(**data)
149 | except ValidationError as e:
150 | return dict(code=-1, msg=str(e))
151 |
152 | with DBContext('w', None, True) as session:
153 | session.query(self.model).filter(self.model.id.in_(valid_data.id_list)).delete(synchronize_session=False)
154 | return dict(code=0, msg=f"删除成功")
155 |
--------------------------------------------------------------------------------
/websdk2/mqhelper.py:
--------------------------------------------------------------------------------
1 | # !/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : ming
5 | date : 2017/3/3 下午9:31
6 | role : rabbitMQ 操作类
7 | """
8 |
9 | import logging
10 | import traceback
11 | import pika
12 | from .consts import const
13 | from .configs import configs
14 | from .error import ConfigError
15 |
16 | logger = logging.getLogger('pika')
17 | logger.setLevel(logging.WARNING)
18 |
19 |
20 | class MessageQueueBase(object):
21 | def __init__(self, exchange, exchange_type, routing_key='', routing_keys=None, queue_name='', no_ack=False,
22 | mq_key=const.DEFAULT_MQ_KEY):
23 | mq_config = configs[const.MQ_CONFIG_ITEM][mq_key]
24 | if const.MQ_ADDR not in mq_config:
25 | raise ConfigError(const.MQ_ADDR)
26 | if const.MQ_PORT not in mq_config:
27 | raise ConfigError(const.MQ_PORT)
28 | if const.MQ_VHOST not in mq_config:
29 | raise ConfigError(const.MQ_VHOST)
30 | if const.MQ_USER not in mq_config:
31 | raise ConfigError(const.MQ_USER)
32 | if const.MQ_PWD not in mq_config:
33 | raise ConfigError(const.MQ_PWD)
34 | self.addr = mq_config[const.MQ_ADDR]
35 | self.port = int(mq_config[const.MQ_PORT])
36 | self.vhost = mq_config[const.MQ_VHOST]
37 | self.user = mq_config[const.MQ_USER]
38 | self.pwd = mq_config[const.MQ_PWD]
39 | self.__exchange = exchange
40 | self.__exchange_type = exchange_type
41 | self.__routing_key = routing_key
42 | self.__routing_keys = routing_keys
43 | self.__queue_name = queue_name
44 | self.__no_ack = no_ack
45 | self.__channel = None
46 | self.__connection = None
47 |
48 | def start_consuming(self, exchange_durable=False):
49 | channel = self.create_channel()
50 |
51 | channel.exchange_declare(exchange=self.__exchange, exchange_type=self.__exchange_type, durable=exchange_durable)
52 | if self.__queue_name:
53 | result = channel.queue_declare(queue=self.__queue_name, durable=True)
54 | else:
55 | result = channel.queue_declare('', exclusive=True)
56 | if self.__routing_keys and isinstance(self.__routing_keys, list):
57 | for binding_key in self.__routing_keys:
58 | channel.queue_bind(exchange=self.__exchange, queue=result.method.queue, routing_key=binding_key)
59 | else:
60 | channel.queue_bind(exchange=self.__exchange, queue=result.method.queue, routing_key=self.__routing_key)
61 |
62 | channel.basic_qos(prefetch_count=1)
63 | channel.basic_consume(result.method.queue, self.call_back, self.__no_ack)
64 | logging.info('[*]Queue %s started.' % (result.method.queue))
65 |
66 | channel.start_consuming()
67 |
68 | def __enter__(self):
69 | self.__channel = self.create_channel()
70 | return self
71 |
72 | def __exit__(self, exc_type, exc_val, exc_tb):
73 | self.__connection.close()
74 |
75 | def close_channel(self):
76 | self.__connection.close()
77 |
78 | def create_channel(self):
79 | credentials = pika.PlainCredentials(self.user, self.pwd)
80 | self.__connection = pika.BlockingConnection(
81 | pika.ConnectionParameters(self.addr, self.port, self.vhost, credentials=credentials))
82 | channel = self.__connection.channel()
83 | return channel
84 |
85 | def new_channel(self):
86 | self.__channel = self.create_channel()
87 | return self
88 |
89 | def call_back(self, ch, method, properties, body):
90 | try:
91 | logging.info('get message')
92 | self.on_message(body)
93 |
94 | if not self.__no_ack:
95 | ch.basic_ack(delivery_tag=method.delivery_tag)
96 | except:
97 | logging.error(traceback.format_exc())
98 | if not self.__no_ack:
99 | ch.basic_nack(delivery_tag=method.delivery_tag)
100 |
101 | def on_message(self, body):
102 | pass
103 |
104 | def publish_message(self, body, durable=True, exchange_durable=False):
105 | self.__channel.exchange_declare(exchange=self.__exchange, exchange_type=self.__exchange_type,
106 | durable=exchange_durable)
107 | if self.__queue_name:
108 | result = self.__channel.queue_declare(queue=self.__queue_name)
109 | else:
110 | result = self.__channel.queue_declare("", exclusive=True, auto_delete=True)
111 |
112 | self.__channel.queue_bind(exchange=self.__exchange, queue=result.method.queue)
113 |
114 | if durable:
115 | properties = pika.BasicProperties(delivery_mode=2)
116 | self.__channel.basic_publish(exchange=self.__exchange, routing_key=self.__routing_key, body=body,
117 | properties=properties)
118 | else:
119 | self.__channel.basic_publish(exchange=self.__exchange, routing_key=self.__routing_key, body=body)
120 | logging.info('Publish message %s sucessfuled.' % body)
121 |
--------------------------------------------------------------------------------
/websdk2/program.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | '''
4 | Author : ming
5 | date : 2017/4/11 下午3:21
6 | desc :
7 | '''
8 | import fire
9 |
10 |
11 | class MainProgram(object):
12 | def __init__(self, progressid=''):
13 | print(progressid)
14 |
15 | @staticmethod
16 | def run(cls_inst):
17 | if issubclass(cls_inst, MainProgram):
18 | fire.Fire(cls_inst)
19 | else:
20 | raise Exception('')
--------------------------------------------------------------------------------
/websdk2/salt_api.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | '''
4 | Author : SS
5 | date : 2017年12月29日14:43:24
6 | role : 集中化管理工具的使用
7 | '''
8 |
9 | import requests
10 | import json
11 | import time
12 |
13 | try:
14 | import cookielib
15 | except:
16 | import http.cookiejar as cookielib
17 |
18 | import ssl
19 |
20 | context = ssl._create_unverified_context()
21 | import urllib3
22 |
23 | urllib3.disable_warnings()
24 |
25 |
26 | class SaltApi:
27 | """
28 | 定义salt api接口的类
29 | 初始化获得token
30 | """
31 |
32 | def __init__(self, url='https://127.0.0.1:8001/', username="saltapi", password="shenshuo"):
33 | self.__url = url
34 | self.__username = username
35 | self.__password = password
36 | self.headers = {
37 | "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/50.0.2661.102 Safari/537.36",
38 | "Content-type": "application/json"
39 | # "Content-type": "application/x-yaml"
40 | }
41 | self.params = {'client': 'local', 'fun': '', 'tgt': ''}
42 | self.login_url = self.__url + "login"
43 | self.login_params = {'username': self.__username, 'password': self.__password, 'eauth': 'pam'}
44 | self.token = self.get_data(self.login_url, self.login_params)['token']
45 | self.headers['X-Auth-Token'] = self.token
46 |
47 | def get_data(self, url, params):
48 | send_data = json.dumps(params)
49 | request = requests.post(url, data=send_data, headers=self.headers, verify=False)
50 | response = request.json()
51 | result = dict(response)
52 | return result['return'][0]
53 |
54 | def salt_command(self, tgt, method, arg=None):
55 | """远程执行命令,相当于salt 'client1' cmd.run 'free -m'"""
56 | if arg:
57 | params = {'client': 'local', 'fun': method, 'tgt': tgt, 'arg': arg}
58 | else:
59 | params = {'client': 'local', 'fun': method, 'tgt': tgt}
60 | result = self.get_data(self.__url, params)
61 | return result
62 |
63 | def salt_async_command(self, tgt, method, arg=None): # 异步执行salt命令,根据jid查看执行结果
64 |
65 | """远程异步执行命令"""
66 | if arg:
67 | params = {'client': 'local_async', 'fun': method, 'tgt': tgt, 'arg': arg}
68 | else:
69 | params = {'client': 'local_async', 'fun': method, 'tgt': tgt}
70 | jid = self.get_data(self.__url, params).get('jid', None)
71 | return jid
72 |
73 | def look_jid(self, jid): # 根据异步执行命令返回的jid查看事件结果
74 | params = {'client': 'runner', 'fun': 'jobs.lookup_jid', 'jid': jid}
75 | result = self.get_data(self.__url, params)
76 | return result
77 |
78 | def run(self, salt_client='*', salt_method='cmd.run_all', salt_params='w', timeout=1800):
79 | try:
80 | if not self.salt_command(salt_client, 'test.ping')[salt_client]:
81 | return -98, 'test.ping error 98', ''
82 | except Exception as e:
83 | return -99, 'test.ping error 99', str(e)
84 |
85 | t = 0
86 | jid = self.salt_async_command(salt_client, salt_method, salt_params)
87 | if not jid:
88 | return -100, '连接失败', '连接失败或主机不存在'
89 |
90 | while True:
91 | time.sleep(5)
92 | if t == timeout:
93 | print('exec timeout!')
94 | break
95 | else:
96 | t += 5
97 | result = self.look_jid(jid)
98 | for i in result.keys():
99 | return result[i]['retcode'], result[i]['stdout'], result[i]['stderr']
100 |
101 |
102 | if __name__ == '__main__':
103 | pass
104 | # salt1 = SaltApi()
105 | # req = salt1.run('*', 'cmd.run_all', 'w')
106 | # status, stdout, stderr = req[0], req[1], req[2]
107 | # print(status, stdout, stderr)
108 |
--------------------------------------------------------------------------------
/websdk2/sqlalchemy_pagination.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Version : 0.0.1
5 | Contact : 191715030@qq.com
6 | Author : shenshuo
7 | Date : 2023/3/2 18:23
8 | Desc : 分页
9 | """
10 |
11 | import math
12 | from sqlalchemy import desc
13 | from .model_utils import queryset_to_list
14 |
15 |
16 | class Page(object):
17 |
18 | def __init__(self, items, page, page_size, total):
19 | self.items = items
20 | self.previous_page = None
21 | self.next_page = None
22 | self.has_previous = page > 1
23 | if self.has_previous: self.previous_page = page - 1
24 | previous_items = (page - 1) * page_size
25 | self.has_next = previous_items + len(items) < total
26 | if self.has_next: self.next_page = page + 1
27 | self.total = total
28 | self.pages = int(math.ceil(total / float(page_size)))
29 |
30 |
31 | def paginate(query, order_by: str = None, **query_params):
32 | page = int(query_params.get('page', 1)) if 'page' in query_params else int(query_params.get('page_number', 1))
33 | page_size = int(query_params.get('limit')) if 'limit' in query_params else int(query_params.get('page_size', 10))
34 |
35 | if 'order_by' in query_params: order_by = query_params.get('order_by') # 排序字段
36 | order = query_params.get('order', 'ascend') # 正序 倒序 order descend ascend
37 | items_not_to_list = query_params.get('items_not_to_list') # 如果不序列化要额外加参数,主要为了连表查询
38 |
39 | if page <= 0: raise AttributeError('page needs to be >= 1')
40 | if page_size <= 0: raise AttributeError('page_size needs to be >= 1')
41 |
42 | if order_by and order != 'descend':
43 | items = query.order_by(order_by).all() if page_size >= 200 else query.order_by(order_by).limit(
44 | page_size).offset((page - 1) * page_size).all()
45 | elif order_by and order == 'descend':
46 | items = query.order_by(desc(order_by)).all() if page_size >= 200 else query.order_by(desc(order_by)).limit(
47 | page_size).offset((page - 1) * page_size).all()
48 | else:
49 | items = query.all() if page_size >= 200 else query.limit(page_size).offset((page - 1) * page_size).all()
50 |
51 | total = query.count()
52 | if not items_not_to_list: items = queryset_to_list(items)
53 | return Page(items, page, page_size, total)
54 |
--------------------------------------------------------------------------------
/websdk2/tools.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """
4 | Author : ss
5 | date : 2018年4月12日
6 | role : 工具类
7 | """
8 |
9 | import sys
10 | import re
11 | import time
12 | import redis
13 | import logging
14 | from shortuuid import uuid
15 | import subprocess
16 | from concurrent.futures import ThreadPoolExecutor
17 | from abc import ABC, abstractmethod
18 | from .consts import const
19 |
20 |
21 | def singleton(class_):
22 | instances = {}
23 |
24 | def getinstance(*args, **kwargs):
25 | if class_ not in instances:
26 | instances[class_] = class_(*args, **kwargs)
27 | return instances[class_]
28 |
29 | return getinstance
30 |
31 |
32 | def bytes_to_unicode(input_bytes):
33 | if sys.version_info.major >= 3:
34 | return str(input_bytes, encoding='utf-8')
35 | else:
36 | return (input_bytes).decode('utf-8')
37 |
38 |
39 | def convert(data):
40 | if isinstance(data, bytes): return data.decode('utf8')
41 | if isinstance(data, dict): return dict(map(convert, data.items()))
42 | if isinstance(data, tuple): return map(convert, data)
43 | return data
44 |
45 |
46 | def check_password(data):
47 | return True if re.search("^(?=.*\d)(?=.*[a-z])(?=.*[A-Z]).*$", data) and len(data) >= 8 else False
48 |
49 |
50 | def is_mail(text, login_mail=None):
51 | if login_mail:
52 | if re.match(r'[0-9a-zA-Z_]{0,19}@%s' % login_mail, text):
53 | return True
54 | else:
55 | return False
56 | p = re.compile(r"[^@]+@[^@]+\.[^@]+")
57 | # if re.match(r'^[0-9a-zA-Z_]{0,19}@[0-9a-zA-Z]{1,13}\.[com,cn,net]{1,3}$', text):
58 | if p.match(text):
59 | return True
60 | else:
61 | return False
62 |
63 |
64 | def is_tel(tel):
65 | ### 检查是否是手机号
66 | ret = re.match(r"^1[35678]\d{9}$", tel)
67 | if ret:
68 | return True
69 | else:
70 | return False
71 |
72 |
73 | def check_contain_chinese(check_str):
74 | ### 检查是否包含汉字
75 | """
76 | :param check_str:
77 | :return:
78 | """
79 | for ch in check_str:
80 | if u'\u4e00' <= ch <= u'\u9fff':
81 | return True
82 | return False
83 |
84 |
85 | class Executor(ThreadPoolExecutor):
86 | """ 线程执行类 """
87 | _instance = None
88 |
89 | def __new__(cls, *args, **kwargs):
90 | if not getattr(cls, '_instance', None):
91 | cls._instance = ThreadPoolExecutor(max_workers=10)
92 | return cls._instance
93 |
94 |
95 | def exec_shell(cmd):
96 | '''执行shell命令函数'''
97 | sub = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
98 | stdout, stderr = sub.communicate()
99 | ret = sub.returncode
100 | if ret == 0:
101 | return ret, stdout.decode('utf-8').split('\n')
102 | else:
103 | return ret, stdout.decode('utf-8').replace('\n', '')
104 |
105 |
106 | class RunningProcess:
107 | def __init__(self, process):
108 | self.process = process
109 | self.start_time = time.time()
110 |
111 | def is_running(self):
112 | return bool(self.process.poll() is None)
113 |
114 | def read_line(self):
115 | return self.process.stdout.readline()
116 |
117 | @property
118 | def unread_lines(self):
119 | lines = self.process.stdout.readlines()
120 | self.process.stdout.close()
121 | return lines
122 |
123 | @property
124 | def run_state(self):
125 | return bool(self.process.poll() == 0)
126 |
127 | def is_timeout(self, exec_time=600):
128 | duration = time.time() - self.start_time
129 | if duration > exec_time:
130 | self.process.terminate()
131 | self.process.wait()
132 | self.process.communicate()
133 | # print("execute timeout, execute time {}, it's killed.".format(duration))
134 | return True
135 | return False
136 |
137 |
138 | class RedisLock(object):
139 | def __init__(self, key, **conf):
140 | if not conf:
141 | from .configs import configs
142 | __redis_info = configs.get(const.REDIS_CONFIG_ITEM, None).get(const.DEFAULT_RD_KEY, None)
143 | conf = dict(host=__redis_info.get(const.RD_HOST_KEY), port=__redis_info.get(const.RD_PORT_KEY, 6379),
144 | db=__redis_info.get(const.RD_DB_KEY, 0), password=__redis_info.get(const.RD_PASSWORD_KEY, None))
145 |
146 | self.rdcon = redis.Redis(host=conf.get('host'), port=conf.get('port'), password=conf.get('password'),
147 | db=conf.get('db', 0))
148 | self._lock = 0
149 | self.lock_key = "{}_dynamic_test".format(key)
150 |
151 | @staticmethod
152 | def get_lock(cls, key_timeout=59, func_timeout=59):
153 | ### key过期时间为一分钟,30秒内key任务没有完成则返回失败
154 | start_time = time.time()
155 | while cls._lock != 1:
156 | timestamp = time.time() + key_timeout + 1
157 | cls._lock = cls.rdcon.setnx(cls.lock_key, timestamp)
158 | lock_key = cls.rdcon.get(cls.lock_key)
159 |
160 | if time.time() - start_time > func_timeout:
161 | return False
162 | if cls._lock == 1 or (
163 | time.time() > float(lock_key) and time.time() > float(cls.rdcon.getset(cls.lock_key, timestamp))):
164 | return True
165 | else:
166 | time.sleep(1)
167 |
168 | @staticmethod
169 | def release(cls):
170 | ### 释放lock
171 | lock_key = cls.rdcon.get(cls.lock_key)
172 | if lock_key and time.time() < float(lock_key): cls.rdcon.delete(cls.lock_key)
173 |
174 |
175 | def deco(cls, release=False):
176 | """ 示例
177 | @deco(RedisLock("redis_lock_key", **dict(host='127.0.0.1', port=6379, password="", db=1)))
178 | def do_func():
179 | print("the func called.")
180 | time.sleep(50)
181 | print("the func end")
182 |
183 |
184 | do_func()
185 | """
186 |
187 | def _deco(func):
188 | def __deco(*args, **kwargs):
189 | if not cls.get_lock(cls): return False
190 | try:
191 | return func(*args, **kwargs)
192 | finally:
193 | ### 执行完就释放key,默认不释放
194 | if release: cls.release(cls)
195 |
196 | return __deco
197 |
198 | return _deco
199 |
200 |
201 | def now_timestamp() -> int:
202 | return int(round(time.time() * 1000))
203 |
204 |
205 | class LockClientV2(ABC):
206 | @abstractmethod
207 | def get_lock(self, key_timeout=59, func_timeout=5):
208 | pass
209 |
210 | @abstractmethod
211 | def release(self):
212 | pass
213 |
214 |
215 | class RedisLockV2(LockClientV2):
216 | def __init__(self, key, **conf):
217 | # 注意: configs.import_dict(**settings) 是必须的
218 | if not conf:
219 | from .configs import configs
220 | redis_config = configs.get(const.REDIS_CONFIG_ITEM, {})
221 | default_config = redis_config.get(const.DEFAULT_RD_KEY, {})
222 |
223 | conf = {
224 | 'host': default_config.get(const.RD_HOST_KEY, 'localhost'),
225 | 'port': default_config.get(const.RD_PORT_KEY, 6379),
226 | 'db': default_config.get(const.RD_DB_KEY, 0),
227 | 'password': default_config.get(const.RD_PASSWORD_KEY)
228 | }
229 | self.redis_client: redis.Redis = redis.Redis(**conf)
230 | self._lock = 0
231 | self.lock_key = f"{key}_dynamic"
232 | self.uuid = str(uuid())
233 |
234 | def get_lock(self, key_timeout=59, func_timeout=5):
235 | # key过期时间为一分钟,30秒内key任务没有完成则返回失败
236 | start_time = time.time()
237 | redis_client: redis.Redis = self.redis_client
238 | _uuid = self.uuid
239 | _lock_key = self.lock_key
240 |
241 | while time.time() - start_time < func_timeout:
242 | ok = redis_client.set(_lock_key, _uuid, nx=True, ex=key_timeout)
243 | got_uuid = convert(redis_client.get(_lock_key))
244 | if got_uuid == _uuid and ok:
245 | return True
246 | time.sleep(1)
247 | return False
248 |
249 | def release(self):
250 | # 释放lock
251 | redis_client: redis.Redis = self.redis_client
252 | _uuid = self.uuid
253 | _lock_key = self.lock_key
254 | got_uuid = convert(redis_client.get(_lock_key))
255 |
256 | if got_uuid == _uuid:
257 | redis_client.delete(_lock_key)
258 |
259 |
260 | def deco_v2(cls: LockClientV2, release=False, key_timeout=59, func_timeout=5):
261 | """ 示例
262 | @deco_v2(RedisLockV2("codo:xxxx:v2:xxxx"), release=True, key_timeout=30)
263 | def do_func():
264 | print("the func called.")
265 | time.sleep(50)
266 | print("the func end")
267 |
268 |
269 | do_func()
270 |
271 |
272 | :param
273 | cls: RedisLockV2实例
274 | release: 是否释放key,默认不释放
275 | key_timeout: key过期时间
276 | func_timeout: 函数争抢锁超时时间
277 | """
278 |
279 | def _deco(func):
280 | def __deco(*args, **kwargs):
281 | try:
282 | ok = cls.get_lock(key_timeout=key_timeout, func_timeout=func_timeout)
283 | if not ok:
284 | return False
285 | except Exception as e:
286 | logging.error(f"[deco_v2] get lock func={func.__name__} error={str(e)}")
287 | return False
288 | try:
289 | return func(*args, **kwargs)
290 | finally:
291 | # 执行完就释放key,默认不释放
292 | if release:
293 | cls.release()
294 |
295 | return __deco
296 |
297 | return _deco
298 |
--------------------------------------------------------------------------------
/websdk2/utils/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Contact : 191715030@qq.com
5 | Author : shenshuo
6 | Date : 2018/12/11
7 | Desc :
8 | """
9 |
10 | import json
11 | import logging
12 | import os
13 | import smtplib
14 | import socket
15 | import time
16 | import uuid
17 | from datetime import datetime
18 | from typing import List, Union
19 | from email.mime.text import MIMEText
20 | from email.mime.multipart import MIMEMultipart
21 |
22 | from ..consts import const
23 |
24 |
25 | class SendMail:
26 | def __init__(self, mail_host: str, mail_port: int, mail_user: str, mail_password: str, mail_ssl: bool = False,
27 | mail_tls: bool = False):
28 | """
29 | 初始化邮件发送客户端
30 |
31 | :param mail_host: SMTP主机
32 | :param mail_port: SMTP端口
33 | :param mail_user: SMTP账号
34 | :param mail_password: SMTP密码
35 | :param mail_ssl: 是否启用SSL加密,如果SMTP端口是465,通常需要启用SSL
36 | :param mail_tls: 是否启用TLS加密,如果SMTP端口是587,通常需要启用TLS
37 | """
38 | self.mail_host = mail_host
39 | self.mail_port = mail_port
40 | self.mail_user = mail_user
41 | self.__mail_password = mail_password # 使用单下划线,表示这是一个受保护的属性
42 | self.mail_ssl = mail_ssl
43 | self.mail_tls = mail_tls
44 |
45 | def send_mail(self, to_list: Union[str, List[str]], subject: str, content: str, subtype: str = 'plain',
46 | att: Union[str, None] = None):
47 | """
48 | 发送邮件
49 |
50 | :param to_list: 收件人列表,可以是单个邮箱地址字符串,也可以是多个邮箱地址的列表,必填
51 | :param subject: 邮件标题,必填
52 | :param content: 邮件内容,必填
53 | :param subtype: 邮件内容格式,默认为plain,可选html
54 | :param att: 附件路径,支持单个附件,选填
55 | """
56 |
57 | if isinstance(to_list, list):
58 | to_list = ','.join(to_list) # 将列表转换为逗号分隔的字符串
59 |
60 | msg = MIMEMultipart()
61 | msg['Subject'] = subject
62 | msg['From'] = self.mail_user
63 | msg['To'] = to_list
64 |
65 | msg.attach(MIMEText(content, subtype, 'utf-8'))
66 |
67 | if att:
68 | if not os.path.isfile(att):
69 | raise FileNotFoundError(f'{att} 文件不存在')
70 | try:
71 | with open(att, 'rb') as f:
72 | file_data = f.read()
73 |
74 | filename = os.path.basename(att)
75 | attachment = MIMEText(file_data.decode('utf-8'), 'base64', 'utf-8')
76 | attachment['Content-Type'] = 'application/octet-stream'
77 | attachment['Content-Disposition'] = f'attachment; filename="{filename}"'
78 | msg.attach(attachment)
79 | except Exception as err:
80 | logging.error(f"发送附件出错 {err}")
81 |
82 | try:
83 | with self._get_server() as server:
84 | server.sendmail(self.mail_user, to_list.split(','), msg.as_string())
85 | return True
86 | except Exception as e:
87 | logging.error(f'邮件发送失败: {e}')
88 | return False
89 |
90 | def _get_server(self):
91 | if self.mail_ssl:
92 | server = smtplib.SMTP_SSL(self.mail_host, self.mail_port)
93 | elif self.mail_tls:
94 | server = smtplib.SMTP(self.mail_host, self.mail_port)
95 | server.starttls()
96 | else:
97 | server = smtplib.SMTP(self.mail_host, self.mail_port)
98 |
99 | server.login(self.mail_user, self.__mail_password)
100 | return server
101 |
102 |
103 | # class SendMail(object):
104 | # def __init__(self, mail_host, mail_port, mail_user, mail_password, mail_ssl=False, mail_tls=False):
105 | # """
106 | # :param mail_host: SMTP主机
107 | # :param mail_port: SMTP端口
108 | # :param mail_user: SMTP账号
109 | # :param mail_password: SMTP密码
110 | # :param mail_ssl: SSL=True, 如果SMTP端口是465,通常需要启用SSL, 如果SMTP端口是587,通常需要启用TLS
111 | # """
112 | # self.mail_host = mail_host
113 | # self.mail_port = mail_port
114 | # self.__mail_user = mail_user
115 | # self.__mail_password = mail_password
116 | # self.mail_ssl = mail_ssl
117 | # self.mail_tls = mail_tls
118 | #
119 | # def send_mail(self, to_list, subject, content, subtype='plain', att=None):
120 | # """
121 | # :param to_list: 收件人,多收件人半角逗号分割, 必填
122 | # :param subject: 标题, 必填
123 | # :param content: 内容, 必填
124 | # :param subtype: 格式,默认:plain, 可选html
125 | # :param att: 附件,支持单附件,选填
126 | # """
127 | # msg = MIMEMultipart()
128 | # msg['Subject'] = subject ## 标题
129 | # msg['From'] = self.__mail_user ## 发件人
130 | # msg['To'] = to_list # 收件人,必须是一个字符串
131 | # # 邮件正文内容
132 | # msg.attach(MIMEText(content, subtype, 'utf-8'))
133 | # if att:
134 | # if not os.path.isfile(att):
135 | # raise FileNotFoundError('{0} file does not exist'.format(att))
136 | #
137 | # dirname, filename = os.path.split(att)
138 | # # 构造附件1,传送当前目录下的 test.txt 文件
139 | # att1 = MIMEText(open(att, 'rb').read(), 'base64', 'utf-8')
140 | # att1["Content-Type"] = 'application/octet-stream'
141 | # # 这里的filename可以任意写,写什么名字,邮件中显示什么名字
142 | # att1["Content-Disposition"] = 'attachment; filename="{0}"'.format(filename)
143 | # msg.attach(att1)
144 | #
145 | # try:
146 | # if self.mail_ssl:
147 | # '''SSL加密方式,通信过程加密,邮件数据安全, 使用端口465'''
148 | # # print('Use SSL SendMail')
149 | # server = smtplib.SMTP_SSL(host=self.mail_host)
150 | # server.connect(self.mail_host, self.mail_port) # 连接服务器
151 | # server.login(self.__mail_user, self.__mail_password) # 登录操作
152 | # server.sendmail(self.__mail_user, to_list.split(','), msg.as_string())
153 | # server.close()
154 | # elif self.mail_tls:
155 | # # print('Use TLS SendMail')
156 | # '''使用TLS模式'''
157 | # server = smtplib.SMTP(host=self.mail_host)
158 | # server.connect(self.mail_host, self.mail_port) # 连接服务器
159 | # server.starttls()
160 | # server.login(self.__mail_user, self.__mail_password) # 登录操作
161 | # server.sendmail(self.__mail_user, to_list.split(','), msg.as_string())
162 | # server.close()
163 | # return True
164 | # else:
165 | # '''使用普通模式'''
166 | # server = smtplib.SMTP()
167 | # server.connect(self.mail_host, self.mail_port) # 连接服务器
168 | # server.login(self.__mail_user, self.__mail_password) # 登录操作
169 | # server.sendmail(self.__mail_user, to_list.split(','), msg.as_string())
170 | # server.close()
171 | # return True
172 | # except Exception as e:
173 | # print(str(e))
174 | # return False
175 | #
176 |
177 | def mail_login(user, password, mail_server='smtp.exmail.qq.com'):
178 | # 模拟登录来验证邮箱
179 | try:
180 | server = smtplib.SMTP()
181 | server.connect(mail_server)
182 | server.login(user, password)
183 | return True
184 | except Exception as e:
185 | logging.error(f"{user}邮箱认证出错:{e}")
186 | return False
187 |
188 |
189 | # def get_contain_dict(src_data: dict, dst_data: dict) -> bool:
190 | # if not isinstance(src_data, dict):
191 | # try:
192 | # src_data = json.loads(src_data)
193 | # except Exception as err:
194 | # return False
195 | #
196 | # if not isinstance(dst_data, dict):
197 | # try:
198 | # dst_data = json.loads(dst_data)
199 | # except Exception as err:
200 | # return False
201 | #
202 | # # src_key = list(src_data.keys())
203 | # # dst_key = list(dst_data.keys())
204 | # pd = [False for c in src_data.keys() if c not in dst_data]
205 | # if pd:
206 | # return False
207 | # else:
208 | # src_val = list(src_data.values())
209 | # dst_val = list(dst_data.values())
210 | # pds = [False for c in src_val if c not in dst_val]
211 | # if pds:
212 | # return False
213 | # else:
214 | # return True
215 |
216 | def get_contain_dict(src_data: dict, dst_data: dict) -> bool:
217 | if not isinstance(src_data, dict):
218 | try:
219 | src_data = json.loads(src_data)
220 | except Exception as err:
221 | return False
222 |
223 | if not isinstance(dst_data, dict):
224 | try:
225 | dst_data = json.loads(dst_data)
226 | except Exception as err:
227 | return False
228 |
229 | ### 判断键是否存在
230 | pd = [False for c in src_data.keys() if c not in dst_data]
231 | if pd:
232 | return False
233 | else:
234 | src_val = list(src_data.values())
235 | dst_val = list(dst_data.values())
236 | pds = [False for c in src_val if c not in dst_val]
237 | if pds:
238 | try:
239 | for d in dst_val:
240 | if isinstance(d, dict) and src_val and isinstance(src_val[0], dict):
241 | pds1 = [True for sv in src_val[0].keys() if sv in d and d.get(sv) == src_val[0].get(sv)]
242 | if True in pds1: return True
243 | except:
244 | return False
245 | return False
246 | else:
247 | return True
248 |
249 |
250 | def now_time_stamp() -> int:
251 | """
252 | 秒时间戳
253 | :return: int
254 | """
255 | return int(time.time())
256 |
257 |
258 | ### 这个地址具有唯一性
259 | def get_node_address():
260 | node_name = os.getenv(const.NODE_ADDRESS) if os.getenv(const.NODE_ADDRESS) else socket.gethostname()
261 | mac = uuid.UUID(int=uuid.getnode()).hex[-12:]
262 | return f'{node_name}--mac-{mac}'
263 |
264 |
265 | ### 这个地址是默认可以通配的
266 | def get_node_topic(node=False):
267 | if not node:
268 | if os.getenv(const.NODE_ADDRESS): return os.getenv(const.NODE_ADDRESS) + '#'
269 | mac = uuid.UUID(int=uuid.getnode()).hex[-12:]
270 | return f'{socket.gethostname()}--mac-{mac}#'
271 | else:
272 | if os.getenv(const.NODE_ADDRESS): return os.getenv(const.NODE_ADDRESS)
273 | mac = uuid.UUID(int=uuid.getnode()).hex[-12:]
274 | return f'{socket.gethostname()}--mac-{mac}'
275 |
276 |
277 | def echo_datetime_now_f():
278 | return datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
279 |
280 |
281 | def echo_execute_time(func):
282 | import logging
283 | from time import time
284 |
285 | # 定义嵌套函数,用来打印出装饰的函数的执行时间
286 | def wrapper(*args, **kwargs):
287 | # 定义开始时间和结束时间,将func夹在中间执行,取得其返回值
288 | start = time()
289 | func_return = func(*args, **kwargs)
290 | end = time()
291 | logging.warning(f'{func.__name__}() execute time: {end - start}s')
292 | return func_return
293 |
294 | # 返回嵌套的函数
295 | return wrapper
296 |
297 |
298 | ### 令牌桶限流
299 | '''
300 | 示例
301 | import time
302 | from settings import settings
303 | from websdk2.configs import configs
304 | from websdk2.cache_context import cache_conn
305 |
306 | if configs.can_import: configs.import_dict(**settings)
307 |
308 | redis_conn = cache_conn()
309 | obj = TokenBucket(redis_conn, 'ss', 5, 60)
310 | for i in range(120):
311 | time.sleep(0.5)
312 | status = obj.can_access('tuanzi')
313 | print(status)
314 | '''
315 |
316 |
317 | class TokenBucket:
318 | """令牌桶限流"""
319 |
320 | # bucket_key 用来标记令牌
321 | # func_name 第二段标记
322 | # capacity = 5 # 桶容量
323 | # rate = 1 # 速率 每分增加一个令牌
324 |
325 | def __init__(self, cache, bucket_key, capacity: int = 5, rate: int = 1):
326 | self.bucket_key = bucket_key # 用来标记令牌桶
327 | self.capacity = capacity # 桶容量
328 | self.rate = rate # 速率 每分钟增加的令牌
329 | self.cache = cache
330 | if not isinstance(rate, int): raise Exception('Rate must be int')
331 |
332 | def can_access(self, func_name) -> bool:
333 | """令牌桶限流"""
334 | redis_key = self.bucket_key + func_name
335 |
336 | now = int(time.time())
337 | current_tokens = self.cache.hget(redis_key, 'current_tokens')
338 | last_time = self.cache.hget(redis_key, 'last_time')
339 |
340 | current_tokens = current_tokens if current_tokens else self.capacity
341 | try:
342 | current_tokens = int(current_tokens)
343 | except:
344 | current_tokens = float(current_tokens)
345 | self.capacity = float(self.capacity)
346 |
347 | last_time = int(last_time) if last_time else now
348 |
349 | increase_tokens = (now - last_time) * self.rate / 60 # 增加的令牌桶 按分钟计算
350 | current_tokens = min(self.capacity, current_tokens + increase_tokens)
351 | if current_tokens > 0:
352 | self.cache.hset(redis_key, 'current_tokens', current_tokens - 1)
353 | self.cache.hset(redis_key, 'last_time', int(time.time()))
354 | return True
355 | else:
356 | return False
357 |
358 |
359 | if __name__ == '__main__':
360 | pass
361 |
--------------------------------------------------------------------------------
/websdk2/utils/cc_crypto.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Version : 0.0.1
5 | Contact : 191715030@qq.com
6 | Author : shenshuo
7 | Date : 2023/4/17 17:07
8 | Desc : 加密
9 | """
10 |
11 | import base64
12 | from cryptography.fernet import Fernet
13 | from ..consts import const
14 | from ..configs import configs
15 |
16 |
17 | class AESCryptoV3:
18 | """
19 | usage: mc = AESCryptoV3() 实例化
20 | mc.my_encrypt('ceshi') 对字符串ceshi进行加密
21 | mc.my_decrypt('') 对密文进行解密
22 | """
23 |
24 | def __init__(self, key: str = 'W1zFCF-pnUXi1zRtfgNkHmM3qv_3zvCkVSx68vXqks4='):
25 | # 这里密钥key 长度必须为16(AES-128)、24(AES-192)、或32(AES-256)Bytes 长度
26 | if not isinstance(key, bytes): key = key.encode('utf-8')
27 | if len(key) > 32:
28 | key = key[0:32]
29 | else:
30 | key = key.rjust(32, b'0')
31 |
32 | self.key = base64.urlsafe_b64encode(key)
33 | self.f = Fernet(self.key)
34 |
35 | @property
36 | def create_key(self):
37 | return Fernet.generate_key()
38 |
39 | def my_encrypt(self, text: str):
40 | if isinstance(text, str): text = text.encode('utf-8')
41 | return self.f.encrypt(text).decode('utf-8')
42 |
43 | def my_decrypt(self, text: str):
44 | if isinstance(text, str): text = text.encode('utf-8')
45 | return self.f.decrypt(text).decode('utf-8')
46 |
47 |
48 | class AESCryptoV4:
49 | """
50 | usage: mc = AESCryptoV4() 实例化
51 | mc.my_encrypt('ceshi') 对字符串ceshi进行加密
52 | mc.my_decrypt('') 对密文进行解密
53 | """
54 |
55 | def __init__(self, key: str = None):
56 | # 如果没有提供密钥,则使用默认密钥
57 | if key is None:
58 | # 若没有提供密钥,则生成一个新密钥
59 | key = configs.get(const.AES_CRYPTO_KEY, 'W1zFCF-pnUXi1zRtfgNkHmM3qv_3zvCkVSx68vXqks4=')
60 |
61 | # 确保密钥是字节类型
62 | if not isinstance(key, bytes):
63 | key = key.encode('utf-8')
64 |
65 | if len(key) > 32:
66 | key = key[0:32]
67 | else:
68 | key = key.rjust(32, b'0')
69 | # 创建Fernet对象
70 | self.f = Fernet(base64.urlsafe_b64encode(key))
71 |
72 | @property
73 | def create_key(self):
74 | return Fernet.generate_key()
75 |
76 | def my_encrypt(self, text: str):
77 | if isinstance(text, str): text = text.encode('utf-8')
78 | return self.f.encrypt(text).decode('utf-8')
79 |
80 | def my_decrypt(self, text: str):
81 | if isinstance(text, str): text = text.encode('utf-8')
82 | return self.f.decrypt(text).decode('utf-8')
83 |
84 |
85 | mcv4 = AESCryptoV4()
86 |
--------------------------------------------------------------------------------
/websdk2/utils/date_format.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | """"
4 | Author : shenshuo
5 | Date : 2023年2月5日13:37:54
6 | Desc : 日期时间格式化处理
7 | """
8 |
9 | from datetime import datetime, timedelta
10 |
11 |
12 | def date_format_to8(start_date: str = None, end_date: str = None) -> tuple:
13 | """
14 | # iview 前端日期时间段数据格式化
15 | start_time_tuple, end_time_tuple = date_format_to8(start_date, end_date)
16 | # 查询语句中使用
17 | session.query(dbA).filter(dbA.create_time.between(start_time_tuple, end_time_tuple).all()
18 | """
19 | date_format_1 = "%Y-%m-%dT%H:%M:%S.%fZ"
20 | date_format_2 = "%Y-%m-%d"
21 |
22 | if not start_date:
23 | start_date = (datetime.now() - timedelta(days=30)).strftime(date_format_2)
24 | if not end_date:
25 | end_date = (datetime.now() + timedelta(days=1)).strftime(date_format_2)
26 |
27 | for date_format in [date_format_1, date_format_2]:
28 | try:
29 | start_time_tuple = datetime.strptime(start_date, date_format) + timedelta(hours=8)
30 | end_time_tuple = datetime.strptime(end_date, date_format) + timedelta(hours=8)
31 | return start_time_tuple, end_time_tuple
32 | except ValueError:
33 | pass
34 |
35 | raise ValueError(f"Unable to parse the dates. Expected formats are {date_format_1} and {date_format_2}.")
36 |
--------------------------------------------------------------------------------
/websdk2/utils/pydantic_utils.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | """
4 | Version : 0.0.8
5 | Contact : 191715030@qq.com
6 | Author : shenshuo
7 | Date : 2021/1/26 20:28
8 | Desc : https://github.com/tiangolo/pydantic-sqlalchemy
9 | """
10 |
11 | ####
12 | from typing import Container, Optional, Type
13 | from pydantic import BaseConfig, BaseModel, create_model, ValidationError
14 | from sqlalchemy.inspection import inspect
15 | from sqlalchemy.orm.properties import ColumnProperty
16 |
17 |
18 | ### 删除的时候一般只有id
19 | class PydanticDel(BaseModel):
20 | id: int
21 |
22 | class PydanticDelList(BaseModel):
23 | id_list: list[int]
24 |
25 |
26 | class OrmConfig(BaseConfig):
27 | orm_mode = True
28 |
29 |
30 | def sqlalchemy_to_pydantic(db_model: Type, *, config: Type = OrmConfig, exclude: Container[str] = []) -> Type[
31 | BaseModel]:
32 | mapper = inspect(db_model)
33 | fields = {}
34 | for attr in mapper.attrs:
35 | if isinstance(attr, ColumnProperty):
36 | if attr.columns:
37 | name = attr.key
38 | if name in exclude:
39 | continue
40 | column = attr.columns[0]
41 | python_type: Optional[type] = None
42 | if hasattr(column.type, "impl"):
43 | if hasattr(column.type.impl, "python_type"):
44 | python_type = column.type.impl.python_type
45 | elif hasattr(column.type, "python_type"):
46 | python_type = column.type.python_type
47 | assert python_type, f"Could not infer python_type for {column}"
48 | default = None
49 | if column.default is None and not column.nullable:
50 | default = ...
51 | fields[name] = (python_type, default)
52 | pydantic_model = create_model(
53 | db_model.__name__, __config__=config, **fields # type: ignore
54 | )
55 | return pydantic_model
56 |
--------------------------------------------------------------------------------
/websdk2/web_logs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*-coding:utf-8-*-
3 | '''
4 | Author : ss
5 | date : 2018-3-19
6 | role : web log
7 | '''
8 |
9 | import logging
10 | import os
11 | import sys
12 | import time
13 | import tornado.log
14 | from shortuuid import uuid
15 |
16 | log_fmt = ''.join(('PROGRESS:%(progress_id) -5s %(levelname) ', '-10s %(asctime)s %(name) -25s %(funcName) '
17 | '-30s LINE.NO:%(lineno) -5d : %(message)s'))
18 | log_key = 'logger_key'
19 |
20 |
21 | def singleton(class_):
22 | instances = {}
23 |
24 | def getinstance(*args, **kwargs):
25 | if class_ not in instances:
26 | instances[class_] = class_(*args, **kwargs)
27 | return instances[class_]
28 |
29 | return getinstance
30 |
31 |
32 | class ProgressLogFilter(logging.Filter):
33 | def filter(self, record):
34 | record.progress_id = Logger().progress_id
35 | return True
36 |
37 |
38 | @singleton
39 | class Logger(object):
40 | def __init__(self, progress_id='', log_file='/tmp/xxx.log'):
41 | self.__log_key = log_key
42 | self.progress_id = progress_id
43 | self.log_file = log_file
44 |
45 | def read_log(self, log_level, log_message):
46 | ###创建一个logger
47 | if self.progress_id == '':
48 | Logger().progress_id = str(uuid())
49 | else:
50 | Logger().progress_id = self.progress_id
51 | logger = logging.getLogger(self.__log_key)
52 | logger.addFilter(ProgressLogFilter())
53 | logger.setLevel(logging.DEBUG)
54 |
55 | ###创建一个handler用于输出到终端
56 | th = logging.StreamHandler()
57 | th.setLevel(logging.DEBUG)
58 |
59 | ###定义handler的输出格式
60 | formatter = logging.Formatter(log_fmt)
61 | th.setFormatter(formatter)
62 |
63 | ###给logger添加handler
64 | logger.addHandler(th)
65 |
66 | ###记录日志
67 | level_dic = {'debug': logger.debug, 'info': logger.info, 'warning': logger.warning, 'error': logger.error,
68 | 'critical': logger.critical}
69 | level_dic[log_level](log_message)
70 |
71 | th.flush()
72 | logger.removeHandler(th)
73 |
74 | def write_log(self, log_level, log_message):
75 | ###创建一个logger
76 | ###创建一个logger
77 | if self.progress_id == '':
78 | Logger().progress_id = str(uuid())
79 | else:
80 | Logger().progress_id = self.progress_id
81 | logger = logging.getLogger(self.__log_key)
82 | logger.addFilter(ProgressLogFilter())
83 | logger.setLevel(logging.DEBUG)
84 |
85 | ###建立日志目录
86 | log_dir = os.path.dirname(self.log_file)
87 | if not os.path.isdir(log_dir):
88 | os.makedirs(log_dir)
89 |
90 | ###创建一个handler用于写入日志文件
91 | fh = logging.FileHandler(self.log_file)
92 | fh.setLevel(logging.DEBUG)
93 |
94 | ###定义handler的输出格式
95 | formatter = logging.Formatter(log_fmt)
96 | fh.setFormatter(formatter)
97 |
98 | ###给logger添加handler
99 | logger.addHandler(fh)
100 |
101 | ###记录日志
102 | level_dic = {'debug': logger.debug, 'info': logger.info, 'warning': logger.warning, 'error': logger.error,
103 | 'critical': logger.critical}
104 | level_dic[log_level](log_message)
105 |
106 | ###删除重复记录
107 | fh.flush()
108 | logger.removeHandler(fh)
109 |
110 |
111 | ins_log = Logger()
112 |
113 |
114 | def timeit(func):
115 | def wrapper(*args, **kwargs):
116 | start_time = time.time()
117 | result = func(*args, **kwargs)
118 | end_time = time.time()
119 | duration = end_time - start_time
120 | ins_log.read_log('info', '%s execute duration :%.3f second' % (str(func), duration))
121 | return result
122 |
123 | return wrapper
124 |
125 |
126 | ins_log.write_log('info', 'xxxx')
127 | ins_log.read_log('info', 'xxxx')
--------------------------------------------------------------------------------