├── .github ├── FUNDING.yml └── workflows │ └── ci.yml ├── .gitignore ├── LICENCE ├── README.md ├── benchmark.md ├── example.py ├── pdm.lock ├── pyproject.toml ├── src └── zibai │ ├── __init__.py │ ├── __main__.py │ ├── cli.py │ ├── const.py │ ├── core.py │ ├── h11.py │ ├── logger.py │ ├── middlewares │ ├── __init__.py │ └── limit_request_count.py │ ├── multiprocess.py │ ├── reloader.py │ ├── utils.py │ └── wsgi_typing.py └── tests ├── __init__.py ├── conftest.py ├── test___main__.py ├── test_core.py ├── test_multiprocess.py └── utils.py /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 4 | - "https://donate.aber.sh/" 5 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI/CD 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | tags: 8 | - "*" 9 | paths: 10 | - ".github/workflows/ci.yml" 11 | - "**.py" 12 | - "pdm.lock" 13 | - "pyproject.toml" 14 | - "!zibai/__version__.py" 15 | pull_request: 16 | branches: 17 | - main 18 | paths: 19 | - ".github/workflows/ci.yml" 20 | - "**.py" 21 | - "pdm.lock" 22 | - "pyproject.toml" 23 | - "!zibai/__version__.py" 24 | 25 | jobs: 26 | tests: 27 | name: "Python ${{ matrix.python-version }} ${{ matrix.os }}" 28 | runs-on: "${{ matrix.os }}" 29 | strategy: 30 | matrix: 31 | python-version: ["3.10", "3.11", "3.12"] 32 | os: [ubuntu-latest, windows-latest, macos-13] 33 | steps: 34 | - uses: actions/checkout@v3 35 | 36 | - uses: pdm-project/setup-pdm@v3 37 | name: Setup Python and PDM 38 | with: 39 | python-version: ${{ matrix.python-version }} 40 | architecture: x64 41 | version: 2.10.4 42 | 43 | - name: Install dependencies 44 | run: | 45 | pdm sync -v -dG test 46 | 47 | - name: Tests 48 | run: pdm run pytest tests -o log_cli=true -o log_cli_level=DEBUG 49 | 50 | publish: 51 | needs: tests 52 | if: startsWith(github.ref, 'refs/tags/') 53 | 54 | name: Publish to PyPI 55 | runs-on: ubuntu-latest 56 | environment: release 57 | permissions: 58 | id-token: write 59 | steps: 60 | - uses: actions/checkout@v3 61 | 62 | - uses: pdm-project/setup-pdm@v3 63 | name: Setup Python and PDM 64 | with: 65 | python-version: "3.10" 66 | architecture: x64 67 | version: 2.10.4 68 | 69 | - name: Build package distributions 70 | run: | 71 | pdm build 72 | 73 | - name: Publish package distributions to PyPI 74 | uses: pypa/gh-action-pypi-publish@release/v1 75 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # pdm 105 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 106 | #pdm.lock 107 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 108 | # in version control. 109 | # https://pdm-project.org/#use-with-ide 110 | .pdm.toml 111 | .pdm-python 112 | .pdm-build/ 113 | 114 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 115 | __pypackages__/ 116 | 117 | # Celery stuff 118 | celerybeat-schedule 119 | celerybeat.pid 120 | 121 | # SageMath parsed files 122 | *.sage.py 123 | 124 | # Environments 125 | .env 126 | .venv 127 | env/ 128 | venv/ 129 | ENV/ 130 | env.bak/ 131 | venv.bak/ 132 | 133 | # Spyder project settings 134 | .spyderproject 135 | .spyproject 136 | 137 | # Rope project settings 138 | .ropeproject 139 | 140 | # mkdocs documentation 141 | /site 142 | 143 | # mypy 144 | .mypy_cache/ 145 | .dmypy.json 146 | dmypy.json 147 | 148 | # Pyre type checker 149 | .pyre/ 150 | 151 | # pytype static type analyzer 152 | .pytype/ 153 | 154 | # Cython debug symbols 155 | cython_debug/ 156 | 157 | # PyCharm 158 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 159 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 160 | # and can be added to the global gitignore or merged into this file. For a more nuclear 161 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 162 | #.idea/ 163 | -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2023 abersheeran 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Zī Bái 2 | 3 | > 中曲之山有兽焉,其状如马而白身黑尾,一角,虎牙爪,音如鼓音,其名曰駮,是食虎豹,可以御兵。 4 | 5 | A modern high-performance pure-Python WSGI server. Can be launched using the command line or programmatically. 6 | 7 | Correct handling of the HTTP protocol is ensured by [h11](https://github.com/python-hyper/h11). **Optional** [gevent](https://github.com/gevent/gevent). 8 | 9 | - **Cross-platform multi-process management**. (You no longer have to worry about gunicorn not being available on Windows😀) 10 | - Support IPv4, IPv6, Unix socket. 11 | - Graceful restart. If code or configuration is updated, new workers will use them. 12 | - Server event hooks. (If you want to do something extra at specific times 🙂) 13 | - Clean and pure way of programming. Can be used any way you want. 14 | 15 | Inspiration from [Uvicorn](https://github.com/encode/uvicorn), [GUnicorn](https://github.com/benoitc/gunicorn), [Waitress](https://github.com/Pylons/waitress), [runweb](https://github.com/abersheeran/runweb). 16 | 17 | ## Quick start 18 | 19 | ```bash 20 | pip install zibai-server[gevent,reload] 21 | 22 | # Then run your WSGI application like kui, django, flask, etc. 23 | zibai example:app 24 | ``` 25 | 26 | Multiple processes: 27 | 28 | ```bash 29 | zibai example:app -p 4 30 | ``` 31 | 32 | Auto reload in development: 33 | 34 | ```bash 35 | zibai example:app --watchfiles "*.py;.env" 36 | ``` 37 | 38 | Use app factory: 39 | 40 | ```bash 41 | zibai example:create_app --call 42 | ``` 43 | 44 | Use `--help` to see all available options. 45 | 46 | ``` 47 | usage: zibai [-h] [--call] [--listen LISTEN [LISTEN ...]] [--subprocess SUBPROCESS] [--no-gevent] 48 | [--max-workers MAX_WORKERS] [--watchfiles WATCHFILES] [--backlog BACKLOG] [--socket-timeout SOCKET_TIMEOUT] 49 | [--dualstack-ipv6] [--unix-socket-perms UNIX_SOCKET_PERMS] 50 | [--h11-max-incomplete-event-size H11_MAX_INCOMPLETE_EVENT_SIZE] 51 | [--max-request-pre-process MAX_REQUEST_PRE_PROCESS] [--graceful-exit-timeout GRACEFUL_EXIT_TIMEOUT] 52 | [--url-scheme URL_SCHEME] [--url-prefix URL_PREFIX] [--before-serve BEFORE_SERVE] 53 | [--before-graceful-exit BEFORE_GRACEFUL_EXIT] [--before-died BEFORE_DIED] [--no-access-log] 54 | [--logging-config-filepath LOGGING_CONFIG_FILEPATH] 55 | app 56 | 57 | positional arguments: 58 | app WSGI app 59 | 60 | options: 61 | -h, --help show this help message and exit 62 | --call use WSGI factory (default: False) 63 | --listen LISTEN [LISTEN ...], -l LISTEN [LISTEN ...] 64 | listen address, HOST:PORT, unix:PATH (default: ['127.0.0.1:8000']) 65 | --subprocess SUBPROCESS, -p SUBPROCESS 66 | number of subprocesses (default: 0) 67 | --no-gevent do not use gevent (default: False) 68 | --max-workers MAX_WORKERS, -w MAX_WORKERS 69 | maximum number of threads or greenlets to use for handling requests (default: 10) 70 | --watchfiles WATCHFILES 71 | watch files for changes and restart workers (default: None) 72 | --backlog BACKLOG listen backlog (default: None) 73 | --socket-timeout SOCKET_TIMEOUT 74 | socket timeout (other means keepalive timeout) (default: None) 75 | --dualstack-ipv6 enable dualstack ipv6 (default: False) 76 | --unix-socket-perms UNIX_SOCKET_PERMS 77 | unix socket permissions (default: 600) 78 | --h11-max-incomplete-event-size H11_MAX_INCOMPLETE_EVENT_SIZE 79 | maximum number of bytes in an incomplete HTTP event (default: None) 80 | --max-request-pre-process MAX_REQUEST_PRE_PROCESS 81 | maximum number of requests to process before killing the worker (default: None) 82 | --graceful-exit-timeout GRACEFUL_EXIT_TIMEOUT 83 | graceful exit timeout (default: 10) 84 | --url-scheme URL_SCHEME 85 | url scheme; will be passed to WSGI app as wsgi.url_scheme (default: http) 86 | --url-prefix URL_PREFIX 87 | url prefix; will be passed to WSGI app as SCRIPT_NAME, if not specified, use environment variable SCRIPT_NAME (default: None) 88 | --before-serve BEFORE_SERVE 89 | callback to run before serving requests (default: None) 90 | --before-graceful-exit BEFORE_GRACEFUL_EXIT 91 | callback to run before graceful exit (default: None) 92 | --before-died BEFORE_DIED 93 | callback to run before exiting (default: None) 94 | --no-access-log disable access log (default: False) 95 | --logging-config-filepath LOGGING_CONFIG_FILEPATH 96 | logging config file path (default: None) 97 | ``` 98 | 99 | ## Use programmatically 100 | 101 | ```python 102 | import logging 103 | 104 | logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") 105 | 106 | 107 | def app(environ, start_response): 108 | status = "200 OK" 109 | headers = [("Content-type", "text/plain; charset=utf-8"), ("Content-Length", "12")] 110 | start_response(status, headers) 111 | return [b"Hello World!"] 112 | 113 | 114 | if __name__ == "__main__": 115 | import sys 116 | from zibai import parse_args, main 117 | 118 | options = parse_args(["example:app"] + sys.argv[1:]) 119 | main(options) 120 | ``` 121 | 122 | `Options` consists of easily serializable types such as string, number, or None. So if you don't want to read and parse the configuration from the command line, you can also create `Options` yourself. 123 | 124 | ```python 125 | from zibai import Options, main 126 | 127 | options = Options(app="example:app") 128 | main(options) 129 | ``` 130 | 131 | ### Advanced usage 132 | 133 | If `Options` cannot meet your customization needs, you can use the `serve` function directly. 134 | 135 | ```python 136 | def app(environ, start_response): 137 | status = "200 OK" 138 | headers = [("Content-type", "text/plain; charset=utf-8"), ("Content-Length", "12")] 139 | start_response(status, headers) 140 | return [b"Hello World!"] 141 | 142 | 143 | if __name__ == "__main__": 144 | import threading 145 | 146 | from zibai import create_bind_socket 147 | from zibai.core import serve 148 | 149 | exit_event = threading.Event() 150 | sock = create_bind_socket("127.0.0.1:8000") 151 | 152 | serve( 153 | app=app, 154 | bind_socket=sock, 155 | max_workers=10, 156 | graceful_exit=exit_event, 157 | before_serve_hook=your_hook, 158 | before_graceful_exit_hook=your_hook, 159 | before_died_hook=your_hook, 160 | ) 161 | ``` 162 | 163 | ## Event hooks 164 | 165 | The following hooks will be executed in each worker process: 166 | 167 | - `before_serve` is called before serving requests. 168 | - `before_graceful_exit` is called before graceful exit. 169 | - `before_died` is called before exiting. 170 | 171 | ## Logging 172 | 173 | Zī Bái uses the standard Python logging module. You can configure it as you like. 174 | 175 | ```python 176 | # Process management, service startup or termination logs. 177 | logger = logging.getLogger("zibai") 178 | # Used for DEBUG http protocol errors, generally do not enable it. 179 | debug_logger = logging.getLogger("zibai.debug") 180 | # Access logs. Non-5xx type request logs will use this. 181 | access_logger = logging.getLogger("zibai.access") 182 | # Error logs. 5xx type request logs will use this. 183 | error_logger = logging.getLogger("zibai.error") 184 | ``` 185 | 186 | You can configure the output format of `access_logger` and `error_logger` to access values in WSGI Environ. 187 | 188 | ```python 189 | from zibai.logger import access_logger 190 | 191 | formatter = logging.Formatter( 192 | "%(asctime)s [%(REMOTE_ADDR)s] %(levelname)s %(message)s", "%Y-%m-%d %H:%M:%S" 193 | ) 194 | for handler in access_logger.handlers: 195 | handler.setFormatter(handler.formatter) 196 | ``` 197 | 198 | ## Signals 199 | 200 | Zī Bái will handle the following signals: 201 | 202 | - `SIGINT`: Trigger quick exit (forcefully close all connections). If subprocess is enabled, then the main process will wait for the subprocesses to exit quickly. 203 | - `SIGTERM`: Trigger graceful exit. If subprocess is enabled, then the main process will wait for the subprocesses to exit gracefully. 204 | 205 | There are also some signals that will only be processed by the main process when subprocess is enabled. 206 | 207 | - `SIGBREAK`: Only available on Windows. Trigger graceful exit. 208 | - `SIGHUP`: Work processeses are graceful restarted one after another. If you update the code, the new worker process will use the new code. 209 | - `SIGTTIN`: Increase the number of worker processes by one. 210 | - `SIGTTOU`: Decrease the number of worker processes by one. 211 | -------------------------------------------------------------------------------- /benchmark.md: -------------------------------------------------------------------------------- 1 | # Benchmark 2 | 3 | All tests are conducted on my M1 Air using python3.11. 4 | 5 | - `waitress`: `waitress-serve example:app` 6 | - `gunicorn`: `gunicorn example:app` 7 | - `gunicron-gevent`: `gunicorn example:app -k gevent` 8 | - `zibai`: `python -m zibai example:app --no-gevent` 9 | - `zibai-gevent`: `python -m zibai example:app` 10 | 11 | ## `waitress` 12 | 13 | ```bash 14 | wrk -t 8 -c 40 -d 10 http://127.0.0.1:8080 15 | Running 10s test @ http://127.0.0.1:8080 16 | 8 threads and 40 connections 17 | Thread Stats Avg Stdev Max +/- Stdev 18 | Latency 2.56ms 2.10ms 81.23ms 97.78% 19 | Req/Sec 2.04k 345.45 2.52k 70.00% 20 | 162591 requests in 10.01s, 22.79MB read 21 | Requests/sec: 16245.71 22 | Transfer/sec: 2.28MB 23 | ``` 24 | 25 | ## `gunicorn` 26 | 27 | ```bash 28 | wrk -t 8 -c 40 -d 10 http://127.0.0.1:8000 29 | Running 10s test @ http://127.0.0.1:8000 30 | 8 threads and 40 connections 31 | Thread Stats Avg Stdev Max +/- Stdev 32 | Latency 13.27ms 2.50ms 46.98ms 97.39% 33 | Req/Sec 370.89 47.08 404.00 89.55% 34 | 16297 requests in 10.07s, 2.58MB read 35 | Socket errors: connect 1, read 0, write 0, timeout 0 36 | Requests/sec: 1619.17 37 | Transfer/sec: 262.48KB 38 | ``` 39 | 40 | ## `gunicorn-gevent` 41 | 42 | ```bash 43 | wrk -t 8 -c 40 -d 10 http://127.0.0.1:8000 44 | Running 10s test @ http://127.0.0.1:8000 45 | 8 threads and 40 connections 46 | Thread Stats Avg Stdev Max +/- Stdev 47 | Latency 69.41ms 259.15ms 1.97s 92.93% 48 | Req/Sec 1.38k 1.27k 3.26k 50.52% 49 | 32226 requests in 10.08s, 5.26MB read 50 | Socket errors: connect 0, read 0, write 0, timeout 94 51 | Requests/sec: 3196.38 52 | Transfer/sec: 533.77KB 53 | ``` 54 | 55 | ## `zibai` 56 | 57 | ```bash 58 | wrk -t 8 -c 40 -d 10 http://127.0.0.1:9000 59 | Running 10s test @ http://127.0.0.1:9000 60 | 8 threads and 40 connections 61 | Thread Stats Avg Stdev Max +/- Stdev 62 | Latency 669.50us 781.06us 21.98ms 86.43% 63 | Req/Sec 5.03k 1.93k 10.21k 45.36% 64 | 150994 requests in 10.10s, 12.96MB read 65 | Requests/sec: 14950.50 66 | Transfer/sec: 1.28MB 67 | ``` 68 | 69 | ## `zibai-gevent` 70 | 71 | ```bash 72 | wrk -t 8 -c 40 -d 10 http://127.0.0.1:9000 73 | Running 10s test @ http://127.0.0.1:9000 74 | 8 threads and 40 connections 75 | Thread Stats Avg Stdev Max +/- Stdev 76 | Latency 579.62us 315.64us 11.52ms 76.51% 77 | Req/Sec 4.35k 3.56k 12.09k 74.88% 78 | 173980 requests in 10.10s, 14.93MB read 79 | Requests/sec: 17226.59 80 | Transfer/sec: 1.48MB 81 | ``` 82 | -------------------------------------------------------------------------------- /example.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | logger = logging.getLogger(__name__) 4 | 5 | 6 | def app(environ, start_response): 7 | match environ["REQUEST_METHOD"]: 8 | case "POST": 9 | status = "200 OK" 10 | headers = [] 11 | start_response(status, headers) 12 | for chunk in environ["wsgi.input"]: 13 | if chunk: 14 | yield chunk 15 | else: 16 | return 17 | case _: 18 | status = "200 OK" 19 | headers = [ 20 | ("Content-type", "text/plain; charset=utf-8"), 21 | ("Content-Length", "12"), 22 | ] 23 | start_response(status, headers) 24 | yield b"Hello World!" 25 | 26 | 27 | def before_serve_hook(): 28 | logger.info("Starting server...") 29 | 30 | 31 | def before_graceful_exit_hook(): 32 | logger.info("Graceful exit...") 33 | 34 | 35 | def before_died_hook(): 36 | logger.info("Died...") 37 | 38 | 39 | if __name__ == "__main__": 40 | import sys 41 | 42 | from zibai.cli import main, parse_args 43 | 44 | options = parse_args(["example:app"] + sys.argv[1:]) 45 | main(options) 46 | -------------------------------------------------------------------------------- /pdm.lock: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # It is not intended for manual editing. 3 | 4 | [metadata] 5 | groups = ["default", "gevent", "reload", "benchmark", "test"] 6 | strategy = ["cross_platform"] 7 | lock_version = "4.4.1" 8 | content_hash = "sha256:4c090a355ff8642a98f103026b94ba19e0cc0755623bbc8740782e6d2a154bfc" 9 | 10 | [[package]] 11 | name = "cffi" 12 | version = "1.16.0" 13 | requires_python = ">=3.8" 14 | summary = "Foreign Function Interface for Python calling C code." 15 | dependencies = [ 16 | "pycparser", 17 | ] 18 | files = [ 19 | {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, 20 | {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, 21 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, 22 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, 23 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, 24 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, 25 | {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, 26 | {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, 27 | {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, 28 | {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, 29 | {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, 30 | {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, 31 | {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, 32 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, 33 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, 34 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, 35 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, 36 | {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, 37 | {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, 38 | {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, 39 | {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, 40 | {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, 41 | {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, 42 | {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, 43 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, 44 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, 45 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, 46 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, 47 | {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, 48 | {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, 49 | {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, 50 | {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, 51 | {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, 52 | ] 53 | 54 | [[package]] 55 | name = "colorama" 56 | version = "0.4.6" 57 | requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 58 | summary = "Cross-platform colored terminal text." 59 | files = [ 60 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 61 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 62 | ] 63 | 64 | [[package]] 65 | name = "coverage" 66 | version = "7.3.2" 67 | requires_python = ">=3.8" 68 | summary = "Code coverage measurement for Python" 69 | files = [ 70 | {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, 71 | {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, 72 | {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, 73 | {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, 74 | {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, 75 | {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, 76 | {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, 77 | {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, 78 | {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, 79 | {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, 80 | {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, 81 | {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, 82 | {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, 83 | {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, 84 | {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, 85 | {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, 86 | {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, 87 | {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, 88 | {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, 89 | {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, 90 | {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, 91 | {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, 92 | {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, 93 | {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, 94 | {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, 95 | {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, 96 | {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, 97 | {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, 98 | {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, 99 | {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, 100 | {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, 101 | {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, 102 | ] 103 | 104 | [[package]] 105 | name = "coverage" 106 | version = "7.3.2" 107 | extras = ["toml"] 108 | requires_python = ">=3.8" 109 | summary = "Code coverage measurement for Python" 110 | dependencies = [ 111 | "coverage==7.3.2", 112 | "tomli; python_full_version <= \"3.11.0a6\"", 113 | ] 114 | files = [ 115 | {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"}, 116 | {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"}, 117 | {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"}, 118 | {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"}, 119 | {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"}, 120 | {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"}, 121 | {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"}, 122 | {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"}, 123 | {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"}, 124 | {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"}, 125 | {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"}, 126 | {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"}, 127 | {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"}, 128 | {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"}, 129 | {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"}, 130 | {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"}, 131 | {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"}, 132 | {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"}, 133 | {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"}, 134 | {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"}, 135 | {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"}, 136 | {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"}, 137 | {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"}, 138 | {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"}, 139 | {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"}, 140 | {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"}, 141 | {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"}, 142 | {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"}, 143 | {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"}, 144 | {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"}, 145 | {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"}, 146 | {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"}, 147 | ] 148 | 149 | [[package]] 150 | name = "exceptiongroup" 151 | version = "1.2.0" 152 | requires_python = ">=3.7" 153 | summary = "Backport of PEP 654 (exception groups)" 154 | files = [ 155 | {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, 156 | {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, 157 | ] 158 | 159 | [[package]] 160 | name = "gevent" 161 | version = "24.2.1" 162 | requires_python = ">=3.8" 163 | summary = "Coroutine-based network library" 164 | dependencies = [ 165 | "cffi>=1.12.2; platform_python_implementation == \"CPython\" and sys_platform == \"win32\"", 166 | "greenlet>=2.0.0; platform_python_implementation == \"CPython\" and python_version < \"3.11\"", 167 | "greenlet>=3.0rc3; platform_python_implementation == \"CPython\" and python_version >= \"3.11\"", 168 | "zope-event", 169 | "zope-interface", 170 | ] 171 | files = [ 172 | {file = "gevent-24.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07"}, 173 | {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3"}, 174 | {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a1df555431f5cd5cc189a6ee3544d24f8c52f2529134685f1e878c4972ab026"}, 175 | {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:14532a67f7cb29fb055a0e9b39f16b88ed22c66b96641df8c04bdc38c26b9ea5"}, 176 | {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd23df885318391856415e20acfd51a985cba6919f0be78ed89f5db9ff3a31cb"}, 177 | {file = "gevent-24.2.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ca80b121bbec76d7794fcb45e65a7eca660a76cc1a104ed439cdbd7df5f0b060"}, 178 | {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9913c45d1be52d7a5db0c63977eebb51f68a2d5e6fd922d1d9b5e5fd758cc98"}, 179 | {file = "gevent-24.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:918cdf8751b24986f915d743225ad6b702f83e1106e08a63b736e3a4c6ead789"}, 180 | {file = "gevent-24.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:3d5325ccfadfd3dcf72ff88a92fb8fc0b56cacc7225f0f4b6dcf186c1a6eeabc"}, 181 | {file = "gevent-24.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:03aa5879acd6b7076f6a2a307410fb1e0d288b84b03cdfd8c74db8b4bc882fc5"}, 182 | {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8bb35ce57a63c9a6896c71a285818a3922d8ca05d150fd1fe49a7f57287b836"}, 183 | {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d7f87c2c02e03d99b95cfa6f7a776409083a9e4d468912e18c7680437b29222c"}, 184 | {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968581d1717bbcf170758580f5f97a2925854943c45a19be4d47299507db2eb7"}, 185 | {file = "gevent-24.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7899a38d0ae7e817e99adb217f586d0a4620e315e4de577444ebeeed2c5729be"}, 186 | {file = "gevent-24.2.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f5e8e8d60e18d5f7fd49983f0c4696deeddaf6e608fbab33397671e2fcc6cc91"}, 187 | {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fbfdce91239fe306772faab57597186710d5699213f4df099d1612da7320d682"}, 188 | {file = "gevent-24.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cdf66977a976d6a3cfb006afdf825d1482f84f7b81179db33941f2fc9673bb1d"}, 189 | {file = "gevent-24.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:1dffb395e500613e0452b9503153f8f7ba587c67dd4a85fc7cd7aa7430cb02cc"}, 190 | {file = "gevent-24.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:6c47ae7d1174617b3509f5d884935e788f325eb8f1a7efc95d295c68d83cce40"}, 191 | {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7cac622e11b4253ac4536a654fe221249065d9a69feb6cdcd4d9af3503602e0"}, 192 | {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bf5b9c72b884c6f0c4ed26ef204ee1f768b9437330422492c319470954bc4cc7"}, 193 | {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5de3c676e57177b38857f6e3cdfbe8f38d1cd754b63200c0615eaa31f514b4f"}, 194 | {file = "gevent-24.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4faf846ed132fd7ebfbbf4fde588a62d21faa0faa06e6f468b7faa6f436b661"}, 195 | {file = "gevent-24.2.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:368a277bd9278ddb0fde308e6a43f544222d76ed0c4166e0d9f6b036586819d9"}, 196 | {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f8a04cf0c5b7139bc6368b461257d4a757ea2fe89b3773e494d235b7dd51119f"}, 197 | {file = "gevent-24.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d8d0642c63d453179058abc4143e30718b19a85cbf58c2744c9a63f06a1d388"}, 198 | {file = "gevent-24.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:94138682e68ec197db42ad7442d3cf9b328069c3ad8e4e5022e6b5cd3e7ffae5"}, 199 | {file = "gevent-24.2.1-pp310-pypy310_pp73-macosx_11_0_universal2.whl", hash = "sha256:7b00f8c9065de3ad226f7979154a7b27f3b9151c8055c162332369262fc025d8"}, 200 | {file = "gevent-24.2.1.tar.gz", hash = "sha256:432fc76f680acf7cf188c2ee0f5d3ab73b63c1f03114c7cd8a34cebbe5aa2056"}, 201 | ] 202 | 203 | [[package]] 204 | name = "greenlet" 205 | version = "3.0.1" 206 | requires_python = ">=3.7" 207 | summary = "Lightweight in-process concurrent programming" 208 | files = [ 209 | {file = "greenlet-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f89e21afe925fcfa655965ca8ea10f24773a1791400989ff32f467badfe4a064"}, 210 | {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28e89e232c7593d33cac35425b58950789962011cc274aa43ef8865f2e11f46d"}, 211 | {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8ba29306c5de7717b5761b9ea74f9c72b9e2b834e24aa984da99cbfc70157fd"}, 212 | {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19bbdf1cce0346ef7341705d71e2ecf6f41a35c311137f29b8a2dc2341374565"}, 213 | {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599daf06ea59bfedbec564b1692b0166a0045f32b6f0933b0dd4df59a854caf2"}, 214 | {file = "greenlet-3.0.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b641161c302efbb860ae6b081f406839a8b7d5573f20a455539823802c655f63"}, 215 | {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d57e20ba591727da0c230ab2c3f200ac9d6d333860d85348816e1dca4cc4792e"}, 216 | {file = "greenlet-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5805e71e5b570d490938d55552f5a9e10f477c19400c38bf1d5190d760691846"}, 217 | {file = "greenlet-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:52e93b28db27ae7d208748f45d2db8a7b6a380e0d703f099c949d0f0d80b70e9"}, 218 | {file = "greenlet-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f7bfb769f7efa0eefcd039dd19d843a4fbfbac52f1878b1da2ed5793ec9b1a65"}, 219 | {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91e6c7db42638dc45cf2e13c73be16bf83179f7859b07cfc139518941320be96"}, 220 | {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1757936efea16e3f03db20efd0cd50a1c86b06734f9f7338a90c4ba85ec2ad5a"}, 221 | {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19075157a10055759066854a973b3d1325d964d498a805bb68a1f9af4aaef8ec"}, 222 | {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9d21aaa84557d64209af04ff48e0ad5e28c5cca67ce43444e939579d085da72"}, 223 | {file = "greenlet-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2847e5d7beedb8d614186962c3d774d40d3374d580d2cbdab7f184580a39d234"}, 224 | {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97e7ac860d64e2dcba5c5944cfc8fa9ea185cd84061c623536154d5a89237884"}, 225 | {file = "greenlet-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b2c02d2ad98116e914d4f3155ffc905fd0c025d901ead3f6ed07385e19122c94"}, 226 | {file = "greenlet-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:22f79120a24aeeae2b4471c711dcf4f8c736a2bb2fabad2a67ac9a55ea72523c"}, 227 | {file = "greenlet-3.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:100f78a29707ca1525ea47388cec8a049405147719f47ebf3895e7509c6446aa"}, 228 | {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60d5772e8195f4e9ebf74046a9121bbb90090f6550f81d8956a05387ba139353"}, 229 | {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:daa7197b43c707462f06d2c693ffdbb5991cbb8b80b5b984007de431493a319c"}, 230 | {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea6b8aa9e08eea388c5f7a276fabb1d4b6b9d6e4ceb12cc477c3d352001768a9"}, 231 | {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d11ebbd679e927593978aa44c10fc2092bc454b7d13fdc958d3e9d508aba7d0"}, 232 | {file = "greenlet-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd4c177afb8a8d9ba348d925b0b67246147af806f0b104af4d24f144d461cd5"}, 233 | {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20107edf7c2c3644c67c12205dc60b1bb11d26b2610b276f97d666110d1b511d"}, 234 | {file = "greenlet-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8bef097455dea90ffe855286926ae02d8faa335ed8e4067326257cb571fc1445"}, 235 | {file = "greenlet-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:b2d3337dcfaa99698aa2377c81c9ca72fcd89c07e7eb62ece3f23a3fe89b2ce4"}, 236 | {file = "greenlet-3.0.1.tar.gz", hash = "sha256:816bd9488a94cba78d93e1abb58000e8266fa9cc2aa9ccdd6eb0696acb24005b"}, 237 | ] 238 | 239 | [[package]] 240 | name = "gunicorn" 241 | version = "21.2.0" 242 | requires_python = ">=3.5" 243 | summary = "WSGI HTTP Server for UNIX" 244 | dependencies = [ 245 | "packaging", 246 | ] 247 | files = [ 248 | {file = "gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0"}, 249 | {file = "gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033"}, 250 | ] 251 | 252 | [[package]] 253 | name = "h11" 254 | version = "0.14.0" 255 | requires_python = ">=3.7" 256 | summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" 257 | files = [ 258 | {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, 259 | {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, 260 | ] 261 | 262 | [[package]] 263 | name = "iniconfig" 264 | version = "2.0.0" 265 | requires_python = ">=3.7" 266 | summary = "brain-dead simple config-ini parsing" 267 | files = [ 268 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 269 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 270 | ] 271 | 272 | [[package]] 273 | name = "packaging" 274 | version = "23.2" 275 | requires_python = ">=3.7" 276 | summary = "Core utilities for Python packages" 277 | files = [ 278 | {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, 279 | {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, 280 | ] 281 | 282 | [[package]] 283 | name = "pluggy" 284 | version = "1.3.0" 285 | requires_python = ">=3.8" 286 | summary = "plugin and hook calling mechanisms for python" 287 | files = [ 288 | {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, 289 | {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, 290 | ] 291 | 292 | [[package]] 293 | name = "pycparser" 294 | version = "2.21" 295 | requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" 296 | summary = "C parser in Python" 297 | files = [ 298 | {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, 299 | {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, 300 | ] 301 | 302 | [[package]] 303 | name = "pytest" 304 | version = "8.0.2" 305 | requires_python = ">=3.8" 306 | summary = "pytest: simple powerful testing with Python" 307 | dependencies = [ 308 | "colorama; sys_platform == \"win32\"", 309 | "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", 310 | "iniconfig", 311 | "packaging", 312 | "pluggy<2.0,>=1.3.0", 313 | "tomli>=1.0.0; python_version < \"3.11\"", 314 | ] 315 | files = [ 316 | {file = "pytest-8.0.2-py3-none-any.whl", hash = "sha256:edfaaef32ce5172d5466b5127b42e0d6d35ebbe4453f0e3505d96afd93f6b096"}, 317 | {file = "pytest-8.0.2.tar.gz", hash = "sha256:d4051d623a2e0b7e51960ba963193b09ce6daeb9759a451844a21e4ddedfc1bd"}, 318 | ] 319 | 320 | [[package]] 321 | name = "pytest-cov" 322 | version = "4.1.0" 323 | requires_python = ">=3.7" 324 | summary = "Pytest plugin for measuring coverage." 325 | dependencies = [ 326 | "coverage[toml]>=5.2.1", 327 | "pytest>=4.6", 328 | ] 329 | files = [ 330 | {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, 331 | {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, 332 | ] 333 | 334 | [[package]] 335 | name = "setuptools" 336 | version = "69.0.2" 337 | requires_python = ">=3.8" 338 | summary = "Easily download, build, install, upgrade, and uninstall Python packages" 339 | files = [ 340 | {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, 341 | {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, 342 | ] 343 | 344 | [[package]] 345 | name = "tomli" 346 | version = "2.0.1" 347 | requires_python = ">=3.7" 348 | summary = "A lil' TOML parser" 349 | files = [ 350 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 351 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 352 | ] 353 | 354 | [[package]] 355 | name = "waitress" 356 | version = "3.0.0" 357 | requires_python = ">=3.8.0" 358 | summary = "Waitress WSGI server" 359 | files = [ 360 | {file = "waitress-3.0.0-py3-none-any.whl", hash = "sha256:2a06f242f4ba0cc563444ca3d1998959447477363a2d7e9b8b4d75d35cfd1669"}, 361 | {file = "waitress-3.0.0.tar.gz", hash = "sha256:005da479b04134cdd9dd602d1ee7c49d79de0537610d653674cc6cbde222b8a1"}, 362 | ] 363 | 364 | [[package]] 365 | name = "watchdog" 366 | version = "4.0.0" 367 | requires_python = ">=3.8" 368 | summary = "Filesystem events monitoring" 369 | files = [ 370 | {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:39cb34b1f1afbf23e9562501673e7146777efe95da24fab5707b88f7fb11649b"}, 371 | {file = "watchdog-4.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c522392acc5e962bcac3b22b9592493ffd06d1fc5d755954e6be9f4990de932b"}, 372 | {file = "watchdog-4.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6c47bdd680009b11c9ac382163e05ca43baf4127954c5f6d0250e7d772d2b80c"}, 373 | {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8350d4055505412a426b6ad8c521bc7d367d1637a762c70fdd93a3a0d595990b"}, 374 | {file = "watchdog-4.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c17d98799f32e3f55f181f19dd2021d762eb38fdd381b4a748b9f5a36738e935"}, 375 | {file = "watchdog-4.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4986db5e8880b0e6b7cd52ba36255d4793bf5cdc95bd6264806c233173b1ec0b"}, 376 | {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:11e12fafb13372e18ca1bbf12d50f593e7280646687463dd47730fd4f4d5d257"}, 377 | {file = "watchdog-4.0.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5369136a6474678e02426bd984466343924d1df8e2fd94a9b443cb7e3aa20d19"}, 378 | {file = "watchdog-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76ad8484379695f3fe46228962017a7e1337e9acadafed67eb20aabb175df98b"}, 379 | {file = "watchdog-4.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6e949a8a94186bced05b6508faa61b7adacc911115664ccb1923b9ad1f1ccf7b"}, 380 | {file = "watchdog-4.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6a4db54edea37d1058b08947c789a2354ee02972ed5d1e0dca9b0b820f4c7f92"}, 381 | {file = "watchdog-4.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d31481ccf4694a8416b681544c23bd271f5a123162ab603c7d7d2dd7dd901a07"}, 382 | {file = "watchdog-4.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8fec441f5adcf81dd240a5fe78e3d83767999771630b5ddfc5867827a34fa3d3"}, 383 | {file = "watchdog-4.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:6a9c71a0b02985b4b0b6d14b875a6c86ddea2fdbebd0c9a720a806a8bbffc69f"}, 384 | {file = "watchdog-4.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:557ba04c816d23ce98a06e70af6abaa0485f6d94994ec78a42b05d1c03dcbd50"}, 385 | {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0f9bd1fd919134d459d8abf954f63886745f4660ef66480b9d753a7c9d40927"}, 386 | {file = "watchdog-4.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f9b2fdca47dc855516b2d66eef3c39f2672cbf7e7a42e7e67ad2cbfcd6ba107d"}, 387 | {file = "watchdog-4.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:73c7a935e62033bd5e8f0da33a4dcb763da2361921a69a5a95aaf6c93aa03a87"}, 388 | {file = "watchdog-4.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:6a80d5cae8c265842c7419c560b9961561556c4361b297b4c431903f8c33b269"}, 389 | {file = "watchdog-4.0.0-py3-none-win32.whl", hash = "sha256:8f9a542c979df62098ae9c58b19e03ad3df1c9d8c6895d96c0d51da17b243b1c"}, 390 | {file = "watchdog-4.0.0-py3-none-win_amd64.whl", hash = "sha256:f970663fa4f7e80401a7b0cbeec00fa801bf0287d93d48368fc3e6fa32716245"}, 391 | {file = "watchdog-4.0.0-py3-none-win_ia64.whl", hash = "sha256:9a03e16e55465177d416699331b0f3564138f1807ecc5f2de9d55d8f188d08c7"}, 392 | {file = "watchdog-4.0.0.tar.gz", hash = "sha256:e3e7065cbdabe6183ab82199d7a4f6b3ba0a438c5a512a68559846ccb76a78ec"}, 393 | ] 394 | 395 | [[package]] 396 | name = "zope-event" 397 | version = "5.0" 398 | requires_python = ">=3.7" 399 | summary = "Very basic event publishing system" 400 | dependencies = [ 401 | "setuptools", 402 | ] 403 | files = [ 404 | {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, 405 | {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, 406 | ] 407 | 408 | [[package]] 409 | name = "zope-interface" 410 | version = "6.1" 411 | requires_python = ">=3.7" 412 | summary = "Interfaces for Python" 413 | dependencies = [ 414 | "setuptools", 415 | ] 416 | files = [ 417 | {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, 418 | {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, 419 | {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"}, 420 | {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"}, 421 | {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"}, 422 | {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"}, 423 | {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"}, 424 | {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"}, 425 | {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"}, 426 | {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"}, 427 | {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"}, 428 | {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"}, 429 | {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"}, 430 | {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"}, 431 | {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"}, 432 | {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"}, 433 | {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"}, 434 | {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"}, 435 | {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, 436 | ] 437 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "zibai-server" 3 | version = "0.13.0" 4 | description = "A modern high-performance Python WSGI server." 5 | authors = [ 6 | {name = "abersheeran", email = "me@abersheeran.com"}, 7 | ] 8 | dependencies = [ 9 | "h11>=0.14.0", 10 | ] 11 | requires-python = ">=3.10" 12 | readme = "README.md" 13 | license = {text = "Apache-2.0"} 14 | 15 | [project.urls] 16 | repository = "https://github.com/abersheeran/zibai" 17 | 18 | [project.optional-dependencies] 19 | gevent = [ 20 | "gevent>=23.9.1", 21 | ] 22 | reload = [ 23 | "watchdog>=3.0.0", 24 | ] 25 | 26 | [project.scripts] 27 | zibai = "zibai.__main__:command_line" 28 | 29 | [build-system] 30 | requires = ["pdm-backend"] 31 | build-backend = "pdm.backend" 32 | 33 | [tool.pdm.dev-dependencies] 34 | benchmark = [ 35 | "waitress>=2.1.2", 36 | "gunicorn>=21.2.0", 37 | ] 38 | test = [ 39 | "pytest>=7.4.3", 40 | "pytest-cov>=4.1.0", 41 | ] 42 | 43 | [tool.pdm.scripts] 44 | format = "ruff format ." 45 | lint = "ruff check ." 46 | test = "pytest --cov=src/zibai" 47 | 48 | [tool.ruff.lint] 49 | ignore = ["E731"] 50 | extend-select = ["I"] 51 | 52 | [tool.coverage] 53 | ignores = [ 54 | "raise NotImplementedError" 55 | ] 56 | 57 | [tool.coverage.run] 58 | omit = ["*/.venv/*", "*/tests/*"] 59 | 60 | [tool.coverage.report] 61 | exclude_lines = [ 62 | "pragma: no cover", 63 | "raise NotImplementedError", 64 | "if False:", 65 | "assert False", 66 | "if typing.TYPE_CHECKING:", 67 | "if TYPE_CHECKING:", 68 | "pass", 69 | "\\.\\.\\.", 70 | ] 71 | show_missing = true 72 | skip_covered = true 73 | -------------------------------------------------------------------------------- /src/zibai/__init__.py: -------------------------------------------------------------------------------- 1 | from .cli import ( 2 | Options, 3 | create_bind_socket, 4 | import_from_string, 5 | main, 6 | parse_args, 7 | ) 8 | 9 | __all__ = [ 10 | "parse_args", 11 | "Options", 12 | "main", 13 | "create_bind_socket", 14 | "import_from_string", 15 | ] 16 | -------------------------------------------------------------------------------- /src/zibai/__main__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | from .cli import main, parse_args 5 | 6 | 7 | def command_line() -> None: 8 | sys.path.insert(0, os.getcwd()) 9 | main(parse_args(sys.argv[1:])) 10 | 11 | 12 | if __name__ == "__main__": 13 | command_line() 14 | -------------------------------------------------------------------------------- /src/zibai/cli.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import dataclasses 3 | import importlib 4 | import ipaddress 5 | import json 6 | import logging 7 | import logging.config 8 | import socket 9 | from pathlib import Path 10 | from typing import Any, Callable, Sequence 11 | 12 | from .logger import load_config, logger 13 | from .wsgi_typing import WSGIApp 14 | 15 | 16 | @dataclasses.dataclass 17 | class Options: 18 | """ 19 | Keep `Options` can be passed between processes. 20 | """ 21 | 22 | app: str 23 | call: bool = False 24 | listen: list[str] = dataclasses.field(default_factory=lambda: ["127.0.0.1:8000"]) 25 | subprocess: int = 0 26 | no_gevent: bool = False 27 | max_workers: int = 10 28 | watchfiles: str | None = None 29 | 30 | backlog: int | None = None 31 | socket_timeout: float = 5 32 | dualstack_ipv6: bool = False 33 | unix_socket_perms: int = 0o600 34 | h11_max_incomplete_event_size: int | None = None 35 | max_request_pre_process: int | None = None 36 | graceful_exit_timeout: float = 10 37 | 38 | # WSGI environment settings 39 | url_scheme: str = "http" 40 | url_prefix: str | None = None 41 | 42 | # Server callback hooks 43 | before_serve: str | None = None 44 | before_graceful_exit: str | None = None 45 | before_died: str | None = None 46 | 47 | # Logging 48 | no_access_log: bool = False 49 | logging_config_filepath: Path | None = None 50 | 51 | # After __post_init__ 52 | sockets: list[socket.socket] = dataclasses.field(init=False) 53 | logging_config: dict = dataclasses.field(init=False, default_factory=dict) 54 | 55 | def __post_init__(self) -> None: 56 | if self.watchfiles is not None and self.subprocess <= 0: 57 | raise ValueError("Cannot watch files without subprocesses") 58 | 59 | if self.dualstack_ipv6 and not socket.has_dualstack_ipv6(): 60 | raise ValueError("Dualstack ipv6 is not supported on this platform") 61 | 62 | self.init_sockets() 63 | self.load_logging_config() 64 | 65 | @classmethod 66 | def default_value(cls, field_name: str) -> Any: 67 | fields = {field.name: field for field in dataclasses.fields(Options)} 68 | default = fields[field_name].default 69 | default_factory = fields[field_name].default_factory 70 | if default is dataclasses.MISSING and default_factory is dataclasses.MISSING: 71 | raise ValueError(f"Field {field_name} has no default value") 72 | if default_factory is not dataclasses.MISSING: 73 | return default_factory() 74 | return default 75 | 76 | def init_sockets(self) -> None: 77 | self.sockets = [] 78 | 79 | for listen in self.listen: 80 | sock = create_bind_socket( 81 | listen, 82 | uds_perms=self.unix_socket_perms, 83 | dualstack_ipv6=self.dualstack_ipv6, 84 | ) 85 | if self.backlog is not None: 86 | sock.listen(self.backlog) 87 | else: 88 | sock.listen() 89 | self.sockets.append(sock) 90 | 91 | def get_application(self) -> WSGIApp: 92 | app = import_from_string(self.app) 93 | if self.call: 94 | app = app() 95 | return app 96 | 97 | def get_before_serve_hook(self) -> Callable[[], None]: 98 | if self.before_serve is not None: 99 | return import_from_string(self.before_serve) 100 | else: 101 | return lambda: None 102 | 103 | def get_before_graceful_exit_hook(self) -> Callable[[], None]: 104 | if self.before_graceful_exit is not None: 105 | return import_from_string(self.before_graceful_exit) 106 | else: 107 | return lambda: None 108 | 109 | def get_before_died_hook(self) -> Callable[[], None]: 110 | if self.before_died is not None: 111 | return import_from_string(self.before_died) 112 | else: 113 | return lambda: None 114 | 115 | def load_logging_config(self) -> None: 116 | if self.logging_config_filepath is None: 117 | self.logging_config = {} 118 | return 119 | 120 | if self.logging_config_filepath.name.endswith(".json"): 121 | self.logging_config = json.loads( 122 | self.logging_config_filepath.read_text(encoding="utf8") 123 | ) 124 | return 125 | 126 | def configure_logging(self) -> None: 127 | if self.no_access_log: 128 | logging.getLogger("zibai.access").setLevel(logging.WARNING) 129 | 130 | logging.config.dictConfig(load_config(self.logging_config)) 131 | 132 | 133 | def import_from_string(import_str: str) -> Any: 134 | from functools import reduce 135 | 136 | module_str, _, attrs_str = import_str.partition(":") 137 | if not module_str or not attrs_str: 138 | raise ValueError( 139 | f'Import string "{import_str}" must be in format ":".' 140 | ) 141 | 142 | return reduce(getattr, attrs_str.split("."), importlib.import_module(module_str)) 143 | 144 | 145 | def create_bind_socket( 146 | value: str, 147 | *, 148 | uds_perms: int = Options.default_value("unix_socket_perms"), 149 | dualstack_ipv6: bool = Options.default_value("dualstack_ipv6"), 150 | socket_type: int = socket.SOCK_STREAM, 151 | ) -> socket.socket: 152 | import os 153 | 154 | if value.startswith("unix:"): 155 | if not hasattr(socket, "AF_UNIX"): 156 | raise ValueError("UNIX sockets are not supported on this platform") 157 | 158 | path = value[5:] 159 | sock = socket.socket(socket.AF_UNIX, socket_type) # type: ignore 160 | if os.path.exists(path): 161 | os.unlink(path) 162 | sock.bind(path) 163 | 164 | os.chmod(path, uds_perms) 165 | return sock 166 | 167 | if ":" not in value: 168 | raise ValueError("Bind must be of the form: HOST:PORT") 169 | 170 | host, port = value.rsplit(":", 1) 171 | 172 | try: 173 | port = int(port) 174 | except ValueError: 175 | raise ValueError("Bind port must be an integer") 176 | 177 | if not 0 < port < 65536: 178 | raise ValueError("Bind port must be between 0 and 65536") 179 | 180 | if host == "": 181 | if dualstack_ipv6: 182 | host = "::" 183 | else: 184 | host = "0.0.0.0" 185 | 186 | address = ipaddress.ip_address(host) 187 | sock = socket.socket( 188 | socket.AF_INET if address.version == 4 else socket.AF_INET6, 189 | socket_type, 190 | ) 191 | 192 | # Set socket options 193 | if dualstack_ipv6: 194 | sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0) 195 | if os.name != "nt": 196 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) 197 | else: # In windows, SO_REUSEPORT is not available 198 | sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) 199 | 200 | sock.bind((str(address), port)) 201 | 202 | return sock 203 | 204 | 205 | def parse_args(args: Sequence[str]) -> Options: 206 | parser = argparse.ArgumentParser( 207 | formatter_class=argparse.ArgumentDefaultsHelpFormatter 208 | ) 209 | # Please keep the order of arguments like `Options`. 210 | parser.add_argument("app", help="WSGI app") 211 | parser.add_argument( 212 | "--call", 213 | help="use WSGI factory", 214 | default=Options.default_value("call"), 215 | action="store_true", 216 | ) 217 | parser.add_argument( 218 | "--listen", 219 | "-l", 220 | default=Options.default_value("listen"), 221 | nargs="+", 222 | help="listen address, HOST:PORT, unix:PATH", 223 | ) 224 | parser.add_argument( 225 | "--subprocess", 226 | "-p", 227 | default=Options.default_value("subprocess"), 228 | type=int, 229 | help="number of subprocesses", 230 | ) 231 | parser.add_argument( 232 | "--no-gevent", 233 | default=Options.default_value("no_gevent"), 234 | action="store_true", 235 | help="do not use gevent", 236 | ) 237 | parser.add_argument( 238 | "--max-workers", 239 | "-w", 240 | default=Options.default_value("max_workers"), 241 | type=int, 242 | help="maximum number of threads or greenlets to use for handling requests", 243 | ) 244 | parser.add_argument( 245 | "--watchfiles", 246 | help="watch files for changes and restart workers", 247 | required=False, 248 | ) 249 | parser.add_argument( 250 | "--backlog", 251 | type=int, 252 | help="listen backlog", 253 | required=False, 254 | ) 255 | parser.add_argument( 256 | "--socket-timeout", 257 | type=float, 258 | help="socket timeout (other means keepalive timeout)", 259 | required=False, 260 | ) 261 | parser.add_argument( 262 | "--dualstack-ipv6", 263 | default=Options.default_value("dualstack_ipv6"), 264 | action="store_true", 265 | help="enable dualstack ipv6", 266 | ) 267 | parser.add_argument( 268 | "--unix-socket-perms", 269 | default="600", 270 | help="unix socket permissions", 271 | ) 272 | parser.add_argument( 273 | "--h11-max-incomplete-event-size", 274 | type=int, 275 | help="maximum number of bytes in an incomplete HTTP event", 276 | required=False, 277 | ) 278 | parser.add_argument( 279 | "--max-request-pre-process", 280 | type=int, 281 | help="maximum number of requests to process before killing the worker", 282 | required=False, 283 | ) 284 | parser.add_argument( 285 | "--graceful-exit-timeout", 286 | default=Options.default_value("graceful_exit_timeout"), 287 | type=float, 288 | help="graceful exit timeout", 289 | ) 290 | parser.add_argument( 291 | "--url-scheme", 292 | default=Options.default_value("url_scheme"), 293 | help="url scheme; will be passed to WSGI app as wsgi.url_scheme", 294 | ) 295 | parser.add_argument( 296 | "--url-prefix", 297 | help="url prefix; will be passed to WSGI app as SCRIPT_NAME, " 298 | "if not specified, use environment variable SCRIPT_NAME", 299 | required=False, 300 | ) 301 | parser.add_argument( 302 | "--before-serve", 303 | help="callback to run before serving requests", 304 | required=False, 305 | ) 306 | parser.add_argument( 307 | "--before-graceful-exit", 308 | help="callback to run before graceful exit", 309 | required=False, 310 | ) 311 | parser.add_argument( 312 | "--before-died", 313 | help="callback to run before exiting", 314 | required=False, 315 | ) 316 | parser.add_argument( 317 | "--no-access-log", 318 | default=Options.default_value("no_access_log"), 319 | action="store_true", 320 | help="disable access log", 321 | ) 322 | parser.add_argument( 323 | "--logging-config-filepath", 324 | help="logging config file path", 325 | type=Path, 326 | required=False, 327 | ) 328 | options = parser.parse_args(args) 329 | 330 | # Parse unix_socket_perms as an octal integer. 331 | options.unix_socket_perms = int(options.unix_socket_perms, base=8) 332 | 333 | # When watchfiles is specified, subprocess must be greater than 0. 334 | if options.watchfiles is not None: 335 | options.subprocess = max(options.subprocess, 1) 336 | 337 | return Options(**options.__dict__) 338 | 339 | 340 | def main(options: Options, *, is_main: bool = True) -> None: 341 | """ 342 | Main entrypoint for running Zī Bái. 343 | """ 344 | options.configure_logging() 345 | 346 | if is_main: 347 | for sock in options.sockets: 348 | sockname = sock.getsockname() 349 | if isinstance(sockname, str): 350 | logger.info("Listening on %s", sockname) 351 | else: 352 | logger.info("Listening on %s:%d", *sockname[:2]) 353 | 354 | if not options.no_gevent and (options.subprocess == 0 or not is_main): 355 | # Single process mode or worker process with gevent. 356 | try: 357 | import gevent 358 | except ImportError: 359 | logger.warning("gevent not found, using threading instead") 360 | else: 361 | import gevent.monkey 362 | 363 | gevent.monkey.patch_all() 364 | logger.info("Using gevent for worker pool") 365 | 366 | # Before use multiprocessing, we need to call `get_application` to make sure 367 | # the application can be imported correctly. 368 | application = options.get_application() 369 | 370 | if is_main and options.subprocess > 0: 371 | from .multiprocess import ProcessParameters, multiprocess 372 | 373 | multiprocess( 374 | options.subprocess, 375 | ProcessParameters(main, options, is_main=False), 376 | options.watchfiles, 377 | options.graceful_exit_timeout, 378 | ) 379 | return 380 | 381 | if options.h11_max_incomplete_event_size is not None: 382 | # Set max_incomplete_event_size 383 | from . import h11 384 | 385 | h11.MAX_INCOMPLETE_EVENT_SIZE = options.h11_max_incomplete_event_size 386 | 387 | if options.max_request_pre_process is not None: 388 | from .middlewares.limit_request_count import LimitRequestCountMiddleware 389 | 390 | application = LimitRequestCountMiddleware( 391 | application, options.max_request_pre_process 392 | ) 393 | 394 | import os 395 | import signal 396 | import threading 397 | 398 | graceful_exit = threading.Event() 399 | quickly_exit = threading.Event() 400 | 401 | def handle_int(sig, frame) -> None: 402 | logger.info("Received SIGINT, quickly exiting") 403 | graceful_exit.set() 404 | quickly_exit.set() 405 | 406 | def handle_term(sig, frame) -> None: 407 | logger.info("Received SIGTERM, gracefully exiting") 408 | graceful_exit.set() 409 | 410 | signal.signal(signal.SIGINT, handle_int) 411 | signal.signal(signal.SIGTERM, handle_term) 412 | 413 | if is_main: 414 | logger.info("Run in single process mode [%d]", os.getpid()) 415 | 416 | from .core import serve 417 | 418 | serve( 419 | app=application, 420 | bind_sockets=options.sockets, 421 | max_workers=options.max_workers, 422 | graceful_exit=graceful_exit, 423 | graceful_exit_timeout=options.graceful_exit_timeout, 424 | quickly_exit=quickly_exit, 425 | url_scheme=options.url_scheme, 426 | script_name=options.url_prefix, 427 | before_serve_hook=options.get_before_serve_hook(), 428 | before_graceful_exit_hook=options.get_before_graceful_exit_hook(), 429 | before_died_hook=options.get_before_died_hook(), 430 | socket_timeout=options.socket_timeout, 431 | ) 432 | -------------------------------------------------------------------------------- /src/zibai/const.py: -------------------------------------------------------------------------------- 1 | SERVER_NAME = "Zî Bái".encode("latin1") 2 | -------------------------------------------------------------------------------- /src/zibai/core.py: -------------------------------------------------------------------------------- 1 | import os 2 | import queue 3 | import selectors 4 | import socket 5 | import sys 6 | import threading 7 | from concurrent.futures import ThreadPoolExecutor as _ThreadPoolExecutor 8 | from contextlib import contextmanager 9 | from types import TracebackType 10 | from typing import Any, Callable, Generator, Protocol 11 | from typing import cast as typing_cast 12 | 13 | from .h11 import http11_protocol 14 | from .logger import debug_logger, logger 15 | from .utils import unicode_to_wsgi 16 | from .wsgi_typing import WSGIApp 17 | 18 | 19 | class ThreadPoolExecutor(_ThreadPoolExecutor): 20 | def __init__( 21 | self, 22 | max_workers: int | None = None, 23 | thread_name_prefix: str = "", 24 | initializer: Callable[..., object] | None = None, 25 | initargs: tuple[Any, ...] = (), 26 | *, 27 | join_timeout: float = 5, 28 | ) -> None: 29 | super().__init__(max_workers, thread_name_prefix, initializer, initargs) 30 | self._join_timeout = join_timeout 31 | 32 | def shutdown(self, wait: bool = True, *, cancel_futures: bool = False) -> None: 33 | with self._shutdown_lock: 34 | self._shutdown = True 35 | # Drain all work items from the queue, and then cancel their 36 | # associated futures. 37 | while True: 38 | try: 39 | work_item = self._work_queue.get_nowait() 40 | except queue.Empty: 41 | break 42 | if work_item is not None: 43 | work_item.future.cancel() 44 | 45 | # Send a wake-up to prevent threads calling 46 | # _work_queue.get(block=True) from permanently blocking. 47 | self._work_queue.put(None) # type: ignore 48 | 49 | for t in self._threads: 50 | t.join(self._join_timeout) 51 | 52 | 53 | @contextmanager 54 | def lifespan_hooks_context( 55 | before_serve_hook: Callable[[], None] = lambda: None, 56 | before_died_hook: Callable[[], None] = lambda: None, 57 | ) -> Generator[None, None, None]: 58 | """ 59 | Context manager for lifespan hooks. 60 | """ 61 | before_serve_hook() 62 | try: 63 | yield 64 | finally: 65 | before_died_hook() 66 | 67 | 68 | class ContextManager(Protocol): 69 | def __enter__(self) -> Any: ... 70 | 71 | def __exit__( 72 | self, 73 | exc_type: type[BaseException] | None, 74 | exc_value: BaseException | None, 75 | traceback: TracebackType | None, 76 | /, 77 | ) -> bool | None: ... 78 | 79 | 80 | @contextmanager 81 | def quickly_exit_manager( 82 | *contextmangers: ContextManager, quickly_exit: threading.Event 83 | ) -> Generator[None, None, None]: 84 | for cm in contextmangers: 85 | cm.__enter__() 86 | try: 87 | yield 88 | finally: 89 | if not quickly_exit.is_set(): 90 | for cm in contextmangers: 91 | cm.__exit__(*sys.exc_info()) 92 | 93 | 94 | def serve( 95 | *, 96 | app: Any, 97 | bind_sockets: list[socket.socket], 98 | max_workers: int, 99 | graceful_exit: threading.Event, 100 | graceful_exit_timeout: float = 10, 101 | quickly_exit: threading.Event = threading.Event(), 102 | url_scheme: str = "http", 103 | script_name: str | None = None, 104 | before_serve_hook: Callable[[], None] = lambda: None, 105 | before_graceful_exit_hook: Callable[[], None] = lambda: None, 106 | before_died_hook: Callable[[], None] = lambda: None, 107 | socket_timeout: float = 5, 108 | ) -> None: 109 | """ 110 | Serve a WSGI application. 111 | """ 112 | if script_name is None: 113 | # If script_name is not specified, use the environment variable. 114 | script_name = unicode_to_wsgi(os.environ.get("SCRIPT_NAME", "")) 115 | 116 | def _handle_exit_event() -> None: 117 | graceful_exit.wait() 118 | try: 119 | before_graceful_exit_hook() 120 | except Exception: # pragma: no cover 121 | logger.exception("Exception in `before_graceful_exit` callback") 122 | 123 | threading.Thread(target=_handle_exit_event, daemon=True).start() 124 | 125 | lifespan_hooks = lifespan_hooks_context( 126 | before_serve_hook=before_serve_hook, before_died_hook=before_died_hook 127 | ) 128 | executor = ThreadPoolExecutor( 129 | max_workers=max_workers, 130 | thread_name_prefix="zibai_worker", 131 | join_timeout=graceful_exit_timeout, 132 | ) 133 | selector = selectors.DefaultSelector() 134 | 135 | connections: set[socket.socket] = set() 136 | 137 | with lifespan_hooks, selector, executor: 138 | for sock in bind_sockets: 139 | selector.register(sock, selectors.EVENT_READ) 140 | 141 | while not graceful_exit.is_set(): 142 | events = selector.select(timeout=0.1) 143 | if not events: 144 | continue 145 | 146 | for key, _ in events: 147 | try: 148 | sock: socket.socket = typing_cast(socket.socket, key.fileobj) 149 | connection, address = sock.accept() 150 | debug_logger.debug("Accepted connection from %s:%d", *address[:2]) 151 | except (BlockingIOError, ConnectionError): 152 | continue 153 | else: 154 | executor.submit( 155 | handle_connection, 156 | app, 157 | connection, 158 | address, 159 | graceful_exit, 160 | socket_timeout, 161 | connections, 162 | url_scheme=url_scheme, 163 | script_name=script_name, 164 | ) 165 | 166 | if quickly_exit.is_set(): 167 | # Close all connections, forcefully 168 | debug_logger.debug("Closing all connections") 169 | for connection in tuple(connections): 170 | connection.shutdown(socket.SHUT_RDWR) 171 | connection.close() 172 | debug_logger.debug("Closed all connections") 173 | 174 | 175 | def handle_connection( 176 | app: WSGIApp, 177 | connection: socket.socket, 178 | address: tuple[str, int], 179 | graceful_exit: threading.Event, 180 | socket_timeout: float, 181 | connections: set[socket.socket], 182 | *, 183 | url_scheme: str = "http", 184 | script_name: str = "", 185 | ) -> None: 186 | connection.settimeout(socket_timeout) 187 | debug_logger.debug("Handling connection from %s:%d", *address[:2]) 188 | with connection: 189 | try: 190 | connections.add(connection) 191 | 192 | http11_protocol( 193 | app, 194 | connection, 195 | graceful_exit, 196 | url_scheme=url_scheme, 197 | script_name=script_name, 198 | ) 199 | except ConnectionError: 200 | pass # client closed connection, nothing to do 201 | finally: 202 | connections.discard(connection) 203 | -------------------------------------------------------------------------------- /src/zibai/h11.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import socket 3 | import sys 4 | import threading 5 | from typing import Any, Callable 6 | 7 | import h11 8 | 9 | from .const import SERVER_NAME 10 | from .logger import debug_logger, error_logger, log_http 11 | from .utils import Input 12 | from .wsgi_typing import Environ, ExceptionInfo, WSGIApp 13 | 14 | 15 | class ConnectionClosed(Exception): 16 | """ 17 | Received a ConnectionClosed event from h11. 18 | """ 19 | 20 | 21 | @dataclasses.dataclass 22 | class H11Protocol: 23 | c: h11.Connection 24 | s: socket.socket 25 | peername: tuple[str, int] 26 | sockname: tuple[str, int] 27 | 28 | graceful_exit: threading.Event 29 | 30 | # WSGI variables 31 | url_scheme: str = "http" 32 | script_name: str = "" 33 | 34 | # State variables 35 | response_buffer: tuple[int, list[tuple[bytes, bytes]]] | None = None 36 | 37 | @property 38 | def header_sent(self) -> bool: 39 | return self.c.our_state is not h11.SEND_RESPONSE 40 | 41 | def get_next_event(self): 42 | if self.c.their_state is h11.DONE: 43 | return h11.PAUSED 44 | 45 | while True: 46 | if self.c.their_state is h11.IDLE and self.graceful_exit.is_set(): 47 | raise ConnectionClosed 48 | 49 | event = self.c.next_event() 50 | debug_logger.debug("Received event from %s:%d: %r", *self.peername, event) 51 | 52 | match event: 53 | case h11.NEED_DATA: 54 | if self.c.they_are_waiting_for_100_continue: 55 | self.send_with_event( 56 | h11.InformationalResponse(headers=[], status_code=100) 57 | ) 58 | try: 59 | self.c.receive_data(self.s.recv(MAX_INCOMPLETE_EVENT_SIZE)) 60 | except socket.timeout: 61 | pass 62 | case h11.ConnectionClosed(): 63 | raise ConnectionClosed 64 | case _: 65 | return event 66 | 67 | def send_with_event(self, event) -> None: 68 | data = self.c.send(event) 69 | assert data is not None 70 | self.s.sendall(data) 71 | debug_logger.debug("Sent event to %s:%d: %r", *self.peername, event) 72 | 73 | def read_request_body(self): 74 | event = self.get_next_event() 75 | match event: 76 | case h11.Data(data): 77 | return data 78 | case h11.EndOfMessage(): 79 | return b"" 80 | case _: 81 | return b"" 82 | 83 | def start_response( 84 | self, 85 | status: str, 86 | headers: list[tuple[str, str]], 87 | exc_info: ExceptionInfo | None = None, 88 | ) -> Callable[[bytes], Any]: 89 | if exc_info is not None: 90 | try: 91 | if self.header_sent: 92 | raise exc_info[1].with_traceback(exc_info[2]) 93 | finally: 94 | del exc_info 95 | elif self.response_buffer is not None: 96 | raise RuntimeError("start_response() was already called") 97 | 98 | status_code, _ = status.split(" ", 1) 99 | if status_code.isdigit(): 100 | status_code = int(status_code) 101 | else: 102 | raise RuntimeError(f"Invalid status: {status}") 103 | 104 | self.response_buffer = ( 105 | status_code, 106 | [ 107 | *( 108 | (name.encode("latin1"), value.encode("latin1")) 109 | for name, value in headers 110 | ), 111 | (b"Server", SERVER_NAME), 112 | ], 113 | ) 114 | 115 | return self.s.sendall 116 | 117 | def init_environ(self) -> Environ: 118 | event = self.get_next_event() 119 | match event: 120 | case h11.Request(method, headers, target, http_version): 121 | request_uri = target.decode("latin-1") 122 | if "?" in request_uri: 123 | path, query = request_uri.split("?", 1) 124 | else: 125 | path, query = request_uri, "" 126 | 127 | server_name, server_port = self.sockname 128 | remote_name, remote_port = self.peername 129 | 130 | script_name = self.script_name 131 | if path == script_name: 132 | path = "" 133 | else: 134 | url_prefix_with_trailing_slash = script_name + "/" 135 | if path.startswith(url_prefix_with_trailing_slash): 136 | path = path[len(script_name) :] 137 | 138 | environ: Environ = { 139 | "REQUEST_METHOD": method.decode("ascii"), 140 | "SCRIPT_NAME": script_name, 141 | "SERVER_NAME": server_name, 142 | "SERVER_PORT": str(server_port), 143 | "REMOTE_ADDR": remote_name, 144 | "REMOTE_PORT": str(remote_port), 145 | "REQUEST_URI": request_uri, 146 | "PATH_INFO": path, 147 | "QUERY_STRING": query, 148 | "SERVER_PROTOCOL": f"HTTP/{http_version.decode('ascii')}", 149 | "wsgi.version": (1, 0), 150 | "wsgi.url_scheme": self.url_scheme, 151 | "wsgi.input": Input(self.read_request_body), 152 | "wsgi.errors": sys.stderr, 153 | "wsgi.multithread": True, 154 | "wsgi.multiprocess": True, 155 | "wsgi.run_once": False, 156 | } 157 | 158 | for name, value in headers: 159 | name = name.decode("latin1") 160 | value = value.decode("latin1") 161 | if name == "content-type": 162 | environ["CONTENT_TYPE"] = value 163 | elif name == "content-length": 164 | environ["CONTENT_LENGTH"] = value 165 | else: 166 | http_name = "HTTP_" + name.upper().replace("-", "_") 167 | if http_name not in environ: 168 | environ[http_name] = value 169 | else: 170 | environ[http_name] += "," + value 171 | 172 | return environ # type: ignore 173 | case _: 174 | raise RuntimeError(f"Unexpected event: {event}") 175 | 176 | def call_wsgi(self, wsgi_app: WSGIApp) -> None: 177 | environ = self.init_environ() 178 | iterable = None # Just for finally block 179 | 180 | try: 181 | iterable = wsgi_app(environ, self.start_response) 182 | iterator = iter(iterable) 183 | 184 | try: 185 | chunk = next(iterator) 186 | except StopIteration: 187 | # WSGI app return a empty generator 188 | chunk = b"" 189 | 190 | if self.response_buffer is None: 191 | raise RuntimeError("start_response() was not called") 192 | 193 | status_code, headers = self.response_buffer 194 | self.send_with_event(h11.Response(status_code=status_code, headers=headers)) 195 | 196 | log_http(environ, status_code) 197 | 198 | self.send_with_event(h11.Data(data=chunk)) 199 | 200 | for chunk in iterator: 201 | self.send_with_event(h11.Data(data=chunk)) 202 | 203 | self.send_with_event(h11.EndOfMessage()) 204 | except BaseException: 205 | error_logger.exception( 206 | "Error while calling WSGI application", exc_info=sys.exc_info() 207 | ) 208 | if self.header_sent: 209 | raise 210 | 211 | self.send_with_event( 212 | h11.Response( 213 | status_code=500, 214 | headers=[ 215 | (b"Content-Type", b"text/plain; charset=utf-8"), 216 | (b"Content-Length", b"21"), 217 | (b"Server", SERVER_NAME), 218 | ], 219 | ) 220 | ) 221 | self.send_with_event(h11.Data(data=b"Internal Server Error")) 222 | self.send_with_event(h11.EndOfMessage()) 223 | 224 | log_http(environ, 500) 225 | raise 226 | finally: 227 | # Close the iterable if it has a close() method, per PEP 3333. 228 | close = getattr(iterable, "close", None) 229 | if callable(close): 230 | close() 231 | 232 | 233 | MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 234 | 235 | 236 | def http11_protocol( 237 | app: WSGIApp, 238 | sock: socket.socket, 239 | graceful_exit: threading.Event, 240 | *, 241 | url_scheme: str = "http", 242 | script_name: str = "", 243 | ) -> None: 244 | peername = sock.getpeername() 245 | if isinstance(peername, str): 246 | peername = (peername, 0) 247 | else: 248 | peername = peername[:2] 249 | sockname = sock.getsockname() 250 | if isinstance(sockname, str): 251 | sockname = (sockname, 0) 252 | else: 253 | sockname = sockname[:2] 254 | 255 | h11_connection = h11.Connection( 256 | our_role=h11.SERVER, max_incomplete_event_size=MAX_INCOMPLETE_EVENT_SIZE 257 | ) 258 | h = H11Protocol( 259 | c=h11_connection, 260 | s=sock, 261 | peername=peername, 262 | sockname=sockname, 263 | graceful_exit=graceful_exit, 264 | url_scheme=url_scheme, 265 | script_name=script_name, 266 | ) 267 | while not graceful_exit.is_set(): 268 | try: 269 | h.call_wsgi(app) 270 | 271 | while True: 272 | event = h.get_next_event() 273 | match event: 274 | case h11.EndOfMessage() | h11.PAUSED: 275 | try: 276 | h.c.start_next_cycle() 277 | except h11.LocalProtocolError: 278 | raise ConnectionClosed 279 | h.response_buffer = None 280 | debug_logger.debug("Start next cycle in %s:%d", *h.peername) 281 | break 282 | case h11.Data(): # unread request body 283 | pass 284 | case _: 285 | raise RuntimeError(f"Unexpected event: {event}") 286 | except ConnectionClosed: 287 | debug_logger.debug("Connection closed by %s:%d", *h.peername) 288 | break 289 | -------------------------------------------------------------------------------- /src/zibai/logger.py: -------------------------------------------------------------------------------- 1 | import copy 2 | import logging 3 | 4 | logger = logging.getLogger("zibai") 5 | 6 | debug_logger = logging.getLogger("zibai.debug") 7 | 8 | access_logger = logging.getLogger("zibai.access") 9 | 10 | error_logger = logging.getLogger("zibai.error") 11 | 12 | 13 | def log_http(environ, status_code) -> None: 14 | if status_code >= 500: 15 | error_logger.error( 16 | '"%s %s %s" %s', 17 | environ["REQUEST_METHOD"], 18 | environ["PATH_INFO"], 19 | environ["SERVER_PROTOCOL"], 20 | status_code, 21 | extra=environ, 22 | exc_info=True, 23 | ) 24 | else: 25 | access_logger.info( 26 | '"%s %s %s" %s', 27 | environ["REQUEST_METHOD"], 28 | environ["PATH_INFO"], 29 | environ["SERVER_PROTOCOL"], 30 | status_code, 31 | extra=environ, 32 | ) 33 | 34 | 35 | LOGGING_CONFIG: dict = { 36 | "version": 1, 37 | "disable_existing_loggers": False, 38 | "formatters": { 39 | "default": { 40 | "format": "%(asctime)s %(levelname)s %(message)s", 41 | }, 42 | }, 43 | "handlers": { 44 | "default": { 45 | "formatter": "default", 46 | "class": "logging.StreamHandler", 47 | "stream": "ext://sys.stdout", 48 | }, 49 | "error": { 50 | "formatter": "default", 51 | "class": "logging.StreamHandler", 52 | "stream": "ext://sys.stderr", 53 | }, 54 | }, 55 | "loggers": { 56 | "zibai": {"handlers": ["default"], "level": "INFO"}, 57 | "zibai.debug": {"handlers": ["default"], "level": "INFO", "propagate": False}, 58 | "zibai.access": {"handlers": ["default"], "level": "INFO", "propagate": False}, 59 | "zibai.error": {"handlers": ["error"], "level": "ERROR", "propagate": False}, 60 | }, 61 | } 62 | 63 | 64 | def _merge_dict(base: dict, config: dict) -> dict: 65 | base = copy.deepcopy(base) # deep copy 66 | 67 | for key, value in config.items(): 68 | if key not in base: 69 | base[key] = value 70 | else: 71 | if isinstance(value, dict): 72 | base[key] = _merge_dict(base[key], value) 73 | else: 74 | base[key] = value 75 | return base 76 | 77 | 78 | def load_config(config: dict) -> dict: 79 | return _merge_dict(LOGGING_CONFIG, config) 80 | -------------------------------------------------------------------------------- /src/zibai/middlewares/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abersheeran/zibai/c0bb54f3dbfcd4d434ad7ef6bc55b2ffec17ad48/src/zibai/middlewares/__init__.py -------------------------------------------------------------------------------- /src/zibai/middlewares/limit_request_count.py: -------------------------------------------------------------------------------- 1 | import dataclasses 2 | import signal 3 | 4 | from ..wsgi_typing import Environ, IterableChunks, StartResponse, WSGIApp 5 | 6 | 7 | @dataclasses.dataclass 8 | class LimitRequestCountMiddleware: 9 | app: WSGIApp 10 | max_request_pre_process: int 11 | request_count: int = 0 12 | 13 | def __call__( 14 | self, environ: Environ, start_response: StartResponse 15 | ) -> IterableChunks: 16 | yield from self.app(environ, start_response) 17 | 18 | self.request_count += 1 19 | if self.request_count >= self.max_request_pre_process: 20 | signal.raise_signal(signal.SIGTERM) 21 | -------------------------------------------------------------------------------- /src/zibai/multiprocess.py: -------------------------------------------------------------------------------- 1 | import os 2 | import signal 3 | import threading 4 | import time 5 | from contextlib import nullcontext 6 | from multiprocessing import Pipe, get_context 7 | from multiprocessing.context import SpawnProcess 8 | from typing import Any, Callable, ParamSpec 9 | 10 | from .logger import logger 11 | 12 | get_context("spawn").allow_connection_pickling() 13 | 14 | UNIX_SIGNALS = { 15 | getattr(signal, f"SIG{x}"): x 16 | for x in "HUP QUIT TTIN TTOU USR1 USR2 WINCH".split() 17 | if hasattr(signal, f"SIG{x}") 18 | } 19 | 20 | P = ParamSpec("P") 21 | 22 | 23 | class ProcessParameters: 24 | def __init__( 25 | self, 26 | f: Callable[P, Any], 27 | *args: P.args, 28 | **kwargs: P.kwargs, 29 | ) -> None: 30 | self.f = f 31 | self.args = args 32 | self.kwargs = kwargs 33 | 34 | 35 | class Process: 36 | def __init__(self, parameters: ProcessParameters) -> None: 37 | self.parameters = parameters 38 | self.parent_conn, self.child_conn = Pipe() 39 | self.process = SpawnProcess(target=self.target) 40 | 41 | def ping(self, timeout: float = 5) -> bool: 42 | try: 43 | self.parent_conn.send(b"ping") 44 | if self.parent_conn.poll(timeout): 45 | self.parent_conn.recv() 46 | return True 47 | except IOError: 48 | pass 49 | return False 50 | 51 | def pong(self) -> None: 52 | self.child_conn.recv() 53 | self.child_conn.send(b"pong") 54 | 55 | def always_pong(self) -> None: 56 | while True: 57 | self.pong() 58 | 59 | def target(self) -> Any: 60 | if os.name == "nt": 61 | # Windows doesn't support SIGTERM, so we use SIGBREAK instead. 62 | # And then we raise SIGTERM when SIGBREAK is received. 63 | # https://learn.microsoft.com/zh-cn/cpp/c-runtime-library/reference/signal?view=msvc-170 64 | signal.signal( 65 | signal.SIGBREAK, lambda sig, frame: signal.raise_signal(signal.SIGTERM) 66 | ) 67 | 68 | threading.Thread(target=self.always_pong, daemon=True).start() 69 | return self.parameters.f(*self.parameters.args, **self.parameters.kwargs) 70 | 71 | def is_alive(self, timeout: float = 5) -> bool: 72 | return self.process.is_alive() and self.ping(timeout) 73 | 74 | def start(self) -> None: 75 | self.process.start() 76 | logger.info("Started child process [{}]".format(self.process.pid)) 77 | 78 | def terminate(self) -> None: 79 | if self.process.exitcode is not None: 80 | return 81 | assert self.process.pid is not None 82 | if os.name == "nt": 83 | # Windows doesn't support SIGTERM. 84 | # So send SIGBREAK, and then in process raise SIGTERM. 85 | os.kill(self.process.pid, signal.CTRL_BREAK_EVENT) 86 | else: 87 | os.kill(self.process.pid, signal.SIGTERM) 88 | logger.info("Terminated child process [{}]".format(self.process.pid)) 89 | 90 | self.parent_conn.close() 91 | self.child_conn.close() 92 | 93 | def terminate_quickly(self) -> None: 94 | if self.process.exitcode is not None: 95 | return 96 | assert self.process.pid is not None 97 | if os.name == "nt": 98 | os.kill(self.process.pid, signal.CTRL_C_EVENT) 99 | else: 100 | os.kill(self.process.pid, signal.SIGINT) 101 | logger.info("Terminated quickly child process [{}]".format(self.process.pid)) 102 | 103 | self.parent_conn.close() 104 | self.child_conn.close() 105 | 106 | def kill(self) -> None: 107 | # In Windows, the method will call `TerminateProcess` to kill the process. 108 | # In Unix, the method will send SIGKILL to the process. 109 | self.process.kill() 110 | 111 | def join(self, timeout: float | None = None) -> None: 112 | logger.info("Waiting for child process [{}]".format(self.process.pid)) 113 | self.process.join(timeout) 114 | # Timeout, kill the process 115 | while self.process.exitcode is None: 116 | self.process.kill() 117 | self.process.join(1) 118 | 119 | @property 120 | def pid(self) -> int | None: 121 | return self.process.pid 122 | 123 | 124 | class MultiProcessManager: 125 | def __init__( 126 | self, 127 | processes_num: int, 128 | process_parameters: ProcessParameters, 129 | join_timeout: float | None = None, 130 | ): 131 | self.join_timeout = join_timeout 132 | self.processes_num = processes_num 133 | self.process_parameters = process_parameters 134 | self.processes: list[Process] = [] 135 | 136 | self.should_exit = threading.Event() 137 | self.reload_lock = threading.Lock() 138 | 139 | self.signal_queue: list[int] = [] 140 | for sig in UNIX_SIGNALS: 141 | signal.signal(sig, lambda sig, frame: self.signal_queue.append(sig)) 142 | 143 | # Sent by Ctrl+C. 144 | signal.signal(signal.SIGINT, lambda sig, frame: self.handle_int()) 145 | # Sent by `kill `. Not sent on Windows. 146 | signal.signal(signal.SIGTERM, lambda sig, frame: self.handle_term()) 147 | if os.name == "nt": 148 | # Sent by `Ctrl+Break` on Windows. 149 | signal.signal(signal.SIGBREAK, lambda sig, frame: self.handle_break()) 150 | 151 | def init_processes(self) -> None: 152 | for _ in range(self.processes_num): 153 | process = Process(self.process_parameters) 154 | process.start() 155 | self.processes.append(process) 156 | 157 | def terminate_all(self) -> None: 158 | for process in self.processes: 159 | process.terminate() 160 | 161 | def terminate_all_quickly(self) -> None: 162 | for process in self.processes: 163 | process.terminate_quickly() 164 | 165 | def join_all(self) -> None: 166 | for process in self.processes: 167 | process.join(self.join_timeout) 168 | 169 | def restart_all(self) -> None: 170 | for idx, process in enumerate(tuple(self.processes)): 171 | process.terminate() 172 | process.join() 173 | new_process = Process(self.process_parameters) 174 | new_process.start() 175 | self.processes[idx] = new_process 176 | 177 | def on_watchfiles_reload(self) -> None: 178 | with self.reload_lock: 179 | self.terminate_all_quickly() 180 | self.join_all() 181 | time.sleep(1) # Wait for the Ctrl+C signal to be handled 182 | # Because in Windows, the Ctrl+C signal always send to main process. 183 | 184 | def mainloop(self) -> None: 185 | logger.info("Started parent process [{}]".format(os.getpid())) 186 | 187 | self.init_processes() 188 | 189 | while not self.should_exit.wait(0.5): 190 | self.handle_signals() 191 | self.keep_subprocess_alive() 192 | 193 | self.join_all() 194 | 195 | logger.info("Stopped parent process [{}]".format(os.getpid())) 196 | 197 | def keep_subprocess_alive(self) -> None: 198 | if self.should_exit.is_set(): 199 | return # parent process is exiting, no need to keep subprocess alive 200 | 201 | for idx, process in enumerate(tuple(self.processes)): 202 | if process.is_alive(): 203 | continue 204 | 205 | process.kill() # process is hung, kill it 206 | process.join(1) 207 | 208 | if self.should_exit.is_set(): 209 | return 210 | 211 | logger.info("Child process [{}] died".format(process.pid)) 212 | del self.processes[idx] 213 | process = Process(self.process_parameters) 214 | process.start() 215 | self.processes.append(process) 216 | 217 | def handle_signals(self) -> None: 218 | for sig in tuple(self.signal_queue): 219 | self.signal_queue.remove(sig) 220 | sig_name = UNIX_SIGNALS[sig] 221 | sig_handler = getattr(self, f"handle_{sig_name.lower()}", None) 222 | if sig_handler is not None: 223 | sig_handler() 224 | else: 225 | logger.info(f"Received signal [{sig_name}], but nothing to do") 226 | 227 | def handle_int(self) -> None: 228 | if self.reload_lock.locked(): 229 | return 230 | logger.info("Received SIGINT, quickly exiting") 231 | self.should_exit.set() 232 | # On Windows Ctrl+C is automatically sent to all child processes. 233 | if os.name != "nt": 234 | self.terminate_all_quickly() 235 | 236 | def handle_term(self) -> None: 237 | logger.info("Received SIGTERM, gracefully exiting") 238 | self.should_exit.set() 239 | self.terminate_all() 240 | 241 | def handle_break(self) -> None: 242 | logger.info("Received SIGBREAK, gracefully exiting") 243 | self.should_exit.set() 244 | # On Windows, Ctrl+Break is automatically sent to all child processes. 245 | # So, we don't need to terminate all child processes here. 246 | 247 | def handle_hup(self) -> None: 248 | logger.info("Received SIGHUP, restarting processes") 249 | self.restart_all() 250 | 251 | def handle_ttin(self) -> None: 252 | logger.info("Received SIGTTIN, increasing processes") 253 | self.processes_num += 1 254 | process = Process(self.process_parameters) 255 | process.start() 256 | self.processes.append(process) 257 | 258 | def handle_ttou(self) -> None: 259 | logger.info("Received SIGTTOU, decreasing processes") 260 | if self.processes_num <= 1: 261 | logger.info("Cannot decrease processes any more") 262 | return 263 | self.processes_num -= 1 264 | process = self.processes.pop() 265 | process.terminate() 266 | process.join() 267 | 268 | 269 | def multiprocess( 270 | processes_num: int, 271 | process_parameters: ProcessParameters, 272 | watchfiles: str | None, 273 | join_timeout: float | None = None, 274 | ) -> None: 275 | processes_manager = MultiProcessManager( 276 | processes_num, process_parameters, join_timeout 277 | ) 278 | 279 | if watchfiles is not None: 280 | from .reloader import listen_for_changes 281 | 282 | contextmanager = listen_for_changes( 283 | watchfiles, processes_manager.on_watchfiles_reload 284 | ) 285 | else: 286 | contextmanager = nullcontext() 287 | 288 | with contextmanager: 289 | processes_manager.mainloop() 290 | -------------------------------------------------------------------------------- /src/zibai/reloader.py: -------------------------------------------------------------------------------- 1 | import os 2 | import threading 3 | from contextlib import contextmanager 4 | from typing import Any, Callable, Generator 5 | 6 | import watchdog.events 7 | from watchdog.observers import Observer 8 | 9 | from .logger import logger 10 | 11 | 12 | @contextmanager 13 | def listen_for_changes( 14 | watchfiles: str, callback: Callable[[], Any] 15 | ) -> Generator[None, None, None]: 16 | """ 17 | When any of the files in `watchfiles` matches a change, call `callback`. 18 | """ 19 | reloading_event = threading.Event() 20 | 21 | def run_callback(event: watchdog.events.FileSystemEvent) -> None: 22 | if not reloading_event.is_set(): 23 | logger.info("Detected file change, reloading...") 24 | reloading_event.set() 25 | callback() 26 | reloading_event.clear() 27 | 28 | def on_any_event(event: watchdog.events.FileSystemEvent) -> None: 29 | if event.event_type not in ( 30 | watchdog.events.EVENT_TYPE_MOVED, 31 | watchdog.events.EVENT_TYPE_DELETED, 32 | watchdog.events.EVENT_TYPE_CREATED, 33 | watchdog.events.EVENT_TYPE_MODIFIED, 34 | ): 35 | return 36 | 37 | threading.Thread(target=run_callback, args=(event,), daemon=True).start() 38 | 39 | event_handler = watchdog.events.PatternMatchingEventHandler( 40 | patterns=[pattern.strip() for pattern in watchfiles.split(";")], 41 | ) 42 | 43 | setattr(event_handler, "on_any_event", on_any_event) 44 | 45 | observer = Observer() 46 | 47 | path = os.getcwd() 48 | logger.info("Watching files in {}".format(path)) 49 | observer.schedule(event_handler, path, recursive=True) 50 | 51 | observer.start() 52 | try: 53 | yield 54 | finally: 55 | observer.stop() 56 | observer.join() 57 | -------------------------------------------------------------------------------- /src/zibai/utils.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from typing import Callable, Generator 3 | 4 | ENC, ESC = sys.getfilesystemencoding(), "surrogateescape" 5 | 6 | 7 | def unicode_to_wsgi(u: str) -> str: 8 | """Convert an environment variable to a WSGI "bytes-as-unicode" string""" 9 | return u.encode(ENC, ESC).decode("iso-8859-1") 10 | 11 | 12 | class Input: 13 | def __init__(self, receive: Callable[[], bytes]) -> None: 14 | self.buffer = bytearray() 15 | self.receive = receive 16 | self._has_more = True 17 | 18 | @property 19 | def has_more(self) -> bool: 20 | if self._has_more or self.buffer: 21 | return True 22 | return False 23 | 24 | def _receive_more_data(self) -> bytes: 25 | if not self._has_more: 26 | return b"" 27 | data = self.receive() 28 | self._has_more = data != b"" 29 | return data 30 | 31 | def read(self, size: int = -1) -> bytes: 32 | while size == -1 or size > len(self.buffer): 33 | self.buffer.extend(self._receive_more_data()) 34 | if not self._has_more: 35 | break 36 | if size == -1: 37 | result = bytes(self.buffer) 38 | self.buffer.clear() 39 | else: 40 | result = bytes(self.buffer[:size]) 41 | del self.buffer[:size] 42 | return result 43 | 44 | def readline(self, limit: int = -1) -> bytes: 45 | while True: 46 | lf_index = self.buffer.find(b"\n", 0, limit if limit > -1 else None) 47 | if lf_index != -1: 48 | result = bytes(self.buffer[: lf_index + 1]) 49 | del self.buffer[: lf_index + 1] 50 | return result 51 | elif limit != -1: 52 | result = bytes(self.buffer[:limit]) 53 | del self.buffer[:limit] 54 | return result 55 | if not self._has_more: 56 | break 57 | self.buffer.extend(self._receive_more_data()) 58 | 59 | result = bytes(self.buffer) 60 | self.buffer.clear() 61 | return result 62 | 63 | def readlines(self, hint: int = -1) -> list[bytes]: 64 | if not self.has_more: 65 | return [] 66 | if hint == -1: 67 | raw_data = self.read(-1) 68 | bytelist = raw_data.split(b"\n") 69 | if raw_data[-1] == 10: # 10 -> b"\n" 70 | bytelist.pop(len(bytelist) - 1) 71 | return [line + b"\n" for line in bytelist] 72 | return [self.readline() for _ in range(hint)] 73 | 74 | def __iter__(self) -> Generator[bytes, None, None]: 75 | while self.has_more: 76 | yield self.readline() 77 | -------------------------------------------------------------------------------- /src/zibai/wsgi_typing.py: -------------------------------------------------------------------------------- 1 | """ 2 | https://peps.python.org/pep-3333/ 3 | """ 4 | 5 | from types import TracebackType 6 | from typing import ( 7 | Any, 8 | Callable, 9 | Iterable, 10 | List, 11 | Optional, 12 | Protocol, 13 | Tuple, 14 | Type, 15 | TypedDict, 16 | ) 17 | 18 | CGIRequiredDefined = TypedDict( 19 | "CGIRequiredDefined", 20 | { 21 | # The HTTP request method, such as GET or POST. This cannot ever be an 22 | # empty string, and so is always required. 23 | "REQUEST_METHOD": str, 24 | # When HTTP_HOST is not set, these variables can be combined to determine 25 | # a default. 26 | # SERVER_NAME and SERVER_PORT are required strings and must never be empty. 27 | "SERVER_NAME": str, 28 | "SERVER_PORT": str, 29 | # The version of the protocol the client used to send the request. 30 | # Typically this will be something like "HTTP/1.0" or "HTTP/1.1" and 31 | # may be used by the application to determine how to treat any HTTP 32 | # request headers. (This variable should probably be called REQUEST_PROTOCOL, 33 | # since it denotes the protocol used in the request, and is not necessarily 34 | # the protocol that will be used in the server's response. However, for 35 | # compatibility with CGI we have to keep the existing name.) 36 | "SERVER_PROTOCOL": str, 37 | }, 38 | ) 39 | 40 | CGIOptionalDefined = TypedDict( 41 | "CGIOptionalDefined", 42 | { 43 | "REQUEST_URI": str, 44 | "REMOTE_ADDR": str, 45 | "REMOTE_PORT": str, 46 | # The initial portion of the request URL’s “path” that corresponds to the 47 | # application object, so that the application knows its virtual “location”. 48 | # This may be an empty string, if the application corresponds to the “root” 49 | # of the server. 50 | "SCRIPT_NAME": str, 51 | # The remainder of the request URL’s “path”, designating the virtual 52 | # “location” of the request’s target within the application. This may be an 53 | # empty string, if the request URL targets the application root and does 54 | # not have a trailing slash. 55 | "PATH_INFO": str, 56 | # The portion of the request URL that follows the “?”, if any. May be empty 57 | # or absent. 58 | "QUERY_STRING": str, 59 | # The contents of any Content-Type fields in the HTTP request. May be empty 60 | # or absent. 61 | "CONTENT_TYPE": str, 62 | # The contents of any Content-Length fields in the HTTP request. May be empty 63 | # or absent. 64 | "CONTENT_LENGTH": str, 65 | }, 66 | total=False, 67 | ) 68 | 69 | 70 | class InputStream(Protocol): 71 | """ 72 | An input stream (file-like object) from which the HTTP request body bytes can be 73 | read. (The server or gateway may perform reads on-demand as requested by the 74 | application, or it may pre- read the client's request body and buffer it in-memory 75 | or on disk, or use any other technique for providing such an input stream, according 76 | to its preference.) 77 | """ 78 | 79 | def read(self, size: int = -1, /) -> bytes: 80 | """ 81 | The server is not required to read past the client's specified Content-Length, 82 | and should simulate an end-of-file condition if the application attempts to read 83 | past that point. The application should not attempt to read more data than is 84 | specified by the CONTENT_LENGTH variable. 85 | A server should allow read() to be called without an argument, and return the 86 | remainder of the client's input stream. 87 | A server should return empty bytestrings from any attempt to read from an empty 88 | or exhausted input stream. 89 | """ 90 | raise NotImplementedError 91 | 92 | def readline(self, limit: int = -1, /) -> bytes: 93 | """ 94 | Servers should support the optional "size" argument to readline(), but as in 95 | WSGI 1.0, they are allowed to omit support for it. 96 | (In WSGI 1.0, the size argument was not supported, on the grounds that it might 97 | have been complex to implement, and was not often used in practice... but then 98 | the cgi module started using it, and so practical servers had to start 99 | supporting it anyway!) 100 | """ 101 | raise NotImplementedError 102 | 103 | def readlines(self, hint: int = -1, /) -> List[bytes]: 104 | """ 105 | Note that the hint argument to readlines() is optional for both caller and 106 | implementer. The application is free not to supply it, and the server or gateway 107 | is free to ignore it. 108 | """ 109 | raise NotImplementedError 110 | 111 | 112 | class ErrorStream(Protocol): 113 | """ 114 | An output stream (file-like object) to which error output can be written, 115 | for the purpose of recording program or other errors in a standardized and 116 | possibly centralized location. This should be a "text mode" stream; 117 | i.e., applications should use "\n" as a line ending, and assume that it will 118 | be converted to the correct line ending by the server/gateway. 119 | (On platforms where the str type is unicode, the error stream should accept 120 | and log arbitrary unicode without raising an error; it is allowed, however, 121 | to substitute characters that cannot be rendered in the stream's encoding.) 122 | For many servers, wsgi.errors will be the server's main error log. Alternatively, 123 | this may be sys.stderr, or a log file of some sort. The server's documentation 124 | should include an explanation of how to configure this or where to find the 125 | recorded output. A server or gateway may supply different error streams to 126 | different applications, if this is desired. 127 | """ 128 | 129 | def flush(self) -> None: 130 | """ 131 | Since the errors stream may not be rewound, servers and gateways are free to 132 | forward write operations immediately, without buffering. In this case, the 133 | flush() method may be a no-op. Portable applications, however, cannot assume 134 | that output is unbuffered or that flush() is a no-op. They must call flush() 135 | if they need to ensure that output has in fact been written. 136 | (For example, to minimize intermingling of data from multiple processes writing 137 | to the same error log.) 138 | """ 139 | raise NotImplementedError 140 | 141 | def write(self, s: str, /) -> Any: 142 | raise NotImplementedError 143 | 144 | def writelines(self, seq: List[str], /) -> Any: 145 | raise NotImplementedError 146 | 147 | 148 | WSGIDefined = TypedDict( 149 | "WSGIDefined", 150 | { 151 | "wsgi.version": Tuple[int, int], # e.g. (1, 0) 152 | "wsgi.url_scheme": str, # e.g. "http" or "https" 153 | "wsgi.input": InputStream, 154 | "wsgi.errors": ErrorStream, 155 | # This value should evaluate true if the application object may be simultaneously 156 | # invoked by another thread in the same process, and should evaluate false otherwise. 157 | "wsgi.multithread": bool, 158 | # This value should evaluate true if an equivalent application object may be 159 | # simultaneously invoked by another process, and should evaluate false otherwise. 160 | "wsgi.multiprocess": bool, 161 | # This value should evaluate true if the server or gateway expects (but does 162 | # not guarantee!) that the application will only be invoked this one time during 163 | # the life of its containing process. Normally, this will only be true for a 164 | # gateway based on CGI (or something similar). 165 | "wsgi.run_once": bool, 166 | }, 167 | ) 168 | 169 | 170 | class Environ(CGIRequiredDefined, CGIOptionalDefined, WSGIDefined): 171 | """ 172 | WSGI Environ 173 | """ 174 | 175 | 176 | ExceptionInfo = Tuple[Type[BaseException], BaseException, Optional[TracebackType]] 177 | 178 | # https://peps.python.org/pep-3333/#the-write-callable 179 | WriteCallable = Callable[[bytes], None] 180 | 181 | 182 | class StartResponse(Protocol): 183 | def __call__( 184 | self, 185 | status: str, 186 | response_headers: List[Tuple[str, str]], 187 | exc_info: ExceptionInfo | None = None, 188 | /, 189 | ) -> WriteCallable: 190 | raise NotImplementedError 191 | 192 | 193 | IterableChunks = Iterable[bytes] 194 | 195 | WSGIApp = Callable[[Environ, StartResponse], IterableChunks] 196 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/abersheeran/zibai/c0bb54f3dbfcd4d434ad7ef6bc55b2ffec17ad48/tests/__init__.py -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import random 3 | import socket 4 | import threading 5 | import time 6 | 7 | import pytest 8 | from zibai import create_bind_socket 9 | 10 | 11 | def create_ipv4_socket(): 12 | for _ in range(10): 13 | port = random.randint(20000, 60000) 14 | try: 15 | return create_bind_socket(f"127.0.0.1:{port}") 16 | except IOError: 17 | continue 18 | 19 | 20 | def create_ipv6_socket(): 21 | for _ in range(10): 22 | port = random.randint(20000, 60000) 23 | try: 24 | return create_bind_socket(f"::1:{port}") 25 | except IOError: 26 | continue 27 | 28 | 29 | def create_unix_socket(): 30 | path = "/tmp/test-zibai-server.sock" 31 | if os.path.exists(path): 32 | os.remove(path) 33 | return create_bind_socket(f"unix:{path}") 34 | 35 | 36 | @pytest.fixture(params=[create_ipv4_socket, create_ipv6_socket, create_unix_socket]) 37 | def bind_socket(request): 38 | if request.param is create_unix_socket: 39 | if not hasattr(socket, "AF_UNIX"): 40 | pytest.skip("AF_UNIX is not supported") 41 | 42 | with request.param() as sock: 43 | yield sock 44 | 45 | 46 | @pytest.fixture 47 | def socket_and_event(bind_socket): 48 | exit_event = threading.Event() 49 | try: 50 | yield bind_socket, exit_event 51 | finally: 52 | exit_event.set() 53 | time.sleep(0.15) 54 | -------------------------------------------------------------------------------- /tests/test___main__.py: -------------------------------------------------------------------------------- 1 | import subprocess 2 | import sys 3 | 4 | 5 | def test_main(): 6 | subprocess.check_call([sys.executable, "-m", "zibai", "--help"]) 7 | -------------------------------------------------------------------------------- /tests/test_core.py: -------------------------------------------------------------------------------- 1 | import socket 2 | import threading 3 | import time 4 | 5 | import h11 6 | import pytest 7 | from zibai.core import serve 8 | 9 | 10 | def hello_world_app(environ, start_response): 11 | start_response( 12 | "200 OK", 13 | [ 14 | ("Content-type", "text/plain; charset=utf-8"), 15 | ("Content-Length", "12"), 16 | ], 17 | ) 18 | return [b"Hello World!"] 19 | 20 | 21 | @pytest.mark.parametrize("backlog", [10, None]) 22 | def test_hello_world_app( 23 | socket_and_event: tuple[socket.socket, threading.Event], 24 | backlog: int | None, 25 | ) -> None: 26 | bind_socket, exit_event = socket_and_event 27 | if backlog is None: 28 | bind_socket.listen() 29 | else: 30 | bind_socket.listen(backlog) 31 | 32 | server_thread = threading.Thread( 33 | target=serve, 34 | kwargs=dict( 35 | app=hello_world_app, 36 | bind_sockets=[bind_socket], 37 | max_workers=10, 38 | graceful_exit=exit_event, 39 | ), 40 | daemon=True, 41 | ) 42 | server_thread.start() 43 | 44 | time.sleep(1) 45 | 46 | client_socket = socket.socket( 47 | bind_socket.family, bind_socket.type, bind_socket.proto 48 | ) 49 | client_socket.connect(bind_socket.getsockname()) 50 | with client_socket: 51 | client_connection = h11.Connection(h11.CLIENT) 52 | data = client_connection.send( 53 | h11.Request(method="GET", target="/", headers=[("Host", "example.com")]) 54 | ) 55 | assert data is not None 56 | client_socket.sendall(data) 57 | data = client_socket.recv(4096) 58 | client_connection.receive_data(data) 59 | event = client_connection.next_event() 60 | assert isinstance(event, h11.Response) 61 | assert event.status_code == 200 62 | assert event.headers == [ 63 | (b"content-type", b"text/plain; charset=utf-8"), 64 | (b"content-length", b"12"), 65 | (b"server", "Zî Bái".encode("latin-1")), 66 | ] 67 | -------------------------------------------------------------------------------- /tests/test_multiprocess.py: -------------------------------------------------------------------------------- 1 | import signal 2 | import sys 3 | import time 4 | from concurrent.futures import ThreadPoolExecutor 5 | 6 | import pytest 7 | from zibai.multiprocess import MultiProcessManager, ProcessParameters 8 | 9 | from .utils import new_console_in_windows 10 | 11 | 12 | def while_true(): 13 | signal.signal(signal.SIGINT, lambda sig, frame: sys.exit(0)) 14 | signal.signal(signal.SIGTERM, lambda sig, frame: sys.exit(0)) 15 | while True: 16 | time.sleep(1) 17 | 18 | 19 | @new_console_in_windows 20 | def test_multiprocess() -> None: 21 | """ 22 | Ensure that the MultiProcessManager works as expected. 23 | """ 24 | multi_process_manager = MultiProcessManager( 25 | 2, ProcessParameters(while_true), join_timeout=5 26 | ) 27 | executor = ThreadPoolExecutor(max_workers=1) 28 | future = executor.submit(multi_process_manager.mainloop) 29 | time.sleep(1) 30 | multi_process_manager.should_exit.set() 31 | multi_process_manager.terminate_all_quickly() 32 | future.result() 33 | 34 | 35 | @new_console_in_windows 36 | def test_multiprocess_sigbreak() -> None: 37 | """ 38 | Ensure that the SIGBREAK signal is handled as expected. 39 | """ 40 | multi_process_manager = MultiProcessManager( 41 | 2, ProcessParameters(while_true), join_timeout=5 42 | ) 43 | executor = ThreadPoolExecutor(max_workers=1) 44 | future = executor.submit(multi_process_manager.mainloop) 45 | time.sleep(1) 46 | multi_process_manager.should_exit.set() 47 | multi_process_manager.terminate_all() 48 | future.result() 49 | 50 | 51 | @pytest.fixture 52 | def multi_process_manager(): 53 | multi_process_manager = MultiProcessManager( 54 | 2, ProcessParameters(while_true), join_timeout=5 55 | ) 56 | executor = ThreadPoolExecutor(max_workers=1) 57 | future = executor.submit(multi_process_manager.mainloop) 58 | time.sleep(1) 59 | yield multi_process_manager 60 | multi_process_manager.should_exit.set() 61 | multi_process_manager.terminate_all_quickly() 62 | future.result() 63 | 64 | 65 | @pytest.mark.skipif(not hasattr(signal, "SIGHUP"), reason="platform unsupports SIGHUP") 66 | def test_multiprocess_sighup(multi_process_manager: MultiProcessManager) -> None: 67 | """ 68 | Ensure that the SIGHUP signal is handled as expected. 69 | """ 70 | pids = [p.pid for p in multi_process_manager.processes] 71 | multi_process_manager.signal_queue.append(signal.SIGHUP) 72 | time.sleep(1) 73 | assert pids != [p.pid for p in multi_process_manager.processes] 74 | 75 | 76 | @pytest.mark.skipif( 77 | not hasattr(signal, "SIGTTIN"), reason="platform unsupports SIGTTIN" 78 | ) 79 | def test_multiprocess_sigttin(multi_process_manager: MultiProcessManager) -> None: 80 | """ 81 | Ensure that the SIGTTIN signal is handled as expected. 82 | """ 83 | multi_process_manager.signal_queue.append(signal.SIGTTIN) 84 | time.sleep(1) 85 | assert len(multi_process_manager.processes) == 3 86 | 87 | 88 | @pytest.mark.skipif( 89 | not hasattr(signal, "SIGTTOU"), reason="platform unsupports SIGTTOU" 90 | ) 91 | def test_multiprocess_sigttou(multi_process_manager: MultiProcessManager) -> None: 92 | """ 93 | Ensure that the SIGTTOU signal is handled as expected. 94 | """ 95 | multi_process_manager.signal_queue.append(signal.SIGTTOU) 96 | time.sleep(1) 97 | assert len(multi_process_manager.processes) == 1 98 | multi_process_manager.signal_queue.append(signal.SIGTTOU) 99 | time.sleep(1) 100 | assert len(multi_process_manager.processes) == 1 101 | -------------------------------------------------------------------------------- /tests/utils.py: -------------------------------------------------------------------------------- 1 | import functools 2 | import os 3 | from typing import Any, Callable 4 | 5 | 6 | def new_console_in_windows(test_function: Callable[[], Any]) -> Callable[[], Any]: 7 | if os.name != "nt": 8 | return test_function 9 | 10 | @functools.wraps(test_function) 11 | def new_function(): 12 | import subprocess 13 | import sys 14 | 15 | subprocess.check_call( 16 | [ 17 | sys.executable, 18 | "-c", 19 | f"from {test_function.__module__} import {test_function.__name__}; {test_function.__name__}.__wrapped__()", 20 | ], 21 | creationflags=subprocess.CREATE_NO_WINDOW, 22 | ) 23 | 24 | return new_function 25 | --------------------------------------------------------------------------------