├── .coveragerc ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .readthedocs.yml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── LICENSE.APACHE2 ├── LICENSE.MIT ├── MANIFEST.in ├── README.rst ├── check.sh ├── ci.sh ├── docs-requirements.in ├── docs-requirements.txt ├── docs ├── Makefile ├── make.bat └── source │ ├── _static │ └── .gitkeep │ ├── conf.py │ ├── history.rst │ ├── index.rst │ └── reference.rst ├── newsfragments ├── .gitkeep └── README.rst ├── pyproject.toml ├── setup.py ├── test-requirements.in ├── test-requirements.txt └── tricycle ├── __init__.py ├── _meta.py ├── _multi_cancel.py ├── _rwlock.py ├── _service_nursery.py ├── _streams.py ├── _tests ├── __init__.py ├── conftest.py ├── test_meta.py ├── test_multi_cancel.py ├── test_rwlock.py ├── test_service_nursery.py ├── test_streams.py └── test_tree_var.py ├── _tree_var.py ├── _version.py └── py.typed /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch=True 3 | source=tricycle 4 | 5 | [report] 6 | precision = 1 7 | exclude_lines = 8 | pragma: no cover 9 | abc.abstractmethod 10 | if TYPE_CHECKING: 11 | @overload 12 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | Windows: 11 | name: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' 12 | timeout-minutes: 20 13 | runs-on: 'windows-latest' 14 | strategy: 15 | fail-fast: false 16 | matrix: 17 | python: ['3.8', '3.9', '3.10', '3.11', '3.12'] 18 | arch: ['x86', 'x64'] 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v2 22 | - name: Setup python 23 | uses: actions/setup-python@v2 24 | with: 25 | # This allows the matrix to specify just the major.minor version while still 26 | # expanding it to get the latest patch version including alpha releases. 27 | # This avoids the need to update for each new alpha, beta, release candidate, 28 | # and then finally an actual release version. actions/setup-python doesn't 29 | # support this for PyPy presently so we get no help there. 30 | # 31 | # CPython -> 3.9.0-alpha - 3.9.X 32 | # PyPy -> pypy-3.7 33 | python-version: ${{ fromJSON(format('["{0}", "{1}"]', format('{0}.0-alpha - {0}.X', matrix.python), matrix.python))[startsWith(matrix.python, 'pypy')] }} 34 | architecture: '${{ matrix.arch }}' 35 | cache: pip 36 | cache-dependency-path: test-requirements.txt 37 | - name: Run tests 38 | run: ./ci.sh 39 | shell: bash 40 | env: 41 | # Should match 'name:' up above 42 | JOB_NAME: 'Windows (${{ matrix.python }}, ${{ matrix.arch }}${{ matrix.extra_name }})' 43 | - uses: codecov/codecov-action@v3 44 | with: 45 | directory: empty 46 | name: 'Windows (${{ matrix.python }}, ${{ matrix.arch }})' 47 | flags: Windows,${{ matrix.python }} 48 | 49 | Ubuntu: 50 | name: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' 51 | timeout-minutes: 10 52 | runs-on: 'ubuntu-latest' 53 | strategy: 54 | fail-fast: false 55 | matrix: 56 | python: ['pypy-3.8', 'pypy-3.9', '3.8', '3.9', '3.10', '3.11', '3.12'] 57 | check_lint: ['0'] 58 | extra_name: [''] 59 | include: 60 | - python: '3.8' 61 | check_lint: '1' 62 | extra_name: ', formatting and linting' 63 | continue-on-error: >- 64 | ${{ 65 | ( 66 | matrix.check_formatting == '1' 67 | || endsWith(matrix.python, '-dev') 68 | ) 69 | && true 70 | || false 71 | }} 72 | steps: 73 | - name: Checkout 74 | uses: actions/checkout@v2 75 | - name: Setup python 76 | uses: actions/setup-python@v2 77 | if: "!endsWith(matrix.python, '-dev')" 78 | with: 79 | python-version: ${{ fromJSON(format('["{0}", "{1}"]', format('{0}.0-alpha - {0}.X', matrix.python), matrix.python))[startsWith(matrix.python, 'pypy')] }} 80 | cache: pip 81 | cache-dependency-path: test-requirements.txt 82 | - name: Setup python (dev) 83 | uses: deadsnakes/action@v2.0.2 84 | if: endsWith(matrix.python, '-dev') 85 | with: 86 | python-version: '${{ matrix.python }}' 87 | - name: Run tests 88 | run: ./ci.sh 89 | env: 90 | CHECK_LINT: '${{ matrix.check_lint }}' 91 | # Should match 'name:' up above 92 | JOB_NAME: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' 93 | - uses: codecov/codecov-action@v3 94 | with: 95 | directory: empty 96 | name: 'Ubuntu (${{ matrix.python }}${{ matrix.extra_name }})' 97 | flags: Ubuntu,${{ matrix.python }} 98 | 99 | macOS: 100 | name: 'macOS (${{ matrix.python }})' 101 | timeout-minutes: 10 102 | runs-on: 'macos-latest' 103 | strategy: 104 | fail-fast: false 105 | matrix: 106 | python: ['3.8', '3.9', '3.10', '3.11', '3.12'] 107 | include: 108 | - python: '3.8' # <- not actually used 109 | arch: 'x64' 110 | pypy_nightly_branch: 'py3.8' 111 | extra_name: ', pypy 3.8 nightly' 112 | steps: 113 | - name: Checkout 114 | uses: actions/checkout@v2 115 | - name: Setup python 116 | uses: actions/setup-python@v2 117 | with: 118 | python-version: ${{ fromJSON(format('["{0}", "{1}"]', format('{0}.0-alpha - {0}.X', matrix.python), matrix.python))[startsWith(matrix.python, 'pypy')] }} 119 | cache: pip 120 | cache-dependency-path: test-requirements.txt 121 | - name: Run tests 122 | run: ./ci.sh 123 | env: 124 | # Should match 'name:' up above 125 | JOB_NAME: 'macOS (${{ matrix.python }})' 126 | - uses: codecov/codecov-action@v3 127 | with: 128 | directory: empty 129 | name: 'macOS (${{ matrix.python }})' 130 | flags: macOS,${{ matrix.python }} 131 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Sphinx docs 2 | docs/build/ 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *~ 8 | \#* 9 | .#* 10 | 11 | # C extensions 12 | *.so 13 | 14 | # Distribution / packaging 15 | .Python 16 | /build/ 17 | /develop-eggs/ 18 | /dist/ 19 | /eggs/ 20 | /lib/ 21 | /lib64/ 22 | /parts/ 23 | /sdist/ 24 | /var/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | 29 | # Installer logs 30 | pip-log.txt 31 | 32 | # Unit test / coverage reports 33 | htmlcov/ 34 | .tox/ 35 | .coverage 36 | .coverage.* 37 | .cache 38 | .pytest_cache 39 | nosetests.xml 40 | coverage.xml 41 | 42 | # Translations 43 | *.mo 44 | 45 | # Mr Developer 46 | .mr.developer.cfg 47 | .project 48 | .pydevproject 49 | 50 | # Rope 51 | .ropeproject 52 | 53 | # Django stuff: 54 | *.log 55 | *.pot 56 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | # https://docs.readthedocs.io/en/latest/config-file/index.html 2 | version: 2 3 | 4 | formats: 5 | - htmlzip 6 | - epub 7 | 8 | build: 9 | os: "ubuntu-22.04" 10 | tools: 11 | python: "3.11" 12 | 13 | python: 14 | install: 15 | - requirements: docs-requirements.txt 16 | 17 | sphinx: 18 | fail_on_warning: true 19 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | For the Trio code of conduct, see: 2 | https://trio.readthedocs.io/en/latest/code-of-conduct.html 3 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | At least at this early stage in its life, `tricycle` is pretty much "oremanj's bikeshed 2 | of interesting-but-maybe-not-fully-proven Trio ideas". PRs fixing bugs, extending 3 | existing code to be more robust or complete, and/or improving documentation are very welcome, 4 | but if you want to contribute a largish new feature, please raise it on the issue tracker 5 | first so I have a chance to think about whether I want to take on the maintenance 6 | burden or not. 7 | 8 | We mostly follow the Trio contributing guide: 9 | https://trio.readthedocs.io/en/latest/contributing.html 10 | but (this being largely a personal project) contributors will not automatically get commit 11 | bits, and we're trying out some alternative tooling (code formatting with `black` and 12 | enforced static type checking to pass `mypy --strict`) compared to the mainline Trio 13 | projects. `tricycle` anticipates moving to live under the python-trio Github organization 14 | once its scope has stabilized a bit, at which point it will stop being so much of a special 15 | snowflake. 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | This software is made available under the terms of *either* of the 2 | licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to are 3 | made under the terms of *both* these licenses. 4 | -------------------------------------------------------------------------------- /LICENSE.APACHE2: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /LICENSE.MIT: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.rst LICENSE* CODE_OF_CONDUCT* CONTRIBUTING* 2 | include .coveragerc 3 | include test-requirements.txt 4 | include tricycle/py.typed 5 | recursive-include docs * 6 | prune docs/build 7 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | tricycle: miscellaneous extensions for Trio 2 | =========================================== 3 | 4 | .. image:: https://img.shields.io/pypi/v/tricycle.svg 5 | :target: https://pypi.org/project/tricycle 6 | :alt: Latest PyPI version 7 | 8 | .. image:: https://github.com/oremanj/tricycle/actions/workflows/ci.yml/badge.svg 9 | :target: https://github.com/oremanj/tricycle/actions/workflows/ci.yml 10 | :alt: Automated test status 11 | 12 | .. image:: https://img.shields.io/badge/docs-read%20now-blue.svg 13 | :target: https://tricycle.readthedocs.io/en/latest/?badge=latest 14 | :alt: Documentation status 15 | 16 | .. image:: https://codecov.io/gh/oremanj/tricycle/branch/master/graph/badge.svg 17 | :target: https://codecov.io/gh/oremanj/tricycle 18 | :alt: Test coverage 19 | 20 | .. image:: https://img.shields.io/badge/code%20style-black-000000.svg 21 | :target: https://github.com/ambv/black 22 | :alt: Code style: black 23 | 24 | .. image:: http://www.mypy-lang.org/static/mypy_badge.svg 25 | :target: http://www.mypy-lang.org/ 26 | :alt: Checked with mypy 27 | 28 | 29 | This is a library of extensions to `Trio 30 | `__, the friendly Python library 31 | for async concurrency and I/O. 32 | 33 | Currently we have: 34 | 35 | * a readers-writer lock (``tricycle.RWLock``) 36 | * slightly higher-level stream wrappers (``tricycle.BufferedReceiveStream`` 37 | and ``tricycle.TextReceiveStream``) 38 | * some tools for managing cancellation (``tricycle.open_service_nursery()`` 39 | and ``tricycle.MultiCancelScope``) 40 | * a way to make objects that want to keep background tasks running during the 41 | object's lifetime (``tricycle.BackgroundObject`` and the more general 42 | ``tricycle.ScopedObject``) 43 | * an analog of ``ContextVar`` that is inherited through the task tree rather 44 | than across ``start_soon()`` calls, and thus provides more safety for 45 | accessing a resource that is being managed by a parent task 46 | (``tricycle.TreeVar``) 47 | 48 | While we won't release known-broken code, and we strive for 49 | cleanliness and good test coverage, please be advised that 50 | ``tricycle`` is mostly one person's box of tools and has not necessarily 51 | been reviewed or tested to Trio's standards. It *is* being used in 52 | production, and API churn has been minimal thus far although we're not 53 | making any firm stability guarantees. If you find that it doesn't meet 54 | your needs, feel free to `let us know 55 | `__ and we'll endeavor to 56 | improve things. 57 | 58 | tricycle is tested on Linux, Windows, and macOS, on CPython versions 3.8 59 | through 3.12. It will probably work on PyPy as well. 60 | 61 | 62 | License and history 63 | ~~~~~~~~~~~~~~~~~~~ 64 | 65 | ``tricycle`` is licensed under your choice of the MIT or Apache 2.0 license. 66 | See `LICENSE `__ 67 | for details. 68 | 69 | This library has its origins in a package of utilities that the author 70 | wrote at `Hudson River Trading `__ 71 | while building things for them with Trio. Many thanks to HRT for 72 | supporting open source in this way! 73 | -------------------------------------------------------------------------------- /check.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -ex 4 | 5 | EXIT_STATUS=0 6 | 7 | # Autoformatter *first*, to avoid double-reporting errors 8 | if ! black --check setup.py tricycle; then 9 | EXIT_STATUS=1 10 | black --diff setup.py tricycle 11 | fi 12 | 13 | # Run flake8 without pycodestyle and import-related errors 14 | flake8 tricycle/ \ 15 | --ignore=D,E,W,F401,F403,F405,F821,F822\ 16 | || EXIT_STATUS=$? 17 | 18 | # Run mypy 19 | mypy --strict --implicit-reexport -p tricycle || EXIT_STATUS=$? 20 | 21 | # Finally, leave a really clear warning of any issues and exit 22 | if [ $EXIT_STATUS -ne 0 ]; then 23 | cat <= 1.7.0 2 | sphinx_rtd_theme 3 | sphinxcontrib-trio 4 | towncrier 5 | trio >= 0.23.0 6 | -------------------------------------------------------------------------------- /docs-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile docs-requirements.in 6 | # 7 | alabaster==0.7.13 8 | # via sphinx 9 | attrs==23.1.0 10 | # via 11 | # outcome 12 | # trio 13 | babel==2.12.1 14 | # via sphinx 15 | certifi==2023.5.7 16 | # via requests 17 | charset-normalizer==3.1.0 18 | # via requests 19 | click==8.1.3 20 | # via 21 | # click-default-group 22 | # towncrier 23 | click-default-group==1.2.2 24 | # via towncrier 25 | docutils==0.18.1 26 | # via 27 | # sphinx 28 | # sphinx-rtd-theme 29 | idna==3.4 30 | # via 31 | # requests 32 | # trio 33 | imagesize==1.4.1 34 | # via sphinx 35 | incremental==22.10.0 36 | # via towncrier 37 | jinja2==3.1.2 38 | # via 39 | # sphinx 40 | # towncrier 41 | markupsafe==2.1.3 42 | # via jinja2 43 | outcome==1.2.0 44 | # via trio 45 | packaging==23.1 46 | # via sphinx 47 | pygments==2.15.1 48 | # via sphinx 49 | requests==2.31.0 50 | # via sphinx 51 | sniffio==1.3.0 52 | # via trio 53 | snowballstemmer==2.2.0 54 | # via sphinx 55 | sortedcontainers==2.4.0 56 | # via trio 57 | sphinx==6.2.1 58 | # via 59 | # -r docs-requirements.in 60 | # sphinx-rtd-theme 61 | # sphinxcontrib-jquery 62 | # sphinxcontrib-trio 63 | sphinx-rtd-theme==1.2.1 64 | # via -r docs-requirements.in 65 | sphinxcontrib-applehelp==1.0.4 66 | # via sphinx 67 | sphinxcontrib-devhelp==1.0.2 68 | # via sphinx 69 | sphinxcontrib-htmlhelp==2.0.1 70 | # via sphinx 71 | sphinxcontrib-jquery==4.1 72 | # via sphinx-rtd-theme 73 | sphinxcontrib-jsmath==1.0.1 74 | # via sphinx 75 | sphinxcontrib-qthelp==1.0.3 76 | # via sphinx 77 | sphinxcontrib-serializinghtml==1.1.5 78 | # via sphinx 79 | sphinxcontrib-trio==1.1.2 80 | # via -r docs-requirements.in 81 | towncrier==22.12.0 82 | # via -r docs-requirements.in 83 | trio==0.24.0 84 | # via -r docs-requirements.in 85 | urllib3==2.0.2 86 | # via requests 87 | 88 | # The following packages are considered to be unsafe in a requirements file: 89 | # setuptools 90 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = tricycle 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=tricycle 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/source/_static/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oremanj/tricycle/29559d1768cc5edb960acc6079ab4e3a6e15ee19/docs/source/_static/.gitkeep -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # Documentation build configuration file, created by 5 | # sphinx-quickstart on Sat Jan 21 19:11:14 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | # So autodoc can import our package 23 | sys.path.insert(0, os.path.abspath('../..')) 24 | 25 | # https://docs.readthedocs.io/en/stable/builds.html#build-environment 26 | if "READTHEDOCS" in os.environ: 27 | import glob 28 | 29 | if glob.glob("../../newsfragments/*.*.rst"): 30 | print("-- Found newsfragments; running towncrier --", flush=True) 31 | import subprocess 32 | 33 | subprocess.run( 34 | ["towncrier", "--yes", "--date", "not released yet"], 35 | cwd="../..", 36 | check=True, 37 | ) 38 | 39 | # Warn about all references to unknown targets 40 | nitpicky = True 41 | # Except for these ones, which we expect to point to unknown targets: 42 | nitpick_ignore = [ 43 | # Format is ("sphinx reference type", "string"), e.g.: 44 | ("py:obj", "bytes-like"), 45 | ("py:class", "None"), 46 | ("py:exc", "Anything else"), 47 | ("py:class", "tricycle._rwlock._RWLockStatistics"), 48 | ("py:class", "tricycle._tree_var.T"), 49 | ("py:class", "tricycle._tree_var.U"), 50 | ] 51 | default_role = "obj" 52 | 53 | # -- General configuration ------------------------------------------------ 54 | 55 | # If your documentation needs a minimal Sphinx version, state it here. 56 | # 57 | # needs_sphinx = '1.0' 58 | 59 | # Add any Sphinx extension module names here, as strings. They can be 60 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 61 | # ones. 62 | extensions = [ 63 | 'sphinx.ext.autodoc', 64 | 'sphinx.ext.intersphinx', 65 | 'sphinx.ext.coverage', 66 | 'sphinx.ext.napoleon', 67 | 'sphinxcontrib_trio', 68 | # Would like to use this, but it has too many issues currently: 69 | # 'sphinx_autodoc_typehints', 70 | ] 71 | 72 | intersphinx_mapping = { 73 | "python": ('https://docs.python.org/3', None), 74 | "trio": ('https://trio.readthedocs.io/en/stable', None), 75 | } 76 | 77 | autodoc_member_order = "bysource" 78 | 79 | # Add any paths that contain templates here, relative to this directory. 80 | templates_path = [] 81 | 82 | # The suffix(es) of source filenames. 83 | # You can specify multiple suffix as a list of string: 84 | # 85 | # source_suffix = ['.rst', '.md'] 86 | source_suffix = '.rst' 87 | 88 | # The master toctree document. 89 | master_doc = 'index' 90 | 91 | # General information about the project. 92 | project = 'tricycle' 93 | copyright = '2019, Joshua Oreman' 94 | author = 'Joshua Oreman' 95 | 96 | # The version info for the project you're documenting, acts as replacement for 97 | # |version| and |release|, also used in various other places throughout the 98 | # built documents. 99 | # 100 | # The short X.Y version. 101 | import tricycle 102 | version = tricycle.__version__ 103 | # The full version, including alpha/beta/rc tags. 104 | release = version 105 | 106 | # The language for content autogenerated by Sphinx. Refer to documentation 107 | # for a list of supported languages. 108 | # 109 | # This is also used if you do content translation via gettext catalogs. 110 | # Usually you set "language" from the command line for these cases. 111 | language = "en" 112 | 113 | # List of patterns, relative to source directory, that match files and 114 | # directories to ignore when looking for source files. 115 | # This patterns also effect to html_static_path and html_extra_path 116 | exclude_patterns = [] 117 | 118 | # The name of the Pygments (syntax highlighting) style to use. 119 | pygments_style = 'sphinx' 120 | 121 | # The default language for :: blocks 122 | highlight_language = 'python3' 123 | 124 | # If true, `todo` and `todoList` produce output, else they produce nothing. 125 | todo_include_todos = False 126 | 127 | # Fold return type into the "Returns:" section, rather than making 128 | # a separate "Return type:" section 129 | napoleon_use_rtype = False 130 | 131 | 132 | # -- Options for HTML output ---------------------------------------------- 133 | 134 | # The theme to use for HTML and HTML Help pages. See the documentation for 135 | # a list of builtin themes. 136 | # 137 | #html_theme = 'alabaster' 138 | 139 | # We have to set this ourselves, not only because it's useful for local 140 | # testing, but also because if we don't then RTD will throw away our 141 | # html_theme_options. 142 | import sphinx_rtd_theme 143 | html_theme = 'sphinx_rtd_theme' 144 | html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] 145 | 146 | # Theme options are theme-specific and customize the look and feel of a theme 147 | # further. For a list of options available for each theme, see the 148 | # documentation. 149 | # 150 | html_theme_options = { 151 | # default is 2 152 | # show deeper nesting in the RTD theme's sidebar TOC 153 | # https://stackoverflow.com/questions/27669376/ 154 | # I'm not 100% sure this actually does anything with our current 155 | # versions/settings... 156 | "navigation_depth": 4, 157 | "logo_only": True, 158 | 'prev_next_buttons_location': 'both' 159 | } 160 | 161 | # Add any paths that contain custom static files (such as style sheets) here, 162 | # relative to this directory. They are copied after the builtin static files, 163 | # so a file named "default.css" will overwrite the builtin "default.css". 164 | html_static_path = ['_static'] 165 | 166 | 167 | # -- Options for HTMLHelp output ------------------------------------------ 168 | 169 | # Output file base name for HTML help builder. 170 | htmlhelp_basename = 'tricycledoc' 171 | 172 | 173 | # -- Options for LaTeX output --------------------------------------------- 174 | 175 | latex_elements = { 176 | # The paper size ('letterpaper' or 'a4paper'). 177 | # 178 | # 'papersize': 'letterpaper', 179 | 180 | # The font size ('10pt', '11pt' or '12pt'). 181 | # 182 | # 'pointsize': '10pt', 183 | 184 | # Additional stuff for the LaTeX preamble. 185 | # 186 | # 'preamble': '', 187 | 188 | # Latex figure (float) alignment 189 | # 190 | # 'figure_align': 'htbp', 191 | } 192 | 193 | # Grouping the document tree into LaTeX files. List of tuples 194 | # (source start file, target name, title, 195 | # author, documentclass [howto, manual, or own class]). 196 | latex_documents = [ 197 | (master_doc, 'tricycle.tex', 'tricycle Documentation', 198 | author, 'manual'), 199 | ] 200 | 201 | 202 | # -- Options for manual page output --------------------------------------- 203 | 204 | # One entry per manual page. List of tuples 205 | # (source start file, name, description, authors, manual section). 206 | man_pages = [ 207 | (master_doc, 'tricycle', 'tricycle Documentation', 208 | [author], 1) 209 | ] 210 | 211 | 212 | # -- Options for Texinfo output ------------------------------------------- 213 | 214 | # Grouping the document tree into Texinfo files. List of tuples 215 | # (source start file, target name, title, author, 216 | # dir menu entry, description, category) 217 | texinfo_documents = [ 218 | (master_doc, 'tricycle', 'tricycle Documentation', 219 | author, 'tricycle', 'Experimental extensions for Trio, the friendly async I/O library', 220 | 'Miscellaneous'), 221 | ] 222 | -------------------------------------------------------------------------------- /docs/source/history.rst: -------------------------------------------------------------------------------- 1 | Release history 2 | =============== 3 | 4 | .. currentmodule:: tricycle 5 | 6 | .. towncrier release notes start 7 | 8 | tricycle 0.4.1 (2024-02-02) 9 | --------------------------- 10 | 11 | * :func:`open_service_nursery` no longer assumes that ``TaskStatus.started()`` 12 | will be called from inside the task that was just started. This restores 13 | feature parity with regular Trio nurseries, which allow ``started()`` to be 14 | called anywhere, and fixes 15 | `trio-asyncio issue #135 `__. (`#27 `__) 16 | 17 | * tricycle no longer advertises itself as "experimental"; it has been around 18 | for more than 4 years at this point and is being used in production. 19 | 20 | 21 | tricycle 0.4.0 (2024-01-11) 22 | --------------------------- 23 | 24 | * tricycle now requires Python 3.8 and Trio 0.23.0 or greater. 25 | 26 | * tricycle no longer depends on the ``trio-typing`` library, since Trio now 27 | has upstream support for type hints. 28 | 29 | 30 | tricycle 0.3.0 (2023-06-05) 31 | --------------------------- 32 | 33 | * Added `tricycle.TreeVar`, which acts like a context variable that is 34 | inherited at nursery creation time (and then by child tasks of that 35 | nursery) rather than at task creation time. :ref:`Tree variables 36 | ` are useful for providing safe 'ambient' access to a 37 | resource that is tied to an ``async with`` block in the parent task, 38 | such as an open file or trio-asyncio event loop. (`#18 `__) 39 | 40 | 41 | tricycle 0.2.2 (2023-03-01) 42 | --------------------------- 43 | 44 | * tricycle now explicitly re-exports all names, improving PEP-561 compliance and 45 | allowing type checkers that enforce export strictness (including mypy with 46 | ``--no-implicit-reexport``) to check code using tricycle. 47 | `#14 `__ 48 | 49 | tricycle 0.2.1 (2020-09-30) 50 | --------------------------- 51 | 52 | * Update to support Trio 0.15.0 and later: rename ``trio.hazmat`` references 53 | to the new ``trio.lowlevel``. 54 | 55 | tricycle 0.2.0 (2019-12-12) 56 | --------------------------- 57 | 58 | * Add MultiCancelScope, open_service_nursery, ScopedObject, BackgroundObject. 59 | 60 | tricycle 0.1.0 (2019-05-06) 61 | --------------------------- 62 | 63 | * Initial release. 64 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. documentation master file, created by 2 | sphinx-quickstart on Sat Jan 21 19:11:14 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | 7 | =========================================================================== 8 | tricycle: Miscellaneous extensions for Trio, the friendly async I/O library 9 | =========================================================================== 10 | 11 | ``tricycle`` is a library of miscellaneous extensions for `Trio 12 | `__. 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | 17 | reference.rst 18 | history.rst 19 | 20 | ==================== 21 | Indices and tables 22 | ==================== 23 | 24 | * :ref:`genindex` 25 | * :ref:`modindex` 26 | * :ref:`search` 27 | * :ref:`glossary` 28 | -------------------------------------------------------------------------------- /docs/source/reference.rst: -------------------------------------------------------------------------------- 1 | API reference 2 | ============= 3 | 4 | .. module:: tricycle 5 | 6 | Synchronization primitives 7 | -------------------------- 8 | 9 | .. autoclass:: RWLock 10 | 11 | .. automethod:: acquire 12 | .. automethod:: acquire_read 13 | .. automethod:: acquire_write 14 | .. automethod:: acquire_nowait 15 | .. automethod:: acquire_read_nowait 16 | .. automethod:: acquire_write_nowait 17 | .. automethod:: release 18 | .. automethod:: read_locked 19 | .. automethod:: write_locked 20 | .. automethod:: locked 21 | .. automethod:: statistics 22 | 23 | 24 | Stream helpers 25 | -------------- 26 | 27 | tricycle comes with two wrappers around Trio receive streams: 28 | :class:`BufferedReceiveStream`, which helps in parsing binary protocols that 29 | use fixed-length fields, and :class:`TextReceiveStream`, which helps in 30 | parsing line-oriented textual data. 31 | 32 | .. autoclass:: BufferedReceiveStream 33 | :show-inheritance: 34 | :members: 35 | 36 | .. autoclass:: TextReceiveStream 37 | :show-inheritance: 38 | :members: 39 | 40 | .. attribute:: transport_stream 41 | .. attribute:: encoding 42 | .. attribute:: errors 43 | .. attribute:: chunk_size 44 | 45 | The values passed as constructor parameters are also available as 46 | attributes on the resulting :class:`TextReceiveStream` object. 47 | :attr:`errors` and :attr:`chunk_size` are writable; the others are read-only. 48 | (For example, if a read fails with a :exc:`UnicodeDecodeError`, it is safe 49 | to set ``stream.errors = "replace"`` and retry the read.) 50 | 51 | 52 | Cancellation helpers 53 | -------------------- 54 | 55 | Gracefully shutting down a complex task tree can sometimes require 56 | tasks to be cancelled in a particular order. As a motivating example, 57 | we'll consider a simple protocol implementation where the client and 58 | server exchange newline-terminated textual messages, and the client is 59 | supposed to send a message containing the text "goodbye" before it 60 | disconnects:: 61 | 62 | async def receive_messages( 63 | source: trio.abc.ReceiveStream, sink: trio.abc.SendChannel[str] 64 | ) -> None: 65 | async for line in TextReceiveStream(source, newline="\r\n"): 66 | await sink.send(line.rstrip("\r\n")) 67 | await sink.aclose() 68 | 69 | async def send_messages( 70 | source: trio.abc.ReceiveChannel[str], sink: trio.abc.HalfCloseableStream 71 | ) -> None: 72 | async with source: 73 | async for msg in source: 74 | await sink.send_all(msg.encode("utf-8") + b"\r\n") 75 | await sink.send_eof() 76 | 77 | @asynccontextmanager 78 | async def wrap_stream( 79 | stream: trio.abc.HalfCloseableStream 80 | ) -> AsyncIterator[trio.abc.ReceiveChannel[str], trio.abc.SendChannel[str]]: 81 | async with trio.open_nursery() as nursery: 82 | incoming_w, incoming_r = trio.open_memory_channel[str](0) 83 | outgoing_w, outgoing_r = trio.open_memory_channel[str](0) 84 | nursery.start_soon(receive_messages, stream, incoming_w) 85 | nursery.start_soon(send_messages, outgoing_r, stream) 86 | try: 87 | yield (incoming_r, outgoing_w) 88 | finally: 89 | with trio.move_on_after(1) as scope: 90 | scope.shield = True 91 | await outgoing_w.send("goodbye") 92 | 93 | async def example() -> None: 94 | with trio.move_on_after(5): 95 | async with trio.open_tcp_stream("example.com", 1234) as stream, \ 96 | wrap_stream(stream) as (incoming, outgoing): 97 | async for line in incoming: 98 | await outgoing.send("you said: " + line) 99 | if line == "quit": 100 | break 101 | 102 | The intent is that ``example()`` will echo back each message it receives, 103 | until either it receives a "quit" message or five seconds have elapsed. 104 | ``wrap_stream()`` has carefully set up a shielded cancel scope around 105 | the place where it sends the goodbye message, so that the message can 106 | still be sent if the ``async with wrap_stream(...)`` block is 107 | cancelled. (Without this shield, the call to ``send()`` would 108 | immediately raise :exc:`~trio.Cancelled` without sending anything.) 109 | 110 | If you run this, though, you'll find that it doesn't quite work. 111 | Exiting due to a "quit" will send the goodbye, but exiting on a 112 | cancellation won't. In fact, the cancellation case will probably 113 | crash with a :exc:`~trio.BrokenResourceError` when it tries to send 114 | the goodbye. Why is this? 115 | 116 | The problem is that the call to ``send()`` isn't sufficient on its own to 117 | cause the message to be transmitted. It only places the message into a 118 | channel; nothing will actually be sent until the ``send_messages()`` task 119 | reads from that channel and passes some bytes to ``send_all()``. 120 | Before that can happen, ``send_messages()`` will itself have been cancelled. 121 | 122 | The pattern in this example is a common one: some work running in the body 123 | of a nursery is reliant on services provided by background tasks in that 124 | nursery. A normal Trio nursery doesn't draw any distinctions between the 125 | body of the ``async with`` and the background tasks; if the nursery is 126 | cancelled, everything in it will receive that cancellation immediately. 127 | In this case, though, it seems that all of our troubles would be resolved 128 | if only we could somehow ensure that those background tasks stay running 129 | until the body of the ``async with`` has completed. 130 | 131 | tricycle's *service nursery* does exactly this. 132 | 133 | .. autofunction:: open_service_nursery 134 | 135 | 136 | If you need to do manipulations of this sort yourself, it can be helpful 137 | to be able to treat multiple cancel scopes as a single unit. 138 | 139 | .. autoclass:: MultiCancelScope 140 | 141 | .. automethod:: open_child 142 | .. automethod:: cancel 143 | .. autoattribute:: shield 144 | .. autoattribute:: cancel_called 145 | 146 | 147 | Scoped objects 148 | -------------- 149 | 150 | Trio follows the principles of `structured concurrency 151 | `__: 152 | its general-purpose APIs for spawning background tasks all require that 153 | the lifetime of each task be bounded by an ``async with`` block 154 | in its parent (represented by the :class:`nursery ` object). 155 | Sometimes this can seem rather inconvenient; for example, what if you want 156 | to create a class whose instances spawn tasks that live for the lifetime of 157 | the instance? The traditional approach goes something like this:: 158 | 159 | class WebsocketConnection: 160 | def __init__(self, nursery: trio.Nursery, **etc): 161 | self._nursery = nursery 162 | # initialize other members from **etc 163 | 164 | async def connect(self): 165 | await foo() # can't be in __init__ because __init__ is synchronous 166 | self._nursery.start_soon(self._manage_connection) 167 | 168 | @asynccontextmanager 169 | async def open_websocket_connection(**etc) -> AsyncIterator[WebsocketConnection]: 170 | async with open_service_nursery() as nursery: 171 | conn = WebsocketConnection(nursery, **etc) 172 | await conn.connect() 173 | yield conn 174 | nursery.cancel_scope.cancel() 175 | 176 | async def use_websocket(): 177 | async with open_websocket_connection(**etc) as conn: 178 | await conn.send("Hi!") 179 | 180 | tricycle improves on this by providing the ability to define *scoped objects*, 181 | which can only be instantiated as part of an ``async with`` block. 182 | In addition to the usual synchronous ``__init__`` method, their class can 183 | define async methods called ``__open__`` and/or ``__close__`` which run at the 184 | start and end (respectively) of the ``async with`` block. For greater expressive 185 | power, it is also possible to define a ``__wrap__`` method which returns the 186 | entire async context manager to use. 187 | 188 | .. autoclass:: ScopedObject 189 | 190 | A subclass is provided to handle the common case where a nursery should be 191 | created and remain open for the lifetime of the object: 192 | 193 | .. autoclass:: BackgroundObject 194 | :show-inheritance: 195 | 196 | .. attribute:: nursery 197 | 198 | The nursery that was created for this object. This attribute only 199 | exists within the scope of the object's ``async with`` block, so 200 | it cannot be used from ``__init__``, nor after the block has been 201 | exited. 202 | 203 | If made to use :class:`BackgroundObject`, the websocket example above 204 | from above would reduce to:: 205 | 206 | class WebsocketConnection(BackgroundObject, daemon=True): 207 | def __init__(self, **etc): 208 | # initialize other members from **etc 209 | 210 | async def __open__(self) -> None: 211 | await foo() 212 | self.nursery.start_soon(self._manage_connection) 213 | 214 | async def use_websocket(): 215 | async with WebsocketConnection(**etc) as conn: 216 | await conn.send("Hi!") 217 | 218 | 219 | .. _tree-variables: 220 | 221 | Tree variables 222 | -------------- 223 | 224 | When you start a new Trio task, the initial values of its `context variables 225 | `__ 226 | (`contextvars.ContextVar`) are inherited from the environment of the 227 | `~trio.Nursery.start_soon` or `~trio.Nursery.start` call that 228 | started the new task. For example, this code: 229 | 230 | .. code-block:: python3 231 | 232 | some_cvar = contextvars.ContextVar() 233 | 234 | async def print_in_child(tag): 235 | print("In child", tag, "some_cvar has value", some_cvar.get()) 236 | 237 | some_cvar.set(1) 238 | async with trio.open_nursery() as nursery: 239 | nursery.start_soon(print_in_child, 1) 240 | some_cvar.set(2) 241 | nursery.start_soon(print_in_child, 2) 242 | some_cvar.set(3) 243 | print("In parent some_cvar has value", some_cvar.get()) 244 | 245 | will produce output like:: 246 | 247 | In parent some_cvar has value 3 248 | In child 1 some_cvar has value 1 249 | In child 2 some_cvar has value 2 250 | 251 | (If you run it yourself, you might find that the "child 2" line comes 252 | before "child 1", but it will still be the case that child 1 sees value 1 253 | while child 2 sees value 2.) 254 | 255 | You might wonder why this differs from the behavior of cancel scopes, 256 | which only apply to a new task if they surround the new task's entire 257 | nursery (as explained in the Trio documentation about 258 | `child tasks and cancellation `__). The difference is that a cancel 259 | scope has a limited lifetime (it can't cancel anything once you exit 260 | its ``with`` block), while a context variable's value is just a value 261 | (request #42 can keep being request #42 for as long as it likes, 262 | without any cooperation from the task that created it). 263 | 264 | In specialized cases, you might want to provide a task-local value 265 | that's inherited only from the parent nursery, like cancel scopes are. 266 | For example, maybe you're trying to provide child tasks with access to 267 | a limited-lifetime resource such as a nursery or network connection, 268 | and you only want a task to be able to use the resource if it's going 269 | to remain available for the task's entire lifetime. You can support 270 | this use case using `TreeVar`, which is like `contextvars.ContextVar` 271 | except for the way that it's inherited by new tasks. (It's a "tree" 272 | variable because it's inherited along the parent-child links that form 273 | the Trio task tree.) 274 | 275 | If the above example used `TreeVar`, then its output would be: 276 | 277 | .. code-block:: none 278 | :emphasize-lines: 3 279 | 280 | In parent some_cvar has value 3 281 | In child 1 some_cvar has value 1 282 | In child 2 some_cvar has value 1 283 | 284 | because child 2 would inherit the value from its parent nursery, rather than 285 | from the environment of the ``start_soon()`` call that creates it. 286 | 287 | .. autoclass:: tricycle.TreeVar(name, [*, default]) 288 | 289 | .. automethod:: being 290 | :with: 291 | .. automethod:: get_in(task_or_nursery, [default]) 292 | -------------------------------------------------------------------------------- /newsfragments/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oremanj/tricycle/29559d1768cc5edb960acc6079ab4e3a6e15ee19/newsfragments/.gitkeep -------------------------------------------------------------------------------- /newsfragments/README.rst: -------------------------------------------------------------------------------- 1 | Adding newsfragments 2 | ==================== 3 | 4 | This directory collects "newsfragments": short files that each contain 5 | a snippet of ReST-formatted text that will be added to the next 6 | release notes. This should be a description of aspects of the change 7 | (if any) that are relevant to users. (This contrasts with your commit 8 | message and PR description, which are a description of the change as 9 | relevant to people working on the code itself.) 10 | 11 | Each file should be named like ``..rst``, where 12 | ```` is an issue numbers, and ```` is one of: 13 | 14 | * ``feature`` 15 | * ``bugfix`` 16 | * ``doc`` 17 | * ``removal`` 18 | * ``misc`` 19 | 20 | So for example: ``123.feature.rst``, ``456.bugfix.rst`` 21 | 22 | If your PR fixes an issue, use that number here. If there is no issue, 23 | then after you submit the PR and get the PR number you can add a 24 | newsfragment using that instead. 25 | 26 | Note that the ``towncrier`` tool will automatically 27 | reflow your text, so don't try to do any fancy formatting. You can 28 | install ``towncrier`` and then run ``towncrier --draft`` if you want 29 | to get a preview of how your change will look in the final release 30 | notes. 31 | 32 | 33 | Making releases 34 | =============== 35 | 36 | ``pip install towncrier``, then run ``towncrier``. (You can use 37 | ``towncrier --draft`` to get a preview of what this will do.) 38 | 39 | You can configure ``towncrier`` (for example: customizing the 40 | different types of changes) by modifying ``pyproject.toml``. 41 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | target-version = ['py38'] 3 | 4 | [tool.towncrier] 5 | package = "tricycle" 6 | filename = "docs/source/history.rst" 7 | directory = "newsfragments" 8 | underlines = ["-", "~", "^"] 9 | issue_format = "`#{issue} `__" 10 | 11 | [tool.pytest.ini_options] 12 | addopts = ["--strict-markers", "--strict-config"] 13 | xfail_strict = true 14 | faulthandler_timeout = 60 15 | junit_family = "xunit2" 16 | filterwarnings = [ 17 | "error", 18 | # https://gitter.im/python-trio/general?at=63bb8d0740557a3d5c688d67 19 | 'ignore:You are using cryptography on a 32-bit Python on a 64-bit Windows Operating System. Cryptography will be significantly faster if you switch to using a 64-bit Python.:UserWarning', 20 | # this should remain until https://github.com/pytest-dev/pytest/pull/10894 is merged 21 | 'ignore:ast.Str is deprecated:DeprecationWarning', 22 | 'ignore:Attribute s is deprecated and will be removed:DeprecationWarning', 23 | 'ignore:ast.NameConstant is deprecated:DeprecationWarning', 24 | 'ignore:ast.Num is deprecated:DeprecationWarning', 25 | # https://github.com/python/mypy/issues/15330 26 | 'ignore:ast.Ellipsis is deprecated:DeprecationWarning', 27 | 'ignore:ast.Bytes is deprecated:DeprecationWarning', 28 | ] 29 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | exec(open("tricycle/_version.py", encoding="utf-8").read()) 4 | 5 | LONG_DESC = open("README.rst", encoding="utf-8").read() 6 | 7 | setup( 8 | name="tricycle", 9 | version=__version__, 10 | description="Miscellaneous extensions for Trio, the friendly async I/O library", 11 | url="https://github.com/oremanj/tricycle", 12 | long_description=LONG_DESC, 13 | author="Joshua Oreman", 14 | author_email="oremanj@gmail.com", 15 | license="MIT -or- Apache License 2.0", 16 | packages=find_packages(), 17 | include_package_data=True, 18 | install_requires=["trio >= 0.23.0"], 19 | keywords=["async", "trio"], 20 | python_requires=">=3.8", 21 | classifiers=[ 22 | "License :: OSI Approved :: MIT License", 23 | "License :: OSI Approved :: Apache Software License", 24 | "Framework :: Trio", 25 | "Operating System :: POSIX :: Linux", 26 | "Operating System :: MacOS :: MacOS X", 27 | "Programming Language :: Python :: 3 :: Only", 28 | "Programming Language :: Python :: Implementation :: CPython", 29 | "Development Status :: 3 - Alpha", 30 | "Intended Audience :: Developers", 31 | ], 32 | ) 33 | -------------------------------------------------------------------------------- /test-requirements.in: -------------------------------------------------------------------------------- 1 | # Testing 2 | pytest >= 5.0 3 | pytest-cov 4 | pytest-trio >= 0.6.0 5 | 6 | # Tools 7 | black; implementation_name == "cpython" 8 | mypy >= 1.8.0; implementation_name == "cpython" 9 | # 6.0.0 requires py3.8+: 10 | flake8 < 6.0.0 11 | 12 | # Project dependencies 13 | attrs >= 19.2.0 14 | async_generator >= 1.9 15 | trio >= 0.23.0 16 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | # 2 | # This file is autogenerated by pip-compile with Python 3.12 3 | # by the following command: 4 | # 5 | # pip-compile test-requirements.in 6 | # 7 | async-generator==1.10 8 | # via -r test-requirements.in 9 | attrs==23.1.0 10 | # via 11 | # -r test-requirements.in 12 | # outcome 13 | # trio 14 | black==23.3.0 ; implementation_name == "cpython" 15 | # via -r test-requirements.in 16 | click==8.1.3 17 | # via black 18 | coverage[toml]==7.2.7 19 | # via 20 | # coverage 21 | # pytest-cov 22 | flake8==5.0.4 23 | # via -r test-requirements.in 24 | idna==3.4 25 | # via trio 26 | iniconfig==2.0.0 27 | # via pytest 28 | mccabe==0.7.0 29 | # via flake8 30 | mypy==1.8.0 ; implementation_name == "cpython" 31 | # via -r test-requirements.in 32 | mypy-extensions==1.0.0 33 | # via 34 | # black 35 | # mypy 36 | outcome==1.2.0 37 | # via 38 | # pytest-trio 39 | # trio 40 | packaging==23.1 41 | # via 42 | # black 43 | # pytest 44 | pathspec==0.11.1 45 | # via black 46 | platformdirs==3.5.1 47 | # via black 48 | pluggy==1.0.0 49 | # via pytest 50 | pycodestyle==2.9.1 51 | # via flake8 52 | pyflakes==2.5.0 53 | # via flake8 54 | pytest==7.3.1 55 | # via 56 | # -r test-requirements.in 57 | # pytest-cov 58 | # pytest-trio 59 | pytest-cov==4.1.0 60 | # via -r test-requirements.in 61 | pytest-trio==0.8.0 62 | # via -r test-requirements.in 63 | sniffio==1.3.0 64 | # via trio 65 | sortedcontainers==2.4.0 66 | # via trio 67 | trio==0.24.0 68 | # via 69 | # -r test-requirements.in 70 | # pytest-trio 71 | typing-extensions==4.6.3 72 | # via mypy 73 | -------------------------------------------------------------------------------- /tricycle/__init__.py: -------------------------------------------------------------------------------- 1 | from ._version import __version__ 2 | 3 | from ._rwlock import RWLock as RWLock 4 | from ._streams import ( 5 | BufferedReceiveStream as BufferedReceiveStream, 6 | TextReceiveStream as TextReceiveStream, 7 | ) 8 | from ._multi_cancel import MultiCancelScope as MultiCancelScope 9 | from ._service_nursery import open_service_nursery as open_service_nursery 10 | from ._meta import ScopedObject as ScopedObject, BackgroundObject as BackgroundObject 11 | from ._tree_var import TreeVar as TreeVar, TreeVarToken as TreeVarToken 12 | 13 | # watch this space... 14 | 15 | _export = None 16 | for _export in globals().values(): 17 | if hasattr(_export, "__module__"): 18 | _export.__module__ = __name__ 19 | del _export 20 | -------------------------------------------------------------------------------- /tricycle/_meta.py: -------------------------------------------------------------------------------- 1 | import abc 2 | import functools 3 | from contextlib import asynccontextmanager 4 | from trio import Nursery 5 | from typing import ( 6 | Any, 7 | AsyncIterator, 8 | Awaitable, 9 | Callable, 10 | ClassVar, 11 | Dict, 12 | Optional, 13 | Type, 14 | TypeVar, 15 | TYPE_CHECKING, 16 | ) 17 | from ._service_nursery import open_service_nursery 18 | 19 | 20 | T = TypeVar("T") 21 | 22 | 23 | class ScopedObjectMeta(abc.ABCMeta): 24 | # Metaclass that provides the ScopedObject magic. See ScopedObject 25 | # for the docs. 26 | def __new__( 27 | mcls, clsname: str, bases: Any, dct: Dict[str, Any], **kwargs: Any 28 | ) -> "ScopedObjectMeta": 29 | if "__open__" in dct or "__close__" in dct: 30 | if "__wrap__" in dct: 31 | raise TypeError( 32 | "ScopedObjects can define __open__/__close__, or __wrap__, " 33 | "but not both" 34 | ) 35 | 36 | async def noop(self: Any) -> None: 37 | pass 38 | 39 | _open_: Callable[[Any], Awaitable[None]] = dct.get("__open__", noop) 40 | _close_: Callable[[Any], Awaitable[None]] = dct.get("__close__", noop) 41 | 42 | @asynccontextmanager 43 | async def wrap(self: Any) -> AsyncIterator[None]: 44 | async with super(cls, self).__wrap__(): # type: ignore 45 | await _open_(self) 46 | try: 47 | yield 48 | finally: 49 | await _close_(self) 50 | 51 | wrap.__name__ = "__wrap__" 52 | wrap.__qualname__ = dct["__qualname__"] + ".__wrap__" 53 | dct["__wrap__"] = wrap 54 | 55 | # NB: wrap() closes over this 'cls' variable 56 | cls: ScopedObjectMeta = super().__new__(mcls, clsname, bases, dct, **kwargs) 57 | return cls 58 | 59 | @asynccontextmanager 60 | async def __call__(cls: Type[T], *args: Any, **kwds: Any) -> AsyncIterator[T]: 61 | self: T = super().__call__(*args, **kwds) # type: ignore 62 | async with self.__wrap__(): # type: ignore 63 | yield self 64 | 65 | 66 | class ScopedObject(metaclass=ScopedObjectMeta): 67 | """An object whose lifetime must be bound to an ``async with`` block. 68 | 69 | Suppose that ``Foo`` is a :class:`ScopedObject` subclass. Then if 70 | you say ``Foo(*args)``, you won't actually get a ``Foo`` object; 71 | instead, you'll get an async context manager that evaluates to a 72 | ``Foo`` object. So you would need to say:: 73 | 74 | async with Foo(*args) as my_foo: 75 | # do stuff with my_foo 76 | 77 | This allows ``Foo`` to have reliable control of its lifetime, so 78 | it can spawn background tasks, deterministically execute cleanup 79 | code, and so on. 80 | 81 | If you want to implement such an object, inherit from :class:`ScopedObject` 82 | and indicate what should happen on entry and exit of the context. 83 | This should be done in one of the following two ways: 84 | 85 | * Define async ``__open__`` and/or ``__close__`` methods, which will 86 | be called from the context ``__aenter__`` and ``__aexit__`` respectively, 87 | taking no arguments and returning ``None``. 88 | ``__close__`` will be called no matter whether the context exits 89 | normally or due to an exception. (It can tell whether there is an 90 | active exception by using :func:`sys.exc_info`, but cannot suppress 91 | it.) If you use this approach, :class:`ScopedObject` takes care of 92 | invoking any initialization and finalization logic 93 | supplied by your base classes. 94 | 95 | * Define a ``__wrap__`` method that returns an async context 96 | manager. This gives you more flexibility than implementing 97 | ``__open__`` and ``__close__``, because you can run some code 98 | outside of your base classes' scope and can swallow exceptions, 99 | but means you have to enter the base classes' scope yourself. 100 | 101 | It is an error to define both ``__wrap__`` and (``__open__`` or 102 | ``__close__``). If you don't define ``__wrap__``, 103 | :class:`ScopedObject` generates it for you in terms of 104 | ``__open__`` and ``__close__``, with semantics equivalent to the 105 | following:: 106 | 107 | @asynccontextmanager 108 | async def __wrap__(self): 109 | async with super().__wrap__(): 110 | if hasattr(self, "__open__"): 111 | await self.__open__() 112 | try: 113 | yield 114 | finally: 115 | if hasattr(self, "__close__"): 116 | await self.__close__() 117 | 118 | """ 119 | 120 | __slots__ = ("__weakref__",) 121 | 122 | @asynccontextmanager 123 | async def __wrap__(self) -> AsyncIterator[None]: 124 | yield 125 | 126 | if TYPE_CHECKING: 127 | # These are necessary to placate mypy, which doesn't understand 128 | # the asynccontextmanager metaclass __call__. They should never 129 | # actually get called. 130 | async def __aenter__(self: T) -> T: 131 | raise AssertionError 132 | 133 | async def __aexit__(self, *exc: object) -> None: 134 | raise AssertionError 135 | 136 | 137 | class BackgroundObject(ScopedObject): 138 | """A :class:`ScopedObject` that automatically creates a 139 | :func:`service nursery ` for running background tasks. 140 | 141 | If you pass ``daemon=True`` when inheriting from :class:`BackgroundObject`, 142 | like so:: 143 | 144 | class MyObject(BackgroundObject, daemon=True): 145 | ... 146 | 147 | then the tasks spawned in the nursery will automatically be cancelled 148 | when the ``async with MyObject(...) as obj:`` block exits. 149 | Otherwise, the parent waits for the children to exit normally, like 150 | the default Trio nursery behavior. 151 | 152 | """ 153 | 154 | __slots__ = ("nursery",) 155 | __daemon: ClassVar[bool] 156 | 157 | def __init_subclass__(cls, *, daemon: bool = False, **kwargs: Any): 158 | cls.__daemon = daemon 159 | super().__init_subclass__(**kwargs) 160 | 161 | @asynccontextmanager 162 | async def __wrap__(self) -> AsyncIterator[None]: 163 | async with super().__wrap__(): 164 | try: 165 | async with open_service_nursery() as nursery: 166 | self.nursery = nursery 167 | yield 168 | if type(self).__daemon: 169 | nursery.cancel_scope.cancel() 170 | finally: 171 | try: 172 | del self.nursery 173 | except AttributeError: 174 | pass 175 | -------------------------------------------------------------------------------- /tricycle/_multi_cancel.py: -------------------------------------------------------------------------------- 1 | import attr 2 | import trio 3 | import weakref 4 | from typing import Iterator, MutableSet, Optional 5 | 6 | 7 | @attr.s(eq=False, repr=False) 8 | class MultiCancelScope: 9 | r"""Manages a dynamic set of :class:`trio.CancelScope`\s that can be 10 | shielded and cancelled as a unit. 11 | 12 | New cancel scopes are added to the managed set using 13 | :meth:`open_child`, which returns the child scope so you can enter 14 | it with a ``with`` statement. Calls to :meth:`cancel` and changes 15 | to :attr:`shield` apply to all existing children and set the 16 | initial state for future children. Each child scope has its own 17 | :attr:`~trio.CancelScope.deadline` and :attr:`~trio.CancelScope.shield` 18 | attributes; changes to these do not modify the parent. 19 | 20 | There is no :attr:`~trio.CancelScope.cancelled_caught` attribute 21 | on :class:`MultiCancelScope` because it would be ambiguous; some 22 | of the child scopes might exit via a :exc:`trio.Cancelled` 23 | exception and others not. Look at the child :class:`trio.CancelScope` 24 | if you want to see whether it was cancelled or not. 25 | """ 26 | 27 | _child_scopes: MutableSet[trio.CancelScope] = attr.ib( 28 | factory=weakref.WeakSet, init=False 29 | ) 30 | _shield: bool = attr.ib(default=False, kw_only=True) 31 | _cancel_called: bool = attr.ib(default=False, kw_only=True) 32 | 33 | def __repr__(self) -> str: 34 | descr = ["MultiCancelScope"] 35 | if self._shield: 36 | descr.append(" shielded") 37 | if self._cancel_called: 38 | descr.append(" cancelled") 39 | return f"<{''.join(descr)}: {list(self._child_scopes)}>" 40 | 41 | @property 42 | def cancel_called(self) -> bool: 43 | """Returns true if :meth:`cancel` has been called.""" 44 | return self._cancel_called 45 | 46 | @property 47 | def shield(self) -> bool: 48 | """The overall shielding state for this :class:`MultiCancelScope`. 49 | 50 | Setting this attribute sets the :attr:`~trio.CancelScope.shield` 51 | attribute of all children, as well as the default initial shielding 52 | for future children. Individual children may modify their 53 | shield state to be different from the parent value, but further 54 | changes to the parent :attr:`MultiCancelScope.shield` will override 55 | their local choice. 56 | """ 57 | return self._shield 58 | 59 | @shield.setter 60 | def shield(self, new_value: bool) -> None: 61 | self._shield = new_value 62 | for scope in self._child_scopes: 63 | scope.shield = new_value 64 | 65 | def cancel(self) -> None: 66 | """Cancel all child cancel scopes. 67 | 68 | Additional children created after a call to :meth:`cancel` will 69 | start out in the cancelled state. 70 | """ 71 | if not self._cancel_called: 72 | for scope in self._child_scopes: 73 | scope.cancel() 74 | self._cancel_called = True 75 | 76 | def open_child(self, *, shield: Optional[bool] = None) -> trio.CancelScope: 77 | """Return a new child cancel scope. 78 | 79 | The child will start out cancelled if the parent 80 | :meth:`cancel` method has been called. Its initial shield state 81 | is given by the ``shield`` argument, or by the parent's 82 | :attr:`shield` attribute if the ``shield`` argument is not specified. 83 | """ 84 | if shield is None: 85 | shield = self._shield 86 | new_scope = trio.CancelScope(shield=shield) 87 | if self._cancel_called: 88 | new_scope.cancel() 89 | self._child_scopes.add(new_scope) 90 | return new_scope 91 | -------------------------------------------------------------------------------- /tricycle/_rwlock.py: -------------------------------------------------------------------------------- 1 | import attr 2 | import trio 3 | from contextlib import asynccontextmanager 4 | from collections import OrderedDict 5 | from typing import ( 6 | AsyncIterator, 7 | FrozenSet, 8 | List, 9 | Optional, 10 | Sequence, 11 | Set, 12 | TYPE_CHECKING, 13 | ) 14 | 15 | 16 | @attr.s(auto_attribs=True) 17 | class _RWLockStatistics: 18 | locked: str 19 | readers: FrozenSet[trio.lowlevel.Task] 20 | writer: Optional[trio.lowlevel.Task] 21 | readers_waiting: int 22 | writers_waiting: int 23 | 24 | 25 | @attr.s(eq=False, repr=False) 26 | class RWLock: 27 | """A `readers-writer lock 28 | `__. 29 | 30 | Each acquisition of the lock specifies whether it is a "reader" or 31 | a "writer". At any given time, the lock may be held by one writer 32 | and no readers, by many readers and no writer, or by no one. 33 | 34 | This implementation is fair by default: if task A tried to acquire 35 | the lock before task B did, task B won't get it first. This 36 | implies that new readers can't acquire a reader-held lock after a 37 | writer has started waiting to acquire it, which helps avoid 38 | starvation of writers by readers. (The Wikipedia article linked 39 | above calls this "write-preferring".) If you want different behavior, 40 | see the :attr:`read_biased` attribute. 41 | 42 | Attributes: 43 | read_biased (bool): Whether new readers should be able to 44 | immediately acquire a readers-held lock even after some 45 | writers have started waiting for it. (The Wikipedia article 46 | linked above calls this "weakly read-preferring".) Note that 47 | setting :attr:`read_biased` to :data:`True` can result in 48 | indefinite starvation of writers if the read workload is 49 | busy enough. Changing this attribute to :data:`True` will 50 | immediately wake up all waiting readers to grant them the 51 | lock if it is currently readers-held with writers waiting. 52 | 53 | """ 54 | 55 | _writer: Optional[trio.lowlevel.Task] = attr.ib(default=None, init=False) 56 | _readers: Set[trio.lowlevel.Task] = attr.ib(factory=set, init=False) 57 | _waiting: "OrderedDict[trio.lowlevel.Task, bool]" = attr.ib( 58 | factory=OrderedDict, init=False 59 | ) 60 | _waiting_writers_count: int = attr.ib(default=0, init=False) 61 | _read_biased: bool = attr.ib(default=False, kw_only=True) 62 | 63 | def __repr__(self) -> str: 64 | state = ( 65 | "write-locked" 66 | if self._writer 67 | else "read-locked" 68 | if self._readers 69 | else "unlocked" 70 | ) 71 | if self._read_biased: 72 | state += " read-biased" 73 | if self._waiting: 74 | waiters_descs: List[str] = [] 75 | if self._waiting_writers_count: 76 | waiters_descs.append(f"{self._waiting_writers_count} writers") 77 | readers_count = len(self._waiting) - self._waiting_writers_count 78 | if readers_count: 79 | waiters_descs.append(f"{readers_count} readers") 80 | waiters = ", {} waiting".format(" and ".join(waiters_descs)) 81 | else: 82 | waiters = "" 83 | return f"<{state} RWLock object at {id(self):#x}{waiters}>" 84 | 85 | def locked(self) -> str: 86 | """Check whether the lock is currently held. 87 | 88 | Returns: 89 | ``"read"`` if the lock is held by reader(s), ``"write"`` 90 | if the lock is held by a writer, or ``""`` (which tests 91 | as false) if the lock is not held. 92 | """ 93 | return "read" if self._readers else "write" if self._writer else "" 94 | 95 | @trio.lowlevel.enable_ki_protection 96 | def acquire_nowait(self, *, for_write: bool) -> None: 97 | """Attempt to acquire the lock, without blocking. 98 | 99 | Args: 100 | for_write: If True, attempt to acquire the lock in write mode, 101 | which provides exclusive access. If False, attempt to acquire the 102 | lock in read mode, which permits other readers to also hold it. 103 | 104 | Raises: 105 | trio.WouldBlock: if the lock cannot be acquired without blocking 106 | RuntimeError: if the current task already holds the lock (in either 107 | read or write mode) 108 | """ 109 | task = trio.lowlevel.current_task() 110 | if self._writer is task or task in self._readers: 111 | raise RuntimeError("attempt to re-acquire an already held RWLock") 112 | if self._writer is not None: 113 | raise trio.WouldBlock 114 | 115 | if for_write and not self._readers: 116 | self._writer = task 117 | elif not for_write and (self._read_biased or not self._waiting_writers_count): 118 | self._readers.add(task) 119 | else: 120 | raise trio.WouldBlock 121 | 122 | @trio.lowlevel.enable_ki_protection 123 | async def acquire(self, *, for_write: bool) -> None: 124 | """Acquire the lock, blocking if necessary. 125 | 126 | Args: 127 | for_write: If True, acquire the lock in write mode, 128 | which provides exclusive access. If False, acquire the 129 | lock in read mode, which permits other readers to also hold it. 130 | 131 | Raises: 132 | RuntimeError: if the current task already holds the lock (in either 133 | read or write mode) 134 | """ 135 | await trio.lowlevel.checkpoint_if_cancelled() 136 | try: 137 | self.acquire_nowait(for_write=for_write) 138 | except trio.WouldBlock: 139 | task = trio.lowlevel.current_task() 140 | self._waiting[task] = for_write 141 | self._waiting_writers_count += for_write 142 | 143 | def abort_fn(_: object) -> trio.lowlevel.Abort: 144 | del self._waiting[task] 145 | self._waiting_writers_count -= for_write 146 | return trio.lowlevel.Abort.SUCCEEDED 147 | 148 | await trio.lowlevel.wait_task_rescheduled(abort_fn) 149 | else: 150 | await trio.lowlevel.cancel_shielded_checkpoint() 151 | 152 | @trio.lowlevel.enable_ki_protection 153 | def release(self) -> None: 154 | """Release the lock. 155 | 156 | Raises: 157 | RuntimeError: if the current task does not hold the lock (in either 158 | read or write mode) 159 | """ 160 | task = trio.lowlevel.current_task() 161 | if task is self._writer: 162 | self._writer = None 163 | elif task in self._readers: 164 | self._readers.remove(task) 165 | if self._readers: 166 | return 167 | else: 168 | raise RuntimeError("can't release a RWLock you don't own") 169 | 170 | while self._writer is None and self._waiting: 171 | task, for_write = self._waiting.popitem(last=False) 172 | if not for_write: 173 | # Next task is a reader: since we haven't woken 174 | # a writer yet, we can wake it, 175 | self._readers.add(task) 176 | trio.lowlevel.reschedule(task) 177 | # In read-biased mode we can continue to wake 178 | # all other readers. 179 | if self._read_biased: 180 | self._wake_all_readers() 181 | return 182 | # In fair mode we can only wake the readers that 183 | # arrived before the next writer, so keep iterating 184 | # through self._waiting. 185 | elif not self._readers: 186 | # Next task is a writer and there are no readers; 187 | # wake the writer and we're done. 188 | self._writer = task 189 | self._waiting_writers_count -= 1 190 | trio.lowlevel.reschedule(task) 191 | break 192 | else: 193 | # Next task is a writer, but can't be woken because 194 | # there are readers. Put it back at the front of the 195 | # line. 196 | self._waiting[task] = for_write 197 | self._waiting.move_to_end(task, last=False) 198 | break 199 | 200 | def _wake_all_readers(self) -> None: 201 | for task, for_write in list(self._waiting.items()): 202 | if not for_write: 203 | del self._waiting[task] 204 | self._readers.add(task) 205 | trio.lowlevel.reschedule(task) 206 | 207 | # https://github.com/python/mypy/issues/1362: mypy doesn't support 208 | # decorated properties yet 209 | if TYPE_CHECKING: 210 | read_biased: bool 211 | else: 212 | 213 | @property 214 | def read_biased(self) -> bool: 215 | return self._read_biased 216 | 217 | @read_biased.setter 218 | @trio.lowlevel.enable_ki_protection 219 | def read_biased(self, new_value: bool) -> None: 220 | if new_value and not self._read_biased: 221 | self._wake_all_readers() 222 | self._read_biased = new_value 223 | 224 | def acquire_read_nowait(self) -> None: 225 | """Equivalent to ``acquire_nowait(for_write=False)``.""" 226 | return self.acquire_nowait(for_write=False) 227 | 228 | def acquire_write_nowait(self) -> None: 229 | """Equivalent to ``acquire_nowait(for_write=True)``.""" 230 | return self.acquire_nowait(for_write=True) 231 | 232 | async def acquire_read(self) -> None: 233 | """Equivalent to ``acquire(for_write=False)``.""" 234 | return await self.acquire(for_write=False) 235 | 236 | async def acquire_write(self) -> None: 237 | """Equivalent to ``acquire(for_write=True)``.""" 238 | return await self.acquire(for_write=True) 239 | 240 | @trio.lowlevel.enable_ki_protection 241 | @asynccontextmanager 242 | async def read_locked(self) -> AsyncIterator[None]: 243 | """Returns an async context manager whose ``__aenter__`` blocks 244 | to acquire the lock in read mode, and whose ``__aexit__`` 245 | synchronously releases it. 246 | """ 247 | await self.acquire(for_write=False) 248 | try: 249 | yield 250 | finally: 251 | self.release() 252 | 253 | @trio.lowlevel.enable_ki_protection 254 | @asynccontextmanager 255 | async def write_locked(self) -> AsyncIterator[None]: 256 | """Returns an async context manager whose ``__aenter__`` blocks 257 | to acquire the lock in write mode, and whose ``__aexit__`` 258 | synchronously releases it. 259 | """ 260 | await self.acquire(for_write=True) 261 | try: 262 | yield 263 | finally: 264 | self.release() 265 | 266 | def statistics(self) -> _RWLockStatistics: 267 | r"""Return an object containing debugging information. 268 | 269 | Currently the following fields are defined: 270 | 271 | * ``locked``: boolean indicating whether the lock is held by anyone 272 | * ``state``: string with one of the values ``"read"`` (held by one 273 | or more readers), ``"write"`` (held by one writer), 274 | or ``"unlocked"`` (held by no one) 275 | * ``readers``: a frozenset of the :class:`~trio.lowlevel.Task`\s 276 | currently holding the lock in read mode (may be empty) 277 | * ``writer``: the :class:`trio.lowlevel.Task` currently holding 278 | the lock in write mode, or None if the lock is not held in write mode 279 | * ``readers_waiting``: the number of tasks blocked waiting to acquire 280 | the lock in read mode 281 | * ``writers_waiting``: the number of tasks blocked waiting to acquire 282 | the lock in write mode 283 | 284 | """ 285 | return _RWLockStatistics( 286 | locked=self.locked(), 287 | readers=frozenset(self._readers), 288 | writer=self._writer, 289 | readers_waiting=len(self._waiting) - self._waiting_writers_count, 290 | writers_waiting=self._waiting_writers_count, 291 | ) 292 | -------------------------------------------------------------------------------- /tricycle/_service_nursery.py: -------------------------------------------------------------------------------- 1 | import collections 2 | import trio 3 | from functools import partial 4 | from contextlib import asynccontextmanager 5 | from typing import Any, AsyncIterator, Awaitable, Callable, Optional 6 | from ._multi_cancel import MultiCancelScope 7 | 8 | 9 | def _get_coroutine_or_flag_problem( 10 | async_fn: Callable[..., Awaitable[Any]], *args: Any, **kwargs: Any 11 | ) -> Awaitable[Any]: 12 | """Call async_fn(*args) to produce and return a coroutine. If that 13 | doesn't work or doesn't produce a coroutine, try to get help 14 | from trio in describing what went wrong. 15 | """ 16 | try: 17 | # can we call it? 18 | coro = async_fn(*args, **kwargs) 19 | except TypeError: 20 | probe_fn = async_fn 21 | else: 22 | # did we get a coroutine object back? 23 | if isinstance(coro, collections.abc.Coroutine): 24 | return coro 25 | probe_fn = partial(async_fn, **kwargs) 26 | 27 | # TODO: upstream a change that lets us access just the nice 28 | # error detection logic without running the risk of starting a task 29 | 30 | # If we're not happy with this async_fn, trio won't be either, 31 | # and will tell us why in much greater detail. 32 | try: 33 | trio.lowlevel.spawn_system_task(probe_fn, *args) 34 | except TypeError as ex: 35 | problem_with_async_fn = ex 36 | else: 37 | # we started the task successfully, wtf? 38 | raise trio.TrioInternalError( 39 | "tried to spawn a dummy task to figure out what was wrong with " 40 | "{async_fn!r} as an async function, but it seems to have started " 41 | "successfully -- all bets are off at this point" 42 | ) 43 | raise problem_with_async_fn 44 | 45 | 46 | @asynccontextmanager 47 | async def open_service_nursery() -> AsyncIterator[trio.Nursery]: 48 | """Provides a nursery augmented with a cancellation ordering constraint. 49 | 50 | If an entire service nursery becomes cancelled, either due to an 51 | exception raised by some task in the nursery or due to the 52 | cancellation of a scope that surrounds the nursery, the body of 53 | the nursery ``async with`` block will receive the cancellation 54 | first, and no other tasks in the nursery will be cancelled until 55 | the body of the ``async with`` block has been exited. 56 | 57 | This is intended to support the common pattern where the body of 58 | the ``async with`` block uses some service that the other 59 | task(s) in the nursery provide. For example, if you have:: 60 | 61 | async with open_websocket(host, port) as conn: 62 | await communicate_with_websocket(conn) 63 | 64 | where ``open_websocket()`` enters a nursery and spawns some tasks 65 | into that nursery to manage the connection, you probably want 66 | ``conn`` to remain usable in any ``finally`` or ``__aexit__`` 67 | blocks in ``communicate_with_websocket()``. With a regular 68 | nursery, this is not guaranteed; with a service nursery, it is. 69 | An example hinting at general usage:: 70 | 71 | @asynccontextmanager 72 | async def open_websocket(host, port): 73 | async with open_service_nursery() as nursery: 74 | try: 75 | # ... make some child tasks ... 76 | yield connection 77 | finally: 78 | # The yield body is already cancelled, and 79 | # child tasks are still available here for cleanup... 80 | pass 81 | 82 | Now, anything in the body of the ``open_websocket()`` context, including 83 | ``communicate_with_websocket()``, will be given first opportunity to cancel 84 | gracefully. Subsequently, the ``finally`` block in the ``open_websocket()`` 85 | implementation runs, and tasks spawned within the ``try`` body are still 86 | available during cleanup. 87 | 88 | Note that child tasks spawned using ``start()`` gain their protection from 89 | premature cancellation only at the point of their call to 90 | ``task_status.started()``. 91 | """ 92 | 93 | async with trio.open_nursery() as nursery: 94 | child_task_scopes = MultiCancelScope(shield=True) 95 | 96 | def start_soon( 97 | async_fn: Callable[..., Awaitable[Any]], 98 | *args: Any, 99 | name: Optional[str] = None, 100 | ) -> None: 101 | async def wrap_child(coro: Awaitable[Any]) -> None: 102 | with child_task_scopes.open_child(): 103 | await coro 104 | 105 | coro = _get_coroutine_or_flag_problem(async_fn, *args) 106 | type(nursery).start_soon(nursery, wrap_child, coro, name=name or async_fn) 107 | 108 | async def start( 109 | async_fn: Callable[..., Awaitable[Any]], 110 | *args: Any, 111 | name: Optional[str] = None, 112 | ) -> Any: 113 | async def wrap_child(*, task_status: trio.TaskStatus[Any]) -> None: 114 | # For start(), the child doesn't get shielded until it 115 | # calls task_status.started(). 116 | shield_scope = child_task_scopes.open_child(shield=False) 117 | child_task = trio.lowlevel.current_task() 118 | 119 | def wrap_started(value: object = None) -> None: 120 | type(task_status).started(task_status, value) # type: ignore 121 | if child_task.parent_nursery is not nursery: 122 | # started() didn't move the task due to a cancellation, 123 | # so it doesn't get the shield 124 | return 125 | shield_scope.shield = child_task_scopes.shield 126 | 127 | task_status.started = wrap_started # type: ignore 128 | with shield_scope: 129 | await async_fn(*args, task_status=task_status) 130 | 131 | return await type(nursery).start(nursery, wrap_child, name=name or async_fn) 132 | 133 | nursery.start_soon = start_soon # type: ignore 134 | nursery.start = start # type: ignore 135 | try: 136 | yield nursery 137 | finally: 138 | child_task_scopes.shield = False 139 | -------------------------------------------------------------------------------- /tricycle/_streams.py: -------------------------------------------------------------------------------- 1 | import attr 2 | import codecs 3 | import trio 4 | from typing import Optional, Union, Tuple, AsyncIterator, TypeVar 5 | from io import IncrementalNewlineDecoder 6 | 7 | 8 | __all__ = ["BufferedReceiveStream", "TextReceiveStream"] 9 | 10 | 11 | @attr.s(auto_attribs=True, eq=False) 12 | class BufferedReceiveStream(trio.abc.AsyncResource): 13 | """Wraps a :class:`~trio.abc.ReceiveStream` with buffering capabilities, 14 | so you can receive known amounts of data at a time. 15 | """ 16 | 17 | transport_stream: trio.abc.ReceiveStream 18 | chunk_size: int = 4096 19 | 20 | def __attrs_post_init__(self) -> None: 21 | self._buffer = bytearray() 22 | self._receive_pos = 0 23 | 24 | async def aclose(self) -> None: 25 | """Discard all buffered data and close the underlying stream.""" 26 | del self._buffer[:] 27 | self._receive_pos = 0 28 | await self.transport_stream.aclose() 29 | 30 | async def receive(self, num_bytes: int) -> bytes: 31 | """Receive and return ``num_bytes`` bytes, or fewer if EOF is 32 | encountered. 33 | 34 | Args: 35 | num_bytes (int): The number of bytes to return. Must be 36 | greater than zero. 37 | 38 | Returns: 39 | bytes or bytearray: The data received, exactly ``num_bytes`` bytes 40 | unless EOF is encountered. If there is no data left to return 41 | before EOF, returns an empty bytestring (``b""``). 42 | 43 | Raises: 44 | Exception: Anything raised by the :meth:`~trio.abc.ReceiveStream.receive_some` 45 | method of the underlying transport stream. 46 | 47 | """ 48 | if self._receive_pos + num_bytes > len(self._buffer): 49 | del self._buffer[: self._receive_pos] 50 | self._receive_pos = 0 51 | while num_bytes > len(self._buffer): 52 | to_receive = max(self.chunk_size, num_bytes - len(self._buffer)) 53 | data = await self.transport_stream.receive_some(to_receive) 54 | if data == b"": 55 | break # EOF 56 | self._buffer.extend(data) 57 | else: 58 | await trio.lowlevel.checkpoint() 59 | 60 | data = self._buffer[self._receive_pos : self._receive_pos + num_bytes] 61 | self._receive_pos += len(data) 62 | return data 63 | 64 | async def receive_all_or_none(self, num_bytes: int) -> Optional[bytes]: 65 | """Receive and return exactly ``num_bytes`` bytes, or ``None`` 66 | if EOF is encountered before receiving any bytes. 67 | 68 | Args: 69 | num_bytes (int): The number of bytes to return. Must be 70 | greater than zero. 71 | 72 | Returns: 73 | bytes or None: 74 | The data received, exactly ``num_bytes`` bytes; 75 | unless EOF is encountered before reading any data, in which 76 | case we return ``None``. 77 | 78 | Raises: 79 | ValueError: if EOF is encountered after reading at least one byte 80 | but before reading ``num_bytes`` bytes. 81 | """ 82 | data = await self.receive(num_bytes) 83 | if data == b"": 84 | return None 85 | if len(data) != num_bytes: 86 | self._receive_pos -= len(data) 87 | raise ValueError( 88 | f"unclean EOF ({len(data)} bytes after boundary, " 89 | f"expected at least {num_bytes})" 90 | ) 91 | return data 92 | 93 | async def receive_exactly(self, num_bytes: int) -> bytes: 94 | """Receive and return exactly ``num_bytes`` bytes, throwing an 95 | exception if EOF is encountered before then. 96 | 97 | Args: 98 | num_bytes (int): The number of bytes to return. Must be 99 | greater than zero. 100 | 101 | Returns: 102 | bytes: The data received, exactly ``num_bytes`` bytes. 103 | 104 | Raises: 105 | ValueError: if EOF is encountered before reading ``num_bytes`` bytes. 106 | """ 107 | data = await self.receive(num_bytes) 108 | if len(data) != num_bytes: 109 | self._receive_pos -= len(data) 110 | raise ValueError(f"unclean EOF (read only {len(data)}/{num_bytes} bytes)") 111 | return data 112 | 113 | def unget(self, data: bytes) -> None: 114 | """Put the bytes in ``data`` back into the buffer, so they will be the 115 | next thing received by a call to one of the receive methods. 116 | """ 117 | new_receive_pos = max(0, self._receive_pos - len(data)) 118 | self._buffer[new_receive_pos : self._receive_pos] = data 119 | self._receive_pos = new_receive_pos 120 | 121 | 122 | class TextReceiveStream(trio.abc.AsyncResource): 123 | r"""Wraps a :class:`~trio.abc.ReceiveStream` with buffering and decoding 124 | capabilities for receiving line-oriented text. 125 | 126 | See :class:`io.TextIOWrapper` for more documentation on the ``encoding``, 127 | ``errors``, and ``newline`` arguments. 128 | 129 | Args: 130 | transport_stream (~trio.abc.ReceiveStream): The stream to receive 131 | data on. 132 | encoding (str): The encoding with which to decode received data. 133 | If none is specified, we use the value returned by 134 | :func:`locale.getpreferredencoding`. 135 | errors (str): Controls how to respond to decoding errors; common 136 | values include ``"strict"`` (throw an exception), ``"ignore"`` 137 | (drop the bad character), or ``"replace"`` (replace the bad 138 | character with a replacement marker). The default of ``None`` 139 | is equivalent to ``"strict"``. 140 | newline (str): Controls how line endings are handled. Use 141 | ``None`` to convert any newline format to ``"\n"``, 142 | ``""`` to accept any newline format and pass it through unchanged, 143 | or ``"\r"``, ``"\n"``, or ``"\r\n"`` to only accept that 144 | sequence as a newline. 145 | chunk_size (int): The number of bytes to request in each call to the 146 | underlying transport stream's 147 | :meth:`~trio.abc.ReceiveStream.receive_some` method. 148 | 149 | """ 150 | 151 | transport_stream: trio.abc.ReceiveStream 152 | chunk_size: int 153 | 154 | # Either _decoder is the same as _underlying_decoder, or _decoder 155 | # is an IncrementalNewlineDecoder. We need to remember both 156 | # because IncrementalNewlineDecoder doesn't have a .errors 157 | # attribute. 158 | _decoder: codecs.IncrementalDecoder 159 | _underlying_decoder: codecs.IncrementalDecoder 160 | 161 | def __init__( 162 | self, 163 | transport_stream: trio.abc.ReceiveStream, 164 | encoding: Optional[str] = None, 165 | *, 166 | errors: Optional[str] = None, 167 | newline: Optional[str] = "", 168 | chunk_size: int = 8192, 169 | ): 170 | if encoding is None: 171 | import locale 172 | 173 | encoding = locale.getpreferredencoding(False) 174 | 175 | self.transport_stream = transport_stream 176 | self.chunk_size = chunk_size 177 | self._encoding = encoding 178 | 179 | # The newline parameter is a newline sequence, or "" to accept 180 | # any of \r \n \r\n, or None to convert all to \n. self._newline 181 | # is the sequence we'll look for, or "" for any of \r \n \r\n. 182 | 183 | self._newline = newline if newline is not None else "\n" 184 | 185 | def make_decoder( 186 | encoding: str, errors: Optional[str], universal: bool, translate: bool 187 | ) -> codecs.IncrementalDecoder: 188 | info = codecs.lookup(encoding) 189 | decoder = info.incrementaldecoder(errors) # type: ignore 190 | self._underlying_decoder = decoder 191 | if universal: 192 | return IncrementalNewlineDecoder(decoder, translate) 193 | return decoder 194 | 195 | self._decoder = make_decoder( 196 | encoding, errors, newline is None or newline == "", newline is None 197 | ) 198 | 199 | # Data that has been received but not yet passed through 200 | # self._decoder. We store it as a member variable to permit recovery 201 | # from decode errors; unless one of those occurs, it will be None 202 | # at every checkpoint. 203 | self._raw_chunk: Optional[bytes] = None 204 | 205 | # self._chunk[self._chunk_pos:] is the data that has been 206 | # passed through self._decoder but not yet returned from 207 | # receive_line(). 208 | self._chunk = "" 209 | self._chunk_pos = 0 210 | 211 | @property 212 | def encoding(self) -> str: 213 | return self._encoding 214 | 215 | @property 216 | def errors(self) -> Optional[str]: 217 | return self._underlying_decoder.errors 218 | 219 | @errors.setter 220 | def errors(self, value: Optional[str]) -> None: 221 | self._underlying_decoder.errors = value # type: ignore 222 | 223 | @property 224 | def newlines(self) -> Union[str, Tuple[str, ...], None]: 225 | r"""The newline sequences that have actually been observed in the input. 226 | 227 | If no newline sequences have been observed, *or* if you specified 228 | a particular ``newline`` type when constructing this stream, 229 | this attribute is ``None``. Otherwise, it is a single string 230 | or a tuple of strings drawn from the set ``{"\r", "\n", "\r\n"}``. 231 | """ 232 | try: 233 | return self._decoder.newlines # type: ignore 234 | except AttributeError: 235 | return None 236 | 237 | async def aclose(self) -> None: 238 | """Discard all buffered data and close the underlying stream.""" 239 | self._raw_chunk = None 240 | self._chunk = "" 241 | self._chunk_pos = 0 242 | await self.transport_stream.aclose() 243 | 244 | async def __aiter__(self) -> AsyncIterator[str]: 245 | """Iterate over the lines in this stream.""" 246 | while True: 247 | line = await self.receive_line() 248 | if line == "": 249 | return 250 | yield line 251 | 252 | async def receive_line(self, max_chars: int = -1) -> str: 253 | """Receive and decode data on this stream until ``max_chars`` have 254 | been received or a newline or end-of-file is encountered. The 255 | meaning of "newline" depends on the ``newline`` argument 256 | passed at construction time. 257 | 258 | Args: 259 | max_chars (int): The maximum number of characters to return if 260 | no newline sequence is received. If negative, read until 261 | newline or EOF. 262 | 263 | Returns: 264 | str: The line received. It always ends with a newline unless 265 | we reached ``max_chars`` or EOF. If there is no data left to 266 | return before EOF, returns an empty string (``""``). 267 | 268 | Raises: 269 | UnicodeDecodeError: if the received data can't be decoded 270 | Anything else: that was raised by the underlying transport stream's 271 | :meth:`~trio.abc.ReceiveStream.receive_some` method. 272 | 273 | """ 274 | 275 | await trio.lowlevel.checkpoint_if_cancelled() 276 | 277 | got_more = False 278 | line_end_pos = None 279 | while True: 280 | max_pos = len(self._chunk) 281 | if max_chars > 0: 282 | max_pos = min(max_pos, self._chunk_pos + max_chars) 283 | 284 | if self._newline == "": 285 | # Universal newlines without translation: search for any of \r, 286 | # \n, \r\n. Use of IncrementalNewlineDecoder ensures we never 287 | # split a \r\n sequence across two decoder outputs. 288 | crpos = self._chunk.find("\r", self._chunk_pos, max_pos) 289 | lfpos = self._chunk.find("\n", self._chunk_pos, max_pos) 290 | if crpos != -1 or lfpos != -1: 291 | # Found a newline 292 | if crpos != -1 and (lfpos == -1 or crpos < lfpos): 293 | # CR exists and comes before LF. LF may or 294 | # may not exist. If the first LF is one 295 | # position after the first CR, we have a CRLF 296 | # and must end the line after the entire CRLF 297 | # sequence. Otherwise, end after the CR. 298 | line_end_pos = crpos + 1 + (lfpos == crpos + 1) 299 | else: 300 | # CR either does not exist or comes after LF, 301 | # so this line is delimited by LF. 302 | line_end_pos = lfpos + 1 303 | break 304 | else: 305 | # Just need to end on occurrences of self._newline. 306 | # (If we're using universal newlines with translation, we 307 | # set it to "\n" in the constructor.) 308 | nlpos = self._chunk.find(self._newline, self._chunk_pos, max_pos) 309 | if nlpos != -1: 310 | line_end_pos = nlpos + len(self._newline) 311 | break 312 | 313 | # If we found a newline in self._chunk, we broke out of the 314 | # loop above. Getting here means we either need more data or 315 | # hit our max_chars limit and must return without the newline. 316 | if max_pos == self._chunk_pos + max_chars: 317 | # Hit limit, return what we've got. 318 | line_end_pos = max_pos 319 | break 320 | 321 | # Need to pull down more raw data to decode 322 | if self._raw_chunk is None: 323 | self._raw_chunk = await self.transport_stream.receive_some( 324 | self.chunk_size 325 | ) 326 | got_more = True 327 | 328 | if self._raw_chunk == b"": 329 | # EOF on underlying stream. Pull out whatever the decoder 330 | # has left for us; if that's nothing, return EOF ourselves. 331 | chunk = self._decoder.decode(self._raw_chunk, final=True) 332 | if not chunk: 333 | line_end_pos = len(self._chunk) 334 | break 335 | else: 336 | chunk = self._decoder.decode(self._raw_chunk) 337 | 338 | # We need to reallocate self._chunk in order to append the new 339 | # stuff, so we'll throw away already-consumed output while we're 340 | # at it. We don't do this at every call to receive_line() because 341 | # it would result in quadratic-time performance with short lines. 342 | # (We still get quadratic-time performance with arbitrarily long 343 | # lines, but we'll not worry about that for now.) 344 | self._chunk = self._chunk[self._chunk_pos :] + chunk 345 | self._chunk_pos = 0 346 | 347 | # We've incorporated _raw_chunk into _chunk, so null it out. 348 | # If decoding failed we would leave _raw_chunk non-null and 349 | # try again to decode it on a future call, maybe with a different 350 | # errors parameter. 351 | self._raw_chunk = None 352 | 353 | # We break out of the loop when we find the point we want to 354 | # chop at. All that's left is to return it to the caller. 355 | 356 | if not got_more: 357 | # If we never called receive_some(), we only did half a checkpoint, 358 | # and need to do the other half before returning. 359 | await trio.lowlevel.cancel_shielded_checkpoint() 360 | 361 | ret = self._chunk[self._chunk_pos : line_end_pos] 362 | self._chunk_pos = line_end_pos 363 | 364 | # If we're consuming the whole buffer, compact it now since 365 | # that's basically free. Otherwise wait until we next pull 366 | # down a chunk, so we don't have too poor performance when 367 | # receiving lots of short lines. 368 | if self._chunk_pos == len(self._chunk): 369 | self._chunk = "" 370 | self._chunk_pos = 0 371 | 372 | return ret 373 | -------------------------------------------------------------------------------- /tricycle/_tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oremanj/tricycle/29559d1768cc5edb960acc6079ab4e3a6e15ee19/tricycle/_tests/__init__.py -------------------------------------------------------------------------------- /tricycle/_tests/conftest.py: -------------------------------------------------------------------------------- 1 | from pytest_trio.enable_trio_mode import * # type: ignore 2 | -------------------------------------------------------------------------------- /tricycle/_tests/test_meta.py: -------------------------------------------------------------------------------- 1 | import attr 2 | import pytest 3 | import types 4 | import trio 5 | import trio.testing 6 | from contextlib import asynccontextmanager 7 | from typing import AsyncIterator, Coroutine, Iterator, List 8 | 9 | from .. import ScopedObject, BackgroundObject 10 | 11 | 12 | def test_too_much_magic() -> None: 13 | with pytest.raises(TypeError) as info: 14 | 15 | class TooMuchMagic(ScopedObject): # pragma: no cover 16 | async def __open__(self) -> None: 17 | pass 18 | 19 | @asynccontextmanager 20 | async def __wrap__(self) -> AsyncIterator[None]: 21 | yield 22 | 23 | assert str(info.value) == ( 24 | "ScopedObjects can define __open__/__close__, or __wrap__, but not both" 25 | ) 26 | 27 | 28 | @types.coroutine 29 | def async_yield(value: str) -> Iterator[str]: 30 | yield value 31 | 32 | 33 | def test_mro() -> None: 34 | class A(ScopedObject): 35 | async def __open__(self) -> None: 36 | await async_yield("open A") 37 | 38 | class B(A): 39 | async def __open__(self) -> None: 40 | await async_yield("open B") 41 | 42 | async def __close__(self) -> None: 43 | await async_yield("close B") 44 | 45 | class C(A): 46 | async def __open__(self) -> None: 47 | await async_yield("open C") 48 | 49 | async def __close__(self) -> None: 50 | await async_yield("close C") 51 | 52 | class D(B, C): 53 | def __init__(self, value: int): 54 | self.value = value 55 | 56 | async def __close__(self) -> None: 57 | await async_yield("close D") 58 | 59 | assert D.__mro__ == (D, B, C, A, ScopedObject, object) 60 | d_mgr = D(42) 61 | assert not isinstance(d_mgr, D) 62 | assert not hasattr(d_mgr, "value") 63 | assert hasattr(d_mgr, "__aenter__") 64 | 65 | async def use_it() -> None: 66 | async with d_mgr as d: 67 | assert isinstance(d, D) 68 | assert d.value == 42 69 | await async_yield("body") 70 | 71 | coro: Coroutine[str, None, None] = use_it() 72 | record = [] 73 | while True: 74 | try: 75 | record.append(coro.send(None)) 76 | except StopIteration: 77 | break 78 | assert record == [ 79 | "open A", 80 | "open C", 81 | "open B", 82 | "body", 83 | "close D", 84 | "close B", 85 | "close C", 86 | ] 87 | 88 | 89 | @attr.s(auto_attribs=True) 90 | class Example(BackgroundObject): 91 | ticks: int = 0 92 | record: List[str] = attr.Factory(list) 93 | exiting: bool = False 94 | 95 | def __attrs_post_init__(self) -> None: 96 | assert not hasattr(self, "nursery") 97 | self.record.append("attrs_post_init") 98 | 99 | async def __open__(self) -> None: 100 | self.record.append("open") 101 | await self.nursery.start(self._background_task) 102 | self.record.append("started") 103 | 104 | async def __close__(self) -> None: 105 | assert len(self.nursery.child_tasks) != 0 106 | # Make sure this doesn't raise AttributeError in aexit: 107 | del self.nursery 108 | self.record.append("close") 109 | self.exiting = True 110 | 111 | async def _background_task(self, *, task_status: trio.TaskStatus[None]) -> None: 112 | self.record.append("background") 113 | await trio.sleep(1) 114 | self.record.append("starting") 115 | task_status.started() 116 | self.record.append("running") 117 | while not self.exiting: 118 | await trio.sleep(1) 119 | self.ticks += 1 120 | self.record.append("stopping") 121 | 122 | 123 | class DaemonExample(Example, daemon=True): 124 | pass 125 | 126 | 127 | async def test_background(autojump_clock: trio.testing.MockClock) -> None: 128 | async with Example(ticks=100) as obj: 129 | assert obj.record == [ 130 | "attrs_post_init", 131 | "open", 132 | "background", 133 | "starting", 134 | "running", 135 | "started", 136 | ] 137 | del obj.record[:] 138 | await trio.sleep(5.5) 139 | assert obj.record == ["close", "stopping"] 140 | # 1 sec start + 6 ticks 141 | assert trio.current_time() == 7.0 142 | assert obj.ticks == 106 143 | assert not hasattr(obj, "nursery") 144 | 145 | # With daemon=True, the background tasks are cancelled when the parent exits 146 | async with DaemonExample() as obj2: 147 | assert obj2.record == [ 148 | "attrs_post_init", 149 | "open", 150 | "background", 151 | "starting", 152 | "running", 153 | "started", 154 | ] 155 | del obj2.record[:] 156 | await trio.sleep(5.5) 157 | assert obj2.record == ["close"] 158 | assert trio.current_time() == 13.5 159 | assert obj2.ticks == 5 160 | -------------------------------------------------------------------------------- /tricycle/_tests/test_multi_cancel.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import trio 4 | import trio.testing 5 | from .. import MultiCancelScope 6 | 7 | 8 | async def test_basic(autojump_clock: trio.testing.MockClock) -> None: 9 | parent = MultiCancelScope() 10 | finish_order = [] 11 | 12 | async def cancel_child_before_entering() -> None: 13 | child = parent.open_child() 14 | assert not child.cancel_called 15 | child.cancel() 16 | assert child.cancel_called 17 | assert not child.cancelled_caught 18 | await trio.sleep(0.2) 19 | with child: 20 | assert not child.cancelled_caught 21 | await trio.sleep(1) 22 | assert child.cancelled_caught 23 | finish_order.append("cancel_child_before_entering") 24 | 25 | async def cancel_child_after_entering() -> None: 26 | with parent.open_child() as child: 27 | await trio.sleep(0.3) 28 | child.cancel() 29 | await trio.sleep(1) 30 | assert child.cancel_called 31 | assert child.cancelled_caught 32 | finish_order.append("cancel_child_after_entering") 33 | 34 | async def cancel_child_via_local_deadline() -> None: 35 | child = parent.open_child() 36 | child.deadline = trio.current_time() + 0.4 37 | deadline_before_entering = child.deadline 38 | with child: 39 | assert child.deadline == deadline_before_entering 40 | await trio.sleep(1) 41 | assert child.cancel_called 42 | assert child.cancelled_caught 43 | finish_order.append("cancel_child_via_local_deadline") 44 | 45 | async def cancel_child_via_local_deadline_2() -> None: 46 | child = parent.open_child() 47 | child.deadline = trio.current_time() + 1.0 48 | with child: 49 | child.deadline -= 0.9 50 | await trio.sleep(1) 51 | assert child.cancel_called 52 | assert child.cancelled_caught 53 | finish_order.append("cancel_child_via_local_deadline_2") 54 | 55 | async def cancel_parent_before_entering() -> None: 56 | child = parent.open_child() 57 | await trio.sleep(0.6) 58 | assert child.cancel_called 59 | assert not child.cancelled_caught 60 | with child: 61 | await trio.sleep(1) 62 | assert child.cancelled_caught 63 | finish_order.append("cancel_parent_before_entering") 64 | 65 | async def cancel_parent_after_entering() -> None: 66 | with parent.open_child() as child: 67 | await trio.sleep(1) 68 | assert child.cancel_called 69 | assert child.cancelled_caught 70 | finish_order.append("cancel_parent_after_entering") 71 | 72 | async with trio.open_nursery() as nursery: 73 | nursery.start_soon(cancel_child_before_entering) 74 | nursery.start_soon(cancel_child_after_entering) 75 | nursery.start_soon(cancel_child_via_local_deadline) 76 | nursery.start_soon(cancel_child_via_local_deadline_2) 77 | nursery.start_soon(cancel_parent_before_entering) 78 | nursery.start_soon(cancel_parent_after_entering) 79 | await trio.sleep(0.5) 80 | assert "MultiCancelScope cancelled" not in repr(parent) 81 | assert not parent.cancel_called 82 | parent.cancel() 83 | assert parent.cancel_called 84 | assert "MultiCancelScope cancelled" in repr(parent) 85 | parent.cancel() 86 | await trio.sleep(0.2) 87 | 88 | nursery.cancel_scope.deadline = trio.current_time() + 0.1 89 | with parent.open_child() as child: 90 | child.deadline = nursery.cancel_scope.deadline 91 | assert child.cancel_called 92 | assert not child.cancelled_caught 93 | await trio.sleep_forever() 94 | assert child.cancelled_caught 95 | finish_order.append("cancel_parent_before_creating") 96 | 97 | assert not nursery.cancel_scope.cancelled_caught 98 | assert finish_order == [ 99 | "cancel_child_via_local_deadline_2", # t+0.1 100 | "cancel_child_before_entering", # t+0.2 101 | "cancel_child_after_entering", # t+0.3 102 | "cancel_child_via_local_deadline", # t+0.4 103 | "cancel_parent_after_entering", # t+0.5 104 | "cancel_parent_before_entering", # t+0.6 105 | "cancel_parent_before_creating", # t+0.7 106 | ] 107 | 108 | 109 | async def test_shielding(autojump_clock: trio.testing.MockClock) -> None: 110 | parent = MultiCancelScope() 111 | finish_order = [] 112 | 113 | async def shield_child_on_creation() -> None: 114 | try: 115 | with parent.open_child(shield=True): 116 | await trio.sleep(1) 117 | assert False # pragma: no cover 118 | finally: 119 | finish_order.append("shield_child_on_creation") 120 | 121 | async def shield_child_before_entering() -> None: 122 | child = parent.open_child() 123 | child.shield = True 124 | try: 125 | with child: 126 | await trio.sleep(1) 127 | assert False # pragma: no cover 128 | finally: 129 | with trio.CancelScope(shield=True): 130 | await trio.sleep(0.1) 131 | finish_order.append("shield_child_before_entering") 132 | 133 | async def shield_child_after_entering() -> None: 134 | try: 135 | with parent.open_child() as child: 136 | child.shield = True 137 | await trio.sleep(1) 138 | assert False # pragma: no cover 139 | finally: 140 | with trio.CancelScope(shield=True): 141 | await trio.sleep(0.2) 142 | finish_order.append("shield_child_after_entering") 143 | 144 | async def shield_child_when_parent_shielded() -> None: 145 | try: 146 | with trio.CancelScope(shield=True): 147 | await trio.sleep(0.3) 148 | with parent.open_child(): 149 | await trio.sleep(1) 150 | finally: 151 | with trio.CancelScope(shield=True): 152 | await trio.sleep(0.3) 153 | finish_order.append("shield_child_when_parent_shielded") 154 | 155 | async def shield_child_after_parent_unshielded() -> None: 156 | with parent.open_child(shield=True) as child: 157 | this_task = trio.lowlevel.current_task() 158 | 159 | def abort_fn(_): # type: ignore 160 | trio.lowlevel.reschedule(this_task) 161 | return trio.lowlevel.Abort.FAILED 162 | 163 | await trio.lowlevel.wait_task_rescheduled(abort_fn) 164 | child.shield = True 165 | await trio.sleep(0.5) 166 | assert not child.cancelled_caught 167 | finish_order.append("shield_child_after_parent_unshielded") 168 | 169 | async with trio.open_nursery() as nursery: 170 | nursery.start_soon(shield_child_on_creation) 171 | nursery.start_soon(shield_child_before_entering) 172 | nursery.start_soon(shield_child_after_entering) 173 | nursery.start_soon(shield_child_when_parent_shielded) 174 | nursery.start_soon(shield_child_after_parent_unshielded) 175 | 176 | nursery.cancel_scope.cancel() 177 | assert parent.shield == False 178 | with trio.CancelScope(shield=True): 179 | await trio.sleep(0.2) 180 | assert "MultiCancelScope shielded" not in repr(parent) 181 | parent.shield = True 182 | assert "MultiCancelScope shielded" in repr(parent) 183 | assert parent.shield == True 184 | with trio.CancelScope(shield=True): 185 | await trio.sleep(0.2) 186 | parent.shield = False 187 | 188 | assert finish_order == [ 189 | "shield_child_on_creation", # t+0.4 190 | "shield_child_before_entering", # t+0.5 191 | "shield_child_after_entering", # t+0.6 192 | "shield_child_when_parent_shielded", # t+0.7 193 | "shield_child_after_parent_unshielded", # t+0.8 194 | ] 195 | -------------------------------------------------------------------------------- /tricycle/_tests/test_rwlock.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import itertools 3 | import trio 4 | import trio.testing 5 | from .. import RWLock 6 | from typing import List, Optional 7 | 8 | 9 | async def test_rwlock(autojump_clock: trio.testing.MockClock) -> None: 10 | lock = RWLock() 11 | assert not lock.locked() 12 | 13 | lock.acquire_read_nowait() 14 | assert lock.locked() == "read" 15 | 16 | with pytest.raises(RuntimeError): 17 | lock.acquire_read_nowait() 18 | with pytest.raises(RuntimeError): 19 | lock.acquire_write_nowait() 20 | lock.release() 21 | with pytest.raises(RuntimeError): 22 | lock.release() 23 | 24 | with trio.testing.assert_checkpoints(): 25 | await lock.acquire_write() 26 | assert lock.locked() == "write" 27 | with pytest.raises(RuntimeError): 28 | await lock.acquire_read() 29 | with pytest.raises(RuntimeError): 30 | await lock.acquire_write() 31 | lock.release() 32 | 33 | async with lock.read_locked(): 34 | assert lock.locked() == "read" 35 | 36 | async with lock.write_locked(): 37 | assert lock.locked() == "write" 38 | 39 | start_order = itertools.count() 40 | acquire_times: List[Optional[float]] = [None] * 10 41 | 42 | async def holder_task( 43 | for_write: bool, task_status: trio.TaskStatus[trio.lowlevel.Task] 44 | ) -> None: 45 | my_slot = next(start_order) 46 | repr(lock) # smoke test 47 | task_status.started(trio.lowlevel.current_task()) 48 | await lock.acquire(for_write=for_write) 49 | acquire_times[my_slot] = trio.current_time() 50 | try: 51 | await trio.sleep(1) 52 | finally: 53 | lock.release() 54 | 55 | async with trio.open_nursery() as nursery: 56 | t0 = await nursery.start(holder_task, True) 57 | t1a = await nursery.start(holder_task, False) 58 | t1b = await nursery.start(holder_task, False) 59 | t1c = await nursery.start(holder_task, False) 60 | await nursery.start(holder_task, True) # t2 61 | await nursery.start(holder_task, False) # t3a 62 | await nursery.start(holder_task, False) # t3b 63 | await nursery.start(holder_task, True) # t4 64 | await nursery.start(holder_task, True) # t5 65 | t6 = await nursery.start(holder_task, False) 66 | 67 | await trio.sleep(0.5) 68 | assert "write-locked" in repr(lock) 69 | assert lock.statistics().__dict__ == { 70 | "locked": "write", 71 | "readers": frozenset(), 72 | "writer": t0, 73 | "readers_waiting": 6, 74 | "writers_waiting": 3, 75 | } 76 | with pytest.raises(RuntimeError): 77 | lock.release() 78 | with pytest.raises(trio.WouldBlock): 79 | lock.acquire_read_nowait() 80 | with pytest.raises(trio.WouldBlock): 81 | lock.acquire_write_nowait() 82 | 83 | await trio.sleep(1) 84 | assert "read-locked" in repr(lock) 85 | assert lock.statistics().__dict__ == { 86 | "locked": "read", 87 | "readers": frozenset([t1a, t1b, t1c]), 88 | "writer": None, 89 | "readers_waiting": 3, 90 | "writers_waiting": 3, 91 | } 92 | with pytest.raises(RuntimeError): 93 | lock.release() 94 | with pytest.raises(trio.WouldBlock): 95 | # even in read state, can't acquire for read if writers are waiting 96 | lock.acquire_read_nowait() 97 | with pytest.raises(trio.WouldBlock): 98 | lock.acquire_write_nowait() 99 | 100 | await trio.sleep(5) 101 | assert "read-locked" in repr(lock) 102 | assert lock.statistics().__dict__ == { 103 | "locked": "read", 104 | "readers": frozenset([t6]), 105 | "writer": None, 106 | "readers_waiting": 0, 107 | "writers_waiting": 0, 108 | } 109 | lock.acquire_read_nowait() 110 | lock.release() 111 | with pytest.raises(trio.WouldBlock): 112 | lock.acquire_write_nowait() 113 | 114 | assert acquire_times == pytest.approx([0, 1, 1, 1, 2, 3, 3, 4, 5, 6]) 115 | 116 | # test cancellation 117 | start_order = itertools.count() 118 | async with trio.open_nursery() as nursery: 119 | await nursery.start(holder_task, True) 120 | await nursery.start(holder_task, True) 121 | await nursery.start(holder_task, False) 122 | await nursery.start(holder_task, False) 123 | await nursery.start(holder_task, False) 124 | await nursery.start(holder_task, True) 125 | await nursery.start(holder_task, False) 126 | await nursery.start(holder_task, False) 127 | await nursery.start(holder_task, True) 128 | await nursery.start(holder_task, True) 129 | await nursery.start(holder_task, False) 130 | 131 | await trio.sleep(0.5) 132 | nursery.cancel_scope.cancel() 133 | 134 | assert nursery.cancel_scope.cancelled_caught 135 | assert trio.current_time() == pytest.approx(7.5) 136 | assert "unlocked" in repr(lock) 137 | assert lock.statistics().__dict__ == { 138 | "locked": "", 139 | "readers": frozenset(), 140 | "writer": None, 141 | "readers_waiting": 0, 142 | "writers_waiting": 0, 143 | } 144 | 145 | 146 | async def test_read_biased(autojump_clock: trio.testing.MockClock) -> None: 147 | lock = RWLock(read_biased=True) 148 | assert "read-biased" in repr(lock) 149 | assert lock.read_biased 150 | 151 | async def holder_task( 152 | for_write: bool, task_status: trio.TaskStatus[trio.lowlevel.Task] 153 | ) -> None: 154 | task_status.started(trio.lowlevel.current_task()) 155 | await lock.acquire(for_write=for_write) 156 | try: 157 | await trio.sleep(1) 158 | finally: 159 | lock.release() 160 | 161 | async with trio.open_nursery() as nursery: 162 | t1a = await nursery.start(holder_task, False) 163 | t1b = await nursery.start(holder_task, False) 164 | t2 = await nursery.start(holder_task, True) 165 | t1c = await nursery.start(holder_task, False) 166 | 167 | # Reader (t1c) that arrives after the writer (t2) can get the 168 | # lock immediately, before the writer does 169 | await trio.sleep(0.5) 170 | assert lock.statistics().readers == frozenset([t1a, t1b, t1c]) 171 | assert lock.statistics().writer is None 172 | 173 | await trio.sleep(1) 174 | assert lock.statistics().readers == frozenset() 175 | assert lock.statistics().writer is t2 176 | 177 | # If an additional writer gets in line for a writer-held lock 178 | # before any readers do, they get it before readers that come later 179 | # (i.e., the bias towards readers is "weak" -- "strong" would wake 180 | # up all readers when any writer released). 181 | t3 = await nursery.start(holder_task, True) 182 | t4a = await nursery.start(holder_task, False) 183 | t5 = await nursery.start(holder_task, True) 184 | t4b = await nursery.start(holder_task, False) 185 | 186 | await trio.sleep(1) 187 | assert lock.statistics().readers == frozenset() 188 | assert lock.statistics().writer is t3 189 | 190 | await trio.sleep(1) 191 | assert lock.statistics().readers == frozenset([t4a, t4b]) 192 | assert lock.statistics().writer is None 193 | 194 | await trio.sleep(1) 195 | assert lock.statistics().readers == frozenset() 196 | assert lock.statistics().writer is t5 197 | 198 | # Read-bias can be turned on and off dynamically 199 | lock.read_biased = False 200 | assert not lock.read_biased 201 | t6 = await nursery.start(holder_task, False) 202 | t7 = await nursery.start(holder_task, True) 203 | t8 = await nursery.start(holder_task, False) 204 | 205 | # Now reader t8 has to wait because writer t7 arrived first, 206 | # even though the lock is held by reader t6 207 | await trio.sleep(1) 208 | assert lock.statistics().readers == frozenset([t6]) 209 | assert lock.statistics().writer is None 210 | 211 | # If we turn on read-bias again, t8 immediately gets the lock 212 | lock.read_biased = True 213 | assert lock.statistics().readers == frozenset([t6, t8]) 214 | assert lock.statistics().writer is None 215 | 216 | await trio.sleep(0.75) 217 | assert lock.statistics().readers == frozenset([t8]) 218 | assert lock.statistics().writer is None 219 | 220 | await trio.sleep(0.75) 221 | assert lock.statistics().readers == frozenset() 222 | assert lock.statistics().writer is t7 223 | -------------------------------------------------------------------------------- /tricycle/_tests/test_service_nursery.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from typing import Any 3 | 4 | import trio 5 | import trio.testing 6 | from .. import open_service_nursery 7 | 8 | 9 | async def test_basic(autojump_clock: trio.testing.MockClock) -> None: 10 | record = [] 11 | async with open_service_nursery() as nursery: 12 | 13 | @nursery.start_soon 14 | async def background_task() -> None: 15 | try: 16 | await trio.sleep_forever() 17 | finally: 18 | record.append("background_task exiting") 19 | 20 | (task,) = nursery.child_tasks 21 | assert "background_task" in task.name 22 | 23 | nursery.cancel_scope.cancel() 24 | with trio.CancelScope(shield=True): 25 | await trio.sleep(1) 26 | record.append("body exiting") 27 | await trio.sleep(0) 28 | pytest.fail("should've been cancelled") # pragma: no cover 29 | 30 | assert nursery.cancel_scope.cancelled_caught 31 | assert record == ["body exiting", "background_task exiting"] 32 | 33 | 34 | async def test_start(autojump_clock: trio.testing.MockClock) -> None: 35 | record = [] 36 | 37 | async def sleep_then_start(val: int, *, task_status: trio.TaskStatus[int]) -> None: 38 | await trio.sleep(1) 39 | task_status.started(val) 40 | try: 41 | await trio.sleep(10) 42 | record.append("background task finished") # pragma: no cover 43 | finally: 44 | record.append("background task exiting") 45 | 46 | async def shielded_sleep_then_start(*, task_status: trio.TaskStatus[None]) -> None: 47 | with trio.CancelScope(shield=True): 48 | await trio.sleep(1) 49 | task_status.started() 50 | await trio.sleep(10) 51 | 52 | async with open_service_nursery() as nursery: 53 | # Child can be cancelled normally while it's starting 54 | with trio.move_on_after(0.5) as scope: 55 | await nursery.start(sleep_then_start, 1) 56 | assert scope.cancelled_caught 57 | assert not nursery.child_tasks 58 | 59 | # If started() is the first thing to notice a cancellation, the task 60 | # stays in the old nursery and remains unshielded 61 | with trio.move_on_after(0.5) as scope: 62 | await nursery.start(shielded_sleep_then_start) 63 | assert scope.cancelled_caught 64 | assert not nursery.child_tasks 65 | 66 | assert trio.current_time() == 1.5 67 | 68 | # Otherwise, once started() is called the child is shielded until 69 | # the 'async with' block exits. 70 | assert 42 == await nursery.start(sleep_then_start, 42) 71 | assert trio.current_time() == 2.5 72 | 73 | nursery.cancel_scope.cancel() 74 | with trio.CancelScope(shield=True): 75 | await trio.sleep(1) 76 | record.append("parent task finished") 77 | 78 | assert trio.current_time() == 3.5 79 | assert record == ["parent task finished", "background task exiting"] 80 | 81 | 82 | async def test_remote_start(autojump_clock: trio.testing.MockClock) -> None: 83 | record = [] 84 | outer_task_status: trio.TaskStatus[int] 85 | 86 | async def task(*, task_status: trio.TaskStatus[int]) -> None: 87 | nonlocal outer_task_status 88 | outer_task_status = task_status 89 | try: 90 | await trio.sleep(10) 91 | record.append("background task finished") # pragma: no cover 92 | finally: 93 | record.append("background task exiting") 94 | 95 | async def delayed_start() -> None: 96 | await trio.sleep(1) 97 | outer_task_status.started(42) 98 | 99 | async with trio.open_nursery() as outer_nursery: 100 | outer_nursery.start_soon(delayed_start) 101 | async with open_service_nursery() as inner_nursery: 102 | assert 42 == await inner_nursery.start(task) 103 | assert trio.current_time() == 1.0 104 | outer_nursery.cancel_scope.cancel() 105 | with trio.CancelScope(shield=True): 106 | await trio.sleep(1) 107 | record.append("parent task finished") 108 | 109 | assert trio.current_time() == 2.0 110 | assert record == ["parent task finished", "background task exiting"] 111 | 112 | 113 | async def test_problems() -> None: 114 | async with open_service_nursery() as nursery: 115 | with pytest.raises(TypeError) as info: 116 | nursery.start_soon(trio.sleep) # type: ignore[arg-type] 117 | assert "missing 1 required positional argument" in str(info.value) 118 | 119 | with pytest.raises(TypeError) as info: 120 | nursery.start_soon(trio.sleep(1)) # type: ignore 121 | assert "Trio was expecting an async function" in str(info.value) 122 | 123 | with pytest.raises(TypeError) as info: 124 | nursery.start_soon(int, 42) # type: ignore 125 | assert "appears to be synchronous" in str(info.value) 126 | 127 | first_call = True 128 | 129 | def evil() -> Any: 130 | nonlocal first_call 131 | if first_call: 132 | first_call = False 133 | return 42 134 | else: 135 | return trio.sleep(0) 136 | 137 | with pytest.raises(trio.TrioInternalError) as info2: 138 | nursery.start_soon(evil) 139 | assert "all bets are off at this point" in str(info2.value) 140 | -------------------------------------------------------------------------------- /tricycle/_tests/test_streams.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import locale 3 | import random 4 | import sys 5 | 6 | import trio 7 | import trio.testing 8 | from contextlib import asynccontextmanager 9 | from functools import partial 10 | from typing import ( 11 | AsyncContextManager, 12 | AsyncIterator, 13 | Awaitable, 14 | Callable, 15 | List, 16 | Optional, 17 | Sequence, 18 | Tuple, 19 | cast, 20 | ) 21 | from .. import BufferedReceiveStream, TextReceiveStream 22 | 23 | 24 | async def test_buffered_receive(autojump_clock: trio.testing.MockClock) -> None: 25 | send_stream, receive_stream_raw = trio.testing.memory_stream_one_way_pair() 26 | receive_stream = BufferedReceiveStream(receive_stream_raw, chunk_size=8) 27 | 28 | orig_receive_some = receive_stream_raw.receive_some 29 | raw_receive_sizes = [] 30 | 31 | async def hooked_receive_some(max_bytes: int) -> bytes: 32 | result = await orig_receive_some(max_bytes) 33 | raw_receive_sizes.append(len(result)) 34 | return result 35 | 36 | receive_stream_raw.receive_some = hooked_receive_some # type: ignore 37 | 38 | # Send a big block, receive a little at a time 39 | data = bytes(val for val in range(64)) 40 | await send_stream.send_all(data) 41 | 42 | async def checked_receive(num_bytes: int, raw_reads: Sequence[int] = ()) -> bytes: 43 | try: 44 | with trio.testing.assert_checkpoints(): 45 | return await receive_stream.receive(num_bytes) 46 | finally: 47 | assert list(raw_reads) == raw_receive_sizes 48 | raw_receive_sizes[:] = [] 49 | 50 | chunks = [ 51 | await checked_receive(4, [8]), 52 | await checked_receive(1), 53 | await checked_receive(3), # go exactly to end of chunk 54 | await checked_receive(1, [8]), 55 | await checked_receive(9, [8]), # receive across chunk boundary 56 | await checked_receive(18, [12]), # >1 chunk beyond buffered amount 57 | await checked_receive(12, [12]), # >1 chunk starting from boundary 58 | await checked_receive(14, [14]), # 2 bytes left after this point 59 | await checked_receive(1, [2]), 60 | await checked_receive(1), # everything consumed 61 | ] 62 | assert b"".join(chunks) == data 63 | 64 | receive_stream.unget(b"1234") 65 | assert await receive_stream.receive(2) == b"12" 66 | receive_stream.unget(b"012") 67 | assert await receive_stream.receive(4) == b"0123" 68 | receive_stream.unget(b"tt") 69 | assert await receive_stream.receive(3) == b"tt4" 70 | 71 | with pytest.raises(trio.TooSlowError), trio.fail_after(1): 72 | await checked_receive(1) 73 | await send_stream.send_all(b"xyz") 74 | with pytest.raises(trio.TooSlowError), trio.fail_after(1): 75 | await checked_receive(10, [3]) 76 | await send_stream.send_all(b"abcdabcdabcd") 77 | with pytest.raises(trio.TooSlowError), trio.fail_after(1): 78 | await checked_receive(16, [12]) 79 | assert b"xyzabcd" == await checked_receive(7) 80 | await send_stream.aclose() 81 | assert b"abcdabcd" == await checked_receive(32, [0]) 82 | assert b"" == await checked_receive(32, [0]) 83 | 84 | # now try a clean EOF 85 | send_stream, receive_stream_raw = trio.testing.memory_stream_one_way_pair() 86 | receive_stream = BufferedReceiveStream(receive_stream_raw, chunk_size=8) 87 | orig_receive_some = receive_stream_raw.receive_some 88 | receive_stream_raw.receive_some = hooked_receive_some # type: ignore 89 | 90 | await send_stream.send_all(b"1234") 91 | assert b"12" == await checked_receive(2, [4]) 92 | with pytest.raises(trio.TooSlowError), trio.fail_after(1): 93 | await checked_receive(3) 94 | await send_stream.aclose() 95 | assert b"34" == await checked_receive(3, [0]) 96 | 97 | await receive_stream.aclose() 98 | 99 | 100 | @pytest.fixture 101 | async def receiver_factory() -> AsyncIterator[ 102 | Callable[[], AsyncContextManager[BufferedReceiveStream]] 103 | ]: 104 | async def send_task(send_stream: trio.abc.SendStream) -> None: 105 | for val in b"0123456789": 106 | await send_stream.send_all(bytes([val])) 107 | await trio.sleep(1) 108 | await send_stream.aclose() 109 | 110 | @asynccontextmanager 111 | async def receiver() -> AsyncIterator[BufferedReceiveStream]: 112 | send_stream, receive_stream_raw = trio.testing.memory_stream_one_way_pair() 113 | async with trio.open_nursery() as nursery: 114 | nursery.start_soon(send_task, send_stream) 115 | try: 116 | yield BufferedReceiveStream(receive_stream_raw, chunk_size=8) 117 | finally: 118 | nursery.cancel_scope.cancel() 119 | 120 | yield receiver 121 | 122 | 123 | async def test_buffered_receive_helpers( 124 | autojump_clock: trio.testing.MockClock, 125 | receiver_factory: Callable[[], AsyncContextManager[BufferedReceiveStream]], 126 | ) -> None: 127 | async with receiver_factory() as receive_stream: 128 | assert b"012345" == await receive_stream.receive_all_or_none(6) 129 | assert b"6789" == await receive_stream.receive_all_or_none(4) 130 | assert None is await receive_stream.receive_all_or_none(42) 131 | 132 | async with receiver_factory() as receive_stream: 133 | assert b"0" == await receive_stream.receive_all_or_none(1) 134 | assert b"123456789" == await receive_stream.receive_all_or_none(9) 135 | assert None is await receive_stream.receive_all_or_none(3) 136 | 137 | async with receiver_factory() as receive_stream: 138 | assert b"012345" == await receive_stream.receive_all_or_none(6) 139 | with pytest.raises(ValueError) as info: 140 | await receive_stream.receive_all_or_none(5) 141 | assert str(info.value) == ( 142 | "unclean EOF (4 bytes after boundary, expected at least 5)" 143 | ) 144 | assert b"6789" == await receive_stream.receive(4) 145 | assert None is await receive_stream.receive_all_or_none(4) 146 | assert b"" == await receive_stream.receive(4) 147 | 148 | async with receiver_factory() as receive_stream: 149 | assert b"012345" == await receive_stream.receive_all_or_none(6) 150 | with pytest.raises(ValueError) as info: 151 | await receive_stream.receive_all_or_none(5) 152 | assert str(info.value) == ( 153 | "unclean EOF (4 bytes after boundary, expected at least 5)" 154 | ) 155 | assert b"6789" == await receive_stream.receive(4) 156 | assert None is await receive_stream.receive_all_or_none(4) 157 | assert b"" == await receive_stream.receive(4) 158 | 159 | async with receiver_factory() as receive_stream: 160 | assert b"0" == await receive_stream.receive_exactly(1) 161 | assert b"1234567" == await receive_stream.receive_exactly(7) 162 | with pytest.raises(ValueError) as info: 163 | await receive_stream.receive_exactly(3) 164 | assert str(info.value) == "unclean EOF (read only 2/3 bytes)" 165 | assert b"89" == await receive_stream.receive_exactly(2) 166 | with pytest.raises(ValueError) as info: 167 | await receive_stream.receive_exactly(3) 168 | assert str(info.value) == "unclean EOF (read only 0/3 bytes)" 169 | 170 | 171 | async def test_text_receive(autojump_clock: trio.testing.MockClock) -> None: 172 | test_input = ( 173 | b"The quick brown fox jumps over the lazy dog.\r\n\n\r\r\n\n" 174 | b"Yup.\n" 175 | b"That \xf0\x9f\xa6\x8a is still jumping.\r" # fox emoji 176 | ) 177 | 178 | # Test that encoding=None uses the locale preferred encoding 179 | stream = TextReceiveStream(None) # type: ignore 180 | assert stream.encoding == locale.getpreferredencoding(False) 181 | del stream 182 | 183 | newline: Optional[str] 184 | for newline in ("\r", "\n", "\r\n", "", "jump", None): 185 | output_str = test_input.decode("utf-8") 186 | if newline == "": 187 | output_lines = output_str.splitlines(True) 188 | elif newline is None: 189 | output_lines = ( 190 | output_str.replace("\r\n", "\n").replace("\r", "\n").splitlines(True) 191 | ) 192 | else: 193 | output_lines = [line + newline for line in output_str.split(newline)] 194 | if output_lines[-1] == newline: 195 | del output_lines[-1] 196 | else: 197 | output_lines[-1] = output_lines[-1][: -len(newline)] 198 | 199 | Streams = Tuple[trio.testing.MemorySendStream, TextReceiveStream] 200 | 201 | async def make_streams_with_hook( 202 | hook: Optional[ 203 | Callable[ 204 | [trio.testing.MemorySendStream, trio.testing.MemoryReceiveStream], 205 | Awaitable[None], 206 | ] 207 | ] 208 | ) -> Streams: 209 | send_stream, receive_stream_raw = trio.testing.memory_stream_one_way_pair() 210 | receive_stream = TextReceiveStream( 211 | receive_stream_raw, "UTF-8", chunk_size=8, newline=newline 212 | ) 213 | if hook is not None: 214 | send_stream.send_all_hook = partial( 215 | hook, send_stream, receive_stream_raw 216 | ) 217 | assert receive_stream.encoding == "UTF-8" 218 | assert receive_stream.errors is None 219 | return send_stream, receive_stream 220 | 221 | async def make_streams_all_at_once() -> Streams: 222 | return await make_streams_with_hook(None) 223 | 224 | async def make_streams_one_byte_at_a_time() -> Streams: 225 | async def trickle( 226 | send: trio.testing.MemorySendStream, 227 | receive: trio.testing.MemoryReceiveStream, 228 | ) -> None: 229 | while trio.testing.memory_stream_pump(send, receive, max_bytes=1): 230 | await trio.sleep(1) 231 | 232 | return await make_streams_with_hook(trickle) 233 | 234 | async def make_streams_random_small_units() -> Streams: 235 | async def trickle( 236 | send: trio.testing.MemorySendStream, 237 | receive: trio.testing.MemoryReceiveStream, 238 | ) -> None: 239 | while trio.testing.memory_stream_pump( 240 | send, receive, max_bytes=random.randint(1, 16) 241 | ): 242 | await trio.sleep(1) 243 | 244 | return await make_streams_with_hook(trickle) 245 | 246 | for strategy in ( 247 | make_streams_all_at_once, 248 | make_streams_one_byte_at_a_time, 249 | make_streams_random_small_units, 250 | make_streams_random_small_units, 251 | make_streams_random_small_units, 252 | make_streams_random_small_units, 253 | make_streams_random_small_units, 254 | make_streams_random_small_units, 255 | ): 256 | send_stream, receive_stream = await strategy() 257 | received_lines: List[str] = [] 258 | 259 | async def receive_in_background() -> None: 260 | async for line in receive_stream: 261 | received_lines.append(line) 262 | 263 | async with trio.open_nursery() as nursery: 264 | nursery.start_soon(receive_in_background) 265 | await send_stream.send_all(test_input) 266 | await trio.sleep(2) 267 | length_before_eof = len(received_lines) 268 | await send_stream.aclose() 269 | 270 | # Universal newline support will wait for a potential \n 271 | # after the trailing \r and we don't get that line until EOF. 272 | assert len(received_lines) == length_before_eof + (newline != "\r") 273 | assert received_lines == output_lines 274 | # The .newlines property is broken on PyPy: 275 | # https://bitbucket.org/pypy/pypy/issues/3012 276 | if sys.implementation.name == "cpython": 277 | if not newline: 278 | newlines_seen = cast(Tuple[str, ...], receive_stream.newlines) 279 | assert set(newlines_seen) == {"\r", "\n", "\r\n"} 280 | else: 281 | assert receive_stream.newlines is None 282 | 283 | 284 | async def test_text_receive_fix_errors() -> None: 285 | test_input = b"The quick brown \xf0\x9f\xa6\x8a jumps over the lazy dog.\n" 286 | for chunk_size in range(1, len(test_input)): 287 | send_stream, receive_stream_raw = trio.testing.memory_stream_one_way_pair() 288 | receive_stream = TextReceiveStream( 289 | receive_stream_raw, chunk_size=chunk_size, encoding="ascii" 290 | ) 291 | await send_stream.send_all(test_input) 292 | await send_stream.aclose() 293 | with pytest.raises(UnicodeDecodeError): 294 | await receive_stream.receive_line() 295 | with pytest.raises(UnicodeDecodeError): 296 | await receive_stream.receive_line() 297 | receive_stream.errors = "replace" 298 | assert receive_stream.errors == "replace" 299 | line = await receive_stream.receive_line() 300 | x = chr(65533) 301 | assert f"The quick brown {x}{x}{x}{x} jumps over the lazy dog.\n" == line 302 | assert "" == await receive_stream.receive_line() 303 | await receive_stream.aclose() 304 | 305 | 306 | async def test_text_receive_hits_max_chars() -> None: 307 | test_input = b"The quick\nbrown \xf0\x9f\xa6\x8a\njumps over\r\nthe lazy dog.\n" 308 | for chunk_size in range(1, len(test_input)): 309 | send_stream, receive_stream_raw = trio.testing.memory_stream_one_way_pair() 310 | receive_stream = TextReceiveStream( 311 | receive_stream_raw, chunk_size=chunk_size, encoding="utf-8" 312 | ) 313 | await send_stream.send_all(test_input) 314 | await send_stream.aclose() 315 | 316 | fox = chr(129_418) 317 | assert "The quick\n" == await receive_stream.receive_line(12) 318 | assert f"brown {fox}" == await receive_stream.receive_line(7) 319 | assert "\n" == await receive_stream.receive_line(10) 320 | assert "jumps ov" == await receive_stream.receive_line(8) 321 | assert "er\r" == await receive_stream.receive_line(3) 322 | assert "\n" == await receive_stream.receive_line(1) 323 | assert "the lazy dog.\n" == await receive_stream.receive_line(20) 324 | assert "" == await receive_stream.receive_line(1) 325 | -------------------------------------------------------------------------------- /tricycle/_tests/test_tree_var.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | import trio 3 | import trio.testing 4 | from functools import partial 5 | from typing import Optional, Any, cast 6 | 7 | from .. import TreeVar, TreeVarToken 8 | 9 | 10 | async def test_treevar() -> None: 11 | tv1 = TreeVar[int]("tv1") 12 | tv2 = TreeVar[Optional[int]]("tv2", default=None) 13 | tv3 = TreeVar("tv3", default=-1) 14 | assert tv1.name == "tv1" 15 | assert "TreeVar name='tv2'" in repr(tv2) 16 | 17 | with pytest.raises(LookupError): 18 | tv1.get() 19 | assert tv2.get() is None 20 | assert tv1.get(42) == 42 21 | assert tv2.get(42) == 42 22 | 23 | NOTHING = cast(int, object()) 24 | 25 | async def should_be(val1: int, val2: int, new1: int = NOTHING) -> None: 26 | assert tv1.get(NOTHING) == val1 27 | assert tv2.get(NOTHING) == val2 28 | if new1 is not NOTHING: 29 | tv1.set(new1) 30 | 31 | tok1 = tv1.set(10) 32 | async with trio.open_nursery() as outer: 33 | tok2 = tv1.set(15) 34 | with tv2.being(20): 35 | assert tv2.get_in(trio.lowlevel.current_task()) == 20 36 | async with trio.open_nursery() as inner: 37 | tv1.reset(tok2) 38 | outer.start_soon(should_be, 10, NOTHING, 100) 39 | inner.start_soon(should_be, 15, 20, 200) 40 | await trio.testing.wait_all_tasks_blocked() 41 | assert tv1.get_in(trio.lowlevel.current_task()) == 10 42 | await should_be(10, 20, 300) 43 | assert tv1.get_in(inner) == 15 44 | assert tv1.get_in(outer) == 10 45 | assert tv1.get_in(trio.lowlevel.current_task()) == 300 46 | assert tv2.get_in(inner) == 20 47 | assert tv2.get_in(outer) is None 48 | assert tv2.get_in(trio.lowlevel.current_task()) == 20 49 | tv1.reset(tok1) 50 | await should_be(NOTHING, 20) 51 | assert tv1.get_in(inner) == 15 52 | assert tv1.get_in(outer) == 10 53 | with pytest.raises(LookupError): 54 | assert tv1.get_in(trio.lowlevel.current_task()) 55 | # Test get_in() needing to search a parent task but 56 | # finding no value there: 57 | tv3 = TreeVar("tv3", default=-1) 58 | assert tv3.get_in(outer) == -1 59 | assert tv3.get_in(outer, -42) == -42 60 | assert tv2.get() is None 61 | assert tv2.get_in(trio.lowlevel.current_task()) is None 62 | 63 | 64 | def trivial_abort(_: object) -> trio.lowlevel.Abort: 65 | return trio.lowlevel.Abort.SUCCEEDED # pragma: no cover 66 | 67 | 68 | async def test_treevar_follows_eventual_parent() -> None: 69 | tv1 = TreeVar[str]("tv1") 70 | 71 | async def manage_target(task_status: trio.TaskStatus[trio.Nursery]) -> None: 72 | assert tv1.get() == "source nursery" 73 | with tv1.being("target nursery"): 74 | assert tv1.get() == "target nursery" 75 | async with trio.open_nursery() as target_nursery: 76 | with tv1.being("target nested child"): 77 | assert tv1.get() == "target nested child" 78 | task_status.started(target_nursery) 79 | await trio.lowlevel.wait_task_rescheduled(trivial_abort) 80 | assert tv1.get() == "target nested child" 81 | assert tv1.get() == "target nursery" 82 | assert tv1.get() == "target nursery" 83 | assert tv1.get() == "source nursery" 84 | 85 | async def verify( 86 | value: str, *, task_status: trio.TaskStatus[None] = trio.TASK_STATUS_IGNORED 87 | ) -> None: 88 | assert tv1.get() == value 89 | task_status.started() 90 | assert tv1.get() == value 91 | 92 | with tv1.being("source nursery"): 93 | async with trio.open_nursery() as source_nursery: 94 | with tv1.being("source->target start call"): 95 | target_nursery = await source_nursery.start(manage_target) 96 | with tv1.being("verify task"): 97 | source_nursery.start_soon(verify, "source nursery") 98 | target_nursery.start_soon(verify, "target nursery") 99 | await source_nursery.start(verify, "source nursery") 100 | await target_nursery.start(verify, "target nursery") 101 | trio.lowlevel.reschedule(target_nursery.parent_task) 102 | 103 | 104 | async def test_treevar_token_bound_to_task_that_obtained_it() -> None: 105 | tv1 = TreeVar[int]("tv1") 106 | token: Optional[TreeVarToken[int]] = None 107 | 108 | async def get_token() -> None: 109 | nonlocal token 110 | token = tv1.set(10) 111 | try: 112 | await trio.lowlevel.wait_task_rescheduled(trivial_abort) 113 | finally: 114 | tv1.reset(token) 115 | with pytest.raises(LookupError): 116 | tv1.get() 117 | with pytest.raises(LookupError): 118 | tv1.get_in(trio.lowlevel.current_task()) 119 | 120 | async with trio.open_nursery() as nursery: 121 | nursery.start_soon(get_token) 122 | await trio.testing.wait_all_tasks_blocked() 123 | assert token is not None 124 | with pytest.raises(ValueError, match="different Context"): 125 | tv1.reset(token) 126 | assert tv1.get_in(list(nursery.child_tasks)[0]) == 10 127 | nursery.cancel_scope.cancel() 128 | 129 | 130 | def test_treevar_outside_run() -> None: 131 | async def run_sync(fn: Any, *args: Any) -> Any: 132 | return fn(*args) 133 | 134 | tv1 = TreeVar("tv1", default=10) 135 | for operation in ( 136 | tv1.get, 137 | partial(tv1.get, 20), 138 | partial(tv1.set, 30), 139 | lambda: tv1.reset(trio.run(run_sync, tv1.set, 10)), 140 | tv1.being(40).__enter__, 141 | ): 142 | with pytest.raises(RuntimeError, match="must be called from async context"): 143 | operation() # type: ignore 144 | -------------------------------------------------------------------------------- /tricycle/_tree_var.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import attrs 4 | import contextvars 5 | import trio 6 | import weakref 7 | from contextlib import contextmanager 8 | from typing import ( 9 | TypeVar, 10 | Generic, 11 | Any, 12 | Iterator, 13 | MutableMapping, 14 | Optional, 15 | Union, 16 | cast, 17 | overload, 18 | ) 19 | 20 | T = TypeVar("T") 21 | U = TypeVar("U") 22 | 23 | 24 | __all__ = ["TreeVar", "TreeVarToken"] 25 | 26 | 27 | MISSING: Any = contextvars.Token.MISSING 28 | 29 | 30 | @attrs.define(eq=False) 31 | class _TreeVarState(Generic[T]): 32 | """The value associated with the inner contextvar of a TreeVar.""" 33 | 34 | # Weakref to the task for which this state is valid; used to notice 35 | # when a TreeVar has been inherited across start_soon() and recompute 36 | # its value via tree-based inheritance. 37 | task_ref: weakref.ref[trio.lowlevel.Task] 38 | 39 | # Value accessed by TreeVar.get() and TreeVar.set() within that task. 40 | value_for_task: T = MISSING 41 | 42 | # Value that will be inherited by children of the given nursery within 43 | # that task. Used to avoid having modifications after a nursery 44 | # was opened affect child tasks of that nursery. 45 | value_for_children: MutableMapping[trio.Nursery, T] = attrs.Factory( 46 | weakref.WeakKeyDictionary 47 | ) 48 | 49 | def save_current_for_children(self) -> None: 50 | """Associate the current value_for_task as the value_for_children 51 | of all this task's child nurseries that were not already being tracked. 52 | Call this before modifying the value_for_task. 53 | """ 54 | task = self.task_ref() 55 | if task is None: # pragma: no cover 56 | return 57 | for nursery in task.child_nurseries: 58 | self.value_for_children.setdefault(nursery, self.value_for_task) 59 | 60 | 61 | @attrs.frozen 62 | class TreeVarToken(Generic[T]): 63 | var: TreeVar[T] 64 | old_value: T 65 | _context: contextvars.Context = attrs.field(repr=False) 66 | 67 | 68 | class TreeVar(Generic[T]): 69 | """A "tree variable": like a context variable except that its value 70 | in a new task is inherited from the new task's parent nursery rather 71 | than from the new task's spawner. 72 | 73 | `TreeVar` objects support all the same methods and attributes as 74 | `~contextvars.ContextVar` objects 75 | (:meth:`~contextvars.ContextVar.get`, 76 | :meth:`~contextvars.ContextVar.set`, 77 | :meth:`~contextvars.ContextVar.reset`, and 78 | `~contextvars.ContextVar.name`), and they are constructed the same 79 | way. They also provide the additional methods :meth:`being` and 80 | :meth:`get_in`, documented below. 81 | 82 | Accessing or changing the value of a `TreeVar` outside of a Trio 83 | task will raise `RuntimeError`. (Exception: :meth:`get_in` still 84 | works outside of a task, as long as you have a reference to the 85 | task or nursery of interest.) 86 | 87 | .. note:: `TreeVar` values are not directly stored in the 88 | `contextvars.Context`, so you can't use `Context.get() 89 | ` to access them. If you need the value 90 | in a context other than your own, use :meth:`get_in`. 91 | 92 | """ 93 | 94 | __slots__ = ("_cvar", "_default") 95 | 96 | _cvar: contextvars.ContextVar[_TreeVarState[T]] 97 | _default: T 98 | 99 | def __init__(self, name: str, *, default: T = MISSING): 100 | self._cvar = contextvars.ContextVar(name) 101 | self._default = default 102 | 103 | def __repr__(self) -> str: 104 | dflt = "" 105 | if self._default is not MISSING: 106 | dflt = f" default={self._default!r}" 107 | return ( 108 | f"" 109 | ) 110 | 111 | @property 112 | def name(self) -> str: 113 | return self._cvar.name 114 | 115 | def _fetch( 116 | self, 117 | for_task: trio.lowlevel.Task, 118 | current_task: Optional[trio.lowlevel.Task], 119 | ) -> _TreeVarState[T]: 120 | """Return the _TreeVarState associated with *for_task*, inheriting 121 | it from a parent nursery if necessary. 122 | """ 123 | try: 124 | current_state = for_task.context[self._cvar] 125 | set_in_task = current_state.task_ref() 126 | except KeyError: 127 | set_in_task = None 128 | if set_in_task is for_task: 129 | return current_state 130 | 131 | # This TreeVar hasn't yet been used in the current task. 132 | # Initialize it based on the value it had when any of our 133 | # enclosing nurseries were opened, nearest first. 134 | nursery = for_task.eventual_parent_nursery or for_task.parent_nursery 135 | inherited_value: T 136 | if nursery is None: 137 | inherited_value = MISSING 138 | else: 139 | parent_state = self._fetch(nursery.parent_task, current_task) 140 | inherited_value = parent_state.value_for_children.get( 141 | nursery, parent_state.value_for_task 142 | ) 143 | new_state = _TreeVarState[T](weakref.ref(for_task), inherited_value) 144 | if current_task is None: 145 | # If no current_task was provided, then we're being called 146 | # from get_in() and should not cache the intermediate 147 | # values in case we're in a different thread where 148 | # context.run() might fail. 149 | pass 150 | elif for_task.context is current_task.context: 151 | self._cvar.set(new_state) 152 | else: 153 | for_task.context.run(self._cvar.set, new_state) 154 | return new_state 155 | 156 | @overload 157 | def get(self) -> T: 158 | ... 159 | 160 | @overload 161 | def get(self, default: U) -> Union[T, U]: 162 | ... 163 | 164 | def get(self, default: U = MISSING) -> Union[T, U]: 165 | this_task = trio.lowlevel.current_task() 166 | state = self._fetch(this_task, this_task) 167 | if state.value_for_task is not MISSING: 168 | return state.value_for_task 169 | elif default is not MISSING: 170 | return default 171 | elif self._default is not MISSING: 172 | return self._default 173 | else: 174 | raise LookupError(self) 175 | 176 | def set(self, value: T) -> TreeVarToken[T]: 177 | this_task = trio.lowlevel.current_task() 178 | state = self._fetch(this_task, this_task) 179 | state.save_current_for_children() 180 | prev_value, state.value_for_task = state.value_for_task, value 181 | return TreeVarToken(self, prev_value, this_task.context) 182 | 183 | def reset(self, token: TreeVarToken[T]) -> None: 184 | this_task = trio.lowlevel.current_task() 185 | if token._context is not this_task.context: 186 | raise ValueError(f"{token!r} was created in a different Context") 187 | state = self._fetch(this_task, this_task) 188 | state.save_current_for_children() 189 | state.value_for_task = token.old_value 190 | 191 | @contextmanager 192 | def being(self, value: T) -> Iterator[None]: 193 | """Returns a context manager which sets the value of this `TreeVar` to 194 | *value* upon entry and restores its previous value upon exit. 195 | """ 196 | token = self.set(value) 197 | try: 198 | yield 199 | finally: 200 | self.reset(token) 201 | 202 | @overload 203 | def get_in(self, task_or_nursery: Union[trio.lowlevel.Task, trio.Nursery]) -> T: 204 | ... 205 | 206 | @overload 207 | def get_in( 208 | self, task_or_nursery: Union[trio.lowlevel.Task, trio.Nursery], default: U 209 | ) -> Union[T, U]: 210 | ... 211 | 212 | def get_in( 213 | self, 214 | task_or_nursery: Union[trio.lowlevel.Task, trio.Nursery], 215 | default: U = MISSING, 216 | ) -> Union[T, U]: 217 | """Gets the value of this `TreeVar` in the given 218 | `~trio.lowlevel.Task` or `~trio.Nursery`. 219 | 220 | The value in a task is the value that would be returned by a 221 | call to :meth:`~contextvars.ContextVar.get` in that task. The 222 | value in a nursery is the value that would be returned by 223 | :meth:`~contextvars.ContextVar.get` at the beginning of a new 224 | child task started in that nursery. The *default* argument has 225 | the same semantics as it does for :meth:`~contextvars.ContextVar.get`. 226 | """ 227 | if isinstance(task_or_nursery, trio.Nursery): 228 | task = task_or_nursery.parent_task 229 | else: 230 | task = task_or_nursery 231 | state = self._fetch(for_task=task, current_task=None) 232 | if task is task_or_nursery: 233 | result = state.value_for_task 234 | else: 235 | assert isinstance(task_or_nursery, trio.Nursery) 236 | result = state.value_for_children.get(task_or_nursery, state.value_for_task) 237 | if result is not MISSING: 238 | return result 239 | elif default is not MISSING: 240 | return default 241 | elif self._default is not MISSING: 242 | return self._default 243 | else: 244 | raise LookupError(self) 245 | -------------------------------------------------------------------------------- /tricycle/_version.py: -------------------------------------------------------------------------------- 1 | __version__ = "0.4.1+dev" 2 | -------------------------------------------------------------------------------- /tricycle/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/oremanj/tricycle/29559d1768cc5edb960acc6079ab4e3a6e15ee19/tricycle/py.typed --------------------------------------------------------------------------------