├── .codecov.yml ├── .coveragerc ├── .github ├── CONTRIBUTING.rst ├── ISSUE_TEMPLATE │ ├── 01-bug_report.yml │ └── 02-feature_request.yml ├── PULL_REQUEST_TEMPLATE │ └── 01-pull_request.md ├── dependabot.yml └── workflows │ ├── codeql.yml │ ├── python-publish.yml │ ├── unittests.yml │ └── verification.yml ├── .gitignore ├── .readthedocs.yml ├── LICENSE ├── README.rst ├── asyncstdlib ├── __init__.py ├── _core.py ├── _lrucache.py ├── _lrucache.pyi ├── _typing.py ├── _utility.py ├── asynctools.py ├── builtins.py ├── builtins.pyi ├── contextlib.py ├── contextlib.pyi ├── functools.py ├── functools.pyi ├── heapq.py ├── heapq.pyi ├── itertools.py ├── itertools.pyi └── py.typed ├── docs ├── Makefile ├── _static │ └── custom.css ├── conf.py ├── index.rst ├── make.bat └── source │ ├── api │ ├── asynctools.rst │ ├── builtins.rst │ ├── contextlib.rst │ ├── functools.rst │ ├── heapq.rst │ └── itertools.rst │ ├── devel │ ├── contributing.rst │ ├── publishing.rst │ └── testloop.rst │ ├── glossary.rst │ └── notes │ ├── compatible.rst │ └── iter_scope.rst ├── pyproject.toml ├── setup.cfg ├── typetests ├── README.rst ├── test_functools.py └── test_itertools.py └── unittests ├── __init__.py ├── test_asynctools.py ├── test_builtins.py ├── test_contextlib.py ├── test_functools.py ├── test_functools_lru.py ├── test_heapq.py ├── test_helpers.py ├── test_itertools.py └── utility.py /.codecov.yml: -------------------------------------------------------------------------------- 1 | ignore: 2 | # type tests are not execute, so there is no code coverage 3 | - "typetests" 4 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | source = asyncstdlib 3 | branch = TRUE 4 | cover_pylib = FALSE 5 | parallel = False 6 | 7 | [report] 8 | show_missing = True 9 | exclude_lines = 10 | # default 11 | pragma: no cover 12 | # python debug/internals 13 | def __repr__ 14 | if __debug__: 15 | assert 16 | raise AssertionError 17 | raise NotImplementedError 18 | return NotImplemented 19 | if __name__ == "__main__" 20 | if __name__ == '__main__' 21 | @overload 22 | if TYPE_CHECKING 23 | ... 24 | -------------------------------------------------------------------------------- /.github/CONTRIBUTING.rst: -------------------------------------------------------------------------------- 1 | ../docs/source/contributing.rst -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/01-bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Report broken or unexpected features 3 | title: "[Bug]: " 4 | labels: ["bug"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: "Thanks for taking the time to help us help you!" 9 | - type: textarea 10 | id: what-happened 11 | attributes: 12 | label: What happened? 13 | description: "Shortly outline the bug in your own words. A few sentences are enough." 14 | placeholder: "When I do X then Y happens. But Z should happen instead!" 15 | validations: 16 | required: true 17 | - type: textarea 18 | id: mre 19 | attributes: 20 | label: Minimal Reproducible Example 21 | description: Please create a short example code that demonstrates the bug. 22 | render: python 23 | value: | 24 | import asyncio 25 | import asyncstdlib as a 26 | 27 | async def example(): 28 | # TODO: add code that triggers the bug here 29 | 30 | asyncio.run(example()) 31 | - type: checkboxes 32 | id: self-assign 33 | attributes: 34 | label: Request Assignment [Optional] 35 | options: 36 | - label: I already understand the cause and want to submit a bugfix. 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/02-feature_request.yml: -------------------------------------------------------------------------------- 1 | name: Feature Request 2 | description: Request new or missing features 3 | title: "[Feature]: " 4 | labels: ["enhancement"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: "Thanks for taking the time to help us help you!" 9 | - type: textarea 10 | id: outline 11 | attributes: 12 | label: Describe the feature 13 | description: "Shortly outline the feature in your own words. A few sentences are enough." 14 | placeholder: "When I do X then Y should happen. It is important that X/Y can do Z!" 15 | validations: 16 | required: true 17 | - type: checkboxes 18 | id: self-assign 19 | attributes: 20 | label: Request Assignment [Optional] 21 | options: 22 | - label: I already have a plan and want to submit an implementation. 23 | -------------------------------------------------------------------------------- /.github/PULL_REQUEST_TEMPLATE/01-pull_request.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | This PR changes/fixes . Notable changes include: 4 | 5 | 6 | * Fixed some thing 7 | * Changed another thing 8 | 9 | Closes # 10 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | reviewers: 8 | - "maxfischer2781" 9 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "master" ] 6 | pull_request: 7 | branches: [ "master" ] 8 | schedule: 9 | - cron: "4 9 * * 4" 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ python ] 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v4 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v3 31 | with: 32 | languages: ${{ matrix.language }} 33 | queries: +security-and-quality 34 | 35 | - name: Autobuild 36 | uses: github/codeql-action/autobuild@v3 37 | 38 | - name: Perform CodeQL Analysis 39 | uses: github/codeql-action/analyze@v3 40 | with: 41 | category: "/language:${{ matrix.language }}" 42 | -------------------------------------------------------------------------------- /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | jobs: 16 | deploy: 17 | 18 | runs-on: ubuntu-latest 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Set up Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: '3.x' 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install build 30 | - name: Build package 31 | run: python -m build 32 | - name: Publish package 33 | uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc 34 | with: 35 | user: __token__ 36 | password: ${{ secrets.PYPI_API_TOKEN }} 37 | -------------------------------------------------------------------------------- /.github/workflows/unittests.yml: -------------------------------------------------------------------------------- 1 | name: Unit Tests 2 | 3 | on: 4 | push: 5 | pull_request: 6 | schedule: 7 | - cron: '0 10 * * 3,6' 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: [ 15 | '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 16 | 'pypy-3.8', 'pypy-3.10' 17 | ] 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | - name: Set up Python ${{ matrix.python-version }} 22 | uses: actions/setup-python@v5 23 | with: 24 | python-version: ${{ matrix.python-version }} 25 | allow-prereleases: true 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install .[test] 30 | - name: Test with pytest 31 | run: | 32 | pytest --durations=0 --cov=./ 33 | - name: Upload coverage to Codecov 34 | uses: codecov/codecov-action@v5 35 | -------------------------------------------------------------------------------- /.github/workflows/verification.yml: -------------------------------------------------------------------------------- 1 | name: Static Checks 2 | 3 | on: 4 | push: 5 | pull_request: 6 | schedule: 7 | - cron: '0 10 * * 3,6' 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | - name: Set up Python 15 | uses: actions/setup-python@v5 16 | with: 17 | python-version: '3.x' 18 | - name: Install dependencies 19 | run: | 20 | python -m pip install --upgrade pip 21 | pip install .[test,typetest] 22 | - name: Lint with flake8 23 | run: | 24 | flake8 asyncstdlib unittests 25 | - name: Format with black 26 | run: | 27 | black asyncstdlib unittests --diff --check 28 | - name: Verify with MyPy 29 | run: | 30 | mypy --pretty 31 | - name: Verify with PyRight 32 | uses: jakebailey/pyright-action@v2 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Windows template 3 | # Windows thumbnail cache files 4 | Thumbs.db 5 | ehthumbs.db 6 | ehthumbs_vista.db 7 | 8 | # Dump file 9 | *.stackdump 10 | 11 | # Folder config file 12 | [Dd]esktop.ini 13 | 14 | # Recycle Bin used on file shares 15 | $RECYCLE.BIN/ 16 | 17 | # Windows Installer files 18 | *.cab 19 | *.msi 20 | *.msm 21 | *.msp 22 | 23 | # Windows shortcuts 24 | *.lnk 25 | ### JetBrains template 26 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm 27 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 28 | 29 | # User-specific stuff: 30 | .idea 31 | .idea/**/workspace.xml 32 | .idea/**/tasks.xml 33 | .idea/dictionaries 34 | 35 | # Sensitive or high-churn files: 36 | .idea/**/dataSources/ 37 | .idea/**/dataSources.ids 38 | .idea/**/dataSources.xml 39 | .idea/**/dataSources.local.xml 40 | .idea/**/sqlDataSources.xml 41 | .idea/**/dynamic.xml 42 | .idea/**/uiDesigner.xml 43 | 44 | # Gradle: 45 | .idea/**/gradle.xml 46 | .idea/**/libraries 47 | 48 | # CMake 49 | cmake-build-debug/ 50 | cmake-build-release/ 51 | 52 | # Mongo Explorer plugin: 53 | .idea/**/mongoSettings.xml 54 | 55 | ## File-based project format: 56 | *.iws 57 | 58 | ## Plugin-specific files: 59 | 60 | # IntelliJ 61 | out/ 62 | 63 | # mpeltonen/sbt-idea plugin 64 | .idea_modules/ 65 | 66 | # JIRA plugin 67 | atlassian-ide-plugin.xml 68 | 69 | # Cursive Clojure plugin 70 | .idea/replstate.xml 71 | 72 | # Crashlytics plugin (for Android Studio and IntelliJ) 73 | com_crashlytics_export_strings.xml 74 | crashlytics.properties 75 | crashlytics-build.properties 76 | fabric.properties 77 | ### Python template 78 | # Byte-compiled / optimized / DLL files 79 | __pycache__/ 80 | *.py[cod] 81 | *$py.class 82 | 83 | # C extensions 84 | *.so 85 | 86 | # Distribution / packaging 87 | .Python 88 | build/ 89 | develop-eggs/ 90 | dist/ 91 | downloads/ 92 | eggs/ 93 | .eggs/ 94 | lib/ 95 | lib64/ 96 | parts/ 97 | sdist/ 98 | var/ 99 | wheels/ 100 | *.egg-info/ 101 | .installed.cfg 102 | *.egg 103 | MANIFEST 104 | 105 | # PyInstaller 106 | # Usually these files are written by a python script from a template 107 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 108 | *.manifest 109 | *.spec 110 | 111 | # Installer logs 112 | pip-log.txt 113 | pip-delete-this-directory.txt 114 | 115 | # Unit test / coverage reports 116 | htmlcov/ 117 | .tox/ 118 | .coverage 119 | .coverage.* 120 | .cache 121 | nosetests.xml 122 | coverage.xml 123 | *.cover 124 | .hypothesis/ 125 | .pytest_cache 126 | 127 | # Translations 128 | *.mo 129 | *.pot 130 | 131 | # Django stuff: 132 | *.log 133 | .static_storage/ 134 | .media/ 135 | local_settings.py 136 | 137 | # Flask stuff: 138 | instance/ 139 | .webassets-cache 140 | 141 | # Scrapy stuff: 142 | .scrapy 143 | 144 | # Sphinx documentation 145 | docs/_build/ 146 | docs/_static/ 147 | docs/_templates/ 148 | 149 | # PyBuilder 150 | target/ 151 | 152 | # Jupyter Notebook 153 | .ipynb_checkpoints 154 | 155 | # pyenv 156 | .python-version 157 | 158 | # celery beat schedule file 159 | celerybeat-schedule 160 | 161 | # SageMath parsed files 162 | *.sage.py 163 | 164 | # Environments 165 | .env 166 | .venv 167 | env/ 168 | venv/ 169 | ENV/ 170 | env.bak/ 171 | venv.bak/ 172 | 173 | # Spyder project settings 174 | .spyderproject 175 | .spyproject 176 | 177 | # Rope project settings 178 | .ropeproject 179 | 180 | # mkdocs documentation 181 | /site 182 | 183 | # mypy 184 | .mypy_cache/ 185 | ### Linux template 186 | *~ 187 | 188 | # temporary files which can be created if a process still has a handle open of a deleted file 189 | .fuse_hidden* 190 | 191 | # KDE directory preferences 192 | .directory 193 | 194 | # Linux trash folder which might appear on any partition or disk 195 | .Trash-* 196 | 197 | # .nfs files are created when an open file is removed but is still being accessed 198 | .nfs* 199 | ### macOS template 200 | # General 201 | .DS_Store 202 | .AppleDouble 203 | .LSOverride 204 | 205 | # Icon must end with two \r 206 | Icon 207 | 208 | # Thumbnails 209 | ._* 210 | 211 | # Files that might appear in the root of a volume 212 | .DocumentRevisions-V100 213 | .fseventsd 214 | .Spotlight-V100 215 | .TemporaryItems 216 | .Trashes 217 | .VolumeIcon.icns 218 | .com.apple.timemachine.donotpresent 219 | 220 | # Directories potentially created on remote AFP share 221 | .AppleDB 222 | .AppleDesktop 223 | Network Trash Folder 224 | Temporary Items 225 | .apdisk 226 | 227 | -------------------------------------------------------------------------------- /.readthedocs.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.11" 7 | 8 | python: 9 | install: 10 | - method: pip 11 | path: . 12 | extra_requirements: 13 | - doc 14 | 15 | sphinx: 16 | builder: html 17 | configuration: docs/conf.py 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 - 2024 Max Kühn 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ================================================ 2 | ``asyncstdlib`` -- the missing ``async`` toolbox 3 | ================================================ 4 | 5 | .. image:: https://readthedocs.org/projects/asyncstdlib/badge/?version=latest 6 | :target: http://asyncstdlib.readthedocs.io/en/latest/?badge=latest 7 | :alt: Documentation Status 8 | 9 | .. image:: https://img.shields.io/pypi/v/asyncstdlib.svg 10 | :alt: Available on PyPI 11 | :target: https://pypi.python.org/pypi/asyncstdlib/ 12 | 13 | .. image:: https://anaconda.org/conda-forge/asyncstdlib/badges/version.svg 14 | :alt: Available on Conda-Forge 15 | :target: https://anaconda.org/conda-forge/asyncstdlib 16 | 17 | .. image:: https://img.shields.io/github/license/maxfischer2781/asyncstdlib.svg 18 | :alt: License 19 | :target: https://github.com/maxfischer2781/asyncstdlib/blob/master/LICENSE 20 | 21 | .. image:: https://badges.gitter.im/maxfischer2781/asyncstdlib.svg 22 | :target: https://gitter.im/maxfischer2781/asyncstdlib?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge 23 | :alt: Development Chat 24 | 25 | The ``asyncstdlib`` library re-implements functions and classes of the Python 26 | standard library to make them compatible with ``async`` callables, iterables 27 | and context managers. 28 | It is fully agnostic to ``async`` event loops and seamlessly works with 29 | ``asyncio``, third-party libraries such as ``trio``, as well as 30 | any custom ``async`` event loop. 31 | 32 | * Full set of ``async`` versions of advantageous standard library helpers, 33 | such as ``zip``, ``map``, ``enumerate``, ``functools.reduce``, 34 | ``itertools.tee``, ``itertools.groupby`` and many others. 35 | * Safe handling of ``async`` iterators to ensure prompt cleanup, as well as 36 | various helpers to simplify safely using custom ``async`` iterators. 37 | * Small but powerful toolset to seamlessly integrate existing sync code 38 | into ``async`` programs and libraries. 39 | 40 | Check out the `documentation`_ to get started or take a look around. 41 | 42 | .. _documentation: http://asyncstdlib.readthedocs.io/ -------------------------------------------------------------------------------- /asyncstdlib/__init__.py: -------------------------------------------------------------------------------- 1 | """The missing async toolbox""" 2 | 3 | from .builtins import ( 4 | anext, 5 | zip, 6 | map, 7 | filter, 8 | enumerate, 9 | iter, 10 | all, 11 | any, 12 | max, 13 | min, 14 | sum, 15 | list, 16 | dict, 17 | set, 18 | tuple, 19 | sorted, 20 | ) 21 | from .functools import reduce, lru_cache, cache, cached_property 22 | from .contextlib import ( 23 | closing, 24 | ContextDecorator, 25 | contextmanager, 26 | nullcontext, 27 | ExitStack, 28 | ) 29 | from .itertools import ( 30 | accumulate, 31 | batched, 32 | cycle, 33 | chain, 34 | compress, 35 | dropwhile, 36 | filterfalse, 37 | islice, 38 | takewhile, 39 | starmap, 40 | tee, 41 | pairwise, 42 | zip_longest, 43 | groupby, 44 | ) 45 | from .asynctools import borrow, scoped_iter, await_each, any_iter, apply, sync 46 | from .heapq import merge, nlargest, nsmallest 47 | 48 | __version__ = "3.13.1" 49 | 50 | __all__ = [ 51 | "anext", 52 | "zip", 53 | "map", 54 | "filter", 55 | "enumerate", 56 | "iter", 57 | "all", 58 | "any", 59 | "max", 60 | "min", 61 | "sum", 62 | "list", 63 | "dict", 64 | "set", 65 | "tuple", 66 | "sorted", 67 | # functools 68 | "reduce", 69 | "lru_cache", 70 | "cache", 71 | "cached_property", 72 | # contextlib 73 | "closing", 74 | "ContextDecorator", 75 | "contextmanager", 76 | "nullcontext", 77 | "ExitStack", 78 | # itertools 79 | "accumulate", 80 | "batched", 81 | "cycle", 82 | "chain", 83 | "compress", 84 | "dropwhile", 85 | "filterfalse", 86 | "takewhile", 87 | "islice", 88 | "starmap", 89 | "tee", 90 | "pairwise", 91 | "zip_longest", 92 | "groupby", 93 | # asynctools 94 | "borrow", 95 | "scoped_iter", 96 | "await_each", 97 | "any_iter", 98 | "apply", 99 | "sync", 100 | # heapq 101 | "merge", 102 | "nlargest", 103 | "nsmallest", 104 | ] 105 | -------------------------------------------------------------------------------- /asyncstdlib/_core.py: -------------------------------------------------------------------------------- 1 | """ 2 | Internal helpers to safely build async abstractions 3 | 4 | While some of these helpers have public siblings 5 | (e.g. :py:class:`~.ScopedIter` and :py:func:`~.asynctools.scoped_iter`) 6 | they are purposely kept separate. 7 | Any helpers in this module are *not* bound to maintaining a public interface, 8 | and offer less convenience to save on overhead. 9 | """ 10 | 11 | from inspect import iscoroutinefunction 12 | from typing import ( 13 | Any, 14 | AsyncIterator, 15 | AsyncGenerator, 16 | Iterable, 17 | AsyncIterable, 18 | Generic, 19 | Optional, 20 | Awaitable, 21 | Callable, 22 | Type, 23 | ) 24 | from types import TracebackType 25 | 26 | from ._typing import T, AnyIterable 27 | 28 | 29 | class Sentinel: 30 | """Placeholder with configurable ``repr``""" 31 | 32 | __slots__ = ("name",) 33 | 34 | def __init__(self, name: str): 35 | self.name = name 36 | 37 | def __repr__(self) -> str: 38 | return self.name 39 | 40 | 41 | def aiter(subject: AnyIterable[T]) -> AsyncIterator[T]: 42 | """ 43 | An async iterator object yielding elements from ``subject`` 44 | 45 | :raises TypeError: if ``subject`` does not support any iteration protocol 46 | 47 | The ``subject`` must support 48 | the async iteration protocol (the :py:meth:`object.__aiter__` method), 49 | the regular iteration protocol (the :py:meth:`object.__iter__` method), 50 | or it must support the sequence protocol (the :py:meth:`object.__getitem__` 51 | method with integer arguments starting at 0). 52 | In either case, an async iterator is returned. 53 | """ 54 | if isinstance(subject, AsyncIterable): 55 | return subject.__aiter__() 56 | else: 57 | return _aiter_sync(subject).__aiter__() 58 | 59 | 60 | async def _aiter_sync(iterable: Iterable[T]) -> AsyncIterator[T]: 61 | """Helper to provide an async iterator for a regular iterable""" 62 | for item in iterable: 63 | yield item 64 | 65 | 66 | class ScopedIter(Generic[T]): 67 | """ 68 | Context manager that provides and cleans up an iterator for an iterable 69 | 70 | Note that unlike :py:func:`~.asynctools.scoped_iter`, this helper does 71 | *not* borrow the iterator automatically. Use :py:func:`~.borrow` if needed. 72 | """ 73 | 74 | __slots__ = ("_iterator",) 75 | 76 | def __init__(self, iterable: AnyIterable[T]): 77 | self._iterator: AsyncIterator[T] = aiter(iterable) 78 | 79 | async def __aenter__(self) -> AsyncIterator[T]: 80 | return self._iterator 81 | 82 | async def __aexit__( 83 | self, 84 | exc_type: Optional[Type[BaseException]], 85 | exc_val: Optional[BaseException], 86 | exc_tb: Optional[TracebackType], 87 | ) -> None: 88 | try: 89 | aclose = self._iterator.aclose() # type: ignore 90 | except AttributeError: 91 | pass 92 | else: 93 | await aclose 94 | 95 | 96 | def borrow(iterator: AsyncIterator[T]) -> AsyncGenerator[T, None]: 97 | """Borrow an async iterator for iteration, preventing it from being closed""" 98 | return (item async for item in iterator) 99 | 100 | 101 | def awaitify( 102 | function: "Callable[..., Awaitable[T]] | Callable[..., T]", 103 | ) -> Callable[..., Awaitable[T]]: 104 | """Ensure that ``function`` can be used in ``await`` expressions""" 105 | if iscoroutinefunction(function): 106 | return function 107 | else: 108 | return Awaitify(function) 109 | 110 | 111 | class Awaitify(Generic[T]): 112 | """Helper to peek at the return value of ``function`` and make it ``async``""" 113 | 114 | __slots__ = "__wrapped__", "_async_call" 115 | 116 | def __init__(self, function: "Callable[..., Awaitable[T]] | Callable[..., T]"): 117 | self.__wrapped__ = function 118 | self._async_call: "Callable[..., Awaitable[T]] | None" = None 119 | 120 | def __call__(self, *args: Any, **kwargs: Any) -> Awaitable[T]: 121 | if (async_call := self._async_call) is None: 122 | value = self.__wrapped__(*args, **kwargs) 123 | if isinstance(value, Awaitable): 124 | self._async_call = self.__wrapped__ # type: ignore 125 | return value # pyright: ignore 126 | else: 127 | self._async_call = force_async(self.__wrapped__) # type: ignore 128 | return await_value(value) 129 | else: 130 | return async_call(*args, **kwargs) 131 | 132 | 133 | async def await_value(value: T) -> T: 134 | return value 135 | 136 | 137 | def force_async(call: Callable[..., T]) -> Callable[..., Awaitable[T]]: 138 | async def async_wrapped(*args: Any, **kwargs: Any) -> T: 139 | return call(*args, **kwargs) 140 | 141 | return async_wrapped 142 | -------------------------------------------------------------------------------- /asyncstdlib/_lrucache.pyi: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | TypeVar, 3 | Any, 4 | Awaitable, 5 | Callable, 6 | Coroutine, 7 | Generic, 8 | NamedTuple, 9 | overload, 10 | Protocol, 11 | ) 12 | from types import CoroutineType 13 | from typing_extensions import ParamSpec, Concatenate 14 | 15 | from ._typing import AC, TypedDict 16 | 17 | class CacheInfo(NamedTuple): 18 | hits: int 19 | misses: int 20 | maxsize: int | None 21 | currsize: int 22 | 23 | class CacheParameters(TypedDict): 24 | maxsize: int | None 25 | typed: bool 26 | 27 | R = TypeVar("R") 28 | P = ParamSpec("P") 29 | S = TypeVar("S") 30 | S2 = TypeVar("S2") 31 | 32 | class LRUAsyncCallable(Protocol[AC]): 33 | __slots__: tuple[str, ...] 34 | __call__: AC 35 | @overload 36 | def __get__( 37 | self: LRUAsyncCallable[AC], instance: None, owner: type | None = ... 38 | ) -> LRUAsyncCallable[AC]: ... 39 | @overload 40 | def __get__( 41 | self: LRUAsyncCallable[Callable[Concatenate[S, P], Coroutine[Any, Any, R]]], 42 | instance: S, 43 | owner: type | None = ..., 44 | ) -> LRUAsyncBoundCallable[S, P, R]: ... 45 | @overload 46 | def __get__( 47 | self: LRUAsyncCallable[Callable[Concatenate[S, P], CoroutineType[Any, Any, R]]], 48 | instance: S, 49 | owner: type | None = ..., 50 | ) -> LRUAsyncBoundCallable[S, P, R]: ... 51 | @overload 52 | def __get__( 53 | self: LRUAsyncCallable[Callable[Concatenate[S, P], Awaitable[R]]], 54 | instance: S, 55 | owner: type | None = ..., 56 | ) -> LRUAsyncBoundCallable[S, P, R]: ... 57 | @property 58 | def __wrapped__(self) -> AC: ... 59 | def cache_parameters(self) -> CacheParameters: ... 60 | def cache_info(self) -> CacheInfo: ... 61 | def cache_clear(self) -> None: ... 62 | def cache_discard(self, *args: Any, **kwargs: Any) -> None: ... 63 | 64 | class LRUAsyncBoundCallable(Generic[S, P, R]): 65 | __slots__: tuple[str, ...] 66 | __self__: S 67 | __call__: Callable[P, Awaitable[R]] 68 | @overload 69 | def __get__( 70 | self, instance: None, owner: type | None = ... 71 | ) -> LRUAsyncBoundCallable[S, P, R]: ... 72 | @overload 73 | def __get__( 74 | self, instance: S2, owner: type | None = ... 75 | ) -> LRUAsyncBoundCallable[S2, P, R]: ... 76 | def __init__( 77 | self, 78 | lru: LRUAsyncCallable[Callable[Concatenate[S, P], Awaitable[R]]], 79 | __self__: S, 80 | ) -> None: ... 81 | @property 82 | def __wrapped__(self) -> Callable[Concatenate[S, P], Awaitable[R]]: ... 83 | @property 84 | def __func__( 85 | self, 86 | ) -> LRUAsyncCallable[Callable[Concatenate[S, P], Awaitable[R]]]: ... 87 | def cache_parameters(self) -> CacheParameters: ... 88 | def cache_info(self) -> CacheInfo: ... 89 | def cache_clear(self) -> None: ... 90 | def cache_discard(self, *args: Any, **kwargs: Any) -> None: ... 91 | 92 | @overload 93 | def lru_cache(maxsize: AC, typed: bool = ...) -> LRUAsyncCallable[AC]: ... 94 | @overload 95 | def lru_cache( 96 | maxsize: int | None = ..., typed: bool = ... 97 | ) -> Callable[[AC], LRUAsyncCallable[AC]]: ... 98 | -------------------------------------------------------------------------------- /asyncstdlib/_typing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Helper module to simplify version specific typing imports 3 | 4 | This module is for internal use only. Do *not* put any new 5 | "async typing" definitions here. 6 | """ 7 | 8 | from typing import ( 9 | TypeVar, 10 | Hashable, 11 | Union, 12 | AsyncIterable, 13 | Iterable, 14 | Callable, 15 | Any, 16 | Awaitable, 17 | runtime_checkable, 18 | Protocol, 19 | ContextManager, 20 | TypedDict, 21 | ) 22 | 23 | __all__ = [ 24 | "Protocol", 25 | "ContextManager", 26 | "TypedDict", 27 | "T", 28 | "T1", 29 | "T2", 30 | "T3", 31 | "T4", 32 | "T5", 33 | "R", 34 | "C", 35 | "AC", 36 | "HK", 37 | "LT", 38 | "ADD", 39 | "AClose", 40 | "ACloseable", 41 | "AnyIterable", 42 | ] 43 | 44 | # TypeVars for argument/return type 45 | T = TypeVar("T") 46 | T1 = TypeVar("T1") 47 | T2 = TypeVar("T2") 48 | T3 = TypeVar("T3") 49 | T4 = TypeVar("T4") 50 | T5 = TypeVar("T5") 51 | R = TypeVar("R", covariant=True) 52 | C = TypeVar("C", bound=Callable[..., Any]) 53 | AC = TypeVar("AC", bound=Callable[..., Awaitable[Any]]) 54 | 55 | #: Hashable Key 56 | HK = TypeVar("HK", bound=Hashable) 57 | 58 | # LT < LT 59 | LT = TypeVar("LT", bound="SupportsLT") 60 | 61 | 62 | class SupportsLT(Protocol): 63 | def __lt__(self: LT, other: LT) -> bool: 64 | raise NotImplementedError 65 | 66 | 67 | # ADD + ADD 68 | ADD = TypeVar("ADD", bound="SupportsAdd") 69 | 70 | 71 | class SupportsAdd(Protocol): 72 | def __add__(self: ADD, other: ADD, /) -> ADD: 73 | raise NotImplementedError 74 | 75 | 76 | # await AClose.aclose() 77 | AClose = TypeVar("AClose", bound="ACloseable") 78 | 79 | 80 | @runtime_checkable 81 | class ACloseable(Protocol): 82 | async def aclose(self) -> None: 83 | """Asynchronously close this object""" 84 | 85 | 86 | #: (async) iter T 87 | AnyIterable = Union[Iterable[T], AsyncIterable[T]] 88 | -------------------------------------------------------------------------------- /asyncstdlib/_utility.py: -------------------------------------------------------------------------------- 1 | from typing import TypeVar, Optional, Callable 2 | 3 | from ._typing import Protocol 4 | 5 | 6 | class Definition(Protocol): 7 | """ 8 | Type of objects created from a class or function definition 9 | """ 10 | 11 | __name__: str 12 | __module__: str 13 | __qualname__: str 14 | 15 | 16 | D = TypeVar("D", bound=Definition) 17 | 18 | 19 | def public_module( 20 | module_name: str, qual_name: Optional[str] = None 21 | ) -> Callable[[D], D]: 22 | """Set the module name of a function or class""" 23 | 24 | def decorator(thing: D) -> D: 25 | thing.__module__ = module_name 26 | if qual_name is not None: 27 | thing.__qualname__ = qual_name 28 | thing.__name__ = qual_name.rpartition(".")[-1] 29 | return thing 30 | 31 | return decorator 32 | -------------------------------------------------------------------------------- /asyncstdlib/asynctools.py: -------------------------------------------------------------------------------- 1 | from asyncio import iscoroutinefunction 2 | from functools import wraps 3 | from typing import ( 4 | Union, 5 | AsyncContextManager, 6 | AsyncIterator, 7 | TypeVar, 8 | AsyncGenerator, 9 | Iterable, 10 | Awaitable, 11 | AsyncIterable, 12 | Callable, 13 | Coroutine, 14 | Any, 15 | overload, 16 | Optional, 17 | ) 18 | 19 | from ._typing import T, T1, T2, T3, T4, T5, AnyIterable 20 | from ._core import aiter 21 | from .contextlib import nullcontext 22 | 23 | 24 | S = TypeVar("S") 25 | 26 | 27 | class _BorrowedAsyncIterator(AsyncGenerator[T, S]): 28 | """ 29 | Borrowed async iterator/generator, preventing to ``aclose`` the ``iterable`` 30 | """ 31 | 32 | # adding special methods such as `__anext__` as `__slots__` allows to set them 33 | # on the instance: the interpreter expects *descriptors* not methods, and 34 | # `__slots__` are descriptors just like methods. 35 | __slots__ = "__wrapped__", "__anext__", "asend", "athrow", "_wrapper" 36 | 37 | # Type checker does not understand `__slot__` definitions 38 | __anext__: Callable[[Any], Coroutine[Any, Any, T]] 39 | asend: Any 40 | athrow: Any 41 | 42 | def __init__(self, iterator: Union[AsyncIterator[T], AsyncGenerator[T, S]]): 43 | self.__wrapped__ = iterator 44 | # Create an actual async generator wrapper that we can close. Otherwise, 45 | # if we pass on the original iterator methods we cannot disable them if 46 | # anyone has a reference to them. 47 | self._wrapper: AsyncGenerator[T, None] = (item async for item in iterator) 48 | # Forward all async iterator/generator methods but __aiter__ and aclose: 49 | # An async *iterator* (e.g. `async def: yield`) must return 50 | # itself from __aiter__. If we do not shadow this then 51 | # running aiter(self).aclose closes the underlying iterator. 52 | self.__anext__ = self._wrapper.__anext__ # type: ignore 53 | if hasattr(iterator, "asend"): 54 | self.asend = ( 55 | iterator.asend # pyright: ignore[reportUnknownMemberType,reportAttributeAccessIssue] 56 | ) 57 | if hasattr(iterator, "athrow"): 58 | self.athrow = ( 59 | iterator.athrow # pyright: ignore[reportUnknownMemberType,reportAttributeAccessIssue] 60 | ) 61 | 62 | def __aiter__(self) -> AsyncGenerator[T, S]: 63 | return self 64 | 65 | def __repr__(self) -> str: 66 | return f"" 67 | 68 | async def _aclose_wrapper(self) -> None: 69 | wrapper_iterator = self._wrapper 70 | # allow closing the intermediate wrapper 71 | # this prevents a resource warning if the wrapper is GC'd 72 | # the underlying iterator is NOT affected by this 73 | await wrapper_iterator.aclose() 74 | # disable direct asend/athrow to the underlying iterator 75 | if hasattr(self, "asend"): 76 | self.asend = wrapper_iterator.asend 77 | if hasattr(self, "athrow"): 78 | self.athrow = wrapper_iterator.athrow 79 | 80 | def aclose(self) -> Coroutine[Any, Any, None]: 81 | return self._aclose_wrapper() 82 | 83 | 84 | class _ScopedAsyncIterator(_BorrowedAsyncIterator[T, S]): 85 | __slots__ = () 86 | 87 | def __repr__(self) -> str: 88 | return f"" 89 | 90 | async def aclose(self) -> None: 91 | pass 92 | 93 | 94 | class _ScopedAsyncIteratorContext(AsyncContextManager[AsyncIterator[T]]): 95 | """ 96 | Context restricting the lifetime of ``iterator`` to the context scope 97 | 98 | This is an internal helper that relies on ``iterator`` belonging to the scope 99 | and having an ``aclose`` method. 100 | """ 101 | 102 | __slots__ = "_borrowed_iter", "_iterator" 103 | 104 | def __init__(self, iterator: AsyncIterator[T]): 105 | self._iterator: AsyncIterator[T] = iterator 106 | self._borrowed_iter: Optional[_ScopedAsyncIterator[T, Any]] = None 107 | 108 | async def __aenter__(self) -> AsyncIterator[T]: 109 | if self._borrowed_iter is not None: 110 | raise RuntimeError("scoped_iter is not re-entrant") 111 | self._borrowed_iter = _ScopedAsyncIterator(self._iterator) 112 | return self._borrowed_iter 113 | 114 | async def __aexit__(self, *args: Any) -> None: 115 | await self._borrowed_iter._aclose_wrapper() # type: ignore 116 | await self._iterator.aclose() # type: ignore 117 | 118 | def __repr__(self) -> str: 119 | return f"<{self.__class__.__name__} of {self._iterator!r} at 0x{(id(self)):x}>" 120 | 121 | 122 | @overload 123 | def borrow(iterator: AsyncGenerator[T, S], /) -> AsyncGenerator[T, S]: ... 124 | 125 | 126 | @overload 127 | def borrow(iterator: AsyncIterator[T], /) -> AsyncIterator[T]: ... 128 | 129 | 130 | def borrow( 131 | iterator: Union[AsyncIterator[T], AsyncGenerator[T, Any]], / 132 | ) -> Union[AsyncIterator[T], AsyncGenerator[T, Any]]: 133 | """ 134 | Borrow an async iterator, preventing to ``aclose`` it 135 | 136 | When :term:`borrowing` an async iterator, the original owner assures 137 | to close the iterator as needed. In turn, the borrowed iterator does 138 | not allow closing the underlying iterator. 139 | 140 | The borrowed iterator supports :py:meth:`~agen.asend` and 141 | :py:meth:`~agen.athrow` if the underlying iterator supports them as well; 142 | this allows borrowing either an :py:class:`~collections.abc.AsyncIterator` 143 | or :py:class:`~collections.abc.AsyncGenerator`. Regardless of iterator type, 144 | :py:meth:`~agen.aclose` is always provided; it closes only the borrowed 145 | iterator, not the underlying iterator. 146 | 147 | .. seealso:: Use :py:func:`~.scoped_iter` to ensure an (async) iterable 148 | is eventually closed and only :term:`borrowed ` until then. 149 | """ 150 | if not hasattr(iterator, "__anext__") or not hasattr(iterator, "__aiter__"): 151 | raise TypeError( 152 | "borrowing requires an async iterator " 153 | + f"with __aiter__ and __anext__ method, got {type(iterator).__name__}" 154 | ) 155 | return _BorrowedAsyncIterator[T, Any](iterator) 156 | 157 | 158 | def scoped_iter(iterable: AnyIterable[T], /) -> AsyncContextManager[AsyncIterator[T]]: 159 | """ 160 | Context manager that provides an async iterator for an (async) ``iterable`` 161 | 162 | Roughly equivalent to combining :py:func:`~asyncstdlib.builtins.iter` with 163 | :py:class:`~asyncstdlib.contextlib.closing`. The resulting 164 | :term:`asynchronous iterator` is automatically :term:`borrowed ` 165 | to prevent premature closing when passing the iterator around. 166 | 167 | .. code-block:: python3 168 | 169 | from collections import deque 170 | import asyncstdlib as a 171 | 172 | async def head_tail(iterable, leading=5, trailing=5): 173 | '''Provide the first ``leading`` and last ``trailing`` items''' 174 | # create async iterator valid for the entire block 175 | async with a.scoped_iter(iterable) as async_iter: 176 | # ... safely pass it on without it being closed ... 177 | async for item in a.islice(async_iter, leading): 178 | yield item 179 | tail = deque(maxlen=trailing) 180 | # ... and use it again in the block 181 | async for item in async_iter: 182 | tail.append(item) 183 | for item in tail: 184 | yield item 185 | 186 | Nested scoping of the same iterator is safe: inner scopes automatically forfeit 187 | closing the underlying iterator in favour of the outermost scope. This allows 188 | passing the scoped iterator to other functions that use :py:func:`scoped_iter`. 189 | """ 190 | # The iterable cannot be closed. 191 | # We do not need to take care of it. 192 | if not hasattr(iterator := aiter(iterable), "aclose"): 193 | return nullcontext(iterator) 194 | # `iterator` might be already borrowed. We must not special-case this as: 195 | # - we cannot unwrap the underlying iterator, as this would give us longer access 196 | # - we still must create a separate scope with its own lifetime 197 | return _ScopedAsyncIteratorContext(iterator) 198 | 199 | 200 | async def await_each(awaitables: Iterable[Awaitable[T]], /) -> AsyncIterable[T]: 201 | """ 202 | Iterate through ``awaitables`` and await each item 203 | 204 | This converts an *iterable of async* into an *async iterator* of awaited values. 205 | Consequently, we can apply various functions made for ``AsyncIterable[T]`` to 206 | ``Iterable[Awaitable[T]]`` as well. 207 | 208 | Example: 209 | 210 | .. code-block:: python3 211 | 212 | import asyncstdlib as a 213 | 214 | async def check1() -> bool: 215 | ... 216 | 217 | async def check2() -> bool: 218 | ... 219 | 220 | async def check3() -> bool: 221 | ... 222 | 223 | okay = await a.all( 224 | a.await_each([check1(), check2(), check3()]) 225 | ) 226 | """ 227 | for awaitable in awaitables: 228 | yield await awaitable 229 | 230 | 231 | @overload 232 | async def apply( 233 | __func: Callable[[T1], T], 234 | __arg1: Awaitable[T1], 235 | /, 236 | ) -> T: ... 237 | 238 | 239 | @overload 240 | async def apply( 241 | __func: Callable[[T1, T2], T], 242 | __arg1: Awaitable[T1], 243 | __arg2: Awaitable[T2], 244 | /, 245 | ) -> T: ... 246 | 247 | 248 | @overload 249 | async def apply( 250 | __func: Callable[[T1, T2, T3], T], 251 | __arg1: Awaitable[T1], 252 | __arg2: Awaitable[T2], 253 | __arg3: Awaitable[T3], 254 | /, 255 | ) -> T: ... 256 | 257 | 258 | @overload 259 | async def apply( 260 | __func: Callable[[T1, T2, T3, T4], T], 261 | __arg1: Awaitable[T1], 262 | __arg2: Awaitable[T2], 263 | __arg3: Awaitable[T3], 264 | __arg4: Awaitable[T4], 265 | /, 266 | ) -> T: ... 267 | 268 | 269 | @overload 270 | async def apply( 271 | __func: Callable[[T1, T2, T3, T4, T5], T], 272 | __arg1: Awaitable[T1], 273 | __arg2: Awaitable[T2], 274 | __arg3: Awaitable[T3], 275 | __arg4: Awaitable[T4], 276 | __arg5: Awaitable[T5], 277 | /, 278 | ) -> T: ... 279 | 280 | 281 | @overload 282 | async def apply( 283 | __func: Callable[..., T], 284 | /, 285 | *args: Awaitable[Any], 286 | **kwargs: Awaitable[Any], 287 | ) -> T: ... 288 | 289 | 290 | async def apply( 291 | __func: Callable[..., T], /, *args: Awaitable[Any], **kwargs: Awaitable[Any] 292 | ) -> T: 293 | """ 294 | Await the arguments and keyword arguments and then apply ``func`` on them 295 | 296 | Example: 297 | 298 | .. code-block:: python3 299 | 300 | async def compute_something() -> float: 301 | ... 302 | 303 | async def compute_something_else() -> float: 304 | ... 305 | 306 | result = await apply( 307 | lambda x, y: x ** y, 308 | compute_something(), 309 | compute_something_else()) 310 | 311 | The function ``apply`` serves, for example, a practical use case when you want to 312 | chain operations on awaitables and need to pass around the final awaitable 313 | for further operations. 314 | """ 315 | return __func( 316 | *[await arg for arg in args], **{k: await arg for k, arg in kwargs.items()} 317 | ) 318 | 319 | 320 | @overload 321 | def sync(function: Callable[..., Awaitable[T]], /) -> Callable[..., Awaitable[T]]: ... 322 | 323 | 324 | @overload 325 | def sync(function: Callable[..., T], /) -> Callable[..., Awaitable[T]]: ... 326 | 327 | 328 | def sync(function: Callable[..., Any], /) -> Callable[..., Any]: 329 | r""" 330 | Wraps a callable to ensure its result can be ``await``\ ed 331 | 332 | Useful to write :term:`async neutral` functions by wrapping callable arguments, 333 | or to use synchronous functions where asynchronous ones are expected. 334 | Wrapping a regular function defined using ``def`` or ``lambda`` makes it 335 | behave roughly as if it were defined using ``async def`` instead. 336 | 337 | Example: 338 | 339 | .. code-block:: python3 340 | 341 | import asyncstdlib as a 342 | 343 | def test1_sync(x, y): 344 | ... 345 | 346 | async def test1_async(x): 347 | ... 348 | 349 | async def main(): 350 | await a.sync(test1_sync)(x=1, y=2) 351 | await a.sync(test1_async)(x=8) 352 | await a.sync(lambda x: x ** 3)(x=5) 353 | 354 | if __name__ == "__main__": 355 | asyncio.run(main()) 356 | 357 | .. note:: 358 | 359 | This should never be applied as the sole decorator on a function. 360 | Define the function as `async def` instead. 361 | """ 362 | if not callable(function): 363 | raise TypeError("function argument should be Callable") 364 | 365 | if iscoroutinefunction(function): 366 | return function 367 | 368 | @wraps(function) 369 | async def async_wrapped(*args: Any, **kwargs: Any) -> Any: 370 | result = function(*args, **kwargs) 371 | if isinstance(result, Awaitable): 372 | return await result # pyright: ignore[reportUnknownVariableType] 373 | return result 374 | 375 | return async_wrapped 376 | 377 | 378 | async def any_iter( 379 | __iter: Union[ 380 | Awaitable[AnyIterable[Awaitable[T]]], 381 | Awaitable[AnyIterable[T]], 382 | AnyIterable[Awaitable[T]], 383 | AnyIterable[T], 384 | ], 385 | /, 386 | ) -> AsyncIterator[T]: 387 | """ 388 | Provide an async iterator for various forms of "asynchronous iterable" 389 | 390 | Useful to uniformly handle async iterables, awaitable iterables, iterables of 391 | awaitables, and similar in an ``async for`` loop. Among other things, this 392 | matches all forms of ``async def`` functions providing iterables. 393 | 394 | .. code-block:: python3 395 | 396 | import random 397 | import asyncstdlib as a 398 | 399 | # AsyncIterator[T] 400 | async def async_iter(n): 401 | for i in range(n): 402 | yield i 403 | 404 | # Awaitable[Iterator[T]] 405 | async def await_iter(n): 406 | return [*range(n)] 407 | 408 | some_iter = random.choice([async_iter, await_iter, range]) 409 | async for item in a.any_iter(some_iter(4)): 410 | print(item) 411 | 412 | This function must eagerly resolve each "async layer" before checking if 413 | the next layer is as expected. This incurs a performance penalty and 414 | non-iterables may be left unusable by this. 415 | Prefer :py:func:`~.builtins.iter` to test for iterables with :term:`EAFP` 416 | and for performance when only simple iterables need handling. 417 | """ 418 | iterable = __iter if not isinstance(__iter, Awaitable) else await __iter 419 | if isinstance(iterable, AsyncIterable): 420 | async for item in iterable: 421 | yield ( 422 | item if not isinstance(item, Awaitable) else await item 423 | ) # pyright: ignore[reportReturnType] 424 | else: 425 | for item in iterable: 426 | yield ( 427 | item if not isinstance(item, Awaitable) else await item 428 | ) # pyright: ignore[reportReturnType] 429 | -------------------------------------------------------------------------------- /asyncstdlib/builtins.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any, AsyncIterator, Awaitable, Callable, overload 2 | from typing_extensions import TypeGuard 3 | import builtins 4 | 5 | from ._typing import ADD, AnyIterable, HK, LT, R, T, T1, T2, T3, T4, T5 6 | 7 | @overload 8 | async def anext(iterator: AsyncIterator[T]) -> T: ... 9 | @overload 10 | async def anext(iterator: AsyncIterator[T], default: T) -> T: ... 11 | @overload 12 | def iter(subject: AnyIterable[T]) -> AsyncIterator[T]: ... 13 | @overload 14 | def iter( 15 | subject: Callable[[], Awaitable[T | None]], sentinel: None 16 | ) -> AsyncIterator[T]: ... 17 | @overload 18 | def iter(subject: Callable[[], Awaitable[T]], sentinel: T) -> AsyncIterator[T]: ... 19 | async def all(iterable: AnyIterable[Any]) -> bool: ... 20 | async def any(iterable: AnyIterable[Any]) -> bool: ... 21 | @overload 22 | def zip( 23 | *, 24 | strict: bool = ..., 25 | ) -> AsyncIterator[Any]: ... 26 | @overload 27 | def zip( 28 | __it1: AnyIterable[T1], 29 | *, 30 | strict: bool = ..., 31 | ) -> AsyncIterator[builtins.tuple[T1]]: ... 32 | @overload 33 | def zip( 34 | __it1: AnyIterable[T1], 35 | __it2: AnyIterable[T2], 36 | *, 37 | strict: bool = ..., 38 | ) -> AsyncIterator[builtins.tuple[T1, T2]]: ... 39 | @overload 40 | def zip( 41 | __it1: AnyIterable[T1], 42 | __it2: AnyIterable[T2], 43 | __it3: AnyIterable[T3], 44 | *, 45 | strict: bool = ..., 46 | ) -> AsyncIterator[builtins.tuple[T1, T2, T3]]: ... 47 | @overload 48 | def zip( 49 | __it1: AnyIterable[T1], 50 | __it2: AnyIterable[T2], 51 | __it3: AnyIterable[T3], 52 | __it4: AnyIterable[T4], 53 | *, 54 | strict: bool = ..., 55 | ) -> AsyncIterator[builtins.tuple[T1, T2, T3, T4]]: ... 56 | @overload 57 | def zip( 58 | __it1: AnyIterable[T1], 59 | __it2: AnyIterable[T2], 60 | __it3: AnyIterable[T3], 61 | __it4: AnyIterable[T4], 62 | __it5: AnyIterable[T5], 63 | *, 64 | strict: bool = ..., 65 | ) -> AsyncIterator[builtins.tuple[T1, T2, T3, T4, T5]]: ... 66 | @overload 67 | def zip( 68 | __it1: AnyIterable[Any], 69 | __it2: AnyIterable[Any], 70 | __it3: AnyIterable[Any], 71 | __it4: AnyIterable[Any], 72 | __it5: AnyIterable[Any], 73 | *iterables: AnyIterable[Any], 74 | strict: bool = ..., 75 | ) -> AsyncIterator[builtins.tuple[Any, ...]]: ... 76 | @overload 77 | def map( 78 | function: Callable[[T1], Awaitable[R]], 79 | __it1: AnyIterable[T1], 80 | /, 81 | ) -> AsyncIterator[R]: ... 82 | @overload 83 | def map( 84 | function: Callable[[T1], R], 85 | __it1: AnyIterable[T1], 86 | /, 87 | ) -> AsyncIterator[R]: ... 88 | @overload 89 | def map( 90 | function: Callable[[T1, T2], Awaitable[R]], 91 | __it1: AnyIterable[T1], 92 | __it2: AnyIterable[T2], 93 | /, 94 | ) -> AsyncIterator[R]: ... 95 | @overload 96 | def map( 97 | function: Callable[[T1, T2], R], 98 | __it1: AnyIterable[T1], 99 | __it2: AnyIterable[T2], 100 | /, 101 | ) -> AsyncIterator[R]: ... 102 | @overload 103 | def map( 104 | function: Callable[[T1, T2, T3], Awaitable[R]], 105 | __it1: AnyIterable[T1], 106 | __it2: AnyIterable[T2], 107 | __it3: AnyIterable[T3], 108 | /, 109 | ) -> AsyncIterator[R]: ... 110 | @overload 111 | def map( 112 | function: Callable[[T1, T2, T3], R], 113 | __it1: AnyIterable[T1], 114 | __it2: AnyIterable[T2], 115 | __it3: AnyIterable[T3], 116 | /, 117 | ) -> AsyncIterator[R]: ... 118 | @overload 119 | def map( 120 | function: Callable[[T1, T2, T3, T4], Awaitable[R]], 121 | __it1: AnyIterable[T1], 122 | __it2: AnyIterable[T2], 123 | __it3: AnyIterable[T3], 124 | __it4: AnyIterable[T4], 125 | /, 126 | ) -> AsyncIterator[R]: ... 127 | @overload 128 | def map( 129 | function: Callable[[T1, T2, T3, T4], R], 130 | __it1: AnyIterable[T1], 131 | __it2: AnyIterable[T2], 132 | __it3: AnyIterable[T3], 133 | __it4: AnyIterable[T4], 134 | /, 135 | ) -> AsyncIterator[R]: ... 136 | @overload 137 | def map( 138 | function: Callable[[T1, T2, T3, T4, T5], Awaitable[R]], 139 | __it1: AnyIterable[T1], 140 | __it2: AnyIterable[T2], 141 | __it3: AnyIterable[T3], 142 | __it4: AnyIterable[T4], 143 | __it5: AnyIterable[T5], 144 | /, 145 | ) -> AsyncIterator[R]: ... 146 | @overload 147 | def map( 148 | function: Callable[[T1, T2, T3, T4, T5], R], 149 | __it1: AnyIterable[T1], 150 | __it2: AnyIterable[T2], 151 | __it3: AnyIterable[T3], 152 | __it4: AnyIterable[T4], 153 | __it5: AnyIterable[T5], 154 | /, 155 | ) -> AsyncIterator[R]: ... 156 | @overload 157 | def map( 158 | function: Callable[..., Awaitable[R]], 159 | __it1: AnyIterable[Any], 160 | __it2: AnyIterable[Any], 161 | __it3: AnyIterable[Any], 162 | __it4: AnyIterable[Any], 163 | __it5: AnyIterable[Any], 164 | /, 165 | *iterable: AnyIterable[Any], 166 | ) -> AsyncIterator[R]: ... 167 | @overload 168 | def map( 169 | function: Callable[..., R], 170 | __it1: AnyIterable[Any], 171 | __it2: AnyIterable[Any], 172 | __it3: AnyIterable[Any], 173 | __it4: AnyIterable[Any], 174 | __it5: AnyIterable[Any], 175 | /, 176 | *iterable: AnyIterable[Any], 177 | ) -> AsyncIterator[R]: ... 178 | @overload 179 | async def max(iterable: AnyIterable[LT], *, key: None = ...) -> LT: ... 180 | @overload 181 | async def max(iterable: AnyIterable[LT], *, key: None = ..., default: T) -> LT | T: ... 182 | @overload 183 | async def max(iterable: AnyIterable[T1], *, key: Callable[[T1], LT] = ...) -> T1: ... 184 | @overload 185 | async def max( 186 | iterable: AnyIterable[T1], *, key: Callable[[T1], LT] = ..., default: T2 187 | ) -> T1 | T2: ... 188 | @overload 189 | async def min(iterable: AnyIterable[LT], *, key: None = ...) -> LT: ... 190 | @overload 191 | async def min(iterable: AnyIterable[LT], *, key: None = ..., default: T) -> LT | T: ... 192 | @overload 193 | async def min(iterable: AnyIterable[T1], *, key: Callable[[T1], LT] = ...) -> T1: ... 194 | @overload 195 | async def min( 196 | iterable: AnyIterable[T1], *, key: Callable[[T1], LT] = ..., default: T2 197 | ) -> T1 | T2: ... 198 | @overload 199 | def filter( 200 | function: None, 201 | iterable: AnyIterable[T | None], 202 | ) -> AsyncIterator[T]: ... 203 | @overload 204 | def filter( 205 | function: Callable[[T], TypeGuard[R]], 206 | iterable: AnyIterable[T], 207 | ) -> AsyncIterator[R]: ... 208 | @overload 209 | def filter( 210 | function: Callable[[T], Any], 211 | iterable: AnyIterable[T], 212 | ) -> AsyncIterator[T]: ... 213 | def enumerate( 214 | iterable: AnyIterable[T], start: int = 0 215 | ) -> AsyncIterator[builtins.tuple[int, T]]: ... 216 | @overload 217 | async def sum(iterable: AnyIterable[int]) -> int: ... 218 | @overload 219 | async def sum(iterable: AnyIterable[float]) -> float: ... 220 | @overload 221 | async def sum(iterable: AnyIterable[ADD], start: ADD) -> ADD: ... 222 | @overload 223 | async def list() -> builtins.list[Any]: ... 224 | @overload 225 | async def list(iterable: AnyIterable[T]) -> builtins.list[T]: ... 226 | @overload 227 | async def tuple() -> builtins.tuple[()]: ... 228 | @overload 229 | async def tuple(iterable: AnyIterable[T]) -> builtins.tuple[T, ...]: ... 230 | @overload 231 | async def dict() -> builtins.dict[Any, Any]: ... 232 | @overload 233 | async def dict( 234 | iterable: AnyIterable[builtins.tuple[HK, T]], 235 | ) -> builtins.dict[HK, T]: ... 236 | @overload 237 | async def dict( 238 | iterable: AnyIterable[builtins.tuple[str, T]] = ..., **kwargs: T 239 | ) -> builtins.dict[str, T]: ... 240 | @overload 241 | async def set() -> builtins.set[Any]: ... 242 | @overload 243 | async def set(iterable: AnyIterable[T] = ()) -> builtins.set[T]: ... 244 | @overload 245 | async def sorted( 246 | iterable: AnyIterable[LT], *, key: None = ..., reverse: bool = ... 247 | ) -> builtins.list[LT]: ... 248 | @overload 249 | async def sorted( 250 | iterable: AnyIterable[T], *, key: Callable[[T], LT], reverse: bool = ... 251 | ) -> builtins.list[T]: ... 252 | -------------------------------------------------------------------------------- /asyncstdlib/contextlib.pyi: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | TypeVar, 3 | Generic, 4 | AsyncGenerator, 5 | Callable, 6 | Optional, 7 | Any, 8 | Awaitable, 9 | overload, 10 | AsyncContextManager, 11 | ) 12 | from typing_extensions import ParamSpec, Self 13 | from types import TracebackType 14 | from abc import ABCMeta 15 | 16 | from ._typing import AClose, ContextManager, AC, T, R 17 | 18 | AnyContextManager = AsyncContextManager[T] | ContextManager[T] 19 | 20 | class ContextDecorator(AsyncContextManager[T], metaclass=ABCMeta): 21 | """ 22 | Base class for an async context manager useable as a decorator as well 23 | 24 | Inheriting from this class adds the scaffolding to automatically enter 25 | an async context manager on awaiting any callable decorated with it: 26 | 27 | .. code:: python3 28 | 29 | class DecoratorAndContext(AsyncContextDecorator): 30 | async def __aenter__(self) -> Any: 31 | print("entering", self) 32 | 33 | async def __aexit__(self, *exc): 34 | print("exiting", self) 35 | 36 | @DecoratorAndContext() 37 | async def func(): 38 | # DecoratorAndContext has been entered already 39 | print("running some function...") 40 | # DecoratorAndContext will be exited immediately 41 | 42 | The context manager can still be used regularly in `async with` statements. 43 | 44 | Since functions are decorated with an existing context manager instance, 45 | the same instance is entered and exited on every call. If the context is 46 | not safe to be entered multiple times or even concurrently the subclass 47 | should implement the method `_recreate_cm(:Self) -> Self` to create a copy. 48 | """ 49 | 50 | __slots__ = () 51 | 52 | def _recreate_cm(self: Self) -> Self: ... 53 | def __call__(self, func: AC, /) -> AC: ... 54 | 55 | P = ParamSpec("P") 56 | 57 | def contextmanager( 58 | func: Callable[P, AsyncGenerator[T, None]], 59 | ) -> Callable[P, ContextDecorator[T]]: ... 60 | 61 | class closing(Generic[AClose]): 62 | def __init__(self, thing: AClose) -> None: ... 63 | async def __aenter__(self: Self) -> AClose: ... 64 | async def __aexit__( 65 | self, 66 | exc_type: type[BaseException] | None, 67 | exc_val: BaseException | None, 68 | exc_tb: TracebackType | None, 69 | ) -> None: ... 70 | 71 | class nullcontext(AsyncContextManager[T]): 72 | enter_result: T 73 | 74 | @overload 75 | def __init__(self: nullcontext[None], enter_result: None = ...) -> None: ... 76 | @overload 77 | def __init__( 78 | self: nullcontext[T], # pyright: ignore[reportInvalidTypeVarUse] 79 | enter_result: T, 80 | ) -> None: ... 81 | async def __aenter__(self: nullcontext[T]) -> T: ... 82 | async def __aexit__( 83 | self, 84 | exc_type: type[BaseException] | None, 85 | exc_val: BaseException | None, 86 | exc_tb: TracebackType | None, 87 | ) -> None: ... 88 | 89 | SE = TypeVar( 90 | "SE", 91 | bound=AsyncContextManager[Any] 92 | | ContextManager[Any] 93 | | Callable[ 94 | [type[BaseException] | None, BaseException | None, TracebackType | None], 95 | Optional[bool], 96 | ] 97 | | Callable[ 98 | [type[BaseException] | None, BaseException | None, TracebackType | None], 99 | Awaitable[Optional[bool]], 100 | ], 101 | ) 102 | 103 | class ExitStack: 104 | def __init__(self) -> None: ... 105 | def pop_all(self: Self) -> Self: ... 106 | def push(self, exit: SE) -> SE: ... 107 | def callback( 108 | self, callback: Callable[P, R], *args: P.args, **kwargs: P.kwargs 109 | ) -> Callable[P, R]: ... 110 | async def enter_context(self, cm: AnyContextManager[T]) -> T: ... 111 | async def aclose(self) -> None: ... 112 | async def __aenter__(self: Self) -> Self: ... 113 | async def __aexit__( 114 | self, 115 | exc_type: type[BaseException] | None, 116 | exc_val: BaseException | None, 117 | tb: TracebackType | None, 118 | ) -> bool: ... 119 | -------------------------------------------------------------------------------- /asyncstdlib/functools.py: -------------------------------------------------------------------------------- 1 | from asyncio import iscoroutinefunction 2 | from typing import ( 3 | Callable, 4 | Awaitable, 5 | Union, 6 | Any, 7 | Generic, 8 | Generator, 9 | Optional, 10 | AsyncContextManager, 11 | Type, 12 | cast, 13 | ) 14 | 15 | from ._typing import T, AC, AnyIterable, R 16 | from ._core import ScopedIter, awaitify as _awaitify, Sentinel 17 | from .builtins import anext 18 | from .contextlib import nullcontext 19 | 20 | from ._lrucache import ( 21 | lru_cache, 22 | CacheInfo, 23 | CacheParameters, 24 | LRUAsyncCallable, 25 | LRUAsyncBoundCallable, 26 | ) 27 | 28 | __all__ = [ 29 | "cache", 30 | "lru_cache", 31 | "CacheInfo", 32 | "CacheParameters", 33 | "LRUAsyncCallable", 34 | "LRUAsyncBoundCallable", 35 | "reduce", 36 | "cached_property", 37 | "CachedProperty", 38 | ] 39 | 40 | 41 | def cache(user_function: AC) -> LRUAsyncCallable[AC]: 42 | """ 43 | Simple unbounded cache, aka memoization, for async functions 44 | 45 | This is a convenience function, equivalent to :py:func:`~.lru_cache` 46 | with a ``maxsize`` of :py:data:`None`. 47 | """ 48 | return lru_cache(maxsize=None)(user_function) 49 | 50 | 51 | class AwaitableValue(Generic[R]): 52 | """Helper to provide an arbitrary value in ``await``""" 53 | 54 | __slots__ = ("value",) 55 | 56 | def __init__(self, value: R): 57 | self.value = value 58 | 59 | # noinspection PyUnreachableCode 60 | def __await__(self) -> Generator[None, None, R]: 61 | return self.value 62 | yield # type: ignore # pragma: no cover 63 | 64 | def __repr__(self) -> str: 65 | return f"{self.__class__.__name__}({self.value!r})" 66 | 67 | 68 | class _FutureCachedPropertyValue(Generic[R, T]): 69 | """ 70 | A placeholder object to control concurrent access to a cached awaitable value 71 | 72 | When given a lock to coordinate access, only the first task to await on a 73 | cached property triggers the underlying coroutine. Once a value has been 74 | produced, all tasks are unblocked and given the same, single value. 75 | """ 76 | 77 | __slots__ = ("_func", "_instance", "_name", "_lock") 78 | 79 | def __init__( 80 | self, 81 | func: Callable[[T], Awaitable[R]], 82 | instance: T, 83 | name: str, 84 | lock: AsyncContextManager[Any], 85 | ): 86 | self._func = func 87 | self._instance = instance 88 | self._name = name 89 | self._lock = lock 90 | 91 | def __await__(self) -> Generator[None, None, R]: 92 | return self._await_impl().__await__() 93 | 94 | @property 95 | def _instance_value(self) -> Awaitable[R]: 96 | """Retrieve whatever is currently cached on the instance 97 | 98 | If the instance (no longer) has this attribute, it was deleted and the 99 | process is restarted by delegating to the descriptor. 100 | """ 101 | try: 102 | return self._instance.__dict__[self._name] 103 | except KeyError: 104 | # something deleted the cached value or future cached value placeholder. Restart 105 | # the fetch by delegating to the cached_property descriptor. 106 | return getattr(self._instance, self._name) 107 | 108 | async def _await_impl(self) -> R: 109 | if (stored := self._instance_value) is self: 110 | # attempt to get the lock 111 | async with self._lock: 112 | # check again for a cached value 113 | if (stored := self._instance_value) is self: 114 | # the instance attribute is still this placeholder, and we 115 | # hold the lock. Start the getter to store the value on the 116 | # instance and return the value. 117 | return await self._get_attribute() 118 | 119 | # another task produced a value, or the instance.__dict__ object was 120 | # deleted in the interim. 121 | return await stored 122 | 123 | async def _get_attribute(self) -> R: 124 | value = await self._func(self._instance) 125 | self._instance.__dict__[self._name] = AwaitableValue(value) 126 | return value 127 | 128 | def __repr__(self) -> str: 129 | return ( 130 | f"<{type(self).__name__} for '{type(self._instance).__name__}." 131 | f"{self._name}' at {id(self):#x}>" 132 | ) 133 | 134 | 135 | class CachedProperty(Generic[T, R]): 136 | def __init__( 137 | self, 138 | getter: Callable[[T], Awaitable[R]], 139 | asynccontextmanager_type: Type[AsyncContextManager[Any]] = nullcontext, 140 | ): 141 | self.func = self.__wrapped__ = getter 142 | self.attrname = None 143 | self.__doc__ = getter.__doc__ 144 | self.__module__ = getter.__module__ 145 | self._asynccontextmanager_type = asynccontextmanager_type 146 | 147 | def __set_name__(self, owner: Any, name: str) -> None: 148 | if self.attrname is None: 149 | self.attrname = name 150 | elif name != self.attrname: 151 | raise TypeError( 152 | "Cannot assign the same cached_property to two different names " 153 | f"({self.attrname!r} and {name!r})." 154 | ) 155 | 156 | def __get__( 157 | self, instance: Optional[T], owner: Optional[Type[Any]] 158 | ) -> Union["CachedProperty[T, R]", Awaitable[R]]: 159 | if instance is None: 160 | return self 161 | 162 | name = self.attrname 163 | if name is None: 164 | raise TypeError( 165 | "Cannot use cached_property instance without calling __set_name__ on it." 166 | ) 167 | 168 | # check for write access first; not all objects have __dict__ (e.g. class defines slots) 169 | try: 170 | cache = instance.__dict__ 171 | except AttributeError: 172 | msg = ( 173 | f"No '__dict__' attribute on {type(instance).__name__!r} " 174 | f"instance to cache {name!r} property." 175 | ) 176 | raise TypeError(msg) from None 177 | 178 | # store a placeholder for other tasks to access the future cached value 179 | # on this instance. It takes care of coordinating between different 180 | # tasks awaiting on the placeholder until the cached value has been 181 | # produced. 182 | wrapper = _FutureCachedPropertyValue( 183 | self.func, instance, name, self._asynccontextmanager_type() 184 | ) 185 | cache[name] = wrapper 186 | return wrapper 187 | 188 | 189 | def cached_property( 190 | type_or_getter: Union[Type[AsyncContextManager[Any]], Callable[[T], Awaitable[R]]], 191 | /, 192 | ) -> Union[ 193 | Callable[[Callable[[T], Awaitable[R]]], CachedProperty[T, R]], 194 | CachedProperty[T, R], 195 | ]: 196 | """ 197 | Transform a method into an attribute whose value is cached 198 | 199 | When applied to an asynchronous method of a class, instances have an attribute 200 | of the same name as the method (similar to :py:class:`property`). Using this 201 | attribute with ``await`` provides the value of using the method with ``await``. 202 | 203 | The attribute value is cached on the instance after being computed; 204 | subsequent uses of the attribute with ``await`` provide the cached value, 205 | without executing the method again. 206 | The cached value can be cleared using ``del``, in which case the next 207 | access will recompute the value using the wrapped method. 208 | 209 | .. code-block:: python3 210 | 211 | import asyncstdlib as a 212 | 213 | class Resource: 214 | def __init__(self, url): 215 | self.url = url 216 | 217 | @a.cached_property 218 | async def data(self): 219 | return await asynclib.get(self.url) 220 | 221 | resource = Resource("http://example.com") 222 | print(await resource.data) # needs some time... 223 | print(await resource.data) # finishes instantly 224 | del resource.data 225 | print(await resource.data) # needs some time... 226 | 227 | Unlike a :py:class:`property`, this type does not support 228 | :py:meth:`~property.setter` or :py:meth:`~property.deleter`. 229 | 230 | If the attribute is accessed by multiple tasks before a cached value has 231 | been produced, the getter can be run more than once. The final cached value 232 | is determined by the last getter coroutine to return. To enforce that the 233 | getter is executed at most once, provide an appropriate lock type - e.g. the 234 | :py:class:`asyncio.Lock` class in an :py:mod:`asyncio` application - and 235 | access is automatically synchronised. 236 | 237 | .. code-block:: python3 238 | 239 | from asyncio import Lock, gather 240 | 241 | class Resource: 242 | def __init__(self, url): 243 | self.url = url 244 | 245 | @a.cached_property(Lock) 246 | async def data(self): 247 | return await asynclib.get(self.url) 248 | 249 | resource = Resource("http://example.com") 250 | print(*(await gather(resource.data, resource.data))) 251 | 252 | .. note:: 253 | 254 | Instances on which a value is to be cached must have a 255 | ``__dict__`` attribute that is a mutable mapping. 256 | """ 257 | if iscoroutinefunction(type_or_getter): 258 | return CachedProperty(type_or_getter) 259 | elif isinstance(type_or_getter, type) and issubclass( 260 | type_or_getter, AsyncContextManager 261 | ): 262 | 263 | def decorator( 264 | coroutine: Callable[[T], Awaitable[R]], 265 | ) -> CachedProperty[T, R]: 266 | return CachedProperty( 267 | coroutine, 268 | asynccontextmanager_type=cast( 269 | Type[AsyncContextManager[Any]], type_or_getter 270 | ), 271 | ) 272 | 273 | return decorator 274 | else: 275 | raise ValueError("cached_property can only be used with a coroutine function") 276 | 277 | 278 | __REDUCE_SENTINEL = Sentinel("") 279 | 280 | 281 | async def reduce( 282 | function: Union[Callable[[T, T], T], Callable[[T, T], Awaitable[T]]], 283 | iterable: AnyIterable[T], 284 | initial: T = __REDUCE_SENTINEL, # type: ignore 285 | ) -> T: 286 | """ 287 | Reduce an (async) iterable by cumulative application of an (async) function 288 | 289 | :raises TypeError: if ``iterable`` is empty and ``initial`` is not given 290 | 291 | Applies the ``function`` from the beginning of ``iterable``, as if executing 292 | ``await function(current, anext(iterable))`` until ``iterable`` is exhausted. 293 | Note that the output of ``function`` should be valid as its first input. 294 | 295 | The optional ``initial`` is prepended to all items of ``iterable`` 296 | when applying ``function``. If the combination of ``initial`` 297 | and ``iterable`` contains exactly one item, it is returned without 298 | calling ``function``. 299 | """ 300 | async with ScopedIter(iterable) as item_iter: 301 | try: 302 | value = ( 303 | initial if initial is not __REDUCE_SENTINEL else await anext(item_iter) 304 | ) 305 | except StopAsyncIteration: 306 | raise TypeError( 307 | "reduce() of empty sequence with no initial value" 308 | ) from None 309 | function = _awaitify(function) 310 | async for head in item_iter: 311 | value = await function(value, head) 312 | return value 313 | -------------------------------------------------------------------------------- /asyncstdlib/functools.pyi: -------------------------------------------------------------------------------- 1 | from typing import Any, AsyncContextManager, Awaitable, Callable, Generic, overload 2 | 3 | from ._typing import T, T1, T2, AC, AnyIterable, R 4 | 5 | from ._lrucache import ( 6 | LRUAsyncCallable as LRUAsyncCallable, 7 | LRUAsyncBoundCallable as LRUAsyncBoundCallable, 8 | lru_cache as lru_cache, 9 | ) 10 | 11 | def cache(user_function: AC) -> LRUAsyncCallable[AC]: ... 12 | 13 | class CachedProperty(Generic[T, R]): 14 | def __init__( 15 | self, 16 | getter: Callable[[T], Awaitable[R]], 17 | lock_type: type[AsyncContextManager[Any]] = ..., 18 | ) -> None: ... 19 | def __set_name__(self, owner: Any, name: str) -> None: ... 20 | @overload 21 | def __get__(self, instance: None, owner: type[Any]) -> "CachedProperty[T, R]": ... 22 | @overload 23 | def __get__(self, instance: T, owner: type | None) -> Awaitable[R]: ... 24 | # __set__ is not defined at runtime, but you are allowed to replace the cached value 25 | def __set__(self, instance: T, value: R) -> None: ... # type: ignore[misc] # pyright: ignore[reportGeneralTypeIssues] 26 | # __del__ is not defined at runtime, but you are allowed to delete the cached value 27 | def __del__(self, instance: T) -> None: ... 28 | 29 | @overload 30 | def cached_property(getter: Callable[[T], Awaitable[R]], /) -> CachedProperty[T, R]: ... 31 | @overload 32 | def cached_property( 33 | asynccontextmanager_type: type[AsyncContextManager[Any]], / 34 | ) -> Callable[[Callable[[T], Awaitable[R]]], CachedProperty[T, R]]: ... 35 | @overload 36 | async def reduce( 37 | function: Callable[[T1, T2], T1], iterable: AnyIterable[T2], initial: T1 38 | ) -> T1: ... 39 | @overload 40 | async def reduce(function: Callable[[T, T], T], iterable: AnyIterable[T]) -> T: ... 41 | -------------------------------------------------------------------------------- /asyncstdlib/heapq.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from typing import ( 3 | Generic, 4 | AsyncIterator, 5 | Optional, 6 | Callable, 7 | Any, 8 | overload, 9 | Awaitable, 10 | ) 11 | import heapq as _heapq 12 | 13 | from .builtins import enumerate as a_enumerate, zip as a_zip 14 | from ._core import aiter, awaitify, ScopedIter, borrow 15 | from ._typing import AnyIterable, ACloseable, LT, T 16 | 17 | 18 | class _KeyIter(Generic[LT]): 19 | __slots__ = ("head", "tail", "reverse", "head_key", "key") 20 | 21 | @overload 22 | def __init__( 23 | self, 24 | head: T, 25 | tail: AsyncIterator[T], 26 | reverse: bool, 27 | head_key: LT, 28 | key: Callable[[T], Awaitable[LT]], 29 | ) -> None: ... 30 | 31 | @overload 32 | def __init__( 33 | self, head: LT, tail: AsyncIterator[LT], reverse: bool, head_key: LT, key: None 34 | ) -> None: ... 35 | 36 | def __init__( 37 | self, 38 | head: Any, 39 | tail: AsyncIterator[Any], 40 | reverse: bool, 41 | head_key: LT, 42 | key: Any, 43 | ) -> None: 44 | self.head = head 45 | self.head_key = head_key 46 | self.tail = tail 47 | self.key = key 48 | self.reverse = reverse 49 | 50 | @overload 51 | @classmethod 52 | def from_iters( 53 | cls, 54 | iterables: "tuple[AnyIterable[T], ...]", 55 | reverse: bool, 56 | key: Callable[[T], Awaitable[LT]], 57 | ) -> "AsyncIterator[_KeyIter[LT]]": ... 58 | 59 | @overload 60 | @classmethod 61 | def from_iters( 62 | cls, iterables: "tuple[AnyIterable[LT], ...]", reverse: bool, key: None 63 | ) -> "AsyncIterator[_KeyIter[LT]]": ... 64 | 65 | @classmethod 66 | async def from_iters( 67 | cls, 68 | iterables: "tuple[AnyIterable[Any], ...]", 69 | reverse: bool, 70 | key: Optional[Callable[[Any], Any]], 71 | ) -> "AsyncIterator[_KeyIter[Any]]": 72 | for iterable in iterables: 73 | iterator = aiter(iterable) 74 | try: 75 | head = await iterator.__anext__() 76 | except StopAsyncIteration: 77 | pass 78 | else: 79 | head_key = await key(head) if key is not None else head 80 | yield cls(head, iterator, reverse, head_key, key) 81 | 82 | async def pull_head(self) -> bool: 83 | """ 84 | Pull the next ``head`` element from the iterator and signal success 85 | """ 86 | try: 87 | self.head = head = await self.tail.__anext__() 88 | except StopAsyncIteration: 89 | return False 90 | else: 91 | self.head_key = await self.key(head) if self.key is not None else head 92 | return True 93 | 94 | def __lt__(self, other: _KeyIter[LT]) -> bool: 95 | return self.reverse ^ (self.head_key < other.head_key) 96 | 97 | def __eq__(self, other: _KeyIter[LT]) -> bool: # type: ignore[override] 98 | return not (self.head_key < other.head_key or other.head_key < self.head_key) 99 | 100 | 101 | async def merge( 102 | *iterables: AnyIterable[Any], 103 | key: Optional[Callable[[Any], Any]] = None, 104 | reverse: bool = False, 105 | ) -> AsyncIterator[Any]: 106 | """ 107 | Merge all pre-sorted (async) ``iterables`` into a single sorted iterator 108 | 109 | This works similar to ``sorted(chain(*iterables), key=key, reverse=reverse)`` but 110 | operates lazily: at any moment only one item of each iterable is stored for the 111 | comparison. This allows merging streams of pre-sorted items, such as timestamped 112 | records from multiple sources. 113 | 114 | The optional ``key`` argument specifies a one-argument (async) callable, which 115 | provides a substitute for determining the sort order of each item. 116 | The special value and default :py:data:`None` represents the identity function, 117 | comparing items directly. 118 | 119 | The default sort order is ascending, that is items with ``a < b`` imply ``a`` 120 | is yielded before ``b``. Use ``reverse=True`` for descending sort order. 121 | The ``iterables`` must be pre-sorted in the same order. 122 | """ 123 | a_key = awaitify(key) if key is not None else None 124 | # sortable iterators with (reverse) position to ensure stable sort for ties 125 | iter_heap: "list[tuple[_KeyIter[Any], int]]" = [ 126 | (itr, idx if not reverse else -idx) 127 | async for idx, itr in a_enumerate( 128 | _KeyIter[Any].from_iters(iterables, reverse, a_key) 129 | ) 130 | ] 131 | try: 132 | _heapq.heapify(iter_heap) 133 | # there are at least two iterators that need merging 134 | while len(iter_heap) > 1: 135 | while True: 136 | itr, idx = iter_heap[0] 137 | yield itr.head 138 | if await itr.pull_head(): 139 | _heapq.heapreplace(iter_heap, (itr, idx)) 140 | else: 141 | _heapq.heappop(iter_heap) 142 | break 143 | # there is only one iterator left, no need for merging 144 | if iter_heap: 145 | itr, idx = iter_heap[0] 146 | yield itr.head 147 | async for item in itr.tail: 148 | yield item 149 | finally: 150 | for itr, _ in iter_heap: 151 | if isinstance(itr.tail, ACloseable): 152 | await itr.tail.aclose() 153 | 154 | 155 | class ReverseLT(Generic[LT]): 156 | """Helper to reverse ``a < b`` ordering""" 157 | 158 | __slots__ = ("key",) 159 | 160 | def __init__(self, key: LT): 161 | self.key = key 162 | 163 | def __lt__(self, other: ReverseLT[LT]) -> bool: 164 | return other.key < self.key 165 | 166 | 167 | # Python's heapq provides a *min*-heap 168 | # When finding the n largest items, heapq tracks the *minimum* item still large enough. 169 | # In other words, during search we maintain opposite sort order than what is requested. 170 | # We turn the min-heap into a max-sort in the end. 171 | async def _largest( 172 | iterable: AnyIterable[T], 173 | n: int, 174 | key: Callable[[T], Awaitable[LT]], 175 | reverse: bool, 176 | ) -> "list[T]": 177 | ordered: Callable[[LT], LT] = ReverseLT if reverse else lambda x: x # type: ignore 178 | async with ScopedIter(iterable) as iterator: 179 | # assign an ordering to items to solve ties 180 | order_sign = -1 if reverse else 1 181 | n_heap = [ 182 | (ordered(await key(item)), index * order_sign, item) 183 | async for index, item in a_zip(range(n), borrow(iterator)) 184 | ] 185 | if not n_heap: 186 | return [] 187 | _heapq.heapify(n_heap) 188 | worst_key = n_heap[0][0] 189 | next_index = n * order_sign 190 | async for item in iterator: 191 | item_key = ordered(await key(item)) 192 | if worst_key < item_key: 193 | _heapq.heapreplace(n_heap, (item_key, next_index, item)) 194 | worst_key = n_heap[0][0] 195 | next_index += 1 * order_sign 196 | n_heap.sort(reverse=True) 197 | return [item for _, _, item in n_heap] 198 | 199 | 200 | async def _identity(x: T) -> T: 201 | return x 202 | 203 | 204 | async def nlargest( 205 | iterable: AnyIterable[T], 206 | n: int, 207 | key: Optional[Callable[[Any], Awaitable[Any]]] = None, 208 | ) -> "list[T]": 209 | """ 210 | Return a sorted list of the ``n`` largest elements from the (async) iterable 211 | 212 | The optional ``key`` argument specifies a one-argument (async) callable, which 213 | provides a substitute for determining the sort order of each item. 214 | The special value and default :py:data:`None` represents the identity functions, 215 | comparing items directly. 216 | 217 | The result is equivalent to ``sorted(iterable, key=key, reverse=True)[:n]``, 218 | but ``iterable`` is consumed lazily and items are discarded eagerly. 219 | """ 220 | a_key: Callable[[Any], Awaitable[Any]] = ( 221 | awaitify(key) if key is not None else _identity # type: ignore 222 | ) 223 | return await _largest(iterable=iterable, n=n, key=a_key, reverse=False) 224 | 225 | 226 | async def nsmallest( 227 | iterable: AnyIterable[T], 228 | n: int, 229 | key: Optional[Callable[[Any], Awaitable[Any]]] = None, 230 | ) -> "list[T]": 231 | """ 232 | Return a sorted list of the ``n`` smallest elements from the (async) iterable 233 | 234 | Provides the reverse functionality to :py:func:`~.nlargest`. 235 | """ 236 | a_key: Callable[[Any], Awaitable[Any]] = ( 237 | awaitify(key) if key is not None else _identity # type: ignore 238 | ) 239 | return await _largest(iterable=iterable, n=n, key=a_key, reverse=True) 240 | -------------------------------------------------------------------------------- /asyncstdlib/heapq.pyi: -------------------------------------------------------------------------------- 1 | from typing import AsyncIterator, Awaitable, Callable, overload 2 | 3 | from ._typing import AnyIterable, T, LT 4 | 5 | @overload 6 | def merge( 7 | *iterables: AnyIterable[LT], key: None = ..., reverse: bool = ... 8 | ) -> AsyncIterator[LT]: ... 9 | @overload 10 | def merge( 11 | *iterables: AnyIterable[T], key: Callable[[T], Awaitable[LT]], reverse: bool = ... 12 | ) -> AsyncIterator[T]: ... 13 | @overload 14 | def merge( 15 | *iterables: AnyIterable[T], key: Callable[[T], LT], reverse: bool = ... 16 | ) -> AsyncIterator[T]: ... 17 | @overload 18 | async def nlargest( 19 | iterable: AsyncIterator[LT], n: int, key: None = ... 20 | ) -> list[LT]: ... 21 | @overload 22 | async def nlargest( 23 | iterable: AsyncIterator[T], n: int, key: Callable[[T], Awaitable[LT]] 24 | ) -> list[T]: ... 25 | @overload 26 | async def nlargest( 27 | iterable: AsyncIterator[T], n: int, key: Callable[[T], LT] 28 | ) -> list[T]: ... 29 | @overload 30 | async def nsmallest( 31 | iterable: AsyncIterator[LT], n: int, key: None = ... 32 | ) -> list[LT]: ... 33 | @overload 34 | async def nsmallest( 35 | iterable: AsyncIterator[T], n: int, key: Callable[[T], Awaitable[LT]] 36 | ) -> list[T]: ... 37 | @overload 38 | async def nsmallest( 39 | iterable: AsyncIterator[T], n: int, key: Callable[[T], LT] 40 | ) -> list[T]: ... 41 | -------------------------------------------------------------------------------- /asyncstdlib/itertools.pyi: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | Any, 3 | AsyncIterator, 4 | AsyncContextManager, 5 | Awaitable, 6 | Generic, 7 | Iterator, 8 | Iterable, 9 | Callable, 10 | TypeVar, 11 | overload, 12 | ) 13 | from typing_extensions import Literal, Self 14 | 15 | from ._typing import AnyIterable, ADD, T, T1, T2, T3, T4, T5 16 | 17 | def cycle(iterable: AnyIterable[T]) -> AsyncIterator[T]: ... 18 | @overload 19 | def accumulate(iterable: AnyIterable[ADD]) -> AsyncIterator[ADD]: ... 20 | @overload 21 | def accumulate(iterable: AnyIterable[ADD], *, initial: ADD) -> AsyncIterator[ADD]: ... 22 | @overload 23 | def accumulate( 24 | iterable: AnyIterable[T], 25 | function: Callable[[T, T], T] | Callable[[T, T], Awaitable[T]], 26 | ) -> AsyncIterator[T]: ... 27 | @overload 28 | def accumulate( 29 | iterable: AnyIterable[T2], 30 | function: Callable[[T1, T2], T1] | Callable[[T1, T2], Awaitable[T1]], 31 | *, 32 | initial: T1, 33 | ) -> AsyncIterator[T1]: ... 34 | @overload 35 | def batched( 36 | iterable: AnyIterable[T], n: Literal[1], strict: bool = ... 37 | ) -> AsyncIterator[tuple[T]]: ... 38 | @overload 39 | def batched( 40 | iterable: AnyIterable[T], n: Literal[2], strict: bool = ... 41 | ) -> AsyncIterator[tuple[T, T]]: ... 42 | @overload 43 | def batched( 44 | iterable: AnyIterable[T], n: Literal[3], strict: bool = ... 45 | ) -> AsyncIterator[tuple[T, T, T]]: ... 46 | @overload 47 | def batched( 48 | iterable: AnyIterable[T], n: Literal[4], strict: bool = ... 49 | ) -> AsyncIterator[tuple[T, T, T, T]]: ... 50 | @overload 51 | def batched( 52 | iterable: AnyIterable[T], n: Literal[5], strict: bool = ... 53 | ) -> AsyncIterator[tuple[T, T, T, T, T]]: ... 54 | @overload 55 | def batched( 56 | iterable: AnyIterable[T], n: Literal[6], strict: bool = ... 57 | ) -> AsyncIterator[tuple[T, T, T, T, T, T]]: ... 58 | @overload 59 | def batched( 60 | iterable: AnyIterable[T], n: int, strict: bool = ... 61 | ) -> AsyncIterator[tuple[T, ...]]: ... 62 | 63 | class chain(AsyncIterator[T]): 64 | __slots__: tuple[str, ...] 65 | def __init__(self, *iterables: AnyIterable[T]) -> None: ... 66 | @classmethod 67 | def from_iterable(cls, iterable: AnyIterable[AnyIterable[T]]) -> chain[T]: ... 68 | async def __anext__(self) -> T: ... 69 | async def aclose(self) -> None: ... 70 | 71 | def compress(data: AnyIterable[T], selectors: AnyIterable[Any]) -> AsyncIterator[T]: ... 72 | def dropwhile( 73 | predicate: Callable[[T], Any], iterable: AnyIterable[T] 74 | ) -> AsyncIterator[T]: ... 75 | def filterfalse( 76 | predicate: Callable[[T], Any] | None, iterable: AnyIterable[T] 77 | ) -> AsyncIterator[T]: ... 78 | @overload 79 | def islice(iterable: AnyIterable[T], start: int | None, /) -> AsyncIterator[T]: ... 80 | @overload 81 | def islice( 82 | iterable: AnyIterable[T], 83 | start: int | None, 84 | stop: int | None, 85 | step: int | None = None, 86 | /, 87 | ) -> AsyncIterator[T]: ... 88 | @overload 89 | def starmap( 90 | function: Callable[[T1], T] | Callable[[T1], Awaitable[T]], 91 | iterable: AnyIterable[tuple[T1]], 92 | ) -> AsyncIterator[T]: ... 93 | @overload 94 | def starmap( 95 | function: Callable[[T1, T2], T] | Callable[[T1, T2], Awaitable[T]], 96 | iterable: AnyIterable[tuple[T1, T2]], 97 | ) -> AsyncIterator[T]: ... 98 | @overload 99 | def starmap( 100 | function: Callable[[T1, T2, T3], T] | Callable[[T1, T2, T3], Awaitable[T]], 101 | iterable: AnyIterable[tuple[T1, T2, T3]], 102 | ) -> AsyncIterator[T]: ... 103 | @overload 104 | def starmap( 105 | function: Callable[[T1, T2, T3, T4], T] | Callable[[T1, T2, T3, T4], Awaitable[T]], 106 | iterable: AnyIterable[tuple[T1, T2, T3, T4]], 107 | ) -> AsyncIterator[T]: ... 108 | @overload 109 | def starmap( 110 | function: ( 111 | Callable[[T1, T2, T3, T4, T5], T] | Callable[[T1, T2, T3, T4, T5], Awaitable[T]] 112 | ), 113 | iterable: AnyIterable[tuple[T1, T2, T3, T4, T5]], 114 | ) -> AsyncIterator[T]: ... 115 | @overload 116 | def starmap( 117 | function: Callable[..., T] | Callable[..., Awaitable[T]], 118 | iterable: AnyIterable[Iterable[Any]], 119 | ) -> AsyncIterator[T]: ... 120 | def takewhile( 121 | predicate: Callable[[T], Any], iterable: AnyIterable[T] 122 | ) -> AsyncIterator[T]: ... 123 | 124 | class tee(Generic[T]): 125 | __slots__: tuple[str, ...] 126 | 127 | def __init__( 128 | self, 129 | iterable: AnyIterable[T], 130 | n: int = ..., 131 | *, 132 | lock: AsyncContextManager[Any] | None = ..., 133 | ) -> None: ... 134 | def __len__(self) -> int: ... 135 | @overload 136 | def __getitem__(self, item: int) -> AsyncIterator[T]: ... 137 | @overload 138 | def __getitem__(self, item: slice) -> tuple[AsyncIterator[T], ...]: ... 139 | def __iter__(self) -> Iterator[AsyncIterator[T]]: ... 140 | async def __aenter__(self: Self) -> Self: ... 141 | async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: ... 142 | async def aclose(self) -> None: ... 143 | 144 | def pairwise(iterable: AnyIterable[T]) -> AsyncIterator[tuple[T, T]]: ... 145 | 146 | F = TypeVar("F") 147 | 148 | @overload 149 | def zip_longest( 150 | __it1: AnyIterable[T1], *, fillvalue: Any = ... 151 | ) -> AsyncIterator[tuple[T1]]: ... 152 | @overload 153 | def zip_longest( 154 | __it1: AnyIterable[T1], 155 | __it2: AnyIterable[T2], 156 | ) -> AsyncIterator[tuple[T1 | None, T2 | None]]: ... 157 | @overload 158 | def zip_longest( 159 | __it1: AnyIterable[T1], 160 | __it2: AnyIterable[T2], 161 | *, 162 | fillvalue: F, 163 | ) -> AsyncIterator[tuple[T1 | F, T2 | F]]: ... 164 | @overload 165 | def zip_longest( 166 | __it1: AnyIterable[T1], 167 | __it2: AnyIterable[T2], 168 | __it3: AnyIterable[T3], 169 | ) -> AsyncIterator[tuple[T1 | None, T2 | None, T3 | None]]: ... 170 | @overload 171 | def zip_longest( 172 | __it1: AnyIterable[T1], 173 | __it2: AnyIterable[T2], 174 | __it3: AnyIterable[T3], 175 | *, 176 | fillvalue: F, 177 | ) -> AsyncIterator[tuple[T1 | F, T2 | F, T3 | F]]: ... 178 | @overload 179 | def zip_longest( 180 | __it1: AnyIterable[T1], 181 | __it2: AnyIterable[T2], 182 | __it3: AnyIterable[T3], 183 | __it4: AnyIterable[T4], 184 | ) -> AsyncIterator[tuple[T1 | None, T2 | None, T3 | None, T4 | None]]: ... 185 | @overload 186 | def zip_longest( 187 | __it1: AnyIterable[T1], 188 | __it2: AnyIterable[T2], 189 | __it3: AnyIterable[T3], 190 | __it4: AnyIterable[T4], 191 | *, 192 | fillvalue: F, 193 | ) -> AsyncIterator[tuple[T1 | F, T2 | F, T3 | F, T4 | F]]: ... 194 | @overload 195 | def zip_longest( 196 | __it1: AnyIterable[T1], 197 | __it2: AnyIterable[T2], 198 | __it3: AnyIterable[T3], 199 | __it4: AnyIterable[T4], 200 | __it5: AnyIterable[T5], 201 | ) -> AsyncIterator[tuple[T1 | None, T2 | None, T3 | None, T4 | None, T5 | None]]: ... 202 | @overload 203 | def zip_longest( 204 | __it1: AnyIterable[T1], 205 | __it2: AnyIterable[T2], 206 | __it3: AnyIterable[T3], 207 | __it4: AnyIterable[T4], 208 | __it5: AnyIterable[T5], 209 | *, 210 | fillvalue: F, 211 | ) -> AsyncIterator[tuple[T1 | F, T2 | F, T3 | F, T4 | F, T5 | F]]: ... 212 | @overload 213 | def zip_longest( 214 | __it1: AnyIterable[T], 215 | __it2: AnyIterable[T], 216 | __it3: AnyIterable[T], 217 | __it4: AnyIterable[T], 218 | __it5: AnyIterable[T], 219 | *iterables: AnyIterable[T], 220 | ) -> AsyncIterator[tuple[T | None, ...]]: ... 221 | @overload 222 | def zip_longest( 223 | __it1: AnyIterable[T], 224 | __it2: AnyIterable[T], 225 | __it3: AnyIterable[T], 226 | __it4: AnyIterable[T], 227 | __it5: AnyIterable[T], 228 | *iterables: AnyIterable[T], 229 | fillvalue: F, 230 | ) -> AsyncIterator[tuple[T | F, ...]]: ... 231 | 232 | K_co = TypeVar("K_co", covariant=True) 233 | T_co = TypeVar("T_co", covariant=True) 234 | 235 | @overload 236 | def groupby( 237 | iterable: AnyIterable[T_co], key: None = ... 238 | ) -> AsyncIterator[tuple[T_co, AsyncIterator[T_co]]]: ... 239 | @overload 240 | def groupby( 241 | iterable: AnyIterable[T_co], 242 | key: Callable[[T_co], Awaitable[K_co]] | Callable[[T], K_co], 243 | ) -> AsyncIterator[tuple[K_co, AsyncIterator[T_co]]]: ... 244 | -------------------------------------------------------------------------------- /asyncstdlib/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxfischer2781/asyncstdlib/aa76c1d28196df695d69a31d3078b6e48e9e0fbe/asyncstdlib/py.typed -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = . 8 | BUILDDIR = _build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 13 | 14 | .PHONY: help Makefile 15 | 16 | # Catch-all target: route all unknown targets to Sphinx using the new 17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 18 | %: Makefile 19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | dl { 2 | margin-bottom: 15px; 3 | } 4 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # Configuration file for the Sphinx documentation builder. 4 | # 5 | # This file does only contain a selection of the most common options. For a 6 | # full list see the documentation: 7 | # http://www.sphinx-doc.org/en/master/config 8 | 9 | # -- Path setup -------------------------------------------------------------- 10 | 11 | # If extensions (or modules to document with autodoc) are in another directory, 12 | # add these directories to sys.path here. If the directory is relative to the 13 | # documentation root, use os.path.abspath to make it absolute, like shown here. 14 | # 15 | # import os 16 | # import sys 17 | # sys.path.insert(0, os.path.abspath('.')) 18 | import os 19 | import sys 20 | 21 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 22 | from asyncstdlib import __version__ 23 | 24 | 25 | # -- Project information ----------------------------------------------------- 26 | 27 | project = "asyncstdlib" 28 | author = "Max Kühn" 29 | copyright = f"2019-2024 {author}" 30 | 31 | # The short X.Y version 32 | version = __version__ 33 | # The full version, including alpha/beta/rc tags 34 | release = version 35 | 36 | 37 | # -- General configuration --------------------------------------------------- 38 | 39 | # If your documentation needs a minimal Sphinx version, state it here. 40 | # 41 | # needs_sphinx = '1.0' 42 | 43 | # Add any Sphinx extension module names here, as strings. They can be 44 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 45 | # ones. 46 | extensions = [ 47 | "sphinx.ext.autodoc", 48 | "sphinx.ext.intersphinx", 49 | "sphinx.ext.todo", 50 | "sphinx.ext.imgmath", 51 | "sphinx.ext.viewcode", 52 | "sphinxcontrib_trio", 53 | ] 54 | 55 | # Add any paths that contain templates here, relative to this directory. 56 | templates_path = ["_templates"] 57 | 58 | # The suffix(es) of source filenames. 59 | # You can specify multiple suffix as a list of string: 60 | # 61 | # source_suffix = ['.rst', '.md'] 62 | source_suffix = ".rst" 63 | 64 | # The master toctree document. 65 | master_doc = "index" 66 | 67 | # The language for content autogenerated by Sphinx. Refer to documentation 68 | # for a list of supported languages. 69 | # 70 | # This is also used if you do content translation via gettext catalogs. 71 | # Usually you set "language" from the command line for these cases. 72 | language = "en" 73 | 74 | # List of patterns, relative to source directory, that match files and 75 | # directories to ignore when looking for source files. 76 | # This pattern also affects html_static_path and html_extra_path. 77 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] 78 | 79 | # The name of the Pygments (syntax highlighting) style to use. 80 | pygments_style = None 81 | 82 | # whether to include module paths in names 83 | add_module_names = False 84 | 85 | 86 | # -- Options for HTML output ------------------------------------------------- 87 | 88 | # The theme to use for HTML and HTML Help pages. See the documentation for 89 | # a list of builtin themes. 90 | # 91 | html_theme = "alabaster" 92 | 93 | # Theme options are theme-specific and customize the look and feel of a theme 94 | # further. For a list of options available for each theme, see the 95 | # documentation. 96 | # 97 | html_theme_options = { 98 | "description": "The missing async toolbox", 99 | "github_user": "maxfischer2781", 100 | "github_repo": "asyncstdlib", 101 | "fixed_sidebar": True, 102 | } 103 | 104 | # Add any paths that contain custom static files (such as style sheets) here, 105 | # relative to this directory. They are copied after the builtin static files, 106 | # so a file named "default.css" will overwrite the builtin "default.css". 107 | html_static_path = ["_static"] 108 | 109 | # Custom sidebar templates, must be a dictionary that maps document names 110 | # to template names. 111 | # 112 | # The default sidebars (for documents that don't match any pattern) are 113 | # defined by theme itself. Builtin themes are using these templates by 114 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html', 115 | # 'searchbox.html']``. 116 | # 117 | # html_sidebars = {} 118 | 119 | 120 | # -- Options for HTMLHelp output --------------------------------------------- 121 | 122 | # Output file base name for HTML help builder. 123 | htmlhelp_basename = "asyncstdlibdoc" 124 | 125 | # -- Extension configuration ------------------------------------------------- 126 | 127 | # -- Options for intersphinx extension --------------------------------------- 128 | 129 | # Example configuration for intersphinx: refer to the Python standard library. 130 | intersphinx_mapping = { 131 | "python": ("https://docs.python.org/3", None), 132 | "trio": ("https://trio.readthedocs.io/en/stable", None), 133 | "simpy": ("https://simpy.readthedocs.io/en/stable", None), 134 | } 135 | 136 | # -- Options for todo extension ---------------------------------------------- 137 | 138 | # If true, `todo` and `todoList` produce output, else they produce nothing. 139 | todo_include_todos = True 140 | 141 | # -- Sphinx Patches/Fixes ---------------------------------------------------- 142 | 143 | # disable overload detection – conflicts with manual/steno signatures 144 | from sphinx.pycode.parser import VariableCommentPicker 145 | 146 | VariableCommentPicker.add_overload_entry = lambda self, *args, **kwargs: () 147 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. asyncstdlib documentation master file, created by 2 | sphinx-quickstart on Tue Mar 26 15:06:09 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | ============================= 7 | The missing ``async`` toolbox 8 | ============================= 9 | 10 | .. image:: https://readthedocs.org/projects/asyncstdlib/badge/?version=latest 11 | :target: http://asyncstdlib.readthedocs.io/en/latest/?badge=latest 12 | :alt: Documentation Status 13 | 14 | .. image:: https://img.shields.io/pypi/v/asyncstdlib.svg 15 | :alt: Available on PyPI 16 | :target: https://pypi.python.org/pypi/asyncstdlib/ 17 | 18 | .. image:: https://anaconda.org/conda-forge/asyncstdlib/badges/version.svg 19 | :alt: Available on Conda-Forge 20 | :target: https://anaconda.org/conda-forge/asyncstdlib 21 | 22 | .. image:: https://img.shields.io/github/license/maxfischer2781/asyncstdlib.svg 23 | :alt: License 24 | :target: https://github.com/maxfischer2781/asyncstdlib/blob/master/LICENSE 25 | 26 | .. image:: https://badges.gitter.im/maxfischer2781/asyncstdlib.svg 27 | :target: https://gitter.im/maxfischer2781/asyncstdlib?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge 28 | :alt: Development Chat 29 | 30 | .. py:module:: asyncstdlib 31 | :synopsis: The async standard library 32 | 33 | .. toctree:: 34 | :maxdepth: 2 35 | :caption: The Async Toolbox 36 | :hidden: 37 | 38 | source/api/builtins 39 | source/api/functools 40 | source/api/contextlib 41 | source/api/itertools 42 | source/api/heapq 43 | source/api/asynctools 44 | 45 | .. toctree:: 46 | :maxdepth: 2 47 | :caption: Guides and Notes 48 | :hidden: 49 | 50 | source/notes/iter_scope 51 | source/notes/compatible 52 | source/glossary 53 | 54 | .. toctree:: 55 | :maxdepth: 2 56 | :caption: Development & Maintenance 57 | :hidden: 58 | 59 | source/devel/contributing 60 | source/devel/testloop 61 | source/devel/publishing 62 | 63 | The ``asyncstdlib`` library re-implements functions and classes of the Python 64 | standard library to make them compatible with ``async`` callables, iterables 65 | and context managers. 66 | It is fully agnostic to ``async`` event loops and seamlessly works with 67 | :py:mod:`asyncio`, third-party libraries such as :py:mod:`trio`, as well as 68 | any custom ``async`` event loop. 69 | 70 | Standard Library Modules 71 | ======================== 72 | 73 | All re-implementations are located in submodules of :py:mod:`asyncstdlib` 74 | with the same name as those of the Python standard library. 75 | 76 | :py:mod:`asyncstdlib.builtins` 77 | Replicates any :ref:`built-in-funcs` that benefit from being asynchronous, 78 | such as :py:func:`~asyncstdlib.builtins.zip`, 79 | :py:func:`~asyncstdlib.builtins.sum`, 80 | or :py:func:`~asyncstdlib.builtins.list`. 81 | 82 | :py:mod:`asyncstdlib.functools` 83 | Replicates any :py:mod:`functools` that benefit from being asynchronous, 84 | which is just :py:func:`~asyncstdlib.functools.reduce`, 85 | :py:func:`~asyncstdlib.functools.cached_property`, and 86 | :py:func:`~asyncstdlib.functools.lru_cache`. 87 | 88 | :py:mod:`asyncstdlib.contextlib` 89 | Replicates any :py:mod:`contextlib` tools that benefit from being asynchronous, 90 | such as :py:func:`~asyncstdlib.contextlib.contextmanager`, or 91 | :py:func:`~asyncstdlib.contextlib.closing`. 92 | 93 | :py:mod:`asyncstdlib.itertools` 94 | Replicates any :py:mod:`itertools` that benefit from being asynchronous, 95 | such as :py:func:`~asyncstdlib.itertools.cycle`, 96 | :py:func:`~asyncstdlib.itertools.chain`, 97 | or :py:func:`~asyncstdlib.itertools.accumulate`. 98 | 99 | :py:mod:`asyncstdlib.heapq` 100 | Replicates any :py:mod:`heapq` tools that benefit from being asynchronous, 101 | which is just :py:func:`~asyncstdlib.heapq.merge`, 102 | :py:func:`~asyncstdlib.heapq.nlargest`, and 103 | :py:func:`~asyncstdlib.heapq.nsmallest`. 104 | 105 | For simplicity, the :py:mod:`asyncstdlib` namespace also exposes all individual 106 | functions and classes directly. 107 | For example, :py:mod:`asyncstdlib.builtins.enumerate` is also available 108 | as ``asyncstdlib.enumerate``. 109 | 110 | The Async Library Module 111 | ======================== 112 | 113 | The core toolset used by :py:mod:`asyncstdlib` itself is available 114 | as a separate submodule. 115 | 116 | :py:mod:`asyncstdlib.asynctools` 117 | Collects any :py:mod:`asyncstdlib` tools useful for building 118 | well-behaved ``async`` helpers and programs. 119 | 120 | Async Neutral Arguments 121 | ======================= 122 | 123 | Many objects of :py:mod:`asyncstdlib` are :term:`async neutral` -- they accept 124 | *both* regular and async arguments. 125 | Type annotations use parentheses to denote this; 126 | for example, "*(async) iter T*" in the signature **zip(**\ *\*iterables: (async) iter T*\ **)** 127 | means that :py:mod:`asyncstdlib`'s :py:func:`~builtins.zip` 128 | can handle both synchronous and asynchronous iterables. 129 | 130 | Whether a callable is regular or async is determined by inspecting its 131 | return type at runtime. 132 | This supports async-producing factories, such as an ``async def`` 133 | function wrapped in :py:class:`functools.partial`. 134 | However, this also means that the result must consistently be *either* 135 | regular or async. 136 | 137 | Note that only *arguments* to :py:mod:`asyncstdlib` may be async neutral. 138 | All callables of :py:mod:`asyncstdlib` consistently provide 139 | :term:`awaitables `, 140 | :term:`asynchronous iterators `, and 141 | :term:`asynchronous context managers `. 142 | 143 | Async Iterator Cleanup 144 | ====================== 145 | 146 | Cleanup of async iterables is special in that :py:meth:`~agen.aclose` may require 147 | an active event loop. Thus, all utilities of :py:mod:`asyncstdlib` that work on async 148 | iterators will eagerly :py:meth:`~agen.aclose` them. 149 | Use :py:func:`~asyncstdlib.asynctools.borrow` to prevent automatic cleanup, 150 | and :py:func:`~asyncstdlib.asynctools.scoped_iter` to guarantee cleanup in custom code. 151 | 152 | See the guide on :ref:`guide_iteration` for details and usage examples. 153 | 154 | Indices and tables 155 | ================== 156 | 157 | * :ref:`genindex` 158 | * :ref:`modindex` 159 | * :ref:`search` 160 | 161 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=. 11 | set BUILDDIR=_build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/api/asynctools.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | The asynctools library 3 | ====================== 4 | 5 | .. py:module:: asyncstdlib.asynctools 6 | :synopsis: core asynctools variants 7 | 8 | The :py:mod:`asyncstdlib.asynctools` library implements the core toolset used by 9 | :py:mod:`asyncstdlib` itself and similar utilities. 10 | All documented members of this module are separate from internal implementation 11 | and stable regardless of :py:mod:`asyncstdlib` internals. 12 | 13 | .. versionadded:: 1.1.0 14 | 15 | Iterator lifetime 16 | ================= 17 | 18 | .. autofunction:: borrow(iterator: async iter T) -> async iter T 19 | 20 | .. autofunction:: scoped_iter(iterable: (async) iter T) 21 | :async-with: :async iter T 22 | 23 | Async transforming 24 | ================== 25 | 26 | .. autofunction:: sync(function: (...) -> (await) T) -> (...) -> await T 27 | 28 | .. versionadded:: 3.9.3 29 | 30 | .. autofunction:: any_iter(iter: (await) (async) iter (await) T) 31 | :async-for: :T 32 | 33 | .. versionadded:: 3.10.3 34 | 35 | .. autofunction:: await_each(awaitables: iter await T) 36 | :async-for: :T 37 | 38 | .. versionadded:: 3.9.1 39 | 40 | .. autofunction:: apply(func: (*T, **T) -> R, *args: await T, **kwargs: await T) -> R 41 | :async: 42 | 43 | .. versionadded:: 3.9.1 44 | -------------------------------------------------------------------------------- /docs/source/api/builtins.rst: -------------------------------------------------------------------------------- 1 | ==================== 2 | The builtins library 3 | ==================== 4 | 5 | .. py:module:: asyncstdlib.builtins 6 | :synopsis: async builtins variants 7 | 8 | The :py:mod:`asyncstdlib.builtins` library implements 9 | Python's :ref:`built-in-funcs` for (async) functions and (async) iterables. 10 | 11 | Iterator reducing 12 | ================= 13 | 14 | .. autofunction:: anext(iterable: async iter T [, default: T]) -> T 15 | :async: 16 | 17 | .. autofunction:: all(iterable: (async) iter T) -> bool 18 | :async: 19 | 20 | .. autofunction:: any(iterable: (async) iter T) -> bool 21 | :async: 22 | 23 | .. autofunction:: max(iterable: (async) iter T, *, key: (T) → Any, default: T) -> T 24 | :async: 25 | 26 | .. autofunction:: min(iterable: (async) iter T, *, key: (T) → Any, default: T) -> T 27 | :async: 28 | 29 | .. autofunction:: sum(iterable: (async) iter T, start: T = 0) -> T 30 | :async: 31 | 32 | Iterator transforming 33 | ===================== 34 | 35 | .. autofunction:: iter(iterable: (async) iter T) 36 | :async-for: :T 37 | 38 | .. autofunction:: filter(function: (T) → (await) bool, iterable: (async) iter T) 39 | :async-for: :T 40 | 41 | .. autofunction:: zip(*iterables: (async) iter T, strict: bool = True) 42 | :async-for: :(T, ...) 43 | 44 | .. versionadded:: 3.10.0 45 | 46 | The ``strict`` parameter. 47 | 48 | .. autofunction:: map(function: (T, ...) → (await) R, iterable: (async) iter T, ...) 49 | :async-for: :R 50 | 51 | .. autofunction:: enumerate(iterable: (async) iter T, start=0) 52 | :async-for: :(int, T) 53 | 54 | Standard types 55 | ============== 56 | 57 | .. autofunction:: dict(iterable: (async) iter (str, T) = ()) -> {str: T, ...} 58 | :async: 59 | 60 | .. autofunction:: list(iterable: (async) iter T = ()) -> [T, ...] 61 | :async: 62 | 63 | .. autofunction:: set(iterable: (async) iter T = ()) -> {T, ...} 64 | :async: 65 | 66 | .. autofunction:: tuple(iterable: (async) iter T = ()) -> (T, ...) 67 | :async: 68 | 69 | .. autofunction:: sorted(iterable: (async) iter T, *, key: (T) → (await) Any, reverse: bool) -> [T, ...] 70 | :async: 71 | 72 | .. versionadded:: 3.9.0 73 | -------------------------------------------------------------------------------- /docs/source/api/contextlib.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | The contextlib library 3 | ====================== 4 | 5 | .. py:module:: asyncstdlib.contextlib 6 | :synopsis: async contextlib variants 7 | 8 | The :py:mod:`asyncstdlib.contextlib` library implements 9 | Python's :py:mod:`contextlib` for (async) iterables and (async) context managers. 10 | 11 | Context Managers 12 | ================ 13 | 14 | .. py:class:: AbstractContextManager 15 | 16 | An :term:`abstract base class` for asynchronous context managers 17 | 18 | This class can be used to check whether some object is an 19 | asynchronous context manager. A class may inherit from 20 | ``AbstractContextManager``, in which case it must implement 21 | an ``__aenter__`` method; the default ``__aenter__`` returns 22 | the asynchronous context manager itself. 23 | 24 | .. versionadded:: 1.1.0 25 | 26 | .. autoclass:: ContextDecorator[T] 27 | 28 | .. py:function:: contextmanager(func: (...) → async iter T) (...) 29 | :async-with: :T 30 | :noindex: 31 | 32 | .. autofunction:: contextmanager(func: (...) → async iter T) -> (...) → async with T 33 | :decorator: 34 | 35 | .. versionadded:: 3.12.2 36 | 37 | The created context manager is a :py:class:`~.ContextDecorator`. 38 | 39 | .. autofunction:: closing(thing: AC) 40 | :async-with: :AC 41 | 42 | .. autofunction:: nullcontext(enter_result: T) 43 | :async-with: :T 44 | 45 | .. autoclass:: ExitStack 46 | 47 | .. automethod:: enter_context(cm: (async) with T) -> T 48 | 49 | .. automethod:: callback(callback: T as (*args, **kwargs) -> None, *args, **kwargs) -> T 50 | 51 | .. py:method:: push(exit: T as {.__aexit__}) -> T 52 | :noindex: 53 | 54 | .. py:method:: push(exit: T as {.__exit__}) -> T 55 | :noindex: 56 | 57 | .. automethod:: push(exit: T as (Type[BaseException], BaseException, traceback) -> (await) bool) -> T 58 | 59 | .. automethod:: pop_all 60 | 61 | .. automethod:: aclose 62 | 63 | .. versionadded:: 1.1.0 -------------------------------------------------------------------------------- /docs/source/api/functools.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | The functools library 3 | ===================== 4 | 5 | .. py:module:: asyncstdlib.functools 6 | :synopsis: async functools variants 7 | 8 | The :py:mod:`asyncstdlib.functools` library implements 9 | Python's :py:mod:`functools` for (async) functions and (async) iterables. 10 | 11 | Iterator reducing 12 | ================= 13 | 14 | .. autofunction:: reduce(function: (T, T) → (await) T, iterable: (async) iter T, initial: T) -> T 15 | :async: 16 | 17 | Async Caches 18 | ============ 19 | 20 | The regular :py:func:`functools.lru_cache` and :py:func:`functools.cached_property` 21 | are not appropriate for 22 | async callables, such as an ``async def`` :term:`coroutine function`: 23 | their direct return value is an :term:`awaitable` instead of the desired value. 24 | This causes the cache to store only temporary helpers, not the actual values. 25 | 26 | Both :py:func:`~asyncstdlib.functools.lru_cache` 27 | and :py:func:`~asyncstdlib.functools.cached_property` 28 | of :py:mod:`asyncstdlib` work only with async callables 29 | (they are not :term:`async neutral`). 30 | Notably, they also work with regular callables that return an :term:`awaitable`, 31 | such as an ``async def`` function wrapped by :py:func:`~functools.partial`. 32 | 33 | Attribute Caches 34 | ---------------- 35 | 36 | This type of cache tracks ``await``\ ing an attribute. 37 | 38 | .. py:function:: cached_property(getter: (Self) → await T, /) -> await T 39 | :decorator: 40 | 41 | .. autofunction:: cached_property(context_type: Type[AsyncContextManager], /)((Self) → await T) -> await T 42 | :decorator: 43 | :noindex: 44 | 45 | .. versionadded:: 1.1.0 46 | .. versionadded:: 3.12.5 47 | 48 | The ``context_type`` decorator parameter. 49 | 50 | 51 | Callable Caches 52 | --------------- 53 | 54 | This type of cache tracks *call argument patterns* and their return values. 55 | A pattern is an *ordered* representation of positional and keyword arguments; 56 | notably, this disregards defaults and overlap between positional and keyword arguments. 57 | This means that for a function ``f(a, b)``, the calls ``f(1, 2)``, ``f(a=1, b=2)`` 58 | and ``f(b=2, a=1)`` are considered three distinct patterns. 59 | 60 | Note that exceptions are not considered return values and thus never cached. This makes 61 | the caches suitable for queries that may fail, caching any *eventual* result for 62 | quick and reliable lookup. 63 | 64 | .. autofunction:: cache((...) → await R) -> LRUAsyncCallable 65 | :decorator: 66 | 67 | .. versionadded:: 3.9.0 68 | 69 | .. py:function:: lru_cache((...) → await R) -> LRUAsyncCallable 70 | :decorator: 71 | :noindex: 72 | 73 | .. autofunction:: lru_cache(maxsize: ?int = 128, typed: bool = False)((...) → await R) -> LRUAsyncCallable 74 | :decorator: 75 | 76 | A cached async callable can be queried for its cache metadata and allows clearing 77 | entries from the cache. This can be useful to explicitly monitor cache performance, 78 | and to manage caches of unrestricted size. 79 | While the ``maxsize`` of a cache cannot be changed at runtime, 80 | the ``__wrapped__`` callable may be wrapped with a new cache of different size. 81 | 82 | .. autoclass:: LRUAsyncCallable() 83 | 84 | .. py:attribute:: __wrapped__ 85 | 86 | The callable wrapped by this cache 87 | 88 | .. automethod:: __call__(...) -> await R 89 | 90 | .. automethod:: cache_clear() 91 | 92 | .. automethod:: cache_discard(...) 93 | 94 | .. versionchanged:: Python3.9 95 | :py:func:`classmethod` properly wraps caches. 96 | 97 | .. versionchanged:: Python3.13 98 | :py:func:`classmethod` no longer wraps caches in a way that supports `cache_discard`. 99 | 100 | .. versionadded:: 3.10.4 101 | 102 | .. automethod:: cache_info() -> (hits=..., misses=..., maxsize=..., currsize=...) 103 | 104 | .. automethod:: cache_parameters() -> {"maxsize": ..., "typed": ...} 105 | 106 | .. versionadded:: 3.9.0 107 | -------------------------------------------------------------------------------- /docs/source/api/heapq.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | The heapq library 3 | ================= 4 | 5 | .. py:module:: asyncstdlib.heapq 6 | :synopsis: async heapq variants 7 | 8 | The :py:mod:`asyncstdlib.heapq` library implements 9 | Python's :py:mod:`heapq` for (async) functions and (async) iterables. 10 | 11 | .. versionadded:: 3.10.3 12 | 13 | This module does not re-implement the functions to maintain a heap data structure. 14 | Since Python's :py:mod:`heapq` module does not support an internal ``key`` function 15 | but relies on ``(key, item)`` pairs as needed, 16 | the same interface can be used for ``async`` key function. 17 | For example, an ``async`` key function would be used as 18 | ``heappush(heap, (await key_func(item), item))`` instead of 19 | ``heappush(heap, (key_func(item), item))``. 20 | 21 | Iterator merging 22 | ================ 23 | 24 | .. autofunction:: merge(*iterables: (async) iter T, key: (T) → (await) Any = None, reverse: bool = False) 25 | :async-for: :T 26 | 27 | Iterator selecting 28 | ================== 29 | 30 | .. autofunction:: nlargest(*iterables: (async) iter T, n: int, key: (T) → (await) Any = None) -> [T, ...] 31 | 32 | .. autofunction:: nsmallest(*iterables: (async) iter T, n: int, key: (T) → (await) Any = None) -> [T, ...] 33 | 34 | -------------------------------------------------------------------------------- /docs/source/api/itertools.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | The itertools library 3 | ===================== 4 | 5 | .. py:module:: asyncstdlib.itertools 6 | :synopsis: async itertools variants 7 | 8 | The :py:mod:`asyncstdlib.itertools` library implements 9 | Python's :py:mod:`itertools` for (async) functions and (async) iterables. 10 | 11 | .. note:: 12 | 13 | Only functions which benefit from an explicit ``async`` implementation are provided. 14 | Other functions from ``itertools`` can be turned asynchronous using :py:func:`~asyncstdlib.iter`, 15 | e.g. `asyncstdlib.iter(itertools.count(5))`. 16 | 17 | .. note:: 18 | 19 | To avoid leaking resources, all utilities in this module explicitly close their 20 | iterable arguments when done. 21 | This can be unexpected for non-exhausting utilities such as :py:func:`~.dropwhile` 22 | and may require explicit scoping. 23 | See the guide on :ref:`guide_iteration` for details and usage examples. 24 | 25 | Infinite iterators 26 | ================== 27 | 28 | .. autofunction:: cycle(iterable: (async) iter T) 29 | :async-for: :T 30 | 31 | Iterator merging 32 | ================ 33 | 34 | .. autofunction:: chain(*iterables: (async) iter T) 35 | :async-for: :T 36 | 37 | .. autofunction:: asyncstdlib.itertools::chain.from_iterable(iterable: (async) iter (async) iter T) 38 | :async-for: :T 39 | 40 | .. autofunction:: zip_longest(*iterables: (async) iter T, fillvalue: S = None) 41 | :async-for: :(T or S, ...) 42 | 43 | Iterator filtering 44 | ================== 45 | 46 | .. autofunction:: compress(data: (async) iter T, selectors: (async) iter T) 47 | :async-for: :T 48 | 49 | .. autofunction:: dropwhile(predicate: (T) → (await) bool, iterable: (async) iter T) 50 | :async-for: :T 51 | 52 | .. autofunction:: filterfalse(predicate: None | (T) → (await) bool, iterable: (async) iter T) 53 | :async-for: :T 54 | 55 | .. autofunction:: takewhile(predicate: (T) → (await) bool, iterable: (async) iter T) 56 | :async-for: :T 57 | 58 | .. py:function:: islice(iterable: (async) iter T, stop: int) 59 | :async-for: :T 60 | :noindex: 61 | 62 | .. autofunction:: islice(iterable: (async) iter T, start: int, stop: int , step: int =m1) 63 | :async-for: :T 64 | 65 | Iterator transforming 66 | ===================== 67 | 68 | .. autofunction:: accumulate(iterable: (async) iter T, function: (T, T) → (await) T = add [, initial: T]) 69 | :async-for: :T 70 | 71 | .. autofunction:: starmap(function: (*A) → (await) T, iterable: (async) iter (A, ...)) 72 | :async-for: :T 73 | 74 | Iterator splitting 75 | ================== 76 | 77 | .. autofunction:: tee(iterable: (async) iter T, n: int = 2, [*, lock: async with Any]) 78 | :for: :(async iter T, ...) 79 | 80 | .. versionadded:: 3.10.5 81 | 82 | The ``lock`` keyword parameter. 83 | 84 | .. autofunction:: pairwise(iterable: (async) iter T) 85 | :async-for: :(T, T) 86 | 87 | .. versionadded:: 3.10.0 88 | 89 | .. autofunction:: batched(iterable: (async) iter T, n: int, strict: bool = False) 90 | :async-for: :T 91 | 92 | .. versionadded:: 3.11.0 93 | 94 | .. versionadded:: 3.13.0 95 | 96 | The ``strict`` parameter. 97 | 98 | .. py:function:: groupby(iterable: (async) iter T) 99 | :async-for: :(T, async iter T) 100 | :noindex: 101 | .. autofunction:: groupby(iterable: (async) iter T, key: (T) → (await) R) 102 | :async-for: :(R, async iter T) 103 | 104 | .. versionadded:: 1.1.0 105 | -------------------------------------------------------------------------------- /docs/source/devel/contributing.rst: -------------------------------------------------------------------------------- 1 | ======================= 2 | Contribution Guidelines 3 | ======================= 4 | 5 | Contributions to ``asyncstdlib`` are highly welcome! 6 | The place to go is the `asyncstdlib GitHub repository`_ 7 | where you can report bugs, request improvements or propose changes. 8 | 9 | - For bug reports and feature requests simply `open a new issue`_ 10 | and fill in the appropriate template. 11 | - Even for content submissions make sure `an issue exists`_ - this 12 | allows you to get early feedback and document the development. 13 | You can use whatever tooling you like to create the content, 14 | but the next sections give a rough outline on how to proceed. 15 | 16 | .. _asyncstdlib GitHub repository: https://github.com/maxfischer2781/asyncstdlib 17 | .. _open a new issue: https://github.com/maxfischer2781/asyncstdlib/issues/new/choose 18 | .. _an issue exists: https://github.com/maxfischer2781/asyncstdlib/issues 19 | 20 | Submitting Content 21 | ================== 22 | 23 | To submit concrete content suggestions you *must* use a `GitHub Fork and Pull Request`_. 24 | This lets you create the content at your own pace yet still receive direct feedback. 25 | Feel free to start with a *Draft Pull Request* to get feedback early. 26 | 27 | All content goes through mandatory automated and manual review. 28 | You can run most of the automated review yourself to get faster feedback, 29 | yet it is also fine to wait for the checks run on GitHub itself. 30 | Dependencies for automated code and documentation checking is available via 31 | the extras ``test`` and ``doc``, respectively. 32 | 33 | .. note:: 34 | 35 | Ideally you develop with the repository checked out locally and a separate `Python venv`_. 36 | If you have the venv active and are at the repository root, 37 | run ``python -m pip install -e '.[test,typetest,doc]'`` to install all dependencies. 38 | 39 | .. _`GitHub Fork and Pull Request`: https://guides.github.com/activities/forking/ 40 | .. _`Python venv`: https://docs.python.org/3/library/venv.html 41 | 42 | Testing Code 43 | ------------ 44 | 45 | Code can be verified locally using the tools `flake8`, `black`, `pytest`, `pyright` and `mypy`. 46 | You should always verify that the basic checks pass: 47 | 48 | .. code:: bash 49 | 50 | python -m black asyncstdlib unittests 51 | python -m flake8 asyncstdlib unittests 52 | python -m pytest 53 | 54 | This runs tests from simplest to most advanced and should allow a quick development cycle. 55 | 56 | In many cases you can rely on your IDE for type checking. 57 | For major typing related changes, run the full type checking: 58 | 59 | .. code:: bash 60 | 61 | python -m mypy --pretty 62 | python -m pyright 63 | 64 | Note that some additional checks are run on GitHub to check test coverage and code health. 65 | 66 | Building Docs 67 | ------------- 68 | 69 | If you change the documentation, either directly or via significant edits to docstrings, 70 | you can build the documentation yourself to check if everything renders as expected. 71 | To do so, trigger a `Sphinx build`_ to generate a HTML version of the docs: 72 | 73 | .. code:: bash 74 | 75 | sphinx-build -M html ./docs ./docs/_build 76 | 77 | On success, simply open `./docs/_build/html/index.html` in your favourite browser. 78 | 79 | .. _`Sphinx build`: https://www.sphinx-doc.org/en/master/man/sphinx-build.html 80 | 81 | The Review 82 | ---------- 83 | 84 | Once you mark your pull request as ready for review, be prepared for one or more rounds of comments. 85 | These can range from general commentary, to code suggestions, to inquiries why a specific change was made. 86 | We strive to give actionable advice, but whenever you have trouble understanding how to proceed - 87 | please just reply with a comment of your own and ask how to proceed! 88 | 89 | Once all comments are resolved and your pull request was approved, sit back and relax! 90 | We will merge your pull request in due time and include it in the next release. 91 | Thanks for contributing! 92 | -------------------------------------------------------------------------------- /docs/source/devel/publishing.rst: -------------------------------------------------------------------------------- 1 | ===================== 2 | Versions and Releases 3 | ===================== 4 | 5 | The ``asyncstdlib`` versioning closely follows 6 | versioning of the Python standard library. 7 | New versions are published via `PyPI`_ and `Conda-Forge`_ 8 | for installation via ``pip`` and ``conda``. 9 | 10 | Versioning and feature coverage 11 | =============================== 12 | 13 | The ``asyncstdlib`` mimics the versioning of the Python standard library: 14 | 15 | * *Major and Minor version* indicate which Python feature set is supported, and 16 | * *Patch version* indicates the iteration of this feature set. 17 | 18 | For example, ``asyncstdlib`` version 3.9.2 provides the feature set of Python 3.9, 19 | such as :py:func:`~asyncstdlib.functools.cache` added in 3.9 20 | and :py:func:`~asyncstdlib.functools.cached_property` added previously. 21 | 22 | The ``asyncstdlib.asynctools`` feature set does not follow a strict version model. 23 | New features may be added at minor or patch releases. 24 | 25 | Release workflow 26 | ================ 27 | 28 | .. note:: 29 | 30 | This section is only relevant for maintainers of ``asyncstdlib``. 31 | 32 | Releases are performed manually but should happen at least when 33 | an important fix or major feature is added. 34 | Most releases will bump the *patch* version number; 35 | only bump the *minor* or *major* version number to match a new Python release. 36 | 37 | 1. Review all changes added by the new releases: 38 | * Naming of functions/classes/parameters 39 | * Docs are up to date and consistent 40 | * Unittests cover all obvious cases 41 | 42 | 2. Bump the version number: 43 | * Adjust and commit ``asyncstdlib.__init__.__version__`` 44 | * Create a git tag such as ``git tag -a "v3.9.2" -m "description"`` 45 | * Push the commit and tags to github 46 | 47 | 3. Publish the new release: 48 | * Create a new `GitHub release`_ from the recent version tag 49 | * PyPI will be automatically published to via GitHub actions 50 | * Handle the automatic PR at the `Conda-Forge asyncstdlib recipe`_ 51 | 52 | .. _PyPI: https://pypi.org 53 | .. _Conda-Forge: https://conda-forge.org 54 | .. _`PyPI asyncstdlib project`: https://pypi.org/project/asyncstdlib/ 55 | .. _`GitHub release`: https://docs.github.com/en/repositories/releasing-projects-on-github/about-releases 56 | .. _`Conda-Forge asyncstdlib recipe`: https://github.com/conda-forge/asyncstdlib-feedstock 57 | .. _`PyPI release`: https://pypi.org/project/asyncstdlib/#files 58 | -------------------------------------------------------------------------------- /docs/source/devel/testloop.rst: -------------------------------------------------------------------------------- 1 | =============== 2 | Test Event Loop 3 | =============== 4 | 5 | .. py:module:: unittests.utility 6 | :synopsis: testing utilities 7 | 8 | To facilitate event loop agnostic features, :py:mod:`asyncstdlib` includes 9 | its own custom event loop implementation for testing. 10 | This is provided as a simple decorator that is compatible with :py:mod:`pytest`, 11 | as well as a number of `async` commands specific to the event loop. 12 | 13 | Event Loops 14 | =========== 15 | 16 | The test event loop is available via a decorator that should be directly applied 17 | to an ``async def`` test case. 18 | 19 | .. autofunction:: sync(test_case: (...) -> (await) None) -> (...) -> None 20 | 21 | Async commands 22 | ============== 23 | 24 | .. autoclass:: Schedule(*await Any) 25 | 26 | .. py:class:: Switch(skip: int, /) 27 | :no-index: 28 | 29 | .. py:class:: Switch(min: int, max: int, /) 30 | :no-index: 31 | 32 | .. autoclass:: Switch() 33 | 34 | .. autoclass:: Lock 35 | -------------------------------------------------------------------------------- /docs/source/glossary.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | Glossary of Terms 3 | ================= 4 | 5 | .. Using references in the glossary itself: 6 | When mentioning other items, always reference them. 7 | When mentioning the current item, never reference it. 8 | 9 | 10 | .. glossary:: 11 | 12 | async neutral 13 | Types that support either of a regular or asynchronous implementation. 14 | For example, an async neutral iterable may provide either regular 15 | ``for _ in iterable`` or asynchronous ``async for _ in iterable`` iteration. 16 | Commonly, callables have async neutral parameters to simplify using them 17 | with a mixture of synchronous and regular arguments. 18 | 19 | borrowing 20 | borrowed object 21 | Many ``async`` objects need to be cleaned up explicitly – for example, 22 | an :term:`asynchronous iterator` should generally be ``aclose``\ d after use 23 | (see `PEP 533`_ for details). When *borrowing* such an object to a temporary 24 | owner, the original owner guarantees to clean up the object but prevents the 25 | temporary owner from doing so. 26 | 27 | .. _PEP 533: https://www.python.org/dev/peps/pep-0533/ -------------------------------------------------------------------------------- /docs/source/notes/compatible.rst: -------------------------------------------------------------------------------- 1 | .. _guide_compatible: 2 | 3 | ================ 4 | Sync/Async Reuse 5 | ================ 6 | 7 | The :py:mod:`asyncstdlib` only re-implements functions and classes 8 | that benefit from an async implementation. 9 | In some cases, a synchronous implementation is already 10 | sufficient to cover the async case as well. 11 | 12 | Example: async property 13 | ======================= 14 | 15 | A prominent example is an "``async`` ``property``": 16 | a computed attribute that allows to run ``async`` code as well. 17 | This is useful for example to fetch data for the attribute 18 | from a remote database or server. 19 | 20 | As it turns out, we can directly use the builtin :py:class:`property` for this! 21 | 22 | .. code-block:: python3 23 | 24 | # python3 -m asyncio 25 | class Remote: 26 | _count = 0 27 | @property # <== builtin @property ... 28 | async def attribute(self): # ... around an async method 29 | await asyncio.sleep(1) # let's pretend to do some work... 30 | self._count += 1 31 | return "Na" * self._count 32 | 33 | instance = Remote() 34 | print(await instance.attribute) # waits 1 second, prints Na 35 | print(await instance.attribute) # waits 1 second, prints NaNa 36 | 37 | In principle, we could also define setters and deleters 38 | – however, Python has no syntax for async assignment or deletion 39 | which limits the advantage of using a :py:class:`property` in the first place. [1]_ 40 | 41 | Identifying reusability 42 | ======================= 43 | 44 | In general, a utility is sync/async compatible when it takes a callable but does not 45 | depend on the concrete result. 46 | For example, a `property` getter just prepares some attribute value 47 | – which may as well be an awaitable. 48 | In contrast, the similar :py:func:`~asyncstdlib.functools.cached_property` must access 49 | the concrete result to store it – this requires async capabilities for the async case. 50 | 51 | Some examples for async compatible parts of the standard library include: 52 | 53 | * Factory descriptors such as :py:class:`property`, :py:class:`classmethod` and :py:class:`staticmethod` 54 | * Factories such as :py:func:`functools.partial` and :py:func:`functools.partialmethod` 55 | * Selectors such as :py:func:`functools.singledispatch` and :py:func:`functools.singledispatchmethod` 56 | * Modifiers such as :py:func:`functools.wraps` and :py:func:`functools.update_wrapper` 57 | * Special method operators not enforcing result types such as :py:func:`reversed` and :py:func:`~operator.__add__` 58 | 59 | Most of these merely wrap a callable to either modify it directly 60 | (such as :py:func:`functools.wraps`) 61 | or call it regardless of the return type 62 | (such as :py:func:`functools.partial`). 63 | Note that some functions such as :py:func:`~operator.__add__` *usually* work for the 64 | `async` case, but may fail in some subtle edge case – such as not being able to see 65 | a :py:data:`NotImplemented` return value. 66 | 67 | .. [1] Using `setattr` and `delattr` one can asynchronously run a setter/getter, 68 | for example `await setattr(instance, "attribute")`. However, with the lack 69 | of specific syntax this offers little to no advantage over using a method. 70 | -------------------------------------------------------------------------------- /docs/source/notes/iter_scope.rst: -------------------------------------------------------------------------------- 1 | .. _guide_iteration: 2 | 3 | ================ 4 | Iterator Scoping 5 | ================ 6 | 7 | Cleanup of ``async`` resources is special in that it may require an active event loop. 8 | Since :term:`asynchronous iterators ` can hold resources 9 | indefinitely, they should be cleaned up deterministically whenever possible 10 | (see `PEP 533`_ for discussion). 11 | Thus, ``asyncstdlib`` defaults to deterministic cleanup but provides tools to explicitly 12 | manage the lifetime of iterators. 13 | 14 | Cleanup in ``asyncstdlib`` 15 | ========================== 16 | 17 | All async iterators of :py:mod:`asyncstdlib` that work on other iterators 18 | assume sole ownership of the iterators passed to them. 19 | Passed in async iterators are guaranteed to be :py:meth:`~agen.aclose`\ d 20 | as soon as the :py:mod:`asyncstdlib` async iterator itself is cleaned up. 21 | This provides a resource-safe default for the most common operation of 22 | exhausting iterators. 23 | 24 | .. code-block:: python3 25 | 26 | >>> import asyncio 27 | >>> import asyncstdlib as a 28 | >>> 29 | >>> async def async_squares(i=0): 30 | ... """Provide an infinite stream of squared numbers""" 31 | ... while True: 32 | ... await asyncio.sleep(0.1) 33 | ... yield i**2 34 | ... i += 1 35 | ... 36 | >>> async def main(): 37 | ... async_iter = async_squares() 38 | ... # loop until we are done 39 | ... async for i, s in a.zip(range(5), async_iter): 40 | ... print(f"{i}: {s}") 41 | ... assert await a.anext(async_iter, "Closed!") == "Closed!" 42 | ... 43 | >>> asyncio.run(main()) 44 | 45 | For consistency, every :py:mod:`asyncstdlib` async iterator performs such cleanup. 46 | This may be unexpected for async variants of iterator utilities that are usually 47 | applied multiple times, such as :py:func:`itertools.islice`. 48 | Thus, to manage the lifetime of async iterators one can explicitly scope them. 49 | 50 | Scoping async iterator lifetime 51 | =============================== 52 | 53 | In order to use a single async iterator across several iterations but guarantee cleanup, 54 | the iterator can be scoped to an ``async with`` block: 55 | using :py:func:`asyncstdlib.scoped_iter` creates an async iterator that is guaranteed 56 | to :py:meth:`~agen.aclose` at the end of the block, but cannot be closed before. 57 | 58 | .. code-block:: python3 59 | 60 | >>> import asyncio 61 | >>> import asyncstdlib as a 62 | >>> 63 | >>> async def async_squares(i=0): 64 | ... """Provide an infinite stream of squared numbers""" 65 | ... while True: 66 | ... await asyncio.sleep(0.1) 67 | ... yield i**2 68 | ... i += 1 69 | ... 70 | >>> async def main(): 71 | ... # iterator can be re-used in the async with block 72 | ... async with a.scoped_iter(async_squares()) as async_iter: 73 | ... async for s in a.islice(async_iter, 3): 74 | ... print(f"1st Batch: {s}") 75 | ... # async_iter is still open for further iteration 76 | ... async for s in a.islice(async_iter, 3): 77 | ... print(f"2nd Batch: {s}") 78 | ... async for s in a.islice(async_iter, 3): 79 | ... print(f"3rd Batch: {s}") 80 | ... # iterator is closed after the async with block 81 | ... assert await a.anext(async_iter, "Closed!") == "Closed!" 82 | ... 83 | >>> asyncio.run(main()) 84 | 85 | Scoped iterators should be the go-to approach for managing iterator lifetimes. 86 | However, not all lifetimes correspond to well-defined lexical scopes; 87 | for these cases, one can :term:`borrow ` an iterator instead. 88 | 89 | .. _PEP 533: https://www.python.org/dev/peps/pep-0533/ 90 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["flit_core"] 3 | build-backend = "flit_core.buildapi" 4 | 5 | [project] 6 | dynamic = ["version", "description"] 7 | name = "asyncstdlib" 8 | authors = [ 9 | {name = "Max Kühn", email = "maxfischer2781@gmail.com"}, 10 | ] 11 | readme = "README.rst" 12 | classifiers = [ 13 | "Development Status :: 5 - Production/Stable", 14 | "Framework :: AsyncIO", 15 | "Intended Audience :: Developers", 16 | "License :: OSI Approved :: MIT License", 17 | "Programming Language :: Python :: 3 :: Only", 18 | "Programming Language :: Python :: 3.8", 19 | "Programming Language :: Python :: 3.9", 20 | "Programming Language :: Python :: 3.10", 21 | "Programming Language :: Python :: 3.11", 22 | "Programming Language :: Python :: 3.12", 23 | "Programming Language :: Python :: 3.13", 24 | ] 25 | license = {"file" = "LICENSE"} 26 | keywords = ["async", "enumerate", "itertools", "builtins", "functools", "contextlib"] 27 | requires-python = "~=3.8" 28 | dependencies = [] 29 | 30 | [project.optional-dependencies] 31 | test = [ 32 | "pytest", 33 | "flake8", 34 | "flake8-bugbear", 35 | "black; implementation_name=='cpython'", 36 | "coverage", 37 | "pytest-cov", 38 | "flake8-2020", 39 | "mypy; implementation_name=='cpython'", 40 | ] 41 | typetest = [ 42 | "mypy; implementation_name=='cpython'", 43 | "pyright", 44 | "typing-extensions", 45 | ] 46 | doc = ["sphinx", "sphinxcontrib-trio"] 47 | 48 | [project.urls] 49 | Documentation = "https://asyncstdlib.readthedocs.io/en/latest/" 50 | Source = "https://github.com/maxfischer2781/asyncstdlib" 51 | 52 | [tool.flit.sdist] 53 | include = ["unittests"] 54 | 55 | [tool.mypy] 56 | files = ["asyncstdlib", "typetests"] 57 | check_untyped_defs = true 58 | no_implicit_optional = true 59 | warn_redundant_casts = true 60 | warn_unused_ignores = true 61 | warn_unreachable = true 62 | disallow_any_generics = true 63 | disallow_subclassing_any = true 64 | disallow_untyped_calls = true 65 | disallow_untyped_defs = true 66 | disallow_incomplete_defs = true 67 | disallow_untyped_decorators = true 68 | warn_return_any = true 69 | no_implicit_reexport = true 70 | strict_equality = true 71 | 72 | [tool.pyright] 73 | include = ["asyncstdlib", "typetests"] 74 | typeCheckingMode = "strict" 75 | pythonPlatform = "All" 76 | pythonVersion = "3.8" 77 | verboseOutput = true 78 | 79 | [tool.pytest.ini_options] 80 | testpaths = [ 81 | "unittests", 82 | ] 83 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [flake8] 2 | statistics = True 3 | max-line-length = 80 4 | ignore = E302, E501, E704, B008, B011, B905, B950, W503, W504 5 | select = C,E,F,W,B,B9 6 | exclude = docs,.svn,CVS,.bzr,.hg,.git,__pycache__,.tox,.eggs,*.egg 7 | -------------------------------------------------------------------------------- /typetests/README.rst: -------------------------------------------------------------------------------- 1 | ================= 2 | MyPy Type Testing 3 | ================= 4 | 5 | This suite contains *type* tests for ``asyncstdlib``. 6 | These tests follow similar conventions to unittests but are checked by MyPy. 7 | 8 | Test Files 9 | ========== 10 | 11 | Tests MUST be organised into files, with similar tests grouped together. 12 | Each test file SHOULD be called as per the pattern ``type_.py``, 13 | where ```` describes what the tests cover; 14 | for example, ``test_functools.py`` type-tests the ``functools`` package. 15 | 16 | An individual test is a function, method or class and SHOULD be named 17 | with a `test_` or `Test` prefix for functions/methods or classes, respectively. 18 | A class SHOULD be considered a test if it contains any tests. 19 | Tests MUST contain statements to be type-checked: 20 | - plain statements required to be type consistent, 21 | such as passing parameters of expected correct type to a function. 22 | - assertions about types and exhaustiveness, 23 | using `typing.assert_type` or `typing.assert_never`. 24 | - statements required to be type inconsistent with an expected type error, 25 | such as passing parameters of wrong type with `# type: ignore[arg-type]`. 26 | 27 | Test files MAY contain non-test functions, methods or classes for use inside tests. 28 | These SHOULD be type-consistent and not require any type assertions or expected errors. 29 | 30 | Test Execution 31 | ============== 32 | 33 | Tests MUST be checked by MyPy using 34 | the ``warn_unused_ignores`` configuration or ``--warn-unused-ignores`` command line 35 | option. 36 | This is required for negative type consistency checks, 37 | i.e. using expected type errors such as ``# type: ignore[arg-type]``. 38 | -------------------------------------------------------------------------------- /typetests/test_functools.py: -------------------------------------------------------------------------------- 1 | from asyncstdlib import lru_cache 2 | 3 | 4 | @lru_cache() 5 | async def lru_function(a: int) -> int: 6 | return a 7 | 8 | 9 | async def test_cache_parameters() -> None: 10 | await lru_function(12) 11 | await lru_function("wrong parameter type") # type: ignore[arg-type] 12 | 13 | 14 | class TestLRUMethod: 15 | """ 16 | Test that `lru_cache` works on methods 17 | """ 18 | 19 | @lru_cache() 20 | async def cached(self, a: int = 0) -> int: 21 | return a 22 | 23 | async def test_implicit_self(self) -> int: 24 | return await self.cached() 25 | 26 | async def test_method_parameters(self) -> int: 27 | await self.cached("wrong parameter type") # type: ignore[arg-type] 28 | return await self.cached(12) 29 | -------------------------------------------------------------------------------- /typetests/test_itertools.py: -------------------------------------------------------------------------------- 1 | from typing import AsyncIterator 2 | from asyncstdlib import itertools 3 | from typing_extensions import assert_type 4 | 5 | 6 | async def test_cycle() -> None: 7 | async for x in itertools.cycle([1]): 8 | assert_type(x, int) 9 | 10 | 11 | async def test_accumulate() -> None: 12 | async for x in itertools.accumulate([1]): 13 | assert_type(x, int) 14 | 15 | 16 | async def test_batched() -> None: 17 | async for x in itertools.batched([1], 1): 18 | assert_type(x, "tuple[int]") 19 | 20 | 21 | async def test_chain() -> None: 22 | async for x in itertools.chain([1]): 23 | assert_type(x, int) 24 | 25 | 26 | async def test_compress() -> None: 27 | async for x in itertools.compress([1], [1]): 28 | assert_type(x, int) 29 | 30 | 31 | async def test_dropwhile() -> None: 32 | async for x in itertools.dropwhile(lambda x: True, [1]): 33 | assert_type(x, int) 34 | 35 | 36 | async def test_filterfalse() -> None: 37 | async for x in itertools.filterfalse(lambda x: True, [1]): 38 | assert_type(x, int) 39 | 40 | 41 | async def test_starmap() -> None: 42 | def f(x: str) -> int: 43 | return int(x) 44 | 45 | async for x in itertools.starmap(f, [("1",)]): 46 | assert_type(x, int) 47 | 48 | 49 | async def test_takewhile() -> None: 50 | async for x in itertools.takewhile(lambda x: True, [1]): 51 | assert_type(x, int) 52 | 53 | 54 | async def test_tee() -> None: 55 | async for x in itertools.tee([1])[0]: 56 | assert_type(x, int) 57 | 58 | 59 | async def test_tee_iter() -> None: 60 | x1, x2 = itertools.tee([1], n=2) 61 | assert_type(x1, AsyncIterator[int]) 62 | assert_type(x2, AsyncIterator[int]) 63 | 64 | for xi in itertools.tee([1], n=2): 65 | async for x in xi: 66 | assert_type(x, int) 67 | 68 | 69 | async def test_pairwise() -> None: 70 | async for x in itertools.pairwise([1]): 71 | assert_type(x, "tuple[int, int]") 72 | 73 | 74 | async def test_zip_longest() -> None: 75 | async for x in itertools.zip_longest([1]): 76 | assert_type(x, "tuple[int]") 77 | 78 | 79 | async def test_groupby() -> None: 80 | async for x in itertools.groupby([1]): 81 | assert_type(x, "tuple[int, AsyncIterator[int]]") 82 | -------------------------------------------------------------------------------- /unittests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/maxfischer2781/asyncstdlib/aa76c1d28196df695d69a31d3078b6e48e9e0fbe/unittests/__init__.py -------------------------------------------------------------------------------- /unittests/test_asynctools.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | import asyncstdlib as a 4 | 5 | from .utility import sync, asyncify 6 | 7 | 8 | CLOSED = "closed" 9 | 10 | 11 | @sync 12 | async def test_scoped_iter_iterclose(): 13 | """A `scoped_iter` cannot be closed via its public interface""" 14 | async_iterable, iterable = asyncify(range(10)), iter(range(10)) 15 | async with a.scoped_iter(async_iterable) as a1: 16 | assert await a.anext(a1) == next(iterable) 17 | # closing a scoped iterator is a no-op 18 | await a1.aclose() 19 | assert await a.anext(a1) == next(iterable) 20 | # explicitly test #68 21 | await a.iter(a1).aclose() 22 | assert await a.anext(a1) == next(iterable) 23 | assert await a.list(async_iterable) == list(iterable) 24 | assert await a.anext(a1, CLOSED) == CLOSED 25 | assert await a.anext(async_iterable, CLOSED) == CLOSED 26 | 27 | 28 | @sync 29 | async def test_nested_lifetime(): 30 | async_iterable, iterable = asyncify(range(10)), iter(range(10)) 31 | async with a.scoped_iter(async_iterable) as a1: 32 | assert await a.anext(a1) == next(iterable) 33 | async with a.scoped_iter(a1) as a2: 34 | assert await a.anext(a2) == next(iterable) 35 | assert await a.anext(a1) == next(iterable) 36 | # scoped iter can only be closed by scope 37 | await a2.aclose() 38 | assert await a.anext(a2) == next(iterable) 39 | # scoped iterator is closed by its own scope 40 | assert await a.anext(a2, CLOSED) == CLOSED 41 | # scoped iterator is not implicitly closed by inner scope 42 | assert await a.anext(a1) == next(iterable) 43 | assert await a.anext(async_iterable) == next(iterable) 44 | assert await a.anext(a2, CLOSED) == CLOSED 45 | assert await a.anext(a1, CLOSED) == CLOSED 46 | assert await a.anext(async_iterable, CLOSED) == CLOSED 47 | 48 | 49 | @sync 50 | async def test_nested_lifetime_closed_outer(): 51 | """outer lifetime restricts inner lifetime""" 52 | async_iterable, iterable = asyncify(range(10)), iter(range(10)) 53 | async with a.scoped_iter(async_iterable) as a1: 54 | assert await a.anext(a1) == next(iterable) 55 | b1 = a.borrow(a1) 56 | async with a.scoped_iter(b1) as a2: 57 | assert await a.anext(a2) == next(iterable) 58 | await b1.aclose() 59 | # scope iterator is closed alongside parent 60 | assert await a.anext(a2, CLOSED) == CLOSED 61 | assert await a.anext(b1, CLOSED) == CLOSED 62 | assert await a.anext(a2, CLOSED) == CLOSED 63 | assert await a.anext(b1, CLOSED) == CLOSED 64 | # scoped iterator is not implicitly closed by inner scope/borrow 65 | assert await a.anext(a1) == next(iterable) 66 | assert await a.anext(async_iterable) == next(iterable) 67 | assert await a.anext(a2, CLOSED) == CLOSED 68 | assert await a.anext(a1, CLOSED) == CLOSED 69 | assert await a.anext(async_iterable, CLOSED) == CLOSED 70 | 71 | 72 | @sync 73 | async def test_borrow_explicitly(): 74 | async_iterable = asyncify(range(10)) 75 | values = [] 76 | borrowed_aiterable = a.borrow(async_iterable) 77 | values.append(await a.anext(async_iterable)) 78 | values.append(await a.anext(borrowed_aiterable)) 79 | await a.borrow(borrowed_aiterable).aclose() 80 | values.append(await a.anext(borrowed_aiterable)) 81 | await borrowed_aiterable.aclose() 82 | values.append(await a.anext(async_iterable)) 83 | assert values == list(range(4)) 84 | async for value in async_iterable: 85 | values.append(value) 86 | assert values == list(range(10)) 87 | 88 | 89 | class Uncloseable: 90 | def __init__(self, iterator): 91 | self.iterator = iterator 92 | 93 | def __aiter__(self): 94 | return self 95 | 96 | async def __anext__(self): 97 | return await a.anext(self.iterator) 98 | 99 | 100 | @sync 101 | async def test_borrow_iterable(): 102 | async_iterable = Uncloseable(asyncify(range(10))) 103 | values = [] 104 | async with a.scoped_iter(async_iterable) as a1: 105 | values.append(await a.anext(a1)) 106 | values.append(await a.anext(a.borrow(async_iterable))) 107 | assert values == [0, 1] 108 | 109 | 110 | class Closeable: 111 | def __init__(self, iterator): 112 | self.iterator = iterator 113 | 114 | def __aiter__(self): 115 | return self 116 | 117 | async def __anext__(self): 118 | return await a.anext(self.iterator) 119 | 120 | async def aclose(self): 121 | await self.iterator.aclose() 122 | 123 | 124 | @pytest.mark.parametrize( 125 | "async_iterable_t", 126 | [ 127 | lambda: asyncify(range(10)), 128 | lambda: Closeable(asyncify(range(10))), 129 | lambda: Uncloseable(asyncify(range(10))), 130 | ], 131 | ) 132 | @sync 133 | async def test_borrow_methods(async_iterable_t): 134 | async_iterable = async_iterable_t() 135 | values = [] 136 | async with a.scoped_iter(async_iterable) as a1: 137 | values.append(await a.anext(a1)) 138 | assert hasattr(a1, "athrow") == hasattr(async_iterable, "athrow") 139 | assert hasattr(a1, "asend") == hasattr(async_iterable, "asend") 140 | assert values == [0] 141 | 142 | 143 | @sync 144 | async def test_scoped_iter_misuse(): 145 | scoped_iter = a.scoped_iter(asyncify(range(5))) 146 | async with scoped_iter: 147 | with pytest.raises(RuntimeError): 148 | async with scoped_iter: 149 | pytest.fail("may not enter scoped_iter twice") 150 | 151 | 152 | @sync 153 | async def test_borrow_misuse(): 154 | with pytest.raises(TypeError): 155 | a.borrow([1, 2, 3]) 156 | 157 | 158 | @sync 159 | async def test_await_each(): 160 | async def check1() -> bool: 161 | return True 162 | 163 | async def check2() -> bool: 164 | return True 165 | 166 | async def check3() -> bool: 167 | return True 168 | 169 | all_non_negative = await a.all(a.await_each([check1(), check2(), check3()])) 170 | 171 | assert all_non_negative 172 | 173 | 174 | @sync 175 | async def test_apply_with_no_arguments(): 176 | # This is probably a meaningless use case, but we want to make sure that 177 | # ``apply`` is still robust. 178 | result = await a.apply(lambda: 42) 179 | 180 | assert result == 42 181 | 182 | 183 | @sync 184 | async def test_apply_with_an_argument(): 185 | async def compute_something() -> int: 186 | return 42 187 | 188 | result = await a.apply(lambda x: 2 * x, compute_something()) 189 | 190 | assert result == 84 191 | 192 | 193 | @sync 194 | async def test_apply_with_keyword_arguments(): 195 | async def compute_something() -> int: 196 | return 42 197 | 198 | async def compute_something_else() -> int: 199 | return 1984 200 | 201 | result = await a.apply( 202 | lambda x, y: x - y, x=compute_something(), y=compute_something_else() 203 | ) 204 | 205 | assert result == 42 - 1984 206 | 207 | 208 | @sync 209 | async def test_apply_with_an_argument_and_a_keyword_argument(): 210 | async def compute_something() -> int: 211 | return 42 212 | 213 | async def compute_something_else() -> int: 214 | return 1984 215 | 216 | result = await a.apply( 217 | lambda x, y: x - y, compute_something(), y=compute_something_else() 218 | ) 219 | 220 | assert result == 42 - 1984 221 | 222 | 223 | @sync 224 | async def test_sync(): 225 | def check_3(x: int) -> int: 226 | return x + 10 227 | 228 | async def check_4(x: int, y: int, z: int) -> int: 229 | return x + y + z + 100 230 | 231 | t1 = await a.sync(check_3)(x=100) 232 | t2 = await a.sync(check_4)(x=5, y=5, z=10) 233 | t3 = await a.sync(lambda x: x**3)(x=5) 234 | 235 | with pytest.raises(TypeError): 236 | a.sync("string")(10) 237 | 238 | assert t1 == 110 239 | assert t2 == 120 240 | assert t3 == 125 241 | 242 | 243 | @sync 244 | async def test_sync_awaitable(): 245 | """Test any (…) -> await T is recognised""" 246 | 247 | @a.sync 248 | def nocoro_async(value): 249 | async def coro(): 250 | return value 251 | 252 | return coro() 253 | 254 | assert await nocoro_async(5) == 5 255 | 256 | 257 | async def await_iter(n: int): 258 | return [*range(n)] 259 | 260 | 261 | async def async_iter(n: int): 262 | for i in range(n): 263 | yield i 264 | 265 | 266 | async def await_value(i): 267 | return i 268 | 269 | 270 | async def await_iter_await(n: int): 271 | return [await_value(i) for i in range(n)] 272 | 273 | 274 | async def await_async_iter_await(n: int): 275 | for i in range(n): 276 | yield await_value(i) 277 | 278 | 279 | @pytest.mark.parametrize("n", [0, 1, 12]) 280 | @pytest.mark.parametrize( 281 | "any_iterable_t", 282 | [range, await_iter, async_iter, await_iter_await, await_async_iter_await], 283 | ) 284 | @sync 285 | async def test_any_iter(n, any_iterable_t): 286 | iterable = any_iterable_t(n) 287 | assert [item async for item in a.any_iter(iterable)] == [*range(n)] 288 | -------------------------------------------------------------------------------- /unittests/test_builtins.py: -------------------------------------------------------------------------------- 1 | import random 2 | 3 | import pytest 4 | 5 | import asyncstdlib as a 6 | 7 | from .utility import sync, asyncify, awaitify 8 | 9 | 10 | def hide_coroutine(corofunc): 11 | def wrapper(*args, **kwargs): 12 | return corofunc(*args, **kwargs) 13 | 14 | return wrapper 15 | 16 | 17 | @sync 18 | async def test_iter1(): 19 | for iterable in ([], [1, 2, 5, 20], range(20)): 20 | assert await a.list(a.iter(iterable)) == list(iterable) 21 | assert await a.list(a.iter(asyncify(iterable))) == list(iterable) 22 | 23 | 24 | @sync 25 | async def test_iter2(): 26 | for call, sentinel in ( 27 | (lambda: lambda: 1, 1), 28 | (lambda: lambda x=[]: x.append(1) or len(x), 5), 29 | ): 30 | assert await a.list(a.iter(call(), sentinel)) == list(iter(call(), sentinel)) 31 | assert await a.list(a.iter(awaitify(call()), sentinel)) == list( 32 | iter(call(), sentinel) 33 | ) 34 | with pytest.raises(TypeError): 35 | a.iter(1, 1) 36 | 37 | 38 | @sync 39 | async def test_all(): 40 | assert await a.all(asyncify((True, True, True))) 41 | assert not await a.all(asyncify((True, False, True))) 42 | 43 | 44 | @sync 45 | async def test_any(): 46 | assert await a.any(asyncify((False, True, False))) 47 | assert not await a.any(asyncify((False, False, False))) 48 | 49 | 50 | @sync 51 | async def test_zip(): 52 | async for va, vb in a.zip(asyncify(range(5)), range(5)): 53 | assert va == vb 54 | async for idx, vs in a.enumerate(a.zip(asyncify(range(5)), range(5))): 55 | assert vs[0] == vs[1] == idx 56 | async for _ in a.zip(): 57 | assert False 58 | 59 | 60 | @sync 61 | async def test_zip_strict(): 62 | async for va, vb in a.zip(asyncify(range(5)), range(5), strict=True): 63 | assert va == vb 64 | with pytest.raises(ValueError): 65 | async for _ in a.zip(asyncify(range(5)), range(6), strict=True): 66 | pass 67 | with pytest.raises(ValueError): 68 | async for _ in a.zip(asyncify(range(6)), range(5), strict=True): 69 | pass 70 | with pytest.raises(ValueError): 71 | async for _ in a.zip(*[range(5)] * 6, range(6), strict=True): 72 | pass 73 | 74 | 75 | @sync 76 | async def test_zip_close_immediately(): 77 | closed = False 78 | 79 | class SomeIterable: 80 | async def __aiter__(self): 81 | try: 82 | while True: 83 | yield 1 84 | finally: 85 | nonlocal closed 86 | closed = True 87 | 88 | zip_iter = a.zip(asyncify(range(-5, 0)), SomeIterable()) 89 | async for va, vb in zip_iter: 90 | assert va < 0 91 | assert vb == 1 92 | assert closed is True 93 | 94 | 95 | @sync 96 | async def test_map_as(): 97 | async def map_op(value): 98 | return value * 2 99 | 100 | assert [value async for value in a.map(map_op, range(5))] == list(range(0, 10, 2)) 101 | assert [ 102 | value async for value in a.map(hide_coroutine(map_op), range(5, 10, 2)) 103 | ] == list(range(10, 20, 4)) 104 | 105 | 106 | @sync 107 | async def test_map_sa(): 108 | def map_op(value): 109 | return value * 2 110 | 111 | assert [value async for value in a.map(map_op, asyncify(range(5)))] == list( 112 | range(0, 10, 2) 113 | ) 114 | assert [value async for value in a.map(map_op, asyncify(range(5, 10, 2)))] == list( 115 | range(10, 20, 4) 116 | ) 117 | 118 | 119 | @sync 120 | async def test_map_aa(): 121 | async def map_op(value): 122 | return value * 2 123 | 124 | assert [value async for value in a.map(map_op, asyncify(range(5)))] == list( 125 | range(0, 10, 2) 126 | ) 127 | assert [ 128 | value 129 | async for value in a.map(hide_coroutine(map_op), asyncify(range(5, 10, 2))) 130 | ] == list(range(10, 20, 4)) 131 | 132 | 133 | @sync 134 | async def test_max_default(): 135 | assert await a.max((), default=3) == 3 136 | assert await a.max((), key=lambda x: x, default=3) == 3 137 | with pytest.raises(ValueError): 138 | assert await a.max(()) == 3 139 | with pytest.raises(ValueError): 140 | assert await a.max((), key=lambda x: x) == 3 141 | 142 | 143 | @sync 144 | async def test_max_sa(): 145 | async def minus(x): 146 | return -x 147 | 148 | assert await a.max(asyncify((1, 2, 3, 4))) == 4 149 | assert await a.max(asyncify((1, 4, 3, 2))) == 4 150 | assert await a.max(asyncify((1, 2, 3, 4)), key=lambda x: -x) == 1 151 | assert await a.max(asyncify((4, 2, 3, 1)), key=lambda x: -x) == 1 152 | assert await a.max(asyncify((1, 2, 3, 4)), key=minus) == 1 153 | assert await a.max(asyncify((4, 2, 3, 1)), key=minus) == 1 154 | 155 | 156 | @sync 157 | async def test_min_default(): 158 | assert await a.min((), default=3) == 3 159 | assert await a.min((), key=lambda x: x, default=3) == 3 160 | # default does not override items 161 | assert await a.min((3, 2, 1), default=3) == 1 162 | with pytest.raises(ValueError): 163 | assert await a.min(()) == 3 164 | with pytest.raises(ValueError): 165 | assert await a.min((), key=lambda x: x) == 3 166 | 167 | 168 | @sync 169 | async def test_min_sa(): 170 | async def minus(x): 171 | return -x 172 | 173 | assert await a.min(asyncify((1, 2, 3, 4))) == 1 174 | assert await a.min(asyncify((4, 1, 3, 2))) == 1 175 | assert await a.min(asyncify((1, 2, 3, 4)), key=lambda x: -x) == 4 176 | assert await a.min(asyncify((4, 2, 3, 1)), key=lambda x: -x) == 4 177 | assert await a.min(asyncify((1, 2, 3, 4)), key=minus) == 4 178 | assert await a.min(asyncify((4, 2, 3, 1)), key=minus) == 4 179 | 180 | 181 | @sync 182 | async def test_filter_as(): 183 | async def map_op(value): 184 | return value % 2 == 0 185 | 186 | assert [value async for value in a.filter(map_op, range(5))] == list(range(0, 5, 2)) 187 | assert [ 188 | value async for value in a.filter(hide_coroutine(map_op), range(5, 10, 2)) 189 | ] == [] 190 | assert [value async for value in a.filter(map_op, range(4, 10, 2))] == list( 191 | range(4, 10, 2) 192 | ) 193 | 194 | 195 | @sync 196 | async def test_filter_sa(): 197 | def map_op(value): 198 | return value % 2 == 0 199 | 200 | assert [value async for value in a.filter(map_op, asyncify(range(5)))] == list( 201 | range(0, 5, 2) 202 | ) 203 | assert [value async for value in a.filter(map_op, asyncify(range(5, 10, 2)))] == [] 204 | assert [ 205 | value async for value in a.filter(map_op, asyncify(range(4, 10, 2))) 206 | ] == list(range(4, 10, 2)) 207 | 208 | 209 | @sync 210 | async def test_filter_aa(): 211 | async def map_op(value): 212 | return value % 2 == 0 213 | 214 | assert [value async for value in a.filter(map_op, asyncify(range(5)))] == list( 215 | range(0, 5, 2) 216 | ) 217 | assert [value async for value in a.filter(map_op, asyncify(range(5, 10, 2)))] == [] 218 | assert [ 219 | value 220 | async for value in a.filter(hide_coroutine(map_op), asyncify(range(4, 10, 2))) 221 | ] == list(range(4, 10, 2)) 222 | 223 | 224 | @sync 225 | async def test_filter_na(): 226 | assert [value async for value in a.filter(None, asyncify(range(5)))] == list( 227 | range(1, 5) 228 | ) 229 | assert [value async for value in a.filter(None, asyncify(range(5, 10, 2)))] == list( 230 | range(5, 10, 2) 231 | ) 232 | assert [value async for value in a.filter(None, asyncify(range(-10, 10)))] == list( 233 | range(-10, 0) 234 | ) + list(range(1, 10)) 235 | 236 | 237 | @sync 238 | async def test_enumerate(): 239 | async for count, value in a.enumerate(asyncify(range(5))): 240 | assert count == value 241 | async for count, value in a.enumerate(asyncify(range(5, 10)), start=5): 242 | assert count == value 243 | 244 | 245 | @sync 246 | async def test_sum(): 247 | assert await a.sum(asyncify((1, 2, 3, 4))) == 10 248 | assert await a.sum(asyncify((4, 3, 2, 1)), start=5) == 15 249 | assert await a.sum((), start=5) == 5 250 | 251 | 252 | @sync 253 | async def test_types(): 254 | assert await a.list(asyncify(range(5))) == list(range(5)) 255 | assert await a.list(asyncify(range(0))) == list(range(0)) 256 | assert await a.list() == list() 257 | assert await a.tuple(asyncify(range(5))) == tuple(range(5)) 258 | assert await a.tuple(asyncify(range(0))) == tuple(range(0)) 259 | assert await a.tuple() == tuple() 260 | assert await a.set(asyncify(range(5))) == set(range(5)) 261 | assert await a.set(asyncify(range(0))) == set(range(0)) 262 | assert await a.set() == set() 263 | assert await a.dict(a.zip((str(i) for i in range(5)), range(5))) == dict( 264 | zip((str(i) for i in range(5)), range(5)) 265 | ) 266 | assert await a.dict(a.zip((str(i) for i in range(0)), range(0))) == dict( 267 | zip((str(i) for i in range(0)), range(0)) 268 | ) 269 | assert await a.dict(b=3) == dict(b=3) 270 | assert await a.dict(a.zip((str(i) for i in range(5)), range(5)), b=3) == dict( 271 | zip((str(i) for i in range(5)), range(5)), b=3 272 | ) 273 | assert await a.dict() == dict() 274 | 275 | 276 | sortables = [ 277 | [], 278 | [0], 279 | [1, 4, 17, -12, 3, 47, -98, 72, -1138], 280 | list(range(20)), 281 | list(range(2000)), 282 | [random.random() for _ in range(2000)], 283 | ] 284 | 285 | 286 | @pytest.mark.parametrize("sortable", sortables) 287 | @pytest.mark.parametrize("reverse", [True, False]) 288 | @sync 289 | async def test_sorted_direct(sortable, reverse): 290 | assert await a.sorted(sortable, reverse=reverse) == sorted( 291 | sortable, reverse=reverse 292 | ) 293 | assert await a.sorted(asyncify(sortable), reverse=reverse) == sorted( 294 | sortable, reverse=reverse 295 | ) 296 | assert await a.sorted(sortable, key=lambda x: x, reverse=reverse) == sorted( 297 | sortable, key=lambda x: x, reverse=reverse 298 | ) 299 | assert await a.sorted( 300 | sortable, key=awaitify(lambda x: x), reverse=reverse 301 | ) == sorted(sortable, key=lambda x: x, reverse=reverse) 302 | 303 | 304 | @sync 305 | async def test_sorted_stable(): 306 | values = [-i for i in range(20)] 307 | 308 | def collision_key(x): 309 | return x // 2 310 | 311 | # test the test... 312 | assert sorted(values, key=collision_key) != [ 313 | item for key, item in sorted([(collision_key(i), i) for i in values]) 314 | ] 315 | # test the implementation 316 | assert await a.sorted(values, key=awaitify(collision_key)) == sorted( 317 | values, key=collision_key 318 | ) 319 | -------------------------------------------------------------------------------- /unittests/test_contextlib.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | from contextlib import contextmanager 3 | 4 | import pytest 5 | 6 | import asyncstdlib as a 7 | 8 | from .utility import sync 9 | 10 | 11 | @sync 12 | async def test_closing(): 13 | class ACloseable: 14 | def __init__(self): 15 | self.closed = False 16 | 17 | async def aclose(self): 18 | self.closed = True 19 | 20 | async with a.closing(ACloseable()) as async_closeable: 21 | pass 22 | assert async_closeable.closed 23 | 24 | 25 | @sync 26 | async def test_contextmanager(): 27 | @a.contextmanager 28 | async def valid(value): 29 | yield value 30 | 31 | async with valid(1337) as context: 32 | assert context == 1337 33 | 34 | 35 | @sync 36 | async def test_contextmanager_decorator(): 37 | witness: list[str] = [] 38 | 39 | @a.contextmanager 40 | async def decorator(): 41 | witness.append("enter") 42 | yield 43 | witness.append("exit") 44 | 45 | @decorator() 46 | async def func(): 47 | witness.append("inner") 48 | 49 | for repetition in range(1, 4): 50 | await func() 51 | assert witness == ["enter", "inner", "exit"] * repetition 52 | 53 | 54 | @sync 55 | async def test_contextmanager_no_yield(): 56 | """Test that it is an error for a context to not yield""" 57 | 58 | @a.contextmanager 59 | async def no_yield(): 60 | if False: 61 | yield 62 | 63 | with pytest.raises(RuntimeError): 64 | async with no_yield(): 65 | assert False 66 | 67 | 68 | @sync 69 | async def test_contextmanager_no_stop(): 70 | """Test that it is an error for a context to yield again after stopping""" 71 | 72 | @a.contextmanager 73 | async def no_stop(): 74 | yield 75 | yield 76 | 77 | with pytest.raises(RuntimeError): 78 | async with no_stop(): 79 | assert True 80 | 81 | @a.contextmanager 82 | async def supress_no_stop(): 83 | try: 84 | yield 85 | except BaseException: # noqa: B036 86 | yield 87 | 88 | with pytest.raises(RuntimeError): 89 | async with supress_no_stop(): 90 | raise StopAsyncIteration() 91 | 92 | 93 | @sync 94 | async def test_contextmanager_raise_asyncstop(): 95 | """Test that StopAsyncIteration may propagate out of a context block""" 96 | 97 | @a.contextmanager 98 | async def no_raise(): 99 | yield 100 | 101 | with pytest.raises(StopAsyncIteration, match="outside"): 102 | async with no_raise(): 103 | raise StopAsyncIteration("outside") 104 | 105 | @a.contextmanager 106 | async def replace(): 107 | try: 108 | yield 109 | except StopAsyncIteration: 110 | raise StopAsyncIteration("inside") from None 111 | 112 | with pytest.raises(RuntimeError): 113 | async with replace(): 114 | raise StopAsyncIteration("outside") 115 | 116 | 117 | @sync 118 | async def test_contextmanager_raise_runtimeerror(): 119 | @a.contextmanager 120 | async def no_raise(): 121 | yield 122 | 123 | with pytest.raises(RuntimeError, match="outside"): 124 | async with no_raise(): 125 | raise RuntimeError("outside") 126 | 127 | @a.contextmanager 128 | async def replace(): 129 | try: 130 | yield 131 | except RuntimeError: 132 | raise RuntimeError("inside") from None 133 | 134 | with pytest.raises(RuntimeError, match="inside"): 135 | async with replace(): 136 | raise RuntimeError("outside") 137 | 138 | 139 | @sync 140 | async def test_contextmanager_raise_same(): 141 | """Test that outer exceptions do not shadow inner/newer ones""" 142 | 143 | @a.contextmanager 144 | async def reraise(): 145 | try: 146 | yield 147 | except BaseException as err: 148 | raise err 149 | 150 | with pytest.raises(KeyError, match="outside"): 151 | async with reraise(): 152 | raise KeyError("outside") 153 | 154 | @a.contextmanager 155 | async def recreate(): 156 | try: 157 | yield 158 | except BaseException as err: # noqa: B036 159 | raise type(err)("inside") from None 160 | 161 | with pytest.raises(KeyError, match="inside"): 162 | async with recreate(): 163 | raise KeyError("outside") 164 | 165 | 166 | @sync 167 | async def test_contextmanager_raise_generatorexit(): 168 | """Test that shutdown via GeneratorExit is propagated""" 169 | 170 | @a.contextmanager 171 | async def no_op(): 172 | yield 173 | 174 | with pytest.raises(GeneratorExit): 175 | async with no_op(): 176 | raise GeneratorExit("used to tear down coroutines") 177 | 178 | # during shutdown, generators may be killed in arbitrary order 179 | # make sure we do not suppress GeneratorExit 180 | context = no_op() 181 | with pytest.raises(GeneratorExit, match="inner"): 182 | async with context: 183 | # simulate cleanup closing the child early 184 | await context.gen.aclose() 185 | raise GeneratorExit("inner") 186 | 187 | 188 | @sync 189 | async def test_contextmanager_no_suppress_generatorexit(): 190 | """Test that GeneratorExit is not suppressed""" 191 | 192 | @a.contextmanager 193 | async def no_op(): 194 | yield 195 | 196 | exc = GeneratorExit("GE should not be replaced normally") 197 | with pytest.raises(type(exc)) as exc_info: 198 | async with no_op(): 199 | raise exc 200 | assert exc_info.value is exc 201 | 202 | @a.contextmanager 203 | async def exit_ge(): 204 | try: 205 | yield 206 | except GeneratorExit: 207 | # treat GE as regular exit 208 | pass 209 | 210 | with pytest.raises(GeneratorExit): 211 | async with exit_ge(): 212 | raise GeneratorExit("Resume teardown if child exited") 213 | 214 | @a.contextmanager 215 | async def ignore_ge(): 216 | try: 217 | yield 218 | except GeneratorExit: 219 | yield 220 | 221 | with pytest.raises(RuntimeError): 222 | async with ignore_ge(): 223 | raise GeneratorExit("Warn if child does not exit") 224 | 225 | 226 | @sync 227 | async def test_nullcontext(): 228 | async with a.nullcontext(1337) as value: 229 | assert value == 1337 230 | 231 | 232 | class MockAsyncContext: 233 | def __init__(self, value=None): 234 | self._value = value 235 | self.entered = False 236 | self.exited = False 237 | 238 | async def __aenter__(self): 239 | self.entered = True 240 | return self._value 241 | 242 | async def __aexit__(self, exc_type, exc_val, exc_tb): 243 | self.exited = True 244 | 245 | 246 | class MockContext: 247 | def __init__(self, value=None): 248 | self._value = value 249 | self.entered = False 250 | self.exited = False 251 | 252 | def __enter__(self): 253 | self.entered = True 254 | return self._value 255 | 256 | def __exit__(self, exc_type, exc_val, exc_tb): 257 | self.exited = True 258 | 259 | 260 | @sync 261 | async def test_exist_stack(): 262 | async with a.ExitStack() as exit_stack: 263 | for value in (0, 1, 2, 3, -5, None, "Hello"): 264 | assert value == await exit_stack.enter_context(a.nullcontext(value)) 265 | 266 | 267 | @sync 268 | async def test_exit_stack_pop_all(): 269 | async with a.ExitStack() as exit_stack: 270 | contexts = list( 271 | map(lambda v: MockAsyncContext(v) if v % 2 else MockContext(v), range(10)) 272 | ) 273 | values = await a.list(a.map(exit_stack.enter_context, contexts)) 274 | assert values == list(range(10)) 275 | assert all(cm.entered for cm in contexts) 276 | assert all(not cm.exited for cm in contexts) 277 | clone_stack = exit_stack.pop_all() 278 | assert all(not cm.exited for cm in contexts) 279 | await clone_stack.aclose() 280 | assert all(cm.exited for cm in contexts) 281 | 282 | 283 | @sync 284 | async def test_exit_stack_callback(): 285 | """Test that callbacks are run regardless of exceptions""" 286 | unwind_values = [] 287 | 288 | async def push(value): 289 | unwind_values.append(value) 290 | return True # attempt to suppress - this must not succeed 291 | 292 | with pytest.raises(KeyError): 293 | async with a.ExitStack() as exit_stack: 294 | for value in range(5): 295 | exit_stack.callback(push, value) 296 | raise KeyError() 297 | assert unwind_values == list(reversed(range(5))) 298 | 299 | 300 | @sync 301 | async def test_exit_stack_push(): 302 | seen = [] 303 | 304 | @contextmanager 305 | def observe(): 306 | try: 307 | yield 308 | except BaseException as exc_val: 309 | seen.append(exc_val) 310 | raise 311 | 312 | @a.contextmanager 313 | async def suppress(): 314 | try: 315 | yield 316 | except BaseException as exc_val: # noqa: B036 317 | seen.append(exc_val) 318 | 319 | async def replace(exc_type, exc_val, tb, new): 320 | seen.append(exc_val) 321 | raise new 322 | 323 | with pytest.raises(TypeError) as exc_info: 324 | async with a.ExitStack() as exit_stack: 325 | exit_stack.push(partial(replace, new=TypeError())) 326 | exit_stack.push(partial(replace, new=ValueError())) 327 | s = suppress() 328 | await s.__aenter__() 329 | exit_stack.push(s) 330 | exit_stack.push(partial(replace, new=IndexError())) 331 | o = observe() 332 | o.__enter__() 333 | exit_stack.push(o) 334 | raise KeyError() 335 | assert list(map(type, seen)) == [ 336 | KeyError, 337 | KeyError, 338 | IndexError, 339 | type(None), 340 | ValueError, 341 | ] 342 | assert seen[2].__context__ == seen[1] 343 | assert exc_info.type == TypeError 344 | assert exc_info.value.__context__ == seen[-1] 345 | 346 | 347 | @sync 348 | async def test_exit_stack_stitch_context(): 349 | async def replace(exc_type, exc_val, tb, new): 350 | try: 351 | {}["a"] 352 | except KeyError: 353 | raise new 354 | 355 | async def extend(exc_type, exc_val, tb, new): 356 | try: 357 | raise exc_val 358 | except exc_type: 359 | raise new 360 | 361 | replacement_exc, middle_exc, initial_exc = TypeError(), ValueError(), IndexError() 362 | with pytest.raises(type(replacement_exc)) as exc_info: 363 | async with a.ExitStack() as exit_stack: 364 | exit_stack.push(partial(extend, new=replacement_exc)) 365 | exit_stack.push(partial(replace, new=middle_exc)) 366 | raise initial_exc 367 | assert exc_info.value.__context__ == middle_exc 368 | assert exc_info.value.__context__.__context__ == initial_exc 369 | 370 | 371 | @sync 372 | async def test_misuse_enter_context(): 373 | async with a.ExitStack() as exit_stack: 374 | with pytest.raises(AttributeError): 375 | await exit_stack.enter_context(None) 376 | async with a.ExitStack() as exit_stack: 377 | with pytest.raises(AttributeError) as exc_info: 378 | try: 379 | {}[1] 380 | except KeyError: 381 | await exit_stack.enter_context(None) 382 | assert type(exc_info.value.__context__.__context__) is KeyError 383 | -------------------------------------------------------------------------------- /unittests/test_functools.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | import pytest 4 | 5 | import asyncstdlib as a 6 | from asyncstdlib.functools import CachedProperty 7 | 8 | from .utility import Lock, Schedule, Switch, asyncify, sync 9 | 10 | 11 | @sync 12 | async def test_cached_property(): 13 | class Pair: 14 | def __init__(self, a, b): 15 | self.a = a 16 | self.b = b 17 | 18 | @a.cached_property 19 | async def total(self): 20 | return self.a + self.b 21 | 22 | pair = Pair(1, 2) 23 | assert (await pair.total) == 3 24 | pair.a = 2 25 | assert (await pair.total) == 3 26 | del pair.total 27 | assert (await pair.total) == 4 28 | assert type(Pair.total) is CachedProperty 29 | 30 | 31 | @sync 32 | async def test_cache_property_nodict(): 33 | class Foo: 34 | __slots__ = () 35 | 36 | def __init__(self): 37 | pass # pragma: no cover 38 | 39 | @a.cached_property 40 | async def bar(self): 41 | pass # pragma: no cover 42 | 43 | with pytest.raises(TypeError): 44 | Foo().bar 45 | 46 | 47 | @sync 48 | async def test_cache_property_order(): 49 | class Value: 50 | def __init__(self, value): 51 | self.value = value 52 | 53 | @a.cached_property 54 | async def cached(self): 55 | value = self.value 56 | await Switch() 57 | return value 58 | 59 | async def check_increment(to): 60 | val.value = to 61 | assert (await val.cached) == to 62 | 63 | val = Value(0) 64 | await Schedule(check_increment(5), check_increment(12), check_increment(1337)) 65 | assert (await val.cached) != 0 66 | assert (await val.cached) == 1337 # last value fetched 67 | 68 | 69 | @sync 70 | async def test_cache_property_lock_order(): 71 | class Value: 72 | def __init__(self, value): 73 | self.value = value 74 | 75 | @a.cached_property(Lock) 76 | async def cached(self): 77 | value = self.value 78 | await Switch() 79 | return value 80 | 81 | async def check_cached(to, expected): 82 | val.value = to 83 | assert (await val.cached) == expected 84 | 85 | val = Value(0) 86 | await Schedule(check_cached(5, 5), check_cached(12, 5), check_cached(1337, 5)) 87 | assert (await val.cached) == 5 # first value fetched 88 | 89 | 90 | @sync 91 | async def test_cache_property_lock_deletion(): 92 | class Value: 93 | def __init__(self, value): 94 | self.value = value 95 | 96 | @a.cached_property(Lock) 97 | async def cached(self): 98 | value = self.value 99 | await Switch() 100 | return value 101 | 102 | async def check_cached(to, expected): 103 | val.value = to 104 | assert (await val.cached) == expected 105 | 106 | async def delete_attribute(to): 107 | val.value = to 108 | awaitable = val.cached 109 | del val.cached 110 | assert (await awaitable) == to 111 | 112 | val = Value(0) 113 | await Schedule(check_cached(5, 5), delete_attribute(12), check_cached(1337, 12)) 114 | assert (await val.cached) == 12 # first value fetch after deletion 115 | 116 | 117 | @sync 118 | async def test_reduce(): 119 | async def reduction(x, y): 120 | return x + y 121 | 122 | for reducer in (reduction, lambda x, y: x + y): 123 | for itertype in (asyncify, list): 124 | assert await a.reduce(reducer, itertype([0, 1])) == functools.reduce( 125 | lambda x, y: x + y, [0, 1] 126 | ) 127 | assert await a.reduce( 128 | reducer, itertype([0, 1, 2, 3, 4, 0, -5]) 129 | ) == functools.reduce(lambda x, y: x + y, [0, 1, 2, 3, 4, 0, -5]) 130 | assert await a.reduce(reducer, itertype([1]), 23) == functools.reduce( 131 | lambda x, y: x + y, [1], 23 132 | ) 133 | assert await a.reduce(reducer, itertype([12])) == functools.reduce( 134 | lambda x, y: x + y, [12] 135 | ) 136 | assert await a.reduce(reducer, itertype([]), 42) == functools.reduce( 137 | lambda x, y: x + y, [], 42 138 | ) 139 | 140 | 141 | @sync 142 | async def test_reduce_misuse(): 143 | with pytest.raises(TypeError): 144 | await a.reduce(lambda x, y: x + y, []) 145 | with pytest.raises(TypeError): 146 | await a.reduce(lambda x, y: x + y, asyncify([])) 147 | # make sure the stdlib behaves the same 148 | with pytest.raises(TypeError): 149 | functools.reduce(lambda x, y: x + y, []) 150 | 151 | 152 | @sync 153 | async def test_lru_cache_bounded(): 154 | calls = [] 155 | 156 | @a.lru_cache(maxsize=4) 157 | async def pingpong(*args, **kwargs): 158 | calls.append(args[0]) 159 | return args, kwargs 160 | 161 | for kwargs in ({}, {"foo": "bar"}, {"foo": "bar", "baz": 12}): 162 | for val in range(4): 163 | assert await pingpong(val, **kwargs) == ((val,), kwargs) 164 | assert pingpong.cache_info().hits == 0 165 | assert pingpong.cache_info().misses == val + 1 166 | assert len(calls) == 4 167 | for idx in range(5): 168 | for val in range(4): 169 | assert await pingpong(val, **kwargs) == ((val,), kwargs) 170 | assert len(calls) == 4 171 | assert pingpong.cache_info().hits == (idx + 1) * 4 172 | for idx in range(5): 173 | for val in range(4, 9): 174 | assert await pingpong(val, val, **kwargs) == ((val, val), kwargs) 175 | assert len(calls) == (idx + 1) * 5 + 4 176 | 177 | calls.clear() 178 | pingpong.cache_clear() 179 | assert pingpong.cache_info().hits == 0 180 | assert pingpong.cache_info().misses == 0 181 | 182 | 183 | @sync 184 | async def test_lru_cache_unbounded(): 185 | calls = [] 186 | 187 | @a.lru_cache(maxsize=None) 188 | async def pingpong(*args, **kwargs): 189 | calls.append(args[0]) 190 | return args, kwargs 191 | 192 | for kwargs in ({}, {"foo": "bar"}, {"foo": "bar", "baz": 12}): 193 | for val in range(4): 194 | assert await pingpong(val, **kwargs) == ((val,), kwargs) 195 | assert pingpong.cache_info().hits == 0 196 | assert pingpong.cache_info().misses == val + 1 197 | for idx in range(5): 198 | for val in range(4): 199 | assert await pingpong(val, **kwargs) == ((val,), kwargs) 200 | assert len(calls) == 4 201 | assert pingpong.cache_info().hits == (idx + 1) * 4 202 | for _ in range(5): 203 | for val in range(4, 9): 204 | assert await pingpong(val, val, **kwargs) == ((val, val), kwargs) 205 | assert len(calls) == 9 206 | 207 | calls.clear() 208 | pingpong.cache_clear() 209 | assert pingpong.cache_info().hits == 0 210 | assert pingpong.cache_info().misses == 0 211 | 212 | 213 | @sync 214 | async def test_lru_cache_empty(): 215 | calls = [] 216 | 217 | @a.lru_cache(maxsize=0) 218 | async def pingpong(*args, **kwargs): 219 | calls.append(args[0]) 220 | return args, kwargs 221 | 222 | for val in range(20): 223 | assert await pingpong(val) == ((val,), {}) 224 | assert pingpong.cache_info().hits == 0 225 | assert pingpong.cache_info().misses == val + 1 226 | assert len(calls) == 20 == pingpong.cache_info().misses 227 | for idx in range(5): 228 | for val in range(5): 229 | assert await pingpong(val) == ((val,), {}) 230 | assert len(calls) == 20 + idx * 5 + val + 1 231 | assert pingpong.cache_info().misses == 20 + idx * 5 + val + 1 232 | 233 | calls.clear() 234 | pingpong.cache_clear() 235 | assert pingpong.cache_info().hits == 0 236 | assert pingpong.cache_info().misses == 0 237 | 238 | 239 | @sync 240 | async def test_lru_cache_typed(): 241 | @a.lru_cache(maxsize=4, typed=True) 242 | async def pingpong(arg): 243 | return arg 244 | 245 | for val in range(20): 246 | assert await pingpong(val) == val 247 | assert await pingpong(float(val)) == val 248 | assert pingpong.cache_info().misses == (val + 1) * 2 249 | assert pingpong.cache_info().hits == val * 2 250 | assert await pingpong(val) == val 251 | assert await pingpong(float(val)) == val 252 | assert pingpong.cache_info().misses == (val + 1) * 2 253 | assert pingpong.cache_info().hits == (val + 1) * 2 254 | 255 | 256 | @sync 257 | async def test_lru_cache_method(): 258 | """ 259 | Test that the lru_cache can be used on methods 260 | """ 261 | 262 | class SelfCached: 263 | def __init__(self, ident: int): 264 | self.ident = ident 265 | 266 | @a.lru_cache() 267 | async def pingpong(self, arg): 268 | # return identifier of instance to separate cache entries per instance 269 | return arg, self.ident 270 | 271 | for iteration in range(4): 272 | instance = SelfCached(iteration) 273 | for val in range(20): 274 | # 1 read initializes, 2 reads hit 275 | assert await instance.pingpong(val) == (val, iteration) 276 | assert await instance.pingpong(float(val)) == (val, iteration) 277 | assert await instance.pingpong(val) == (val, iteration) 278 | assert instance.pingpong.cache_info().misses == val + 1 + 20 * iteration 279 | assert instance.pingpong.cache_info().hits == (val + 1 + 20 * iteration) * 2 280 | 281 | 282 | @sync 283 | async def test_lru_cache_bare(): 284 | @a.lru_cache 285 | async def pingpong(arg): 286 | return arg 287 | 288 | # check that we are properly wrapped 289 | assert pingpong.cache_info().hits == 0 290 | assert pingpong.cache_info().misses == 0 291 | 292 | 293 | @sync 294 | async def test_lru_cache_misuse(): 295 | with pytest.raises(TypeError): 296 | 297 | @a.lru_cache(maxsize=1.5) 298 | async def pingpong(arg): 299 | return arg 300 | 301 | 302 | @pytest.mark.parametrize("size", [16, None]) 303 | @sync 304 | async def test_lru_cache_concurrent(size): 305 | current = 0 306 | 307 | @a.lru_cache(maxsize=size) 308 | async def count(): 309 | nonlocal current 310 | value = current = current + 1 311 | await Switch() 312 | return value 313 | 314 | async def verify(expected): 315 | assert (await count()) == expected 316 | 317 | await Schedule(*(verify(n + 1) for n in range(5))) 318 | await verify(6) 319 | await Switch() 320 | await verify(1) 321 | 322 | 323 | @sync 324 | async def test_cache(): 325 | calls = [] 326 | 327 | @a.cache 328 | async def pingpong(*args, **kwargs): 329 | calls.append(args[0]) 330 | return args, kwargs 331 | 332 | for kwargs in ({}, {"one": 1}, {"one": 1, "two": 2}): 333 | # fill with initial argument patterns 334 | for val in range(4): 335 | assert await pingpong(val, **kwargs) == ((val,), kwargs) 336 | assert len(calls) == val + 1 337 | assert pingpong.cache_info().hits == 0 338 | assert pingpong.cache_info().misses == val + 1 339 | # repeat argument patterns several times 340 | for idx in range(5): 341 | for val in range(4): 342 | assert await pingpong(val, **kwargs) == ((val,), kwargs) 343 | assert len(calls) == 4 344 | assert pingpong.cache_info().hits == (idx + 1) * 4 345 | # fill with new argument patterns 346 | for _ in range(5): 347 | for val in range(4, 9): 348 | assert await pingpong(val, val, **kwargs) == ((val, val), kwargs) 349 | assert len(calls) == 9 350 | 351 | calls.clear() 352 | pingpong.cache_clear() 353 | assert pingpong.cache_info().hits == 0 354 | assert pingpong.cache_info().misses == 0 355 | 356 | 357 | metadata_cases = [ 358 | (a.cache, None), 359 | (lambda func: a.lru_cache(None)(func), None), 360 | (lambda func: a.lru_cache(0)(func), 0), 361 | (lambda func: a.lru_cache(1)(func), 1), 362 | (lambda func: a.lru_cache(256)(func), 256), 363 | ] 364 | 365 | 366 | @pytest.mark.parametrize("cache, maxsize", metadata_cases) 367 | def test_caches_metadata(cache, maxsize): 368 | @cache 369 | async def pingpong(*args, **kwargs): 370 | return args, kwargs 371 | 372 | assert pingpong.cache_info().maxsize == pingpong.cache_parameters()["maxsize"] 373 | assert not pingpong.cache_parameters()["typed"] 374 | # state for unfilled cache should always be the same 375 | assert pingpong.cache_info().hits == 0 376 | assert pingpong.cache_info().misses == 0 377 | assert pingpong.cache_info().currsize == 0 378 | -------------------------------------------------------------------------------- /unittests/test_functools_lru.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Any 2 | import sys 3 | 4 | import pytest 5 | 6 | import asyncstdlib as a 7 | 8 | from .utility import sync 9 | 10 | 11 | class Counter: 12 | kind: object 13 | count: Any 14 | 15 | 16 | def method_counter(size: "int | None") -> "type[Counter]": 17 | class Counter: 18 | kind = None 19 | 20 | def __init__(self): 21 | self._count = 0 22 | 23 | @a.lru_cache(maxsize=size) 24 | async def count(self): 25 | self._count += 1 26 | return self._count 27 | 28 | return Counter 29 | 30 | 31 | def classmethod_counter(size: "int | None") -> "type[Counter]": 32 | class Counter: 33 | _count = 0 34 | kind = classmethod 35 | 36 | def __init__(self): 37 | type(self)._count = 0 38 | 39 | @classmethod 40 | @a.lru_cache(maxsize=size) 41 | async def count(cls): 42 | cls._count += 1 43 | return cls._count 44 | 45 | return Counter 46 | 47 | 48 | def staticmethod_counter(size: "int | None") -> "type[Counter]": 49 | # I'm sorry for writing this test – please don't do this at home! 50 | count: int = 0 51 | 52 | class Counter: 53 | kind = staticmethod 54 | 55 | def __init__(self): 56 | nonlocal count 57 | count = 0 58 | 59 | @staticmethod 60 | @a.lru_cache(maxsize=size) 61 | async def count(): 62 | nonlocal count 63 | count += 1 64 | return count 65 | 66 | return Counter 67 | 68 | 69 | counter_factories: "list[Callable[[int | None], type[Counter]]]" = [ 70 | method_counter, 71 | classmethod_counter, 72 | staticmethod_counter, 73 | ] 74 | 75 | 76 | @pytest.mark.parametrize("size", [0, 3, 10, None]) 77 | @pytest.mark.parametrize("counter_factory", counter_factories) 78 | @sync 79 | async def test_method_plain( 80 | size: "int | None", counter_factory: "Callable[[int | None], type[Counter]]" 81 | ): 82 | """Test caching without resetting""" 83 | 84 | counter_type = counter_factory(size) 85 | for _instance in range(4): 86 | instance = counter_type() 87 | for reset in range(5): 88 | for access in range(5): 89 | misses = 1 if size != 0 else reset * 5 + access + 1 90 | assert misses == await instance.count() 91 | 92 | 93 | @pytest.mark.parametrize("size", [0, 3, 10, None]) 94 | @pytest.mark.parametrize("counter_factory", counter_factories) 95 | @sync 96 | async def test_method_clear( 97 | size: "int | None", counter_factory: "Callable[[int | None], type[Counter]]" 98 | ): 99 | """Test caching with resetting everything""" 100 | counter_type = counter_factory(size) 101 | for _instance in range(4): 102 | instance = counter_type() 103 | for reset in range(5): 104 | for access in range(5): 105 | misses = reset + 1 if size != 0 else reset * 5 + access + 1 106 | assert misses == await instance.count() 107 | instance.count.cache_clear() 108 | 109 | 110 | @pytest.mark.parametrize("size", [0, 3, 10, None]) 111 | @pytest.mark.parametrize("counter_factory", counter_factories) 112 | @sync 113 | async def test_method_discard( 114 | size: "int | None", counter_factory: "Callable[[int | None], type[Counter]]" 115 | ): 116 | """Test caching with resetting specific item""" 117 | counter_type = counter_factory(size) 118 | if not ( 119 | (3, 9) <= sys.version_info[:2] <= (3, 12) 120 | or counter_type.kind is not classmethod 121 | ): 122 | pytest.skip("classmethod only respects descriptors between 3.9 and 3.12") 123 | for _instance in range(4): 124 | instance = counter_type() 125 | for reset in range(5): 126 | for access in range(5): 127 | misses = reset * 5 + access + 1 128 | assert misses == await instance.count() 129 | instance.count.cache_discard() 130 | 131 | 132 | @pytest.mark.parametrize("size", [0, 3, 10, None]) 133 | @pytest.mark.parametrize("counter_factory", counter_factories) 134 | @sync 135 | async def test_method_metadata( 136 | size: "int | None", counter_factory: "Callable[[int | None], type[Counter]]" 137 | ): 138 | """Test cache metadata on methods""" 139 | tp = counter_factory(size) 140 | for instance in range(4): 141 | ct = tp() 142 | for reset in range(5): 143 | for access in range(5): 144 | await ct.count() 145 | assert tp.count.__wrapped__ == ct.count.__wrapped__ 146 | assert tp.count.cache_parameters() == ct.count.cache_parameters() 147 | assert tp.count.cache_info() == ct.count.cache_info() 148 | assert ( 149 | ct.count.cache_info().maxsize 150 | == ct.count.cache_parameters()["maxsize"] 151 | == size 152 | ) 153 | totals = instance * 25 + reset * 5 + (access + 1) 154 | hits, misses, *_ = ct.count.cache_info() 155 | assert totals == hits + misses 156 | 157 | 158 | @pytest.mark.parametrize("size", [None, 0, 10, 128]) 159 | def test_wrapper_attributes(size: "int | None"): 160 | class Bar: 161 | @a.lru_cache 162 | async def method(self, int_arg: int): 163 | """Method docstring""" 164 | 165 | @a.lru_cache 166 | async def other_method(self): 167 | """Method docstring""" 168 | 169 | assert Bar.method.__doc__ == """Method docstring""" 170 | for name in ("method", "other_method"): 171 | for cache in (getattr(Bar, name), getattr(Bar(), name)): 172 | assert cache.__doc__ == """Method docstring""" 173 | assert cache.__name__ == name 174 | assert cache.__qualname__.endswith(f"Bar.{name}") 175 | if name != "method": 176 | continue 177 | # test direct and literal annotation styles 178 | assert Bar.method.__annotations__["int_arg"] in {int, "int"} 179 | assert Bar().method.__annotations__["int_arg"] in {int, "int"} 180 | -------------------------------------------------------------------------------- /unittests/test_heapq.py: -------------------------------------------------------------------------------- 1 | import heapq 2 | 3 | import pytest 4 | import random 5 | 6 | import asyncstdlib as a 7 | 8 | from .utility import sync, asyncify, awaitify 9 | 10 | 11 | MERGE_SAMPLES = [ 12 | [[1, 2], [3, 4]], 13 | [[1, 2, 3], [4, 5, 6], [7, 8, 9]], 14 | [[1, 4, 7], [2, 5, 8], [3, 6, 9]], 15 | [[1], [2, 3], [4, 5, 6], [7, 8, 9]], 16 | [[1], [2, 4, 6], [3], [5, 7, 8, 9]], 17 | [[1, 2, 3, 4] for _ in range(3)], 18 | [sorted(random.random() for _ in range(5)) for _ in range(5)], 19 | [[], []], 20 | [[]], 21 | ] 22 | 23 | 24 | @pytest.mark.parametrize("samples", MERGE_SAMPLES) 25 | @pytest.mark.parametrize("reverse", [False, True]) 26 | @sync 27 | async def test_merge_stdlib(samples, reverse): 28 | """Compare `heapq.merge` against stdlib implementation""" 29 | samples = samples if not reverse else [sample[::-1] for sample in samples] 30 | assert list(heapq.merge(*samples, reverse=reverse)) == [ 31 | item async for item in a.merge(*map(asyncify, samples), reverse=reverse) 32 | ] 33 | 34 | 35 | @pytest.mark.parametrize("samples", MERGE_SAMPLES) 36 | @pytest.mark.parametrize("reverse", [False, True]) 37 | @sync 38 | async def test_merge_stdlib_key(samples, reverse): 39 | """Compare `heapq.merge` with key against stdlib implementation""" 40 | # use a key that reverses the result => must reverse input 41 | samples = samples if reverse else [sample[::-1] for sample in samples] 42 | assert list(heapq.merge(*samples, key=lambda x: -x, reverse=reverse)) == [ 43 | item 44 | async for item in a.merge( 45 | *map(asyncify, samples), key=awaitify(lambda x: -x), reverse=reverse 46 | ) 47 | ] 48 | 49 | 50 | MINMAX_SAMPLES = [ 51 | [0, 3, 1, 4, 2, 5], 52 | [0, -3, 1, -4, -2, 5], 53 | # [i * 1.1 for i in range(20)], 54 | # [random.randint(-5, 5) in range(20)], 55 | # [i * i if i % 3 else -i * i for i in range(2000)], 56 | ] 57 | 58 | 59 | @pytest.mark.parametrize("sample", MINMAX_SAMPLES) 60 | @pytest.mark.parametrize("n", [0, 1, 2, 10, 100, 400, 999, 1000, 1100]) 61 | @sync 62 | async def test_nsmallest_stdlib(sample, n): 63 | assert heapq.nsmallest(n, sample) == await a.nsmallest(asyncify(sample), n) 64 | 65 | 66 | @pytest.mark.parametrize("sample", MINMAX_SAMPLES) 67 | @pytest.mark.parametrize("n", [0, 1, 2, 10, 100, 400, 999, 1000, 1100]) 68 | @sync 69 | async def test_nlargest_stdlib(sample, n): 70 | assert heapq.nlargest(n, sample) == await a.nlargest(asyncify(sample), n) 71 | 72 | 73 | @pytest.mark.parametrize("sample", MINMAX_SAMPLES) 74 | @pytest.mark.parametrize("n", [0, 1, 2, 10, 100, 400, 999, 1000, 1100]) 75 | @sync 76 | async def test_nsmallest_stdlib_key(sample, n): 77 | assert heapq.nsmallest(n, sample, key=lambda x: -x) == await a.nsmallest( 78 | asyncify(sample), n, key=lambda x: -x 79 | ) 80 | 81 | 82 | @pytest.mark.parametrize("sample", MINMAX_SAMPLES) 83 | @pytest.mark.parametrize("n", [0, 1, 2, 10, 100, 400, 999, 1000, 1100]) 84 | @sync 85 | async def test_nlargest_stdlib_key(sample, n): 86 | assert heapq.nlargest(n, sample, key=lambda x: -x) == await a.nlargest( 87 | asyncify(sample), n, key=lambda x: -x 88 | ) 89 | -------------------------------------------------------------------------------- /unittests/test_helpers.py: -------------------------------------------------------------------------------- 1 | from asyncstdlib import _core 2 | 3 | from .utility import sync 4 | 5 | 6 | @sync 7 | async def test_scoped_iter_graceful(): 8 | """Test that ScopedIter handlers non-generator iterables""" 9 | 10 | class AIterator: 11 | async def __anext__(self): 12 | return 1 13 | 14 | def __aiter__(self): 15 | return self 16 | 17 | class AIterable: 18 | def __aiter__(self): 19 | return AIterator() 20 | 21 | async_iterable = AIterable() 22 | async with _core.ScopedIter(async_iterable) as async_iterator: 23 | # test that no error from calling the missing `aclose` is thrown 24 | assert async_iterator is not async_iterable 25 | # check that the iterator satisfies iter(iterator) is iterator 26 | assert _core.aiter(async_iterator) is async_iterator 27 | assert (await async_iterator.__anext__()) == 1 28 | assert (await async_iterator.__anext__()) == 1 29 | -------------------------------------------------------------------------------- /unittests/test_itertools.py: -------------------------------------------------------------------------------- 1 | import itertools 2 | import sys 3 | import platform 4 | 5 | import pytest 6 | 7 | import asyncstdlib as a 8 | 9 | from .utility import sync, asyncify, awaitify, Schedule, Switch, Lock 10 | 11 | 12 | @sync 13 | async def test_accumulate(): 14 | async def reduction(x, y): 15 | return x + y 16 | 17 | for reducer in (reduction, lambda x, y: x + y): 18 | for itertype in (asyncify, list): 19 | assert await a.list(a.accumulate(itertype([0, 1]), reducer)) == list( 20 | itertools.accumulate([0, 1], lambda x, y: x + y) 21 | ) 22 | assert await a.list( 23 | a.accumulate(itertype([0, 1, 2, 3, 4, 0, -5]), reducer) 24 | ) == list(itertools.accumulate([0, 1, 2, 3, 4, 0, -5], lambda x, y: x + y)) 25 | assert await a.list(a.accumulate(itertype([12]), reducer)) == list( 26 | itertools.accumulate([12], lambda x, y: x + y) 27 | ) 28 | assert await a.list(a.accumulate(itertype([1]), reducer, initial=23)) == [ 29 | 23, 30 | 24, 31 | ] 32 | assert await a.list(a.accumulate(itertype([]), reducer, initial=42)) == [42] 33 | 34 | 35 | @sync 36 | async def test_accumulate_default(): 37 | for itertype in (asyncify, list): 38 | assert await a.list(a.accumulate(itertype([0, 1]))) == list( 39 | itertools.accumulate([0, 1]) 40 | ) 41 | assert await a.list(a.accumulate(itertype([0, 1, 2, 3, 4, 0, -5]))) == list( 42 | itertools.accumulate([0, 1, 2, 3, 4, 0, -5]) 43 | ) 44 | assert await a.list(a.accumulate(itertype([12]))) == list( 45 | itertools.accumulate([12]) 46 | ) 47 | assert await a.list(a.accumulate(itertype([1]), initial=23)) == [ 48 | 23, 49 | 24, 50 | ] 51 | assert await a.list(a.accumulate(itertype([]), initial=42)) == [42] 52 | 53 | 54 | @sync 55 | async def test_accumulate_misuse(): 56 | with pytest.raises(TypeError): 57 | assert await a.list(a.accumulate([])) 58 | 59 | 60 | batched_cases = [ 61 | (range(10), 2, [(0, 1), (2, 3), (4, 5), (6, 7), (8, 9)]), 62 | (range(10), 3, [(0, 1, 2), (3, 4, 5), (6, 7, 8), (9,)]), 63 | (range(10), 17, [tuple(range(10))]), 64 | ([], 2, []), 65 | ] 66 | 67 | 68 | @pytest.mark.parametrize("iterable, length, result", batched_cases) 69 | @sync 70 | async def test_batched(iterable, length, result): 71 | assert await a.list(a.batched(iterable, length)) == result 72 | assert await a.list(a.batched(asyncify(iterable), length)) == result 73 | 74 | 75 | @sync 76 | @pytest.mark.parametrize("length", (0, -1)) 77 | async def test_batched_invalid(length): 78 | with pytest.raises(ValueError): 79 | await a.list(a.batched(range(10), length)) 80 | 81 | 82 | @sync 83 | @pytest.mark.parametrize("values", ([1, 2, 3, 4], [1, 2, 3, 4, 5], [1])) 84 | async def test_batched_strict(values: "list[int]"): 85 | for n in range(1, len(values) + 1): 86 | batches = a.batched(values, n, strict=True) 87 | if len(values) % n == 0: 88 | assert values == list(await a.reduce(lambda a, b: a + b, batches)) 89 | else: 90 | assert await a.anext(batches) 91 | with pytest.raises(ValueError): 92 | await a.list(batches) 93 | 94 | 95 | @sync 96 | async def test_cycle(): 97 | async for _ in a.cycle([]): 98 | assert False 99 | assert True 100 | async for idx, val in a.enumerate(a.cycle([0, 1])): 101 | assert idx % 2 == val 102 | if idx == 6: 103 | break 104 | assert idx == 6 105 | 106 | 107 | chains = [ 108 | ([0, 1, 2, 3], [4, 5, 6, 7]), 109 | ([4], [3], [2], [1]), 110 | ([], [], [], [1], [], []), 111 | ] 112 | 113 | 114 | @pytest.mark.parametrize("iterables", chains) 115 | @sync 116 | async def test_chain(iterables): 117 | for itertype in (asyncify, list): 118 | assert await a.list(a.chain(*map(itertype, iterables))) == list( 119 | itertools.chain(*iterables) 120 | ) 121 | assert await a.list(a.chain.from_iterable(map(itertype, iterables))) == list( 122 | itertools.chain.from_iterable(iterables) 123 | ) 124 | 125 | 126 | class ACloseFacade: 127 | """Wrapper to check if an iterator has been closed""" 128 | 129 | def __init__(self, iterable): 130 | self.closed = False 131 | self.__wrapped__ = iterable 132 | self._iterator = a.iter(iterable) 133 | 134 | async def __anext__(self): 135 | if self.closed: 136 | raise StopAsyncIteration() 137 | return await self._iterator.__anext__() 138 | 139 | def __aiter__(self): 140 | return self 141 | 142 | async def aclose(self): 143 | if hasattr(self._iterator, "aclose"): 144 | await self._iterator.aclose() 145 | self.closed = True 146 | 147 | 148 | @pytest.mark.parametrize("iterables", chains) 149 | @sync 150 | async def test_chain_close_auto(iterables): 151 | """Test that `chain` closes exhausted iterators""" 152 | closeable_iterables = [ACloseFacade(iterable) for iterable in iterables] 153 | assert await a.list(a.chain(*closeable_iterables)) == list( 154 | itertools.chain(*iterables) 155 | ) 156 | assert all(iterable.closed for iterable in closeable_iterables) 157 | 158 | 159 | # insert a known filled iterable since chain closes all that are exhausted 160 | @pytest.mark.parametrize("iterables", [([1], *chain) for chain in chains]) 161 | @pytest.mark.parametrize( 162 | "chain_type, must_close", 163 | [(lambda iterators: a.chain(*iterators), True), (a.chain.from_iterable, False)], 164 | ) 165 | @sync 166 | async def test_chain_close_partial(iterables, chain_type, must_close): 167 | """Test that `chain` closes owned iterators""" 168 | closeable_iterables = [ACloseFacade(iterable) for iterable in iterables] 169 | chain = chain_type(closeable_iterables) 170 | assert await a.anext(chain) == next(itertools.chain(*iterables)) 171 | await chain.aclose() 172 | assert all(iterable.closed == must_close for iterable in closeable_iterables[1:]) 173 | # closed chain must remain closed regardless of iterators 174 | assert await a.anext(chain, "sentinel") == "sentinel" 175 | 176 | 177 | compress_cases = [ 178 | (range(20), [idx % 2 for idx in range(20)]), 179 | ([1] * 5, [True, True, False, True, True]), 180 | ([1] * 128, [False]), 181 | ([], []), 182 | ] 183 | 184 | 185 | @pytest.mark.parametrize("data, selectors", compress_cases) 186 | @sync 187 | async def test_compress(data, selectors): 188 | expected = list(itertools.compress(data, selectors)) 189 | assert await a.list(a.compress(data, selectors)) == expected 190 | assert await a.list(a.compress(asyncify(data), selectors)) == expected 191 | assert await a.list(a.compress(data, asyncify(selectors))) == expected 192 | assert await a.list(a.compress(asyncify(data), asyncify(selectors))) == expected 193 | 194 | 195 | droptakewhile_cases = [ 196 | (range(20), lambda x: x < 5), 197 | (range(20), lambda x: x > 5), 198 | ([1] * 12, lambda x: x > 5), 199 | ([1, 2, 3, 4] * 4, lambda x: x < 3), 200 | ([1, 2, 3, 4] * 4, lambda x: True), 201 | ([1, 2, 3, 4] * 4, lambda x: False), 202 | ] 203 | 204 | 205 | @pytest.mark.parametrize("iterable, predicate", droptakewhile_cases) 206 | @sync 207 | async def test_dropwhile(iterable, predicate): 208 | expected = list(itertools.dropwhile(predicate, iterable)) 209 | assert await a.list(a.dropwhile(predicate, iterable)) == expected 210 | assert await a.list(a.dropwhile(awaitify(predicate), iterable)) == expected 211 | assert await a.list(a.dropwhile(predicate, asyncify(iterable))) == expected 212 | assert ( 213 | await a.list(a.dropwhile(awaitify(predicate), asyncify(iterable))) == expected 214 | ) 215 | 216 | 217 | filterfalse_cases = ( 218 | (lambda x: True, [0, 1] * 5), 219 | (lambda x: False, [0, 1] * 5), 220 | (lambda x: x, [0, 1] * 5), 221 | (lambda x: x < 5, range(20)), 222 | (lambda x: x > 5, range(20)), 223 | ) 224 | 225 | 226 | @pytest.mark.parametrize("predicate, iterable", filterfalse_cases) 227 | @sync 228 | async def test_filterfalse(predicate, iterable): 229 | expected = list(itertools.filterfalse(predicate, iterable)) 230 | assert await a.list(a.filterfalse(predicate, iterable)) == expected 231 | assert await a.list(a.filterfalse(awaitify(predicate), iterable)) == expected 232 | assert await a.list(a.filterfalse(predicate, asyncify(iterable))) == expected 233 | assert ( 234 | await a.list(a.filterfalse(awaitify(predicate), asyncify(iterable))) == expected 235 | ) 236 | 237 | 238 | @pytest.mark.parametrize("predicate, iterable", filterfalse_cases) 239 | @sync 240 | async def test_filterfalse_predicate_none(predicate, iterable): 241 | expected = list(itertools.filterfalse(None, iterable)) 242 | assert await a.list(a.filterfalse(None, iterable)) == expected 243 | assert await a.list(a.filterfalse(None, asyncify(iterable))) == expected 244 | 245 | 246 | @pytest.mark.parametrize("iterable, predicate", droptakewhile_cases) 247 | @sync 248 | async def test_takewhile(iterable, predicate): 249 | expected = list(itertools.takewhile(predicate, iterable)) 250 | assert await a.list(a.takewhile(predicate, iterable)) == expected 251 | assert await a.list(a.takewhile(awaitify(predicate), iterable)) == expected 252 | assert await a.list(a.takewhile(predicate, asyncify(iterable))) == expected 253 | assert ( 254 | await a.list(a.takewhile(awaitify(predicate), asyncify(iterable))) == expected 255 | ) 256 | 257 | 258 | @pytest.mark.parametrize("iterable", ((), (1, 2, 3, 4), range(25), range(500))) 259 | @pytest.mark.parametrize( 260 | "slicing", 261 | ((None, None, None), (0,), (5,), (0, 20, 3), (5, 0, 1), (3, 50, 4), (5, None, 6)), 262 | ) 263 | @sync 264 | async def test_islice(iterable, slicing): 265 | expected = list(itertools.islice(iterable, *slicing)) 266 | assert await a.list(a.islice(iterable, *slicing)) == expected 267 | assert await a.list(a.islice(asyncify(iterable), *slicing)) == expected 268 | 269 | 270 | async def ayield_exactly(count: int): 271 | for item in range(count): 272 | yield item 273 | assert False, "Too many `anext` items requested" 274 | 275 | 276 | @sync 277 | @pytest.mark.parametrize("slicing", ((0,), (5,), (0, 20, 3), (5, 0, 1), (3, 50, 4))) 278 | async def test_islice_exact(slicing): 279 | """`isclice` consumes exactly as many items as needed""" 280 | boundary = slice(*slicing) if len(slicing) > 1 else slice(0, slicing[0]) 281 | expected = list(range(boundary.stop)[boundary]) 282 | assert ( 283 | await a.list( 284 | a.islice(ayield_exactly(max(boundary.start, boundary.stop)), *slicing) 285 | ) 286 | == expected 287 | ) 288 | 289 | 290 | @sync 291 | async def test_islice_scoped_iter(): 292 | """multiple `isclice` on borrowed iterator are consecutive""" 293 | async_iterable, iterable = asyncify(range(10)), iter(range(10)) 294 | async with a.scoped_iter(async_iterable) as a1: 295 | assert await a.list(a.islice(a1, 5)) == list(itertools.islice(iterable, 5)) 296 | assert await a.list(a.islice(a1, 5)) == list(itertools.islice(iterable, 5)) 297 | assert await a.list(a.islice(a1, 5)) == list(itertools.islice(iterable, 5)) 298 | 299 | 300 | starmap_cases = [ 301 | (lambda x, y: x + y, [(1, 2), (3, 4)]), 302 | (lambda *args: sum(args), [range(i) for i in range(1, 10)]), 303 | ] 304 | 305 | 306 | @pytest.mark.parametrize("function, iterable", starmap_cases) 307 | @sync 308 | async def test_starmap(function, iterable): 309 | expected = list(itertools.starmap(function, iterable)) 310 | assert await a.list(a.starmap(function, iterable)) == expected 311 | assert await a.list(a.starmap(awaitify(function), iterable)) == expected 312 | assert await a.list(a.starmap(function, asyncify(iterable))) == expected 313 | assert await a.list(a.starmap(awaitify(function), asyncify(iterable))) == expected 314 | 315 | 316 | @sync 317 | async def test_tee(): 318 | iterable = [1, 2, 3, -5, 12, 78, -1, 111] 319 | async with a.tee(iterable, n=3) as iterators: 320 | assert len(iterators) == 3 321 | for idx, iterator in enumerate(iterators): 322 | assert iterators[idx] == iterator 323 | for iterator in (iterators[1], iterators[0], iterators[2]): 324 | assert await a.list(iterator) == iterable 325 | async with a.tee(asyncify(iterable), n=3) as iterators: 326 | for iterator in iterators: 327 | assert await a.list(iterator) == iterable 328 | 329 | 330 | @sync 331 | async def test_tee_concurrent_locked(): 332 | """Test that properly uses a lock for synchronisation""" 333 | items = [1, 2, 3, -5, 12, 78, -1, 111] 334 | 335 | async def iter_values(): 336 | for item in items: 337 | # switch to other tasks a few times to guarantees another runs 338 | await Switch(5) 339 | yield item 340 | 341 | async def test_peer(peer_tee): 342 | assert await a.list(peer_tee) == items 343 | 344 | head_peer, *peers = a.tee(iter_values(), n=len(items) // 2, lock=Lock()) 345 | await Schedule(*map(test_peer, peers)) 346 | await Switch() 347 | results = [item async for item in head_peer] 348 | assert results == items 349 | 350 | 351 | # see https://github.com/python/cpython/issues/74956 352 | @pytest.mark.skipif( 353 | sys.version_info < (3, 8), 354 | reason="async generators only protect against concurrent access since 3.8", 355 | ) 356 | @pytest.mark.skipif( 357 | platform.python_implementation() != "CPython", 358 | reason="async generators only protect against concurrent access on CPython", 359 | ) 360 | @sync 361 | async def test_tee_concurrent_unlocked(): 362 | """Test that tee does not prevent concurrency without a lock""" 363 | items = list(range(12)) 364 | 365 | # concurrency-unsafe iterator that task-switches between yields 366 | async def iter_values(): 367 | for item in items: 368 | # switch to other tasks a few times to guarantee another runs 369 | await Switch(5) 370 | yield item 371 | 372 | async def test_peer(peer_tee): 373 | assert await a.list(peer_tee) == items 374 | 375 | # schedule two tasks that read via tee from the same iterator 376 | this, peer = a.tee(iter_values(), n=2) 377 | await Schedule(test_peer(peer)) 378 | await Switch() 379 | # underlying generator raises RuntimeError when `__anext__` is interleaved 380 | with pytest.raises(RuntimeError): 381 | await test_peer(this) 382 | 383 | 384 | @sync 385 | async def test_pairwise(): 386 | assert await a.list(a.pairwise(range(5))) == [(0, 1), (1, 2), (2, 3), (3, 4)] 387 | assert await a.list(a.pairwise(range(2))) == [(0, 1)] 388 | assert await a.list(a.pairwise(range(1))) == [] 389 | assert await a.list(a.pairwise(range(0))) == [] 390 | for count in range(5): 391 | assert await a.list(a.pairwise([0] * count)) == [(0, 0)] * (count - 1) 392 | 393 | 394 | @sync 395 | async def test_zip_longest(): 396 | async for va, vb in a.zip_longest(asyncify(range(5)), range(5)): 397 | assert va == vb 398 | async for idx, vs in a.enumerate(a.zip_longest(asyncify(range(5)), range(5), [])): 399 | assert vs[0] == vs[1] == idx 400 | assert vs[2] is None 401 | async for _ in a.zip_longest(): 402 | assert False 403 | 404 | 405 | groupby_cases = [ 406 | [0, 1, 1, 2, 2, 2, 3, 2], 407 | list(range(15)) + list(range(15)) + list(range(15)), 408 | [num for num in range(5) for _ in range(5)], 409 | [num for num in range(5) for _ in range(num)], 410 | [1, 1, 2, 2, 2, 2, 3, 3, 1, 2, 5, 5, 2, 2], 411 | ] 412 | 413 | 414 | def identity(x): 415 | return x 416 | 417 | 418 | def modulo(x): 419 | return x % 3 420 | 421 | 422 | def divide(x): 423 | return x // 3 424 | 425 | 426 | async def keys(gby): 427 | return [k async for k, _ in a.iter(gby)] 428 | 429 | 430 | async def values(gby): 431 | return [await a.list(g) async for _, g in a.iter(gby)] 432 | 433 | 434 | @pytest.mark.parametrize("iterable", groupby_cases) 435 | @pytest.mark.parametrize("key", [identity, modulo, divide]) 436 | @pytest.mark.parametrize("view", [keys, values]) 437 | @sync 438 | async def test_groupby(iterable, key, view): 439 | for akey in (key, awaitify(key)): 440 | assert await view(a.groupby(iterable)) == await view( 441 | itertools.groupby(iterable) 442 | ) 443 | assert await view(a.groupby(asyncify(iterable))) == await view( 444 | itertools.groupby(iterable) 445 | ) 446 | assert await view(a.groupby(iterable, key=akey)) == await view( 447 | itertools.groupby(iterable, key=key) 448 | ) 449 | assert await view(a.groupby(asyncify(iterable), key=akey)) == await view( 450 | itertools.groupby(iterable, key=key) 451 | ) 452 | -------------------------------------------------------------------------------- /unittests/utility.py: -------------------------------------------------------------------------------- 1 | from typing import ( 2 | Callable, 3 | Coroutine, 4 | Iterable, 5 | AsyncIterator, 6 | TypeVar, 7 | Awaitable, 8 | Deque, 9 | Tuple, 10 | Any, 11 | ) 12 | from functools import wraps 13 | from collections import deque 14 | from random import randint 15 | 16 | 17 | T = TypeVar("T") 18 | 19 | 20 | async def asyncify(iterable: Iterable[T]) -> AsyncIterator[T]: 21 | """ 22 | Convert an iterable into an async iterable 23 | 24 | This is intended to sequence literals like lists to `async` iterators 25 | in order to force usage of `async` code paths. There is no functional 26 | or other advantage otherwise. 27 | """ 28 | for value in iterable: 29 | yield value 30 | 31 | 32 | def awaitify(call: Callable[..., T]) -> Callable[..., Awaitable[T]]: 33 | """ 34 | Convert a callable (`foo()`) into an async callable (`await foo()`) 35 | 36 | This is intended to convert `lambda` expressions to `async` functions 37 | in order to force usage of `async` code paths. There is no functional 38 | or other advantage otherwise. 39 | """ 40 | 41 | async def await_wrapper(*args: Any, **kwargs: Any) -> T: 42 | return call(*args, **kwargs) 43 | 44 | return await_wrapper 45 | 46 | 47 | class Schedule: 48 | r""" 49 | Signal to the event loop to adopt and run new coroutines 50 | 51 | :param coros: The coroutines to start running 52 | 53 | In order to communicate with the event loop and start the coroutines, 54 | the :py:class:`Schedule` must be `await`\ ed. 55 | """ 56 | 57 | def __init__(self, *coros: Coroutine[Any, Any, Any]): 58 | self.coros = coros 59 | 60 | def __await__(self): 61 | yield self 62 | 63 | 64 | class Switch: 65 | """ 66 | Signal to the event loop to run another coroutine 67 | 68 | Pauses the coroutine but immediately continues after 69 | all other runnable coroutines of the event loop. 70 | This is similar to the common ``sleep(0)`` function 71 | of regular event loop frameworks. 72 | 73 | If a single argument is given, this specifies how many 74 | turns should be skipped. The default corresponds to `0`. 75 | If two arguments are given, this is interpreted as an 76 | inclusive interval to randomly select the skip count. 77 | """ 78 | 79 | def __init__(self, skip: int = 0, limit: int = 0, /) -> None: 80 | if limit <= 0: 81 | self._idle_count = skip 82 | else: 83 | self._idle_count = randint(skip, limit) 84 | 85 | def __await__(self): 86 | yield self 87 | for _ in range(self._idle_count): 88 | yield self 89 | 90 | 91 | class Lock: 92 | """Simple lock for exclusive access""" 93 | 94 | def __init__(self): 95 | self._owned = False 96 | self._waiting: list[object] = [] 97 | 98 | async def __aenter__(self): 99 | if self._owned: 100 | # wait until it is our turn to take the lock 101 | token = object() 102 | self._waiting.append(token) 103 | # a spin-lock should be fine since tests are short anyways 104 | while self._owned or self._waiting[0] is not token: 105 | await Switch() 106 | # we will take the lock now, remove our wait claim 107 | self._waiting.pop(0) 108 | self._owned = True 109 | 110 | async def __aexit__(self, exc_type: Any, exc_val: Any, exc_tb: Any): 111 | self._owned = False 112 | 113 | 114 | def sync(test_case: Callable[..., Coroutine[None, Any, Any]], /) -> Callable[..., None]: 115 | """ 116 | Mark an ``async def`` test case to be run synchronously with children 117 | 118 | This provides a primitive "event loop" which only responds 119 | to :py:class:`Schedule`, :py:class:`Switch` and :py:class:`Lock`. 120 | 121 | It should be applied as a decorator on an ``async def`` function, which 122 | is then turned into a synchronous callable that will run the ``async def`` 123 | function and all tasks it spawns. 124 | Other decorators, most prominently :py:func:`pytest.mark.parametrize`, 125 | can be applied around it. 126 | """ 127 | 128 | @wraps(test_case) 129 | def run_sync(*args: Any, **kwargs: Any): 130 | run_queue: Deque[Tuple[Coroutine[Any, Any, Any], Any]] = deque() 131 | run_queue.append((test_case(*args, **kwargs), None)) 132 | while run_queue: 133 | coro, event = run_queue.popleft() 134 | try: 135 | event = coro.send(event) 136 | except StopIteration as e: 137 | result = e.args[0] if e.args else None 138 | assert result is None, f"got '{result!r}' expected 'None'" 139 | else: 140 | if isinstance(event, Schedule): 141 | run_queue.extend((new_coro, None) for new_coro in event.coros) 142 | run_queue.append((coro, event)) 143 | elif isinstance(event, Switch): 144 | run_queue.append((coro, event)) 145 | else: # pragma: no cover 146 | raise RuntimeError( 147 | f"test case {test_case} yielded an unexpected event {event}" 148 | ) 149 | 150 | return run_sync 151 | --------------------------------------------------------------------------------