├── .github ├── FUNDING.yml ├── SECURITY.md └── workflows │ ├── main.yml │ └── pypi-package.yml ├── .gitignore ├── CHANGELOG.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── NOTICE ├── README.md ├── pdm.lock ├── pyproject.toml ├── src └── aiofiles │ ├── __init__.py │ ├── base.py │ ├── os.py │ ├── ospath.py │ ├── tempfile │ ├── __init__.py │ └── temptypes.py │ └── threadpool │ ├── __init__.py │ ├── binary.py │ ├── text.py │ └── utils.py ├── tests ├── resources │ ├── multiline_file.txt │ └── test_file1.txt ├── test_os.py ├── test_simple.py ├── test_stdio.py ├── test_tempfile.py └── threadpool │ ├── test_binary.py │ ├── test_concurrency.py │ ├── test_open.py │ ├── test_text.py │ └── test_wrap.py └── tox.ini /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | --- 2 | tidelift: "pypi/aiofiles" 3 | github: Tinche 4 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | ## Security contact information 2 | 3 | To report a security vulnerability, please use the 4 | [Tidelift security contact](https://tidelift.com/security). 5 | Tidelift will coordinate the fix and disclosure. 6 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: 5 | push: 6 | branches: ["main"] 7 | pull_request: 8 | branches: ["main"] 9 | workflow_dispatch: 10 | 11 | jobs: 12 | tests: 13 | name: "Python ${{ matrix.python-version }}" 14 | runs-on: ${{ matrix.os }} 15 | strategy: 16 | matrix: 17 | os: [ubuntu-latest, windows-latest] 18 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13", "pypy-3.9"] 19 | 20 | steps: 21 | - uses: "actions/checkout@v4" 22 | 23 | - uses: "actions/setup-python@v5" 24 | with: 25 | python-version: "${{ matrix.python-version }}" 26 | allow-prereleases: true 27 | 28 | - name: "Install dependencies" 29 | run: | 30 | python -VV 31 | python -m site 32 | python -m pip install --upgrade pip wheel pdm 33 | python -m pip install --upgrade tox tox-gh-actions 34 | 35 | - name: "Run tox targets for ${{ matrix.python-version }}" 36 | run: "python -m tox" 37 | 38 | - name: "Upload coverage data" 39 | uses: "actions/upload-artifact@v4" 40 | with: 41 | name: coverage-data-${{ matrix.python-version }} 42 | path: ".coverage.*" 43 | if-no-files-found: "ignore" 44 | include-hidden-files: true 45 | if: runner.os == 'Linux' 46 | 47 | coverage: 48 | name: "Combine & check coverage." 49 | needs: "tests" 50 | runs-on: "ubuntu-latest" 51 | 52 | steps: 53 | - uses: "actions/checkout@v4" 54 | 55 | - uses: "actions/setup-python@v5" 56 | with: 57 | cache: "pip" 58 | python-version: "3.13" 59 | 60 | - run: "python -Im pip install --upgrade coverage[toml]" 61 | 62 | - uses: "actions/download-artifact@v4" 63 | with: 64 | pattern: "coverage-data-*" 65 | merge-multiple: true 66 | 67 | - name: "Combine coverage" 68 | run: | 69 | python -Im coverage combine 70 | python -Im coverage html --skip-covered --skip-empty 71 | python -Im coverage json 72 | 73 | # Report and write to summary. 74 | python -Im coverage report | sed 's/^/ /' >> $GITHUB_STEP_SUMMARY 75 | 76 | export TOTAL=$(python -c "import json;print(json.load(open('coverage.json'))['totals']['percent_covered_display'])") 77 | echo "total=$TOTAL" >> $GITHUB_ENV 78 | 79 | - name: "Upload HTML report." 80 | uses: "actions/upload-artifact@v4" 81 | with: 82 | name: "html-report" 83 | path: "htmlcov" 84 | 85 | - name: "Make badge" 86 | if: github.ref == 'refs/heads/main' 87 | uses: "schneegans/dynamic-badges-action@v1.4.0" 88 | with: 89 | # GIST_TOKEN is a GitHub personal access token with scope "gist". 90 | auth: ${{ secrets.GIST_TOKEN }} 91 | gistID: 882f02e3df32136c847ba90d2688f06e 92 | filename: covbadge.json 93 | label: Coverage 94 | message: ${{ env.total }}% 95 | minColorRange: 50 96 | maxColorRange: 90 97 | valColorRange: ${{ env.total }} 98 | 99 | package: 100 | name: "Build & verify package" 101 | runs-on: "ubuntu-latest" 102 | 103 | steps: 104 | - uses: "actions/checkout@v3" 105 | - uses: "actions/setup-python@v4" 106 | with: 107 | python-version: "3.12" 108 | 109 | - name: "Install PDM and twine" 110 | run: "python -m pip install pdm twine check-wheel-contents" 111 | - name: "Build package" 112 | run: "pdm build" 113 | - name: "List result" 114 | run: "ls -l dist" 115 | - name: "Check wheel contents" 116 | run: "check-wheel-contents dist/*.whl" 117 | - name: "Check long_description" 118 | run: "python -m twine check dist/*" 119 | -------------------------------------------------------------------------------- /.github/workflows/pypi-package.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Build & maybe upload PyPI package 3 | 4 | on: 5 | push: 6 | branches: [main] 7 | tags: ["*"] 8 | release: 9 | types: 10 | - published 11 | workflow_dispatch: 12 | 13 | permissions: 14 | contents: read 15 | id-token: write 16 | 17 | jobs: 18 | build-package: 19 | name: Build & verify package 20 | runs-on: ubuntu-latest 21 | 22 | steps: 23 | - uses: actions/checkout@v4 24 | with: 25 | fetch-depth: 0 26 | 27 | - uses: hynek/build-and-inspect-python-package@v2 28 | 29 | # Upload to Test PyPI on every commit on main. 30 | release-test-pypi: 31 | name: Publish in-dev package to test.pypi.org 32 | environment: release-test-pypi 33 | if: github.event_name == 'push' && github.ref == 'refs/heads/main' 34 | runs-on: ubuntu-latest 35 | needs: build-package 36 | 37 | steps: 38 | - name: Download packages built by build-and-inspect-python-package 39 | uses: actions/download-artifact@v4 40 | with: 41 | name: Packages 42 | path: dist 43 | 44 | - name: Upload package to Test PyPI 45 | uses: pypa/gh-action-pypi-publish@release/v1 46 | with: 47 | repository-url: https://test.pypi.org/legacy/ 48 | 49 | # Upload to real PyPI on GitHub Releases. 50 | release-pypi: 51 | name: Publish released package to pypi.org 52 | environment: release-pypi 53 | if: github.event.action == 'published' 54 | runs-on: ubuntu-latest 55 | needs: build-package 56 | 57 | steps: 58 | - name: Download packages built by build-and-inspect-python-package 59 | uses: actions/download-artifact@v4 60 | with: 61 | name: Packages 62 | path: dist 63 | 64 | - name: Upload package to PyPI 65 | uses: pypa/gh-action-pypi-publish@release/v1 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Sphinx documentation 59 | docs/_build/ 60 | 61 | # PyBuilder 62 | target/ 63 | 64 | # pyenv 65 | # For a library or package, you might want to ignore these files since the code is 66 | # intended to run in multiple environments; otherwise, check them in: 67 | .python-version 68 | 69 | # pdm 70 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 71 | #pdm.lock 72 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 73 | # in version control. 74 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 75 | .pdm.toml 76 | .pdm-python 77 | .pdm-build/ 78 | 79 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 80 | __pypackages__/ 81 | 82 | # Environments 83 | .env 84 | .venv 85 | env/ 86 | venv/ 87 | ENV/ 88 | env.bak/ 89 | venv.bak/ 90 | 91 | # PyCharm 92 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 93 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 94 | # and can be added to the global gitignore or merged into this file. For a more nuclear 95 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 96 | .idea/ 97 | 98 | # Visual Studio Code 99 | .vscode/ 100 | 101 | # Ruff stuff: 102 | .ruff_cache/ 103 | 104 | # PyPI configuration file 105 | .pypirc 106 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # History 2 | 3 | ## 25.1.0 (UNRELEASED) 4 | 5 | - Add `ruff` formatter and linter. 6 | [#216](https://github.com/Tinche/aiofiles/pull/216) 7 | - Dropped Python 3.8 support. If you require it, use version 24.1.0. 8 | [#204](https://github.com/Tinche/aiofiles/pull/204) 9 | 10 | ## 24.1.0 (2024-06-24) 11 | 12 | - Import `os.link` conditionally to fix importing on android. 13 | [#175](https://github.com/Tinche/aiofiles/issues/175) 14 | - Remove spurious items from `aiofiles.os.__all__` when running on Windows. 15 | - Switch to more modern async idioms: Remove types.coroutine and make AiofilesContextManager an awaitable instead a coroutine. 16 | - Add `aiofiles.os.path.abspath` and `aiofiles.os.getcwd`. 17 | [#174](https://github.com/Tinche/aiofiles/issues/181) 18 | - _aiofiles_ is now tested on Python 3.13 too. 19 | [#184](https://github.com/Tinche/aiofiles/pull/184) 20 | - Dropped Python 3.7 support. If you require it, use version 23.2.1. 21 | 22 | ## 23.2.1 (2023-08-09) 23 | 24 | - Import `os.statvfs` conditionally to fix importing on non-UNIX systems. 25 | [#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172) 26 | - aiofiles is now also tested on Windows. 27 | 28 | ## 23.2.0 (2023-08-09) 29 | 30 | - aiofiles is now tested on Python 3.12 too. 31 | [#166](https://github.com/Tinche/aiofiles/issues/166) [#168](https://github.com/Tinche/aiofiles/pull/168) 32 | - On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` now accepts a `delete_on_close` argument, just like the stdlib version. 33 | - On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` no longer exposes a `delete` attribute, just like the stdlib version. 34 | - Added `aiofiles.os.statvfs` and `aiofiles.os.path.ismount`. 35 | [#162](https://github.com/Tinche/aiofiles/pull/162) 36 | - Use [PDM](https://pdm.fming.dev/latest/) instead of Poetry. 37 | [#169](https://github.com/Tinche/aiofiles/pull/169) 38 | 39 | ## 23.1.0 (2023-02-09) 40 | 41 | - Added `aiofiles.os.access`. 42 | [#146](https://github.com/Tinche/aiofiles/pull/146) 43 | - Removed `aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.softspace`. 44 | [#151](https://github.com/Tinche/aiofiles/pull/151) 45 | - Added `aiofiles.stdin`, `aiofiles.stdin_bytes`, and other stdio streams. 46 | [#154](https://github.com/Tinche/aiofiles/pull/154) 47 | - Transition to `asyncio.get_running_loop` (vs `asyncio.get_event_loop`) internally. 48 | 49 | ## 22.1.0 (2022-09-04) 50 | 51 | - Added `aiofiles.os.path.islink`. 52 | [#126](https://github.com/Tinche/aiofiles/pull/126) 53 | - Added `aiofiles.os.readlink`. 54 | [#125](https://github.com/Tinche/aiofiles/pull/125) 55 | - Added `aiofiles.os.symlink`. 56 | [#124](https://github.com/Tinche/aiofiles/pull/124) 57 | - Added `aiofiles.os.unlink`. 58 | [#123](https://github.com/Tinche/aiofiles/pull/123) 59 | - Added `aiofiles.os.link`. 60 | [#121](https://github.com/Tinche/aiofiles/pull/121) 61 | - Added `aiofiles.os.renames`. 62 | [#120](https://github.com/Tinche/aiofiles/pull/120) 63 | - Added `aiofiles.os.{listdir, scandir}`. 64 | [#143](https://github.com/Tinche/aiofiles/pull/143) 65 | - Switched to CalVer. 66 | - Dropped Python 3.6 support. If you require it, use version 0.8.0. 67 | - aiofiles is now tested on Python 3.11. 68 | 69 | ## 0.8.0 (2021-11-27) 70 | 71 | - aiofiles is now tested on Python 3.10. 72 | - Added `aiofiles.os.replace`. 73 | [#107](https://github.com/Tinche/aiofiles/pull/107) 74 | - Added `aiofiles.os.{makedirs, removedirs}`. 75 | - Added `aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}`. 76 | [#63](https://github.com/Tinche/aiofiles/pull/63) 77 | - Added `suffix`, `prefix`, `dir` args to `aiofiles.tempfile.TemporaryDirectory`. 78 | [#116](https://github.com/Tinche/aiofiles/pull/116) 79 | 80 | ## 0.7.0 (2021-05-17) 81 | 82 | - Added the `aiofiles.tempfile` module for async temporary files. 83 | [#56](https://github.com/Tinche/aiofiles/pull/56) 84 | - Switched to Poetry and GitHub actions. 85 | - Dropped 3.5 support. 86 | 87 | ## 0.6.0 (2020-10-27) 88 | 89 | - `aiofiles` is now tested on ppc64le. 90 | - Added `name` and `mode` properties to async file objects. 91 | [#82](https://github.com/Tinche/aiofiles/pull/82) 92 | - Fixed a DeprecationWarning internally. 93 | [#75](https://github.com/Tinche/aiofiles/pull/75) 94 | - Python 3.9 support and tests. 95 | 96 | ## 0.5.0 (2020-04-12) 97 | 98 | - Python 3.8 support. Code base modernization (using `async/await` instead of `asyncio.coroutine`/`yield from`). 99 | - Added `aiofiles.os.remove`, `aiofiles.os.rename`, `aiofiles.os.mkdir`, `aiofiles.os.rmdir`. 100 | [#62](https://github.com/Tinche/aiofiles/pull/62) 101 | 102 | ## 0.4.0 (2018-08-11) 103 | 104 | - Python 3.7 support. 105 | - Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x. 106 | 107 | ## 0.3.2 (2017-09-23) 108 | 109 | - The LICENSE is now included in the sdist. 110 | [#31](https://github.com/Tinche/aiofiles/pull/31) 111 | 112 | ## 0.3.1 (2017-03-10) 113 | 114 | - Introduced a changelog. 115 | - `aiofiles.os.sendfile` will now work if the standard `os` module contains a `sendfile` function. 116 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "{}" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright {yyyy} {name of copyright owner} 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | 203 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | TEST_DIR := tests 2 | DIRS := src $(TEST_DIR) 3 | 4 | .PHONY: test lint 5 | 6 | check: 7 | pdm run ruff format --check $(DIRS) 8 | pdm run ruff check $(DIRS) 9 | 10 | format: 11 | pdm run ruff format $(DIRS) 12 | 13 | lint: format 14 | pdm run ruff check --fix $(DIRS) 15 | 16 | test: 17 | pdm run pytest -x --ff $(TEST_DIR) 18 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Asyncio support for files 2 | Copyright 2016 Tin Tvrtkovic 3 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # aiofiles: file support for asyncio 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/aiofiles.svg)](https://pypi.python.org/pypi/aiofiles) 4 | [![Build](https://github.com/Tinche/aiofiles/workflows/CI/badge.svg)](https://github.com/Tinche/aiofiles/actions) 5 | [![Coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/Tinche/882f02e3df32136c847ba90d2688f06e/raw/covbadge.json)](https://github.com/Tinche/aiofiles/actions/workflows/main.yml) 6 | [![Supported Python versions](https://img.shields.io/pypi/pyversions/aiofiles.svg)](https://github.com/Tinche/aiofiles) 7 | [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) 8 | 9 | **aiofiles** is an Apache2 licensed library, written in Python, for handling local 10 | disk files in asyncio applications. 11 | 12 | Ordinary local file IO is blocking, and cannot easily and portably be made 13 | asynchronous. This means doing file IO may interfere with asyncio applications, 14 | which shouldn't block the executing thread. aiofiles helps with this by 15 | introducing asynchronous versions of files that support delegating operations to 16 | a separate thread pool. 17 | 18 | ```python 19 | async with aiofiles.open('filename', mode='r') as f: 20 | contents = await f.read() 21 | print(contents) 22 | 'My file contents' 23 | ``` 24 | 25 | Asynchronous iteration is also supported. 26 | 27 | ```python 28 | async with aiofiles.open('filename') as f: 29 | async for line in f: 30 | ... 31 | ``` 32 | 33 | Asynchronous interface to tempfile module. 34 | 35 | ```python 36 | async with aiofiles.tempfile.TemporaryFile('wb') as f: 37 | await f.write(b'Hello, World!') 38 | ``` 39 | 40 | ## Features 41 | 42 | - a file API very similar to Python's standard, blocking API 43 | - support for buffered and unbuffered binary files, and buffered text files 44 | - support for `async`/`await` ([PEP 492](https://peps.python.org/pep-0492/)) constructs 45 | - async interface to tempfile module 46 | 47 | ## Installation 48 | 49 | To install aiofiles, simply: 50 | 51 | ```bash 52 | $ pip install aiofiles 53 | ``` 54 | 55 | ## Usage 56 | 57 | Files are opened using the `aiofiles.open()` coroutine, which in addition to 58 | mirroring the builtin `open` accepts optional `loop` and `executor` 59 | arguments. If `loop` is absent, the default loop will be used, as per the 60 | set asyncio policy. If `executor` is not specified, the default event loop 61 | executor will be used. 62 | 63 | In case of success, an asynchronous file object is returned with an 64 | API identical to an ordinary file, except the following methods are coroutines 65 | and delegate to an executor: 66 | 67 | - `close` 68 | - `flush` 69 | - `isatty` 70 | - `read` 71 | - `readall` 72 | - `read1` 73 | - `readinto` 74 | - `readline` 75 | - `readlines` 76 | - `seek` 77 | - `seekable` 78 | - `tell` 79 | - `truncate` 80 | - `writable` 81 | - `write` 82 | - `writelines` 83 | 84 | In case of failure, one of the usual exceptions will be raised. 85 | 86 | `aiofiles.stdin`, `aiofiles.stdout`, `aiofiles.stderr`, 87 | `aiofiles.stdin_bytes`, `aiofiles.stdout_bytes`, and 88 | `aiofiles.stderr_bytes` provide async access to `sys.stdin`, 89 | `sys.stdout`, `sys.stderr`, and their corresponding `.buffer` properties. 90 | 91 | The `aiofiles.os` module contains executor-enabled coroutine versions of 92 | several useful `os` functions that deal with files: 93 | 94 | - `stat` 95 | - `statvfs` 96 | - `sendfile` 97 | - `rename` 98 | - `renames` 99 | - `replace` 100 | - `remove` 101 | - `unlink` 102 | - `mkdir` 103 | - `makedirs` 104 | - `rmdir` 105 | - `removedirs` 106 | - `link` 107 | - `symlink` 108 | - `readlink` 109 | - `listdir` 110 | - `scandir` 111 | - `access` 112 | - `getcwd` 113 | - `path.abspath` 114 | - `path.exists` 115 | - `path.isfile` 116 | - `path.isdir` 117 | - `path.islink` 118 | - `path.ismount` 119 | - `path.getsize` 120 | - `path.getatime` 121 | - `path.getctime` 122 | - `path.samefile` 123 | - `path.sameopenfile` 124 | 125 | ### Tempfile 126 | 127 | **aiofiles.tempfile** implements the following interfaces: 128 | 129 | - TemporaryFile 130 | - NamedTemporaryFile 131 | - SpooledTemporaryFile 132 | - TemporaryDirectory 133 | 134 | Results return wrapped with a context manager allowing use with async with and async for. 135 | 136 | ```python 137 | async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f: 138 | await f.write(b'Line1\n Line2') 139 | await f.seek(0) 140 | async for line in f: 141 | print(line) 142 | 143 | async with aiofiles.tempfile.TemporaryDirectory() as d: 144 | filename = os.path.join(d, "file.ext") 145 | ``` 146 | 147 | ### Writing tests for aiofiles 148 | 149 | Real file IO can be mocked by patching `aiofiles.threadpool.sync_open` 150 | as desired. The return type also needs to be registered with the 151 | `aiofiles.threadpool.wrap` dispatcher: 152 | 153 | ```python 154 | aiofiles.threadpool.wrap.register(mock.MagicMock)( 155 | lambda *args, **kwargs: aiofiles.threadpool.AsyncBufferedIOBase(*args, **kwargs) 156 | ) 157 | 158 | async def test_stuff(): 159 | write_data = 'data' 160 | read_file_chunks = [ 161 | b'file chunks 1', 162 | b'file chunks 2', 163 | b'file chunks 3', 164 | b'', 165 | ] 166 | file_chunks_iter = iter(read_file_chunks) 167 | 168 | mock_file_stream = mock.MagicMock( 169 | read=lambda *args, **kwargs: next(file_chunks_iter) 170 | ) 171 | 172 | with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file_stream) as mock_open: 173 | async with aiofiles.open('filename', 'w') as f: 174 | await f.write(write_data) 175 | assert await f.read() == b'file chunks 1' 176 | 177 | mock_file_stream.write.assert_called_once_with(write_data) 178 | ``` 179 | 180 | ### Contributing 181 | 182 | Contributions are very welcome. Tests can be run with `tox`, please ensure 183 | the coverage at least stays the same before you submit a pull request. 184 | -------------------------------------------------------------------------------- /pdm.lock: -------------------------------------------------------------------------------- 1 | # This file is @generated by PDM. 2 | # It is not intended for manual editing. 3 | 4 | [metadata] 5 | groups = ["default", "lint", "test"] 6 | strategy = [] 7 | lock_version = "4.5.0" 8 | content_hash = "sha256:75d219641baf2a4f27a1b934d0958d2649aef7ae87bcd895ceefb13262cff0c0" 9 | 10 | [[metadata.targets]] 11 | requires_python = ">=3.9" 12 | 13 | [[package]] 14 | name = "colorama" 15 | version = "0.4.6" 16 | requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 17 | summary = "Cross-platform colored terminal text." 18 | files = [ 19 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 20 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 21 | ] 22 | 23 | [[package]] 24 | name = "coverage" 25 | version = "7.2.7" 26 | requires_python = ">=3.7" 27 | summary = "Code coverage measurement for Python" 28 | files = [ 29 | {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, 30 | {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, 31 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, 32 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, 33 | {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, 34 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, 35 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, 36 | {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, 37 | {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, 38 | {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, 39 | {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, 40 | {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, 41 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, 42 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, 43 | {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, 44 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, 45 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, 46 | {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, 47 | {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, 48 | {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, 49 | {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, 50 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, 51 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, 52 | {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, 53 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, 54 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, 55 | {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, 56 | {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, 57 | {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, 58 | {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, 59 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, 60 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, 61 | {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, 62 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, 63 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, 64 | {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, 65 | {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, 66 | {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, 67 | {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, 68 | {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, 69 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, 70 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, 71 | {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, 72 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, 73 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, 74 | {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, 75 | {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, 76 | {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, 77 | {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, 78 | {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, 79 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, 80 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, 81 | {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, 82 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, 83 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, 84 | {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, 85 | {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, 86 | {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, 87 | {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, 88 | {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, 89 | ] 90 | 91 | [[package]] 92 | name = "distlib" 93 | version = "0.3.7" 94 | summary = "Distribution utilities" 95 | files = [ 96 | {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, 97 | {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, 98 | ] 99 | 100 | [[package]] 101 | name = "exceptiongroup" 102 | version = "1.1.2" 103 | requires_python = ">=3.7" 104 | summary = "Backport of PEP 654 (exception groups)" 105 | files = [ 106 | {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, 107 | {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, 108 | ] 109 | 110 | [[package]] 111 | name = "filelock" 112 | version = "3.12.2" 113 | requires_python = ">=3.7" 114 | summary = "A platform independent file lock." 115 | files = [ 116 | {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, 117 | {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, 118 | ] 119 | 120 | [[package]] 121 | name = "iniconfig" 122 | version = "2.0.0" 123 | requires_python = ">=3.7" 124 | summary = "brain-dead simple config-ini parsing" 125 | files = [ 126 | {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, 127 | {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, 128 | ] 129 | 130 | [[package]] 131 | name = "packaging" 132 | version = "23.1" 133 | requires_python = ">=3.7" 134 | summary = "Core utilities for Python packages" 135 | files = [ 136 | {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, 137 | {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, 138 | ] 139 | 140 | [[package]] 141 | name = "platformdirs" 142 | version = "2.6.2" 143 | requires_python = ">=3.7" 144 | summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 145 | dependencies = [ 146 | "typing-extensions>=4.4; python_version < \"3.8\"", 147 | ] 148 | files = [ 149 | {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, 150 | {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, 151 | ] 152 | 153 | [[package]] 154 | name = "pluggy" 155 | version = "1.5.0" 156 | requires_python = ">=3.8" 157 | summary = "plugin and hook calling mechanisms for python" 158 | files = [ 159 | {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, 160 | {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, 161 | ] 162 | 163 | [[package]] 164 | name = "py" 165 | version = "1.11.0" 166 | requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" 167 | summary = "library with cross-python path, ini-parsing, io, code, log facilities" 168 | files = [ 169 | {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, 170 | {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, 171 | ] 172 | 173 | [[package]] 174 | name = "pytest" 175 | version = "8.2.2" 176 | requires_python = ">=3.8" 177 | summary = "pytest: simple powerful testing with Python" 178 | dependencies = [ 179 | "colorama; sys_platform == \"win32\"", 180 | "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", 181 | "iniconfig", 182 | "packaging", 183 | "pluggy<2.0,>=1.5", 184 | "tomli>=1; python_version < \"3.11\"", 185 | ] 186 | files = [ 187 | {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, 188 | {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, 189 | ] 190 | 191 | [[package]] 192 | name = "pytest-asyncio" 193 | version = "1.0.0" 194 | requires_python = ">=3.9" 195 | summary = "Pytest support for asyncio" 196 | dependencies = [ 197 | "pytest<9,>=8.2", 198 | "typing-extensions>=4.12; python_version < \"3.10\"", 199 | ] 200 | files = [ 201 | {file = "pytest_asyncio-1.0.0-py3-none-any.whl", hash = "sha256:4f024da9f1ef945e680dc68610b52550e36590a67fd31bb3b4943979a1f90ef3"}, 202 | {file = "pytest_asyncio-1.0.0.tar.gz", hash = "sha256:d15463d13f4456e1ead2594520216b225a16f781e144f8fdf6c5bb4667c48b3f"}, 203 | ] 204 | 205 | [[package]] 206 | name = "ruff" 207 | version = "0.11.8" 208 | requires_python = ">=3.7" 209 | summary = "An extremely fast Python linter and code formatter, written in Rust." 210 | files = [ 211 | {file = "ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3"}, 212 | {file = "ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835"}, 213 | {file = "ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c"}, 214 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c"}, 215 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219"}, 216 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f"}, 217 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474"}, 218 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38"}, 219 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458"}, 220 | {file = "ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5"}, 221 | {file = "ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948"}, 222 | {file = "ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb"}, 223 | {file = "ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c"}, 224 | {file = "ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304"}, 225 | {file = "ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2"}, 226 | {file = "ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4"}, 227 | {file = "ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2"}, 228 | {file = "ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8"}, 229 | ] 230 | 231 | [[package]] 232 | name = "six" 233 | version = "1.16.0" 234 | requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 235 | summary = "Python 2 and 3 compatibility utilities" 236 | files = [ 237 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 238 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 239 | ] 240 | 241 | [[package]] 242 | name = "tomli" 243 | version = "2.0.1" 244 | requires_python = ">=3.7" 245 | summary = "A lil' TOML parser" 246 | files = [ 247 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 248 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 249 | ] 250 | 251 | [[package]] 252 | name = "tox" 253 | version = "3.28.0" 254 | requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" 255 | summary = "tox is a generic virtualenv management and test command line tool" 256 | dependencies = [ 257 | "colorama>=0.4.1; platform_system == \"Windows\"", 258 | "filelock>=3.0.0", 259 | "importlib-metadata>=0.12; python_version < \"3.8\"", 260 | "packaging>=14", 261 | "pluggy>=0.12.0", 262 | "py>=1.4.17", 263 | "six>=1.14.0", 264 | "toml>=0.10.2; python_version <= \"3.6\"", 265 | "tomli>=2.0.1; python_version >= \"3.7\" and python_version < \"3.11\"", 266 | "virtualenv!=20.0.0,!=20.0.1,!=20.0.2,!=20.0.3,!=20.0.4,!=20.0.5,!=20.0.6,!=20.0.7,>=16.0.0", 267 | ] 268 | files = [ 269 | {file = "tox-3.28.0-py2.py3-none-any.whl", hash = "sha256:57b5ab7e8bb3074edc3c0c0b4b192a4f3799d3723b2c5b76f1fa9f2d40316eea"}, 270 | {file = "tox-3.28.0.tar.gz", hash = "sha256:d0d28f3fe6d6d7195c27f8b054c3e99d5451952b54abdae673b71609a581f640"}, 271 | ] 272 | 273 | [[package]] 274 | name = "typing-extensions" 275 | version = "4.13.2" 276 | requires_python = ">=3.8" 277 | summary = "Backported and Experimental Type Hints for Python 3.8+" 278 | files = [ 279 | {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, 280 | {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, 281 | ] 282 | 283 | [[package]] 284 | name = "virtualenv" 285 | version = "20.16.2" 286 | requires_python = ">=3.6" 287 | summary = "Virtual Python Environment builder" 288 | dependencies = [ 289 | "distlib<1,>=0.3.1", 290 | "filelock<4,>=3.2", 291 | "importlib-metadata>=0.12; python_version < \"3.8\"", 292 | "importlib-resources>=1.0; python_version < \"3.7\"", 293 | "platformdirs<3,>=2", 294 | ] 295 | files = [ 296 | {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, 297 | {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, 298 | ] 299 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["hatchling", "hatch-vcs"] 3 | build-backend = "hatchling.build" 4 | 5 | [project] 6 | name = "aiofiles" 7 | description = "File support for asyncio." 8 | authors = [ 9 | {name = "Tin Tvrtkovic", email = "tinchester@gmail.com"}, 10 | ] 11 | dependencies = [] 12 | requires-python = ">=3.9" 13 | readme = "README.md" 14 | license = {text = "Apache-2.0"} 15 | classifiers = [ 16 | "Development Status :: 5 - Production/Stable", 17 | "License :: OSI Approved :: Apache Software License", 18 | "Operating System :: OS Independent", 19 | "Programming Language :: Python :: 3.9", 20 | "Programming Language :: Python :: 3.10", 21 | "Programming Language :: Python :: 3.11", 22 | "Programming Language :: Python :: 3.12", 23 | "Programming Language :: Python :: 3.13", 24 | "Programming Language :: Python :: Implementation :: CPython", 25 | "Programming Language :: Python :: Implementation :: PyPy", 26 | "Framework :: AsyncIO", 27 | ] 28 | dynamic = ["version"] 29 | 30 | [project.urls] 31 | Changelog = "https://github.com/Tinche/aiofiles#history" 32 | "Bug Tracker" = "https://github.com/Tinche/aiofiles/issues" 33 | Repository = "https://github.com/Tinche/aiofiles" 34 | 35 | [dependency-groups] 36 | test = [ 37 | "pytest>=8.2.2", 38 | "pytest-asyncio>=0.23.7", 39 | "coverage>=6.4.4", 40 | "tox>=3.25.1", 41 | ] 42 | lint = [ 43 | "ruff>=0.11.8", 44 | ] 45 | 46 | [tool.hatch.version] 47 | source = "vcs" 48 | raw-options = { local_scheme = "no-local-version" } 49 | 50 | [tool.coverage.run] 51 | parallel = true 52 | source_pkgs = ["aiofiles"] 53 | 54 | [tool.coverage.paths] 55 | source = [ 56 | "src", 57 | ".tox/*/lib/python*/site-packages", 58 | ".tox/pypy*/site-packages", 59 | ] 60 | 61 | [tool.pytest.ini_options] 62 | minversion = "8.2" 63 | asyncio_mode = "auto" 64 | asyncio_default_fixture_loop_scope = "function" 65 | 66 | [tool.ruff] 67 | indent-width = 4 68 | line-length = 88 69 | target-version = "py39" 70 | 71 | [tool.ruff.format] 72 | docstring-code-format = false 73 | docstring-code-line-length = "dynamic" 74 | indent-style = "space" 75 | line-ending = "auto" 76 | quote-style = "double" 77 | skip-magic-trailing-comma = false 78 | 79 | [tool.ruff.lint] 80 | select = [ 81 | # "A", # flake8-builtins (A) 82 | # "ANN", # flake8-annotations (ANN) 83 | # "ARG", # flake8-unused-arguments (ARG) 84 | "ASYNC", # flake8-async (ASYNC) 85 | "B", # flake8-bugbear (B) 86 | "BLE", # flake8-blind-except (BLE) 87 | "C4", # flake8-comprehensions (C4) 88 | # "COM", # flake8-commas (COM) 89 | # "D", # pydocstyle (D) 90 | "E", # Error (E) 91 | "EM", # flake8-errmsg (EM) 92 | "ERA", # eradicate (ERA) 93 | "F", # Pyflakes (F) 94 | # "FBT", # flake8-boolean-trap (FBT) 95 | "I", # isort (I) 96 | "ICN", # flake8-import-conventions (ICN) 97 | "ISC", # flake8-implicit-str-concat (ISC) 98 | # "N", # pep8-naming (N) 99 | "PIE", # flake8-pie (PIE) 100 | "PLE", # Error (PLE) 101 | # "PLR", # Refactor (PLR) 102 | "PLW", # Warning (PLW) 103 | # "PT", # flake8-pytest-style (PT) 104 | "PTH", # flake8-use-pathlib (PTH) 105 | "PYI", # flake8-pyi (PYI) 106 | "Q", # flake8-quotes (Q) 107 | "RET", # flake8-return (RET) 108 | "RSE", # flake8-raise (RSE) 109 | "S", # flake8-bandit (S) 110 | "SIM", # flake8-simplify (SIM) 111 | "T10", # flake8-debugger (T10) 112 | "T20", # flake8-print (T20) 113 | "TC", # flake8-type-checking (TC) 114 | "TID", # flake8-tidy-imports (TID) 115 | "TRY", # tryceratops (TRY) 116 | "UP", # pyupgrade (UP) 117 | "W", # Warning (W) 118 | "YTT" # flake8-2020 (YTT) 119 | ] 120 | ignore = [ 121 | "COM812", 122 | "ISC001", 123 | ] 124 | fixable = [ 125 | "COM", 126 | "I" 127 | ] 128 | 129 | [tool.ruff.lint.per-file-ignores] 130 | "__init__.py" = ["F401"] 131 | "src/**/*.py" = [ 132 | "TID252", # https://docs.astral.sh/ruff/rules/relative-imports/ 133 | ] 134 | "tests/**/*.py" = [ 135 | "ARG", 136 | "ASYNC", 137 | "BLE", 138 | "PTH", 139 | "S", 140 | "SIM", 141 | "T20" 142 | ] 143 | -------------------------------------------------------------------------------- /src/aiofiles/__init__.py: -------------------------------------------------------------------------------- 1 | """Utilities for asyncio-friendly file handling.""" 2 | 3 | from . import tempfile 4 | from .threadpool import ( 5 | open, 6 | stderr, 7 | stderr_bytes, 8 | stdin, 9 | stdin_bytes, 10 | stdout, 11 | stdout_bytes, 12 | ) 13 | 14 | __all__ = [ 15 | "open", 16 | "tempfile", 17 | "stdin", 18 | "stdout", 19 | "stderr", 20 | "stdin_bytes", 21 | "stdout_bytes", 22 | "stderr_bytes", 23 | ] 24 | -------------------------------------------------------------------------------- /src/aiofiles/base.py: -------------------------------------------------------------------------------- 1 | from asyncio import get_running_loop 2 | from collections.abc import Awaitable 3 | from contextlib import AbstractAsyncContextManager 4 | from functools import partial, wraps 5 | 6 | 7 | def wrap(func): 8 | @wraps(func) 9 | async def run(*args, loop=None, executor=None, **kwargs): 10 | if loop is None: 11 | loop = get_running_loop() 12 | pfunc = partial(func, *args, **kwargs) 13 | return await loop.run_in_executor(executor, pfunc) 14 | 15 | return run 16 | 17 | 18 | class AsyncBase: 19 | def __init__(self, file, loop, executor): 20 | self._file = file 21 | self._executor = executor 22 | self._ref_loop = loop 23 | 24 | @property 25 | def _loop(self): 26 | return self._ref_loop or get_running_loop() 27 | 28 | def __aiter__(self): 29 | """We are our own iterator.""" 30 | return self 31 | 32 | def __repr__(self): 33 | return super().__repr__() + " wrapping " + repr(self._file) 34 | 35 | async def __anext__(self): 36 | """Simulate normal file iteration.""" 37 | 38 | if line := await self.readline(): 39 | return line 40 | raise StopAsyncIteration 41 | 42 | 43 | class AsyncIndirectBase(AsyncBase): 44 | def __init__(self, name, loop, executor, indirect): 45 | self._indirect = indirect 46 | self._name = name 47 | super().__init__(None, loop, executor) 48 | 49 | @property 50 | def _file(self): 51 | return self._indirect() 52 | 53 | @_file.setter 54 | def _file(self, v): 55 | pass # discard writes 56 | 57 | 58 | class AiofilesContextManager(Awaitable, AbstractAsyncContextManager): 59 | """An adjusted async context manager for aiofiles.""" 60 | 61 | __slots__ = ("_coro", "_obj") 62 | 63 | def __init__(self, coro): 64 | self._coro = coro 65 | self._obj = None 66 | 67 | def __await__(self): 68 | if self._obj is None: 69 | self._obj = yield from self._coro.__await__() 70 | return self._obj 71 | 72 | async def __aenter__(self): 73 | return await self 74 | 75 | async def __aexit__(self, exc_type, exc_val, exc_tb): 76 | await get_running_loop().run_in_executor( 77 | None, self._obj._file.__exit__, exc_type, exc_val, exc_tb 78 | ) 79 | self._obj = None 80 | -------------------------------------------------------------------------------- /src/aiofiles/os.py: -------------------------------------------------------------------------------- 1 | """Async executor versions of file functions from the os module.""" 2 | 3 | import os 4 | 5 | from . import ospath as path 6 | from .base import wrap 7 | 8 | __all__ = [ 9 | "path", 10 | "stat", 11 | "rename", 12 | "renames", 13 | "replace", 14 | "remove", 15 | "unlink", 16 | "mkdir", 17 | "makedirs", 18 | "rmdir", 19 | "removedirs", 20 | "symlink", 21 | "readlink", 22 | "listdir", 23 | "scandir", 24 | "access", 25 | "wrap", 26 | "getcwd", 27 | ] 28 | 29 | access = wrap(os.access) 30 | 31 | getcwd = wrap(os.getcwd) 32 | 33 | listdir = wrap(os.listdir) 34 | 35 | makedirs = wrap(os.makedirs) 36 | mkdir = wrap(os.mkdir) 37 | 38 | readlink = wrap(os.readlink) 39 | remove = wrap(os.remove) 40 | removedirs = wrap(os.removedirs) 41 | rename = wrap(os.rename) 42 | renames = wrap(os.renames) 43 | replace = wrap(os.replace) 44 | rmdir = wrap(os.rmdir) 45 | 46 | scandir = wrap(os.scandir) 47 | stat = wrap(os.stat) 48 | symlink = wrap(os.symlink) 49 | 50 | unlink = wrap(os.unlink) 51 | 52 | 53 | if hasattr(os, "link"): 54 | __all__ += ["link"] 55 | link = wrap(os.link) 56 | if hasattr(os, "sendfile"): 57 | __all__ += ["sendfile"] 58 | sendfile = wrap(os.sendfile) 59 | if hasattr(os, "statvfs"): 60 | __all__ += ["statvfs"] 61 | statvfs = wrap(os.statvfs) 62 | -------------------------------------------------------------------------------- /src/aiofiles/ospath.py: -------------------------------------------------------------------------------- 1 | """Async executor versions of file functions from the os.path module.""" 2 | 3 | from os import path 4 | 5 | from .base import wrap 6 | 7 | __all__ = [ 8 | "abspath", 9 | "getatime", 10 | "getctime", 11 | "getmtime", 12 | "getsize", 13 | "exists", 14 | "isdir", 15 | "isfile", 16 | "islink", 17 | "ismount", 18 | "samefile", 19 | "sameopenfile", 20 | ] 21 | 22 | abspath = wrap(path.abspath) 23 | 24 | getatime = wrap(path.getatime) 25 | getctime = wrap(path.getctime) 26 | getmtime = wrap(path.getmtime) 27 | getsize = wrap(path.getsize) 28 | 29 | exists = wrap(path.exists) 30 | 31 | isdir = wrap(path.isdir) 32 | isfile = wrap(path.isfile) 33 | islink = wrap(path.islink) 34 | ismount = wrap(path.ismount) 35 | 36 | samefile = wrap(path.samefile) 37 | sameopenfile = wrap(path.sameopenfile) 38 | -------------------------------------------------------------------------------- /src/aiofiles/tempfile/__init__.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import sys 3 | from functools import partial, singledispatch 4 | from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOBase 5 | from tempfile import NamedTemporaryFile as syncNamedTemporaryFile 6 | from tempfile import SpooledTemporaryFile as syncSpooledTemporaryFile 7 | from tempfile import TemporaryDirectory as syncTemporaryDirectory 8 | from tempfile import TemporaryFile as syncTemporaryFile 9 | from tempfile import _TemporaryFileWrapper as syncTemporaryFileWrapper 10 | 11 | from ..base import AiofilesContextManager 12 | from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO 13 | from ..threadpool.text import AsyncTextIOWrapper 14 | from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory 15 | 16 | __all__ = [ 17 | "NamedTemporaryFile", 18 | "TemporaryFile", 19 | "SpooledTemporaryFile", 20 | "TemporaryDirectory", 21 | ] 22 | 23 | 24 | # ================================================================ 25 | # Public methods for async open and return of temp file/directory 26 | # objects with async interface 27 | # ================================================================ 28 | if sys.version_info >= (3, 12): 29 | 30 | def NamedTemporaryFile( 31 | mode="w+b", 32 | buffering=-1, 33 | encoding=None, 34 | newline=None, 35 | suffix=None, 36 | prefix=None, 37 | dir=None, 38 | delete=True, 39 | delete_on_close=True, 40 | loop=None, 41 | executor=None, 42 | ): 43 | """Async open a named temporary file""" 44 | return AiofilesContextManager( 45 | _temporary_file( 46 | named=True, 47 | mode=mode, 48 | buffering=buffering, 49 | encoding=encoding, 50 | newline=newline, 51 | suffix=suffix, 52 | prefix=prefix, 53 | dir=dir, 54 | delete=delete, 55 | delete_on_close=delete_on_close, 56 | loop=loop, 57 | executor=executor, 58 | ) 59 | ) 60 | 61 | else: 62 | 63 | def NamedTemporaryFile( 64 | mode="w+b", 65 | buffering=-1, 66 | encoding=None, 67 | newline=None, 68 | suffix=None, 69 | prefix=None, 70 | dir=None, 71 | delete=True, 72 | loop=None, 73 | executor=None, 74 | ): 75 | """Async open a named temporary file""" 76 | return AiofilesContextManager( 77 | _temporary_file( 78 | named=True, 79 | mode=mode, 80 | buffering=buffering, 81 | encoding=encoding, 82 | newline=newline, 83 | suffix=suffix, 84 | prefix=prefix, 85 | dir=dir, 86 | delete=delete, 87 | loop=loop, 88 | executor=executor, 89 | ) 90 | ) 91 | 92 | 93 | def TemporaryFile( 94 | mode="w+b", 95 | buffering=-1, 96 | encoding=None, 97 | newline=None, 98 | suffix=None, 99 | prefix=None, 100 | dir=None, 101 | loop=None, 102 | executor=None, 103 | ): 104 | """Async open an unnamed temporary file""" 105 | return AiofilesContextManager( 106 | _temporary_file( 107 | named=False, 108 | mode=mode, 109 | buffering=buffering, 110 | encoding=encoding, 111 | newline=newline, 112 | suffix=suffix, 113 | prefix=prefix, 114 | dir=dir, 115 | loop=loop, 116 | executor=executor, 117 | ) 118 | ) 119 | 120 | 121 | def SpooledTemporaryFile( 122 | max_size=0, 123 | mode="w+b", 124 | buffering=-1, 125 | encoding=None, 126 | newline=None, 127 | suffix=None, 128 | prefix=None, 129 | dir=None, 130 | loop=None, 131 | executor=None, 132 | ): 133 | """Async open a spooled temporary file""" 134 | return AiofilesContextManager( 135 | _spooled_temporary_file( 136 | max_size=max_size, 137 | mode=mode, 138 | buffering=buffering, 139 | encoding=encoding, 140 | newline=newline, 141 | suffix=suffix, 142 | prefix=prefix, 143 | dir=dir, 144 | loop=loop, 145 | executor=executor, 146 | ) 147 | ) 148 | 149 | 150 | def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None): 151 | """Async open a temporary directory""" 152 | return AiofilesContextManagerTempDir( 153 | _temporary_directory( 154 | suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor 155 | ) 156 | ) 157 | 158 | 159 | # ========================================================= 160 | # Internal coroutines to open new temp files/directories 161 | # ========================================================= 162 | if sys.version_info >= (3, 12): 163 | 164 | async def _temporary_file( 165 | named=True, 166 | mode="w+b", 167 | buffering=-1, 168 | encoding=None, 169 | newline=None, 170 | suffix=None, 171 | prefix=None, 172 | dir=None, 173 | delete=True, 174 | delete_on_close=True, 175 | loop=None, 176 | executor=None, 177 | max_size=0, 178 | ): 179 | """Async method to open a temporary file with async interface""" 180 | if loop is None: 181 | loop = asyncio.get_running_loop() 182 | 183 | if named: 184 | cb = partial( 185 | syncNamedTemporaryFile, 186 | mode=mode, 187 | buffering=buffering, 188 | encoding=encoding, 189 | newline=newline, 190 | suffix=suffix, 191 | prefix=prefix, 192 | dir=dir, 193 | delete=delete, 194 | delete_on_close=delete_on_close, 195 | ) 196 | else: 197 | cb = partial( 198 | syncTemporaryFile, 199 | mode=mode, 200 | buffering=buffering, 201 | encoding=encoding, 202 | newline=newline, 203 | suffix=suffix, 204 | prefix=prefix, 205 | dir=dir, 206 | ) 207 | 208 | f = await loop.run_in_executor(executor, cb) 209 | 210 | # Wrap based on type of underlying IO object 211 | if type(f) is syncTemporaryFileWrapper: 212 | # _TemporaryFileWrapper was used (named files) 213 | result = wrap(f.file, f, loop=loop, executor=executor) 214 | result._closer = f._closer 215 | return result 216 | # IO object was returned directly without wrapper 217 | return wrap(f, f, loop=loop, executor=executor) 218 | 219 | else: 220 | 221 | async def _temporary_file( 222 | named=True, 223 | mode="w+b", 224 | buffering=-1, 225 | encoding=None, 226 | newline=None, 227 | suffix=None, 228 | prefix=None, 229 | dir=None, 230 | delete=True, 231 | loop=None, 232 | executor=None, 233 | max_size=0, 234 | ): 235 | """Async method to open a temporary file with async interface""" 236 | if loop is None: 237 | loop = asyncio.get_running_loop() 238 | 239 | if named: 240 | cb = partial( 241 | syncNamedTemporaryFile, 242 | mode=mode, 243 | buffering=buffering, 244 | encoding=encoding, 245 | newline=newline, 246 | suffix=suffix, 247 | prefix=prefix, 248 | dir=dir, 249 | delete=delete, 250 | ) 251 | else: 252 | cb = partial( 253 | syncTemporaryFile, 254 | mode=mode, 255 | buffering=buffering, 256 | encoding=encoding, 257 | newline=newline, 258 | suffix=suffix, 259 | prefix=prefix, 260 | dir=dir, 261 | ) 262 | 263 | f = await loop.run_in_executor(executor, cb) 264 | 265 | # Wrap based on type of underlying IO object 266 | if type(f) is syncTemporaryFileWrapper: 267 | # _TemporaryFileWrapper was used (named files) 268 | result = wrap(f.file, f, loop=loop, executor=executor) 269 | # add delete property 270 | result.delete = f.delete 271 | return result 272 | # IO object was returned directly without wrapper 273 | return wrap(f, f, loop=loop, executor=executor) 274 | 275 | 276 | async def _spooled_temporary_file( 277 | max_size=0, 278 | mode="w+b", 279 | buffering=-1, 280 | encoding=None, 281 | newline=None, 282 | suffix=None, 283 | prefix=None, 284 | dir=None, 285 | loop=None, 286 | executor=None, 287 | ): 288 | """Open a spooled temporary file with async interface""" 289 | if loop is None: 290 | loop = asyncio.get_running_loop() 291 | 292 | cb = partial( 293 | syncSpooledTemporaryFile, 294 | max_size=max_size, 295 | mode=mode, 296 | buffering=buffering, 297 | encoding=encoding, 298 | newline=newline, 299 | suffix=suffix, 300 | prefix=prefix, 301 | dir=dir, 302 | ) 303 | 304 | f = await loop.run_in_executor(executor, cb) 305 | 306 | # Single interface provided by SpooledTemporaryFile for all modes 307 | return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor) 308 | 309 | 310 | async def _temporary_directory( 311 | suffix=None, prefix=None, dir=None, loop=None, executor=None 312 | ): 313 | """Async method to open a temporary directory with async interface""" 314 | if loop is None: 315 | loop = asyncio.get_running_loop() 316 | 317 | cb = partial(syncTemporaryDirectory, suffix, prefix, dir) 318 | f = await loop.run_in_executor(executor, cb) 319 | 320 | return AsyncTemporaryDirectory(f, loop=loop, executor=executor) 321 | 322 | 323 | class AiofilesContextManagerTempDir(AiofilesContextManager): 324 | """With returns the directory location, not the object (matching sync lib)""" 325 | 326 | async def __aenter__(self): 327 | self._obj = await self._coro 328 | return self._obj.name 329 | 330 | 331 | @singledispatch 332 | def wrap(base_io_obj, file, *, loop=None, executor=None): 333 | """Wrap the object with interface based on type of underlying IO""" 334 | 335 | msg = f"Unsupported IO type: {base_io_obj}" 336 | raise TypeError(msg) 337 | 338 | 339 | @wrap.register(TextIOBase) 340 | def _(base_io_obj, file, *, loop=None, executor=None): 341 | return AsyncTextIOWrapper(file, loop=loop, executor=executor) 342 | 343 | 344 | @wrap.register(BufferedWriter) 345 | def _(base_io_obj, file, *, loop=None, executor=None): 346 | return AsyncBufferedIOBase(file, loop=loop, executor=executor) 347 | 348 | 349 | @wrap.register(BufferedReader) 350 | @wrap.register(BufferedRandom) 351 | def _(base_io_obj, file, *, loop=None, executor=None): 352 | return AsyncBufferedReader(file, loop=loop, executor=executor) 353 | 354 | 355 | @wrap.register(FileIO) 356 | def _(base_io_obj, file, *, loop=None, executor=None): 357 | return AsyncFileIO(file, loop=loop, executor=executor) 358 | -------------------------------------------------------------------------------- /src/aiofiles/tempfile/temptypes.py: -------------------------------------------------------------------------------- 1 | """Async wrappers for spooled temp files and temp directory objects""" 2 | 3 | from functools import partial 4 | 5 | from ..base import AsyncBase 6 | from ..threadpool.utils import ( 7 | cond_delegate_to_executor, 8 | delegate_to_executor, 9 | proxy_property_directly, 10 | ) 11 | 12 | 13 | @delegate_to_executor("fileno", "rollover") 14 | @cond_delegate_to_executor( 15 | "close", 16 | "flush", 17 | "isatty", 18 | "read", 19 | "readline", 20 | "readlines", 21 | "seek", 22 | "tell", 23 | "truncate", 24 | ) 25 | @proxy_property_directly("closed", "encoding", "mode", "name", "newlines") 26 | class AsyncSpooledTemporaryFile(AsyncBase): 27 | """Async wrapper for SpooledTemporaryFile class""" 28 | 29 | async def _check(self): 30 | if self._file._rolled: 31 | return 32 | max_size = self._file._max_size 33 | if max_size and self._file.tell() > max_size: 34 | await self.rollover() 35 | 36 | async def write(self, s): 37 | """Implementation to anticipate rollover""" 38 | if self._file._rolled: 39 | cb = partial(self._file.write, s) 40 | return await self._loop.run_in_executor(self._executor, cb) 41 | 42 | file = self._file._file # reference underlying base IO object 43 | rv = file.write(s) 44 | await self._check() 45 | return rv 46 | 47 | async def writelines(self, iterable): 48 | """Implementation to anticipate rollover""" 49 | if self._file._rolled: 50 | cb = partial(self._file.writelines, iterable) 51 | return await self._loop.run_in_executor(self._executor, cb) 52 | 53 | file = self._file._file # reference underlying base IO object 54 | rv = file.writelines(iterable) 55 | await self._check() 56 | return rv 57 | 58 | 59 | @delegate_to_executor("cleanup") 60 | @proxy_property_directly("name") 61 | class AsyncTemporaryDirectory: 62 | """Async wrapper for TemporaryDirectory class""" 63 | 64 | def __init__(self, file, loop, executor): 65 | self._file = file 66 | self._loop = loop 67 | self._executor = executor 68 | 69 | async def close(self): 70 | await self.cleanup() 71 | -------------------------------------------------------------------------------- /src/aiofiles/threadpool/__init__.py: -------------------------------------------------------------------------------- 1 | """Handle files using a thread pool executor.""" 2 | 3 | import asyncio 4 | import sys 5 | from functools import partial, singledispatch 6 | from io import ( 7 | BufferedIOBase, 8 | BufferedRandom, 9 | BufferedReader, 10 | BufferedWriter, 11 | FileIO, 12 | TextIOBase, 13 | ) 14 | 15 | from ..base import AiofilesContextManager 16 | from .binary import ( 17 | AsyncBufferedIOBase, 18 | AsyncBufferedReader, 19 | AsyncFileIO, 20 | AsyncIndirectBufferedIOBase, 21 | ) 22 | from .text import AsyncTextIndirectIOWrapper, AsyncTextIOWrapper 23 | 24 | sync_open = open 25 | 26 | __all__ = ( 27 | "open", 28 | "stdin", 29 | "stdout", 30 | "stderr", 31 | "stdin_bytes", 32 | "stdout_bytes", 33 | "stderr_bytes", 34 | ) 35 | 36 | 37 | def open( 38 | file, 39 | mode="r", 40 | buffering=-1, 41 | encoding=None, 42 | errors=None, 43 | newline=None, 44 | closefd=True, 45 | opener=None, 46 | *, 47 | loop=None, 48 | executor=None, 49 | ): 50 | return AiofilesContextManager( 51 | _open( 52 | file, 53 | mode=mode, 54 | buffering=buffering, 55 | encoding=encoding, 56 | errors=errors, 57 | newline=newline, 58 | closefd=closefd, 59 | opener=opener, 60 | loop=loop, 61 | executor=executor, 62 | ) 63 | ) 64 | 65 | 66 | async def _open( 67 | file, 68 | mode="r", 69 | buffering=-1, 70 | encoding=None, 71 | errors=None, 72 | newline=None, 73 | closefd=True, 74 | opener=None, 75 | *, 76 | loop=None, 77 | executor=None, 78 | ): 79 | """Open an asyncio file.""" 80 | if loop is None: 81 | loop = asyncio.get_running_loop() 82 | cb = partial( 83 | sync_open, 84 | file, 85 | mode=mode, 86 | buffering=buffering, 87 | encoding=encoding, 88 | errors=errors, 89 | newline=newline, 90 | closefd=closefd, 91 | opener=opener, 92 | ) 93 | f = await loop.run_in_executor(executor, cb) 94 | 95 | return wrap(f, loop=loop, executor=executor) 96 | 97 | 98 | @singledispatch 99 | def wrap(file, *, loop=None, executor=None): 100 | msg = f"Unsupported io type: {file}." 101 | raise TypeError(msg) 102 | 103 | 104 | @wrap.register(TextIOBase) 105 | def _(file, *, loop=None, executor=None): 106 | return AsyncTextIOWrapper(file, loop=loop, executor=executor) 107 | 108 | 109 | @wrap.register(BufferedWriter) 110 | @wrap.register(BufferedIOBase) 111 | def _(file, *, loop=None, executor=None): 112 | return AsyncBufferedIOBase(file, loop=loop, executor=executor) 113 | 114 | 115 | @wrap.register(BufferedReader) 116 | @wrap.register(BufferedRandom) 117 | def _(file, *, loop=None, executor=None): 118 | return AsyncBufferedReader(file, loop=loop, executor=executor) 119 | 120 | 121 | @wrap.register(FileIO) 122 | def _(file, *, loop=None, executor=None): 123 | return AsyncFileIO(file, loop=loop, executor=executor) 124 | 125 | 126 | stdin = AsyncTextIndirectIOWrapper("sys.stdin", None, None, indirect=lambda: sys.stdin) 127 | stdout = AsyncTextIndirectIOWrapper( 128 | "sys.stdout", None, None, indirect=lambda: sys.stdout 129 | ) 130 | stderr = AsyncTextIndirectIOWrapper( 131 | "sys.stderr", None, None, indirect=lambda: sys.stderr 132 | ) 133 | stdin_bytes = AsyncIndirectBufferedIOBase( 134 | "sys.stdin.buffer", None, None, indirect=lambda: sys.stdin.buffer 135 | ) 136 | stdout_bytes = AsyncIndirectBufferedIOBase( 137 | "sys.stdout.buffer", None, None, indirect=lambda: sys.stdout.buffer 138 | ) 139 | stderr_bytes = AsyncIndirectBufferedIOBase( 140 | "sys.stderr.buffer", None, None, indirect=lambda: sys.stderr.buffer 141 | ) 142 | -------------------------------------------------------------------------------- /src/aiofiles/threadpool/binary.py: -------------------------------------------------------------------------------- 1 | from ..base import AsyncBase, AsyncIndirectBase 2 | from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly 3 | 4 | 5 | @delegate_to_executor( 6 | "close", 7 | "flush", 8 | "isatty", 9 | "read", 10 | "read1", 11 | "readinto", 12 | "readline", 13 | "readlines", 14 | "seek", 15 | "seekable", 16 | "tell", 17 | "truncate", 18 | "writable", 19 | "write", 20 | "writelines", 21 | ) 22 | @proxy_method_directly("detach", "fileno", "readable") 23 | @proxy_property_directly("closed", "raw", "name", "mode") 24 | class AsyncBufferedIOBase(AsyncBase): 25 | """The asyncio executor version of io.BufferedWriter and BufferedIOBase.""" 26 | 27 | 28 | @delegate_to_executor("peek") 29 | class AsyncBufferedReader(AsyncBufferedIOBase): 30 | """The asyncio executor version of io.BufferedReader and Random.""" 31 | 32 | 33 | @delegate_to_executor( 34 | "close", 35 | "flush", 36 | "isatty", 37 | "read", 38 | "readall", 39 | "readinto", 40 | "readline", 41 | "readlines", 42 | "seek", 43 | "seekable", 44 | "tell", 45 | "truncate", 46 | "writable", 47 | "write", 48 | "writelines", 49 | ) 50 | @proxy_method_directly("fileno", "readable") 51 | @proxy_property_directly("closed", "name", "mode") 52 | class AsyncFileIO(AsyncBase): 53 | """The asyncio executor version of io.FileIO.""" 54 | 55 | 56 | @delegate_to_executor( 57 | "close", 58 | "flush", 59 | "isatty", 60 | "read", 61 | "read1", 62 | "readinto", 63 | "readline", 64 | "readlines", 65 | "seek", 66 | "seekable", 67 | "tell", 68 | "truncate", 69 | "writable", 70 | "write", 71 | "writelines", 72 | ) 73 | @proxy_method_directly("detach", "fileno", "readable") 74 | @proxy_property_directly("closed", "raw", "name", "mode") 75 | class AsyncIndirectBufferedIOBase(AsyncIndirectBase): 76 | """The indirect asyncio executor version of io.BufferedWriter and BufferedIOBase.""" 77 | 78 | 79 | @delegate_to_executor("peek") 80 | class AsyncIndirectBufferedReader(AsyncIndirectBufferedIOBase): 81 | """The indirect asyncio executor version of io.BufferedReader and Random.""" 82 | 83 | 84 | @delegate_to_executor( 85 | "close", 86 | "flush", 87 | "isatty", 88 | "read", 89 | "readall", 90 | "readinto", 91 | "readline", 92 | "readlines", 93 | "seek", 94 | "seekable", 95 | "tell", 96 | "truncate", 97 | "writable", 98 | "write", 99 | "writelines", 100 | ) 101 | @proxy_method_directly("fileno", "readable") 102 | @proxy_property_directly("closed", "name", "mode") 103 | class AsyncIndirectFileIO(AsyncIndirectBase): 104 | """The indirect asyncio executor version of io.FileIO.""" 105 | -------------------------------------------------------------------------------- /src/aiofiles/threadpool/text.py: -------------------------------------------------------------------------------- 1 | from ..base import AsyncBase, AsyncIndirectBase 2 | from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly 3 | 4 | 5 | @delegate_to_executor( 6 | "close", 7 | "flush", 8 | "isatty", 9 | "read", 10 | "readable", 11 | "readline", 12 | "readlines", 13 | "seek", 14 | "seekable", 15 | "tell", 16 | "truncate", 17 | "write", 18 | "writable", 19 | "writelines", 20 | ) 21 | @proxy_method_directly("detach", "fileno", "readable") 22 | @proxy_property_directly( 23 | "buffer", 24 | "closed", 25 | "encoding", 26 | "errors", 27 | "line_buffering", 28 | "newlines", 29 | "name", 30 | "mode", 31 | ) 32 | class AsyncTextIOWrapper(AsyncBase): 33 | """The asyncio executor version of io.TextIOWrapper.""" 34 | 35 | 36 | @delegate_to_executor( 37 | "close", 38 | "flush", 39 | "isatty", 40 | "read", 41 | "readable", 42 | "readline", 43 | "readlines", 44 | "seek", 45 | "seekable", 46 | "tell", 47 | "truncate", 48 | "write", 49 | "writable", 50 | "writelines", 51 | ) 52 | @proxy_method_directly("detach", "fileno", "readable") 53 | @proxy_property_directly( 54 | "buffer", 55 | "closed", 56 | "encoding", 57 | "errors", 58 | "line_buffering", 59 | "newlines", 60 | "name", 61 | "mode", 62 | ) 63 | class AsyncTextIndirectIOWrapper(AsyncIndirectBase): 64 | """The indirect asyncio executor version of io.TextIOWrapper.""" 65 | -------------------------------------------------------------------------------- /src/aiofiles/threadpool/utils.py: -------------------------------------------------------------------------------- 1 | import functools 2 | 3 | 4 | def delegate_to_executor(*attrs): 5 | def cls_builder(cls): 6 | for attr_name in attrs: 7 | setattr(cls, attr_name, _make_delegate_method(attr_name)) 8 | return cls 9 | 10 | return cls_builder 11 | 12 | 13 | def proxy_method_directly(*attrs): 14 | def cls_builder(cls): 15 | for attr_name in attrs: 16 | setattr(cls, attr_name, _make_proxy_method(attr_name)) 17 | return cls 18 | 19 | return cls_builder 20 | 21 | 22 | def proxy_property_directly(*attrs): 23 | def cls_builder(cls): 24 | for attr_name in attrs: 25 | setattr(cls, attr_name, _make_proxy_property(attr_name)) 26 | return cls 27 | 28 | return cls_builder 29 | 30 | 31 | def cond_delegate_to_executor(*attrs): 32 | def cls_builder(cls): 33 | for attr_name in attrs: 34 | setattr(cls, attr_name, _make_cond_delegate_method(attr_name)) 35 | return cls 36 | 37 | return cls_builder 38 | 39 | 40 | def _make_delegate_method(attr_name): 41 | async def method(self, *args, **kwargs): 42 | cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) 43 | return await self._loop.run_in_executor(self._executor, cb) 44 | 45 | return method 46 | 47 | 48 | def _make_proxy_method(attr_name): 49 | def method(self, *args, **kwargs): 50 | return getattr(self._file, attr_name)(*args, **kwargs) 51 | 52 | return method 53 | 54 | 55 | def _make_proxy_property(attr_name): 56 | def proxy_property(self): 57 | return getattr(self._file, attr_name) 58 | 59 | return property(proxy_property) 60 | 61 | 62 | def _make_cond_delegate_method(attr_name): 63 | """For spooled temp files, delegate only if rolled to file object""" 64 | 65 | async def method(self, *args, **kwargs): 66 | if self._file._rolled: 67 | cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) 68 | return await self._loop.run_in_executor(self._executor, cb) 69 | return getattr(self._file, attr_name)(*args, **kwargs) 70 | 71 | return method 72 | -------------------------------------------------------------------------------- /tests/resources/multiline_file.txt: -------------------------------------------------------------------------------- 1 | line 1 2 | line 2 3 | line 3 4 | line 4 -------------------------------------------------------------------------------- /tests/resources/test_file1.txt: -------------------------------------------------------------------------------- 1 | 0123456789 -------------------------------------------------------------------------------- /tests/test_os.py: -------------------------------------------------------------------------------- 1 | """Tests for asyncio's os module.""" 2 | 3 | import asyncio 4 | import os 5 | import platform 6 | from os import stat 7 | from os.path import dirname, exists, isdir, join 8 | from pathlib import Path 9 | 10 | import pytest 11 | 12 | import aiofiles.os 13 | 14 | 15 | async def test_stat(): 16 | """Test the stat call.""" 17 | filename = join(dirname(__file__), "resources", "test_file1.txt") 18 | 19 | stat_res = await aiofiles.os.stat(filename) 20 | 21 | assert stat_res.st_size == 10 22 | 23 | 24 | @pytest.mark.skipif(platform.system() == "Windows", reason="No statvfs on Windows") 25 | async def test_statvfs(): 26 | """Test the statvfs call.""" 27 | 28 | statvfs_res = await aiofiles.os.statvfs("/") 29 | 30 | assert statvfs_res.f_bsize == os.statvfs("/").f_bsize 31 | 32 | 33 | async def test_remove(): 34 | """Test the remove call.""" 35 | filename = join(dirname(__file__), "resources", "test_file2.txt") 36 | with open(filename, "w") as f: 37 | f.write("Test file for remove call") 38 | 39 | assert exists(filename) 40 | await aiofiles.os.remove(filename) 41 | assert exists(filename) is False 42 | 43 | 44 | async def test_unlink(): 45 | """Test the unlink call.""" 46 | filename = join(dirname(__file__), "resources", "test_file2.txt") 47 | with open(filename, "w") as f: 48 | f.write("Test file for unlink call") 49 | 50 | assert exists(filename) 51 | await aiofiles.os.unlink(filename) 52 | assert exists(filename) is False 53 | 54 | 55 | async def test_mkdir_and_rmdir(): 56 | """Test the mkdir and rmdir call.""" 57 | directory = join(dirname(__file__), "resources", "test_dir") 58 | await aiofiles.os.mkdir(directory) 59 | assert isdir(directory) 60 | await aiofiles.os.rmdir(directory) 61 | assert exists(directory) is False 62 | 63 | 64 | async def test_rename(): 65 | """Test the rename call.""" 66 | old_filename = join(dirname(__file__), "resources", "test_file1.txt") 67 | new_filename = join(dirname(__file__), "resources", "test_file2.txt") 68 | await aiofiles.os.rename(old_filename, new_filename) 69 | assert exists(old_filename) is False and exists(new_filename) 70 | await aiofiles.os.rename(new_filename, old_filename) 71 | assert exists(old_filename) and exists(new_filename) is False 72 | 73 | 74 | async def test_renames(): 75 | """Test the renames call.""" 76 | old_filename = join(dirname(__file__), "resources", "test_file1.txt") 77 | new_filename = join( 78 | dirname(__file__), "resources", "subdirectory", "test_file2.txt" 79 | ) 80 | await aiofiles.os.renames(old_filename, new_filename) 81 | assert exists(old_filename) is False and exists(new_filename) 82 | await aiofiles.os.renames(new_filename, old_filename) 83 | assert ( 84 | exists(old_filename) 85 | and exists(new_filename) is False 86 | and exists(dirname(new_filename)) is False 87 | ) 88 | 89 | 90 | async def test_replace(): 91 | """Test the replace call.""" 92 | old_filename = join(dirname(__file__), "resources", "test_file1.txt") 93 | new_filename = join(dirname(__file__), "resources", "test_file2.txt") 94 | 95 | await aiofiles.os.replace(old_filename, new_filename) 96 | assert exists(old_filename) is False and exists(new_filename) 97 | await aiofiles.os.replace(new_filename, old_filename) 98 | assert exists(old_filename) and exists(new_filename) is False 99 | 100 | with open(new_filename, "w") as f: 101 | f.write("Test file") 102 | assert exists(old_filename) and exists(new_filename) 103 | 104 | await aiofiles.os.replace(old_filename, new_filename) 105 | assert exists(old_filename) is False and exists(new_filename) 106 | await aiofiles.os.replace(new_filename, old_filename) 107 | assert exists(old_filename) and exists(new_filename) is False 108 | 109 | 110 | @pytest.mark.skipif( 111 | "2.4" < platform.release() < "2.6.33", 112 | reason="sendfile() syscall doesn't allow file->file", 113 | ) 114 | @pytest.mark.skipif( 115 | platform.system() in ("Darwin", "Windows"), 116 | reason="sendfile() doesn't work on mac and Win", 117 | ) 118 | async def test_sendfile_file(tmpdir): 119 | """Test the sendfile functionality, file-to-file.""" 120 | filename = join(dirname(__file__), "resources", "test_file1.txt") 121 | tmp_filename = tmpdir.join("tmp.bin") 122 | 123 | with open(filename) as f: 124 | contents = f.read() 125 | 126 | input_file = await aiofiles.open(filename) 127 | output_file = await aiofiles.open(str(tmp_filename), mode="w+") 128 | 129 | size = (await aiofiles.os.stat(filename)).st_size 130 | 131 | input_fd = input_file.fileno() 132 | output_fd = output_file.fileno() 133 | 134 | await aiofiles.os.sendfile(output_fd, input_fd, 0, size) 135 | 136 | await output_file.seek(0) 137 | 138 | actual_contents = await output_file.read() 139 | actual_size = (await aiofiles.os.stat(str(tmp_filename))).st_size 140 | 141 | assert contents == actual_contents 142 | assert size == actual_size 143 | 144 | 145 | @pytest.mark.skipif( 146 | platform.system() in ("Windows"), reason="sendfile() doesn't work on Win" 147 | ) 148 | async def test_sendfile_socket(unused_tcp_port): 149 | """Test the sendfile functionality, file-to-socket.""" 150 | filename = join(dirname(__file__), "resources", "test_file1.txt") 151 | 152 | with open(filename, mode="rb") as f: 153 | contents = f.read() 154 | 155 | async def serve_file(_, writer): 156 | out_fd = writer.transport.get_extra_info("socket").fileno() 157 | size = (await aiofiles.os.stat(filename)).st_size 158 | in_file = await aiofiles.open(filename) 159 | try: 160 | in_fd = in_file.fileno() 161 | await aiofiles.os.sendfile(out_fd, in_fd, 0, size) 162 | finally: 163 | await in_file.close() 164 | await writer.drain() 165 | writer.close() 166 | 167 | server = await asyncio.start_server(serve_file, port=unused_tcp_port) 168 | 169 | reader, writer = await asyncio.open_connection("127.0.0.1", unused_tcp_port) 170 | actual_contents = await reader.read() 171 | writer.close() 172 | 173 | assert contents == actual_contents 174 | server.close() 175 | 176 | await server.wait_closed() 177 | 178 | 179 | async def test_exists(): 180 | """Test path.exists call.""" 181 | filename = join(dirname(__file__), "resources", "test_file1.txt") 182 | result = await aiofiles.os.path.exists(filename) 183 | assert result 184 | 185 | 186 | async def test_isfile(): 187 | """Test path.isfile call.""" 188 | filename = join(dirname(__file__), "resources", "test_file1.txt") 189 | result = await aiofiles.os.path.isfile(filename) 190 | assert result 191 | 192 | 193 | async def test_isdir(): 194 | """Test path.isdir call.""" 195 | filename = join(dirname(__file__), "resources") 196 | result = await aiofiles.os.path.isdir(filename) 197 | assert result 198 | 199 | 200 | async def test_islink(): 201 | """Test the path.islink call.""" 202 | src_filename = join(dirname(__file__), "resources", "test_file1.txt") 203 | dst_filename = join(dirname(__file__), "resources", "test_file2.txt") 204 | await aiofiles.os.symlink(src_filename, dst_filename) 205 | assert await aiofiles.os.path.islink(dst_filename) 206 | await aiofiles.os.remove(dst_filename) 207 | 208 | 209 | async def test_ismount(): 210 | """Test the path.ismount call.""" 211 | filename = join(dirname(__file__), "resources") 212 | assert not await aiofiles.os.path.ismount(filename) 213 | assert await aiofiles.os.path.ismount("/") 214 | 215 | 216 | async def test_getsize(): 217 | """Test path.getsize call.""" 218 | filename = join(dirname(__file__), "resources", "test_file1.txt") 219 | result = await aiofiles.os.path.getsize(filename) 220 | assert result == 10 221 | 222 | 223 | async def test_samefile(): 224 | """Test path.samefile call.""" 225 | filename = join(dirname(__file__), "resources", "test_file1.txt") 226 | result = await aiofiles.os.path.samefile(filename, filename) 227 | assert result 228 | 229 | 230 | async def test_sameopenfile(): 231 | """Test path.samefile call.""" 232 | filename = join(dirname(__file__), "resources", "test_file1.txt") 233 | result = await aiofiles.os.path.samefile(filename, filename) 234 | assert result 235 | 236 | 237 | async def test_getmtime(): 238 | """Test path.getmtime call.""" 239 | filename = join(dirname(__file__), "resources", "test_file1.txt") 240 | result = await aiofiles.os.path.getmtime(filename) 241 | assert result 242 | 243 | 244 | async def test_getatime(): 245 | """Test path.getatime call.""" 246 | filename = join(dirname(__file__), "resources", "test_file1.txt") 247 | result = await aiofiles.os.path.getatime(filename) 248 | assert result 249 | 250 | 251 | async def test_getctime(): 252 | """Test path. call.""" 253 | filename = join(dirname(__file__), "resources", "test_file1.txt") 254 | result = await aiofiles.os.path.getctime(filename) 255 | assert result 256 | 257 | 258 | async def test_link(): 259 | """Test the link call.""" 260 | src_filename = join(dirname(__file__), "resources", "test_file1.txt") 261 | dst_filename = join(dirname(__file__), "resources", "test_file2.txt") 262 | initial_src_nlink = stat(src_filename).st_nlink 263 | await aiofiles.os.link(src_filename, dst_filename) 264 | assert ( 265 | exists(src_filename) 266 | and exists(dst_filename) 267 | and (stat(src_filename).st_ino == stat(dst_filename).st_ino) 268 | and (stat(src_filename).st_nlink == initial_src_nlink + 1) 269 | and (stat(dst_filename).st_nlink == 2) 270 | ) 271 | await aiofiles.os.remove(dst_filename) 272 | assert ( 273 | exists(src_filename) 274 | and exists(dst_filename) is False 275 | and (stat(src_filename).st_nlink == initial_src_nlink) 276 | ) 277 | 278 | 279 | async def test_symlink(): 280 | """Test the symlink call.""" 281 | src_filename = join(dirname(__file__), "resources", "test_file1.txt") 282 | dst_filename = join(dirname(__file__), "resources", "test_file2.txt") 283 | await aiofiles.os.symlink(src_filename, dst_filename) 284 | assert ( 285 | exists(src_filename) 286 | and exists(dst_filename) 287 | and stat(src_filename).st_ino == stat(dst_filename).st_ino 288 | ) 289 | await aiofiles.os.remove(dst_filename) 290 | assert exists(src_filename) and exists(dst_filename) is False 291 | 292 | 293 | @pytest.mark.skipif( 294 | platform.system() == "Windows", reason="Doesn't work on Win properly" 295 | ) 296 | async def test_readlink(): 297 | """Test the readlink call.""" 298 | src_filename = join(dirname(__file__), "resources", "test_file1.txt") 299 | dst_filename = join(dirname(__file__), "resources", "test_file2.txt") 300 | await aiofiles.os.symlink(src_filename, dst_filename) 301 | symlinked_path = await aiofiles.os.readlink(dst_filename) 302 | assert src_filename == symlinked_path 303 | await aiofiles.os.remove(dst_filename) 304 | 305 | 306 | async def test_listdir_empty_dir(): 307 | """Test the listdir call when the dir is empty.""" 308 | directory = join(dirname(__file__), "resources", "empty_dir") 309 | await aiofiles.os.mkdir(directory) 310 | dir_list = await aiofiles.os.listdir(directory) 311 | assert dir_list == [] 312 | await aiofiles.os.rmdir(directory) 313 | 314 | 315 | async def test_listdir_dir_with_only_one_file(): 316 | """Test the listdir call when the dir has one file.""" 317 | some_dir = join(dirname(__file__), "resources", "some_dir") 318 | some_file = join(some_dir, "some_file.txt") 319 | await aiofiles.os.mkdir(some_dir) 320 | with open(some_file, "w") as f: 321 | f.write("Test file") 322 | dir_list = await aiofiles.os.listdir(some_dir) 323 | assert "some_file.txt" in dir_list 324 | await aiofiles.os.remove(some_file) 325 | await aiofiles.os.rmdir(some_dir) 326 | 327 | 328 | async def test_listdir_dir_with_only_one_dir(): 329 | """Test the listdir call when the dir has one dir.""" 330 | some_dir = join(dirname(__file__), "resources", "some_dir") 331 | other_dir = join(some_dir, "other_dir") 332 | await aiofiles.os.mkdir(some_dir) 333 | await aiofiles.os.mkdir(other_dir) 334 | dir_list = await aiofiles.os.listdir(some_dir) 335 | assert "other_dir" in dir_list 336 | await aiofiles.os.rmdir(other_dir) 337 | await aiofiles.os.rmdir(some_dir) 338 | 339 | 340 | async def test_listdir_dir_with_multiple_files(): 341 | """Test the listdir call when the dir has multiple files.""" 342 | some_dir = join(dirname(__file__), "resources", "some_dir") 343 | some_file = join(some_dir, "some_file.txt") 344 | other_file = join(some_dir, "other_file.txt") 345 | await aiofiles.os.mkdir(some_dir) 346 | with open(some_file, "w") as f: 347 | f.write("Test file") 348 | with open(other_file, "w") as f: 349 | f.write("Test file") 350 | dir_list = await aiofiles.os.listdir(some_dir) 351 | assert "some_file.txt" in dir_list 352 | assert "other_file.txt" in dir_list 353 | await aiofiles.os.remove(some_file) 354 | await aiofiles.os.remove(other_file) 355 | await aiofiles.os.rmdir(some_dir) 356 | 357 | 358 | async def test_listdir_dir_with_a_file_and_a_dir(): 359 | """Test the listdir call when the dir has files and other dirs.""" 360 | some_dir = join(dirname(__file__), "resources", "some_dir") 361 | other_dir = join(some_dir, "other_dir") 362 | some_file = join(some_dir, "some_file.txt") 363 | await aiofiles.os.mkdir(some_dir) 364 | await aiofiles.os.mkdir(other_dir) 365 | with open(some_file, "w") as f: 366 | f.write("Test file") 367 | dir_list = await aiofiles.os.listdir(some_dir) 368 | assert "some_file.txt" in dir_list 369 | assert "other_dir" in dir_list 370 | await aiofiles.os.remove(some_file) 371 | await aiofiles.os.rmdir(other_dir) 372 | await aiofiles.os.rmdir(some_dir) 373 | 374 | 375 | async def test_listdir_non_existing_dir(): 376 | """Test the listdir call when the dir doesn't exist.""" 377 | some_dir = join(dirname(__file__), "resources", "some_dir") 378 | with pytest.raises(FileNotFoundError): 379 | await aiofiles.os.listdir(some_dir) 380 | 381 | 382 | async def test_scantdir_empty_dir(): 383 | """Test the scandir call when the dir is empty.""" 384 | empty_dir = join(dirname(__file__), "resources", "empty_dir") 385 | await aiofiles.os.mkdir(empty_dir) 386 | dir_iterator = await aiofiles.os.scandir(empty_dir) 387 | dir_list = [] 388 | for dir_entity in dir_iterator: 389 | dir_list.append(dir_entity) 390 | assert dir_list == [] 391 | await aiofiles.os.rmdir(empty_dir) 392 | 393 | 394 | async def test_scandir_dir_with_only_one_file(): 395 | """Test the scandir call when the dir has one file.""" 396 | some_dir = join(dirname(__file__), "resources", "some_dir") 397 | some_file = join(some_dir, "some_file.txt") 398 | await aiofiles.os.mkdir(some_dir) 399 | with open(some_file, "w") as f: 400 | f.write("Test file") 401 | dir_iterator = await aiofiles.os.scandir(some_dir) 402 | some_file_entity = next(dir_iterator) 403 | assert some_file_entity.name == "some_file.txt" 404 | await aiofiles.os.remove(some_file) 405 | await aiofiles.os.rmdir(some_dir) 406 | 407 | 408 | async def test_scandir_dir_with_only_one_dir(): 409 | """Test the scandir call when the dir has one dir.""" 410 | some_dir = join(dirname(__file__), "resources", "some_dir") 411 | other_dir = join(some_dir, "other_dir") 412 | await aiofiles.os.mkdir(some_dir) 413 | await aiofiles.os.mkdir(other_dir) 414 | dir_iterator = await aiofiles.os.scandir(some_dir) 415 | other_dir_entity = next(dir_iterator) 416 | assert other_dir_entity.name == "other_dir" 417 | await aiofiles.os.rmdir(other_dir) 418 | await aiofiles.os.rmdir(some_dir) 419 | 420 | 421 | async def test_scandir_non_existing_dir(): 422 | """Test the scandir call when the dir doesn't exist.""" 423 | some_dir = join(dirname(__file__), "resources", "some_dir") 424 | with pytest.raises(FileNotFoundError): 425 | await aiofiles.os.scandir(some_dir) 426 | 427 | 428 | @pytest.mark.skipif(platform.system() == "Windows", reason="Doesn't work on Win") 429 | async def test_access(): 430 | temp_file = Path(__file__).parent.joinpath("resources", "os_access_temp.txt") 431 | temp_dir = Path(__file__).parent.joinpath("resources", "os_access_temp") 432 | 433 | # prepare 434 | if temp_file.exists(): 435 | os.remove(temp_file) 436 | assert not temp_file.exists() 437 | temp_file.touch() 438 | 439 | if temp_dir.exists(): 440 | os.rmdir(temp_dir) 441 | assert not temp_dir.exists() 442 | os.mkdir(temp_dir) 443 | 444 | data = [ 445 | # full access 446 | [0o777, os.F_OK, True], 447 | [0o777, os.R_OK, True], 448 | [0o777, os.W_OK, True], 449 | [0o777, os.X_OK, True], 450 | # chmod -x 451 | [0o666, os.F_OK, True], 452 | [0o666, os.R_OK, True], 453 | [0o666, os.W_OK, True], 454 | [0o666, os.X_OK, False], 455 | # chmod -w 456 | [0o444, os.F_OK, True], 457 | [0o444, os.R_OK, True], 458 | [0o444, os.W_OK, False], 459 | [0o444, os.X_OK, False], 460 | # chmod -r 461 | [0o000, os.F_OK, True], 462 | [0o000, os.R_OK, False], 463 | [0o000, os.W_OK, False], 464 | [0o000, os.X_OK, False], 465 | ] 466 | for ch, mode, access in data: 467 | print(f"mode:{mode}, access:{access}") 468 | temp_file.chmod(ch) 469 | temp_dir.chmod(ch) 470 | assert await aiofiles.os.access(temp_file, mode) == access 471 | assert await aiofiles.os.access(temp_dir, mode) == access 472 | 473 | # not exists 474 | os.remove(temp_file) 475 | os.rmdir(temp_dir) 476 | for mode in [os.F_OK, os.R_OK, os.W_OK, os.X_OK]: 477 | print(f"mode:{mode}") 478 | assert not await aiofiles.os.access(temp_file, mode) 479 | assert not await aiofiles.os.access(temp_dir, mode) 480 | 481 | 482 | async def test_getcwd(): 483 | """Test the getcwd call.""" 484 | cwd = await aiofiles.os.getcwd() 485 | assert cwd == os.getcwd() 486 | 487 | 488 | async def test_abspath_given_abspath(): 489 | """Test the abspath call with an absolute path.""" 490 | filename = join(dirname(__file__), "resources", "test_file1.txt") 491 | file_abs_path = await aiofiles.os.path.abspath(filename) 492 | assert file_abs_path == filename 493 | 494 | 495 | async def test_abspath(): 496 | """Test the abspath call.""" 497 | relative_filename = "./tests/resources/test_file1.txt" 498 | abs_filename = join(dirname(__file__), "resources", "test_file1.txt") 499 | result = await aiofiles.os.path.abspath(relative_filename) 500 | assert result == abs_filename 501 | -------------------------------------------------------------------------------- /tests/test_simple.py: -------------------------------------------------------------------------------- 1 | """Simple tests verifying basic functionality.""" 2 | 3 | import asyncio 4 | 5 | from aiofiles import threadpool 6 | 7 | 8 | async def test_serve_small_bin_file_sync(tmpdir, unused_tcp_port): 9 | """Fire up a small simple file server, and fetch a file. 10 | 11 | The file is read into memory synchronously, so this test doesn't actually 12 | test anything except the general test concept. 13 | """ 14 | # First we'll write a small file. 15 | filename = "test.bin" 16 | file_content = b"0123456789" 17 | file = tmpdir.join(filename) 18 | file.write_binary(file_content) 19 | 20 | async def serve_file(reader, writer): 21 | full_filename = str(file) 22 | with open(full_filename, "rb") as f: 23 | writer.write(f.read()) 24 | writer.close() 25 | 26 | server = await asyncio.start_server(serve_file, port=unused_tcp_port) 27 | 28 | reader, _ = await asyncio.open_connection(host="localhost", port=unused_tcp_port) 29 | payload = await reader.read() 30 | 31 | assert payload == file_content 32 | 33 | server.close() 34 | await server.wait_closed() 35 | 36 | 37 | async def test_serve_small_bin_file(tmpdir, unused_tcp_port): 38 | """Fire up a small simple file server, and fetch a file.""" 39 | # First we'll write a small file. 40 | filename = "test.bin" 41 | file_content = b"0123456789" 42 | file = tmpdir.join(filename) 43 | file.write_binary(file_content) 44 | 45 | async def serve_file(reader, writer): 46 | full_filename = str(file) 47 | f = await threadpool.open(full_filename, mode="rb") 48 | writer.write(await f.read()) 49 | await f.close() 50 | writer.close() 51 | 52 | server = await asyncio.start_server(serve_file, port=unused_tcp_port) 53 | 54 | reader, _ = await asyncio.open_connection(host="localhost", port=unused_tcp_port) 55 | payload = await reader.read() 56 | 57 | assert payload == file_content 58 | 59 | server.close() 60 | await server.wait_closed() 61 | -------------------------------------------------------------------------------- /tests/test_stdio.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from aiofiles import stderr, stderr_bytes, stdin, stdin_bytes, stdout, stdout_bytes 4 | 5 | 6 | async def test_stdio(capsys): 7 | await stdout.write("hello") 8 | await stderr.write("world") 9 | out, err = capsys.readouterr() 10 | assert out == "hello" 11 | assert err == "world" 12 | with pytest.raises(OSError): 13 | await stdin.read() 14 | 15 | 16 | async def test_stdio_bytes(capsysbinary): 17 | await stdout_bytes.write(b"hello") 18 | await stderr_bytes.write(b"world") 19 | out, err = capsysbinary.readouterr() 20 | assert out == b"hello" 21 | assert err == b"world" 22 | with pytest.raises(OSError): 23 | await stdin_bytes.read() 24 | -------------------------------------------------------------------------------- /tests/test_tempfile.py: -------------------------------------------------------------------------------- 1 | import io 2 | import os 3 | import platform 4 | import sys 5 | 6 | import pytest 7 | 8 | from aiofiles import tempfile 9 | 10 | 11 | @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) 12 | async def test_temporary_file(mode): 13 | """Test temporary file.""" 14 | data = b"Hello World!\n" if "b" in mode else "Hello World!\n" 15 | 16 | async with tempfile.TemporaryFile(mode=mode) as f: 17 | for _ in range(3): 18 | await f.write(data) 19 | 20 | await f.flush() 21 | await f.seek(0) 22 | 23 | async for line in f: 24 | assert line == data 25 | 26 | 27 | @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) 28 | @pytest.mark.skipif( 29 | sys.version_info >= (3, 12), 30 | reason=("3.12+ doesn't support tempfile.NamedTemporaryFile.delete"), 31 | ) 32 | async def test_named_temporary_file(mode): 33 | data = b"Hello World!" if "b" in mode else "Hello World!" 34 | filename = None 35 | 36 | async with tempfile.NamedTemporaryFile(mode=mode) as f: 37 | await f.write(data) 38 | await f.flush() 39 | await f.seek(0) 40 | assert await f.read() == data 41 | 42 | filename = f.name 43 | assert os.path.exists(filename) 44 | assert os.path.isfile(filename) 45 | assert f.delete 46 | 47 | assert not os.path.exists(filename) 48 | 49 | 50 | @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) 51 | @pytest.mark.skipif( 52 | sys.version_info < (3, 12), 53 | reason=("3.12+ doesn't support tempfile.NamedTemporaryFile.delete"), 54 | ) 55 | async def test_named_temporary_file_312(mode): 56 | data = b"Hello World!" if "b" in mode else "Hello World!" 57 | filename = None 58 | 59 | async with tempfile.NamedTemporaryFile(mode=mode) as f: 60 | await f.write(data) 61 | await f.flush() 62 | await f.seek(0) 63 | assert await f.read() == data 64 | 65 | filename = f.name 66 | assert os.path.exists(filename) 67 | assert os.path.isfile(filename) 68 | 69 | assert not os.path.exists(filename) 70 | 71 | 72 | @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) 73 | @pytest.mark.skipif( 74 | sys.version_info < (3, 12), reason=("3.12+ supports delete_on_close") 75 | ) 76 | async def test_named_temporary_delete_on_close(mode): 77 | data = b"Hello World!" if "b" in mode else "Hello World!" 78 | filename = None 79 | 80 | async with tempfile.NamedTemporaryFile(mode=mode, delete_on_close=True) as f: 81 | await f.write(data) 82 | await f.flush() 83 | await f.close() 84 | 85 | filename = f.name 86 | assert not os.path.exists(filename) 87 | 88 | async with tempfile.NamedTemporaryFile(mode=mode, delete_on_close=False) as f: 89 | await f.write(data) 90 | await f.flush() 91 | await f.close() 92 | 93 | filename = f.name 94 | assert os.path.exists(filename) 95 | 96 | assert not os.path.exists(filename) 97 | 98 | 99 | @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) 100 | async def test_spooled_temporary_file(mode): 101 | """Test spooled temporary file.""" 102 | data = b"Hello World!" if "b" in mode else "Hello World!" 103 | 104 | async with tempfile.SpooledTemporaryFile(max_size=len(data) + 1, mode=mode) as f: 105 | await f.write(data) 106 | await f.flush() 107 | if "b" in mode: 108 | assert type(f._file._file) is io.BytesIO 109 | 110 | await f.write(data) 111 | await f.flush() 112 | if "b" in mode: 113 | assert type(f._file._file) is not io.BytesIO 114 | 115 | await f.seek(0) 116 | assert await f.read() == data + data 117 | 118 | 119 | @pytest.mark.skipif( 120 | platform.system() == "Windows", reason="Doesn't work on Win properly" 121 | ) 122 | @pytest.mark.parametrize( 123 | "test_string, newlines", [("LF\n", "\n"), ("CRLF\r\n", "\r\n")] 124 | ) 125 | async def test_spooled_temporary_file_newlines(test_string, newlines): 126 | """ 127 | Test `newlines` property in spooled temporary file. 128 | issue https://github.com/Tinche/aiofiles/issues/118 129 | """ 130 | 131 | async with tempfile.SpooledTemporaryFile(mode="w+") as f: 132 | await f.write(test_string) 133 | await f.flush() 134 | await f.seek(0) 135 | 136 | assert f.newlines is None 137 | 138 | await f.read() 139 | 140 | assert f.newlines == newlines 141 | 142 | 143 | @pytest.mark.parametrize("prefix, suffix", [("a", "b"), ("c", "d"), ("e", "f")]) 144 | async def test_temporary_directory(prefix, suffix, tmp_path): 145 | """Test temporary directory.""" 146 | dir_path = None 147 | 148 | async with tempfile.TemporaryDirectory( 149 | suffix=suffix, prefix=prefix, dir=tmp_path 150 | ) as d: 151 | dir_path = d 152 | assert os.path.exists(dir_path) 153 | assert os.path.isdir(dir_path) 154 | assert d[-1] == suffix 155 | assert d.split(os.sep)[-1][0] == prefix 156 | assert not os.path.exists(dir_path) 157 | -------------------------------------------------------------------------------- /tests/threadpool/test_binary.py: -------------------------------------------------------------------------------- 1 | """PEP 0492/Python 3.5+ tests for binary files.""" 2 | 3 | import io 4 | from os.path import dirname, join 5 | 6 | import pytest 7 | 8 | from aiofiles.threadpool import open as aioopen 9 | 10 | 11 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 12 | @pytest.mark.parametrize("buffering", [-1, 0]) 13 | async def test_simple_iteration(mode, buffering): 14 | """Test iterating over lines from a file.""" 15 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 16 | 17 | async with aioopen(filename, mode=mode, buffering=buffering) as file: 18 | # Append mode needs us to seek. 19 | await file.seek(0) 20 | 21 | counter = 1 22 | # The old iteration pattern: 23 | while True: 24 | line = await file.readline() 25 | if not line: 26 | break 27 | assert line.strip() == b"line " + str(counter).encode() 28 | counter += 1 29 | 30 | counter = 1 31 | await file.seek(0) 32 | # The new iteration pattern: 33 | async for line in file: 34 | assert line.strip() == b"line " + str(counter).encode() 35 | counter += 1 36 | 37 | assert file.closed 38 | 39 | 40 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 41 | @pytest.mark.parametrize("buffering", [-1, 0]) 42 | async def test_simple_readlines(mode, buffering): 43 | """Test the readlines functionality.""" 44 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 45 | 46 | with open(filename, mode="rb") as f: 47 | expected = f.readlines() 48 | 49 | async with aioopen(str(filename), mode=mode) as file: 50 | # Append mode needs us to seek. 51 | await file.seek(0) 52 | 53 | actual = await file.readlines() 54 | 55 | assert actual == expected 56 | 57 | 58 | @pytest.mark.parametrize("mode", ["rb+", "wb", "ab"]) 59 | @pytest.mark.parametrize("buffering", [-1, 0]) 60 | async def test_simple_flush(mode, buffering, tmpdir): 61 | """Test flushing to a file.""" 62 | filename = "file.bin" 63 | 64 | full_file = tmpdir.join(filename) 65 | 66 | if "r" in mode: 67 | full_file.ensure() # Read modes want it to already exist. 68 | 69 | async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: 70 | await file.write(b"0") # Shouldn't flush. 71 | 72 | if buffering == -1: 73 | assert b"" == full_file.read_binary() 74 | else: 75 | assert b"0" == full_file.read_binary() 76 | 77 | await file.flush() 78 | 79 | assert b"0" == full_file.read_binary() 80 | 81 | 82 | @pytest.mark.parametrize("mode", ["rb+", "wb+", "ab+"]) 83 | async def test_simple_peek(mode, tmpdir): 84 | """Test flushing to a file.""" 85 | filename = "file.bin" 86 | 87 | full_file = tmpdir.join(filename) 88 | full_file.write_binary(b"0123456789") 89 | 90 | async with aioopen(str(full_file), mode=mode) as file: 91 | if "a" in mode: 92 | await file.seek(0) # Rewind for append modes. 93 | 94 | peeked = await file.peek(1) 95 | 96 | # Technically it's OK for the peek to return less bytes than requested. 97 | if peeked: 98 | assert peeked.startswith(b"0") 99 | 100 | read = await file.read(1) 101 | 102 | assert peeked.startswith(read) 103 | 104 | 105 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 106 | @pytest.mark.parametrize("buffering", [-1, 0]) 107 | async def test_simple_read(mode, buffering): 108 | """Just read some bytes from a test file.""" 109 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 110 | async with aioopen(filename, mode=mode, buffering=buffering) as file: 111 | await file.seek(0) # Needed for the append mode. 112 | 113 | actual = await file.read() 114 | 115 | assert b"" == (await file.read()) 116 | assert actual == open(filename, mode="rb").read() 117 | 118 | 119 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 120 | @pytest.mark.parametrize("buffering", [-1, 0]) 121 | async def test_staggered_read(mode, buffering): 122 | """Read bytes repeatedly.""" 123 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 124 | async with aioopen(filename, mode=mode, buffering=buffering) as file: 125 | await file.seek(0) # Needed for the append mode. 126 | 127 | actual = [] 128 | while True: 129 | byte = await file.read(1) 130 | if byte: 131 | actual.append(byte) 132 | else: 133 | break 134 | 135 | assert b"" == (await file.read()) 136 | 137 | expected = [] 138 | with open(filename, mode="rb") as f: 139 | while True: 140 | byte = f.read(1) 141 | if byte: 142 | expected.append(byte) 143 | else: 144 | break 145 | 146 | assert actual == expected 147 | 148 | 149 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 150 | @pytest.mark.parametrize("buffering", [-1, 0]) 151 | async def test_simple_seek(mode, buffering, tmpdir): 152 | """Test seeking and then reading.""" 153 | filename = "bigfile.bin" 154 | content = b"0123456789" * 4 * io.DEFAULT_BUFFER_SIZE 155 | 156 | full_file = tmpdir.join(filename) 157 | full_file.write_binary(content) 158 | 159 | async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: 160 | await file.seek(4) 161 | 162 | assert b"4" == (await file.read(1)) 163 | 164 | 165 | @pytest.mark.parametrize("mode", ["wb", "rb", "rb+", "wb+", "ab", "ab+"]) 166 | @pytest.mark.parametrize("buffering", [-1, 0]) 167 | async def test_simple_close_ctx_mgr_iter(mode, buffering, tmpdir): 168 | """Open a file, read a byte, and close it.""" 169 | filename = "bigfile.bin" 170 | content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE 171 | 172 | full_file = tmpdir.join(filename) 173 | full_file.write_binary(content) 174 | 175 | async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: 176 | assert not file.closed 177 | assert not file._file.closed 178 | 179 | assert file.closed 180 | assert file._file.closed 181 | 182 | 183 | @pytest.mark.parametrize("mode", ["wb", "rb", "rb+", "wb+", "ab", "ab+"]) 184 | @pytest.mark.parametrize("buffering", [-1, 0]) 185 | async def test_simple_close_ctx_mgr(mode, buffering, tmpdir): 186 | """Open a file, read a byte, and close it.""" 187 | filename = "bigfile.bin" 188 | content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE 189 | 190 | full_file = tmpdir.join(filename) 191 | full_file.write_binary(content) 192 | 193 | file = await aioopen(str(full_file), mode=mode, buffering=buffering) 194 | assert not file.closed 195 | assert not file._file.closed 196 | 197 | await file.close() 198 | 199 | assert file.closed 200 | assert file._file.closed 201 | 202 | 203 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 204 | @pytest.mark.parametrize("buffering", [-1, 0]) 205 | async def test_simple_readinto(mode, buffering): 206 | """Test the readinto functionality.""" 207 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 208 | async with aioopen(filename, mode=mode, buffering=buffering) as file: 209 | await file.seek(0) # Needed for the append mode. 210 | 211 | array = bytearray(4) 212 | bytes_read = await file.readinto(array) 213 | 214 | assert bytes_read == 4 215 | assert array == open(filename, mode="rb").read(4) 216 | 217 | 218 | @pytest.mark.parametrize("mode", ["rb+", "wb", "ab+"]) 219 | @pytest.mark.parametrize("buffering", [-1, 0]) 220 | async def test_simple_truncate(mode, buffering, tmpdir): 221 | """Test truncating files.""" 222 | filename = "bigfile.bin" 223 | content = b"0123456789" * 4 * io.DEFAULT_BUFFER_SIZE 224 | 225 | full_file = tmpdir.join(filename) 226 | full_file.write_binary(content) 227 | 228 | async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: 229 | # The append modes want us to seek first. 230 | await file.seek(0) 231 | 232 | if "w" in mode: 233 | # We've just erased the entire file. 234 | await file.write(content) 235 | await file.flush() 236 | await file.seek(0) 237 | 238 | await file.truncate() 239 | 240 | assert b"" == full_file.read_binary() 241 | 242 | 243 | @pytest.mark.parametrize("mode", ["wb", "rb+", "wb+", "ab", "ab+"]) 244 | @pytest.mark.parametrize("buffering", [-1, 0]) 245 | async def test_simple_write(mode, buffering, tmpdir): 246 | """Test writing into a file.""" 247 | filename = "bigfile.bin" 248 | content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE 249 | 250 | full_file = tmpdir.join(filename) 251 | 252 | if "r" in mode: 253 | full_file.ensure() # Read modes want it to already exist. 254 | 255 | async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: 256 | bytes_written = await file.write(content) 257 | 258 | assert bytes_written == len(content) 259 | assert content == full_file.read_binary() 260 | 261 | 262 | async def test_simple_detach(tmpdir): 263 | """Test detaching for buffered streams.""" 264 | filename = "file.bin" 265 | 266 | full_file = tmpdir.join(filename) 267 | full_file.write_binary(b"0123456789") 268 | 269 | with pytest.raises(ValueError): 270 | async with aioopen(str(full_file), mode="rb") as file: 271 | raw_file = file.detach() 272 | 273 | assert raw_file 274 | 275 | with pytest.raises(ValueError): 276 | await file.read() 277 | 278 | assert b"0123456789" == raw_file.read(10) 279 | 280 | 281 | async def test_simple_readall(tmpdir): 282 | """Test the readall function by reading a large file in. 283 | 284 | Only RawIOBase supports readall(). 285 | """ 286 | filename = "bigfile.bin" 287 | content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE # Hopefully several reads. 288 | 289 | sync_file = tmpdir.join(filename) 290 | sync_file.write_binary(content) 291 | 292 | file = await aioopen(str(sync_file), mode="rb", buffering=0) 293 | 294 | actual = await file.readall() 295 | 296 | assert actual == content 297 | 298 | await file.close() 299 | assert file.closed 300 | 301 | 302 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 303 | @pytest.mark.parametrize("buffering", [-1, 0]) 304 | async def test_name_property(mode, buffering): 305 | """Test iterating over lines from a file.""" 306 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 307 | 308 | async with aioopen(filename, mode=mode, buffering=buffering) as file: 309 | assert file.name == filename 310 | 311 | assert file.closed 312 | 313 | 314 | @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) 315 | @pytest.mark.parametrize("buffering", [-1, 0]) 316 | async def test_mode_property(mode, buffering): 317 | """Test iterating over lines from a file.""" 318 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 319 | 320 | async with aioopen(filename, mode=mode, buffering=buffering) as file: 321 | assert file.mode == mode 322 | 323 | assert file.closed 324 | -------------------------------------------------------------------------------- /tests/threadpool/test_concurrency.py: -------------------------------------------------------------------------------- 1 | """Test concurrency properties of the implementation.""" 2 | 3 | import asyncio 4 | import time 5 | from os.path import dirname, join 6 | 7 | import aiofiles.threadpool 8 | 9 | 10 | async def test_slow_file(monkeypatch, unused_tcp_port): 11 | """Monkey patch open and file.read(), and assert the loop still works.""" 12 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 13 | 14 | with open(filename, mode="rb") as f: 15 | contents = f.read() 16 | 17 | def new_open(*args, **kwargs): 18 | time.sleep(1) 19 | return open(*args, **kwargs) 20 | 21 | monkeypatch.setattr(aiofiles.threadpool, "sync_open", value=new_open) 22 | 23 | async def serve_file(_, writer): 24 | file = await aiofiles.threadpool.open(filename, mode="rb") 25 | try: 26 | while True: 27 | data = await file.read(1) 28 | if not data: 29 | break 30 | writer.write(data) 31 | await writer.drain() 32 | await writer.drain() 33 | finally: 34 | writer.close() 35 | await file.close() 36 | 37 | async def return_one(_, writer): 38 | writer.write(b"1") 39 | await writer.drain() 40 | writer.close() 41 | 42 | counter = 0 43 | 44 | async def spam_client(): 45 | nonlocal counter 46 | while True: 47 | r, w = await asyncio.open_connection("127.0.0.1", port=30001) 48 | assert (await r.read()) == b"1" 49 | counter += 1 50 | w.close() 51 | await asyncio.sleep(0.01) 52 | 53 | file_server = await asyncio.start_server(serve_file, port=unused_tcp_port) 54 | spam_server = await asyncio.start_server(return_one, port=30001) 55 | 56 | spam_task = asyncio.ensure_future(spam_client()) 57 | 58 | reader, writer = await asyncio.open_connection("127.0.0.1", port=unused_tcp_port) 59 | 60 | actual_contents = await reader.read() 61 | writer.close() 62 | 63 | await asyncio.sleep(0) 64 | 65 | file_server.close() 66 | spam_server.close() 67 | 68 | await file_server.wait_closed() 69 | await spam_server.wait_closed() 70 | 71 | spam_task.cancel() 72 | 73 | assert actual_contents == contents 74 | assert counter > 30 75 | -------------------------------------------------------------------------------- /tests/threadpool/test_open.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | from pathlib import Path 3 | 4 | import pytest 5 | 6 | from aiofiles.threadpool import open as aioopen 7 | 8 | RESOURCES_DIR = Path(__file__).parent.parent / "resources" 9 | TEST_FILE = RESOURCES_DIR / "test_file1.txt" 10 | TEST_FILE_CONTENTS = "0123456789" 11 | 12 | 13 | @pytest.mark.parametrize("mode", ["r", "rb"]) 14 | async def test_file_not_found(mode): 15 | filename = "non_existent" 16 | 17 | try: 18 | open(filename, mode=mode) 19 | except Exception as e: 20 | expected = e 21 | 22 | assert expected 23 | 24 | try: 25 | await aioopen(filename, mode=mode) 26 | except Exception as e: 27 | actual = e 28 | 29 | assert actual 30 | 31 | assert actual.errno == expected.errno 32 | assert str(actual) == str(expected) 33 | 34 | 35 | async def test_file_async_context_aexit(): 36 | async with aioopen(TEST_FILE) as fp: 37 | pass 38 | 39 | with pytest.raises(ValueError): 40 | line = await fp.read() 41 | 42 | async with aioopen(TEST_FILE) as fp: 43 | line = await fp.read() 44 | assert line == TEST_FILE_CONTENTS 45 | 46 | 47 | async def test_filetask_async_context_aexit(): 48 | async def _process_test_file(file_ctx, sleep_time: float = 1.0): 49 | nonlocal file_ref 50 | async with file_ctx as fp: 51 | file_ref = file_ctx._obj 52 | await asyncio.sleep(sleep_time) 53 | await fp.read() 54 | 55 | cancel_time, sleep_time = 0.1, 10 56 | assert cancel_time <= (sleep_time / 10) 57 | 58 | file_ref = None 59 | file_ctx = aioopen(TEST_FILE) 60 | 61 | task = asyncio.create_task( 62 | _process_test_file(file_ctx=file_ctx, sleep_time=sleep_time) 63 | ) 64 | try: 65 | await asyncio.wait_for(task, timeout=cancel_time) 66 | except asyncio.TimeoutError: 67 | assert task.cancelled 68 | 69 | assert file_ref.closed 70 | -------------------------------------------------------------------------------- /tests/threadpool/test_text.py: -------------------------------------------------------------------------------- 1 | """PEP 0492/Python 3.5+ tests for text files.""" 2 | 3 | import io 4 | from os.path import dirname, join 5 | 6 | import pytest 7 | 8 | from aiofiles.threadpool import open as aioopen 9 | 10 | 11 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 12 | async def test_simple_iteration(mode): 13 | """Test iterating over lines from a file.""" 14 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 15 | 16 | async with aioopen(filename, mode=mode) as file: 17 | # Append mode needs us to seek. 18 | await file.seek(0) 19 | 20 | counter = 1 21 | 22 | # The old iteration pattern: 23 | while True: 24 | line = await file.readline() 25 | if not line: 26 | break 27 | assert line.strip() == "line " + str(counter) 28 | counter += 1 29 | 30 | await file.seek(0) 31 | counter = 1 32 | 33 | # The new iteration pattern: 34 | async for line in file: 35 | assert line.strip() == "line " + str(counter) 36 | counter += 1 37 | 38 | assert file.closed 39 | 40 | 41 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 42 | async def test_simple_readlines(mode): 43 | """Test the readlines functionality.""" 44 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 45 | 46 | with open(filename) as f: 47 | expected = f.readlines() 48 | 49 | async with aioopen(filename, mode=mode) as file: 50 | # Append mode needs us to seek. 51 | await file.seek(0) 52 | 53 | actual = await file.readlines() 54 | 55 | assert file.closed 56 | 57 | assert actual == expected 58 | 59 | 60 | @pytest.mark.parametrize("mode", ["r+", "w", "a"]) 61 | async def test_simple_flush(mode, tmpdir): 62 | """Test flushing to a file.""" 63 | filename = "file.bin" 64 | 65 | full_file = tmpdir.join(filename) 66 | 67 | if "r" in mode: 68 | full_file.ensure() # Read modes want it to already exist. 69 | 70 | async with aioopen(str(full_file), mode=mode) as file: 71 | await file.write("0") # Shouldn't flush. 72 | 73 | assert "" == full_file.read_text(encoding="utf8") 74 | 75 | await file.flush() 76 | 77 | assert "0" == full_file.read_text(encoding="utf8") 78 | 79 | assert file.closed 80 | 81 | 82 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 83 | async def test_simple_read(mode): 84 | """Just read some bytes from a test file.""" 85 | filename = join(dirname(__file__), "..", "resources", "test_file1.txt") 86 | async with aioopen(filename, mode=mode) as file: 87 | await file.seek(0) # Needed for the append mode. 88 | 89 | actual = await file.read() 90 | 91 | assert "" == (await file.read()) 92 | assert actual == open(filename).read() 93 | 94 | assert file.closed 95 | 96 | 97 | @pytest.mark.parametrize("mode", ["w", "a"]) 98 | async def test_simple_read_fail(mode, tmpdir): 99 | """Try reading some bytes and fail.""" 100 | filename = "bigfile.bin" 101 | content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE 102 | 103 | full_file = tmpdir.join(filename) 104 | full_file.write(content) 105 | with pytest.raises(ValueError): 106 | async with aioopen(str(full_file), mode=mode) as file: 107 | await file.seek(0) # Needed for the append mode. 108 | 109 | await file.read() 110 | 111 | assert file.closed 112 | 113 | 114 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 115 | async def test_staggered_read(mode): 116 | """Read bytes repeatedly.""" 117 | filename = join(dirname(__file__), "..", "resources", "test_file1.txt") 118 | async with aioopen(filename, mode=mode) as file: 119 | await file.seek(0) # Needed for the append mode. 120 | 121 | actual = [] 122 | while True: 123 | char = await file.read(1) 124 | if char: 125 | actual.append(char) 126 | else: 127 | break 128 | 129 | assert "" == (await file.read()) 130 | 131 | expected = [] 132 | with open(filename) as f: 133 | while True: 134 | char = f.read(1) 135 | if char: 136 | expected.append(char) 137 | else: 138 | break 139 | 140 | assert actual == expected 141 | 142 | assert file.closed 143 | 144 | 145 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 146 | async def test_simple_seek(mode, tmpdir): 147 | """Test seeking and then reading.""" 148 | filename = "bigfile.bin" 149 | content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE 150 | 151 | full_file = tmpdir.join(filename) 152 | full_file.write(content) 153 | 154 | async with aioopen(str(full_file), mode=mode) as file: 155 | await file.seek(4) 156 | assert "4" == (await file.read(1)) 157 | 158 | assert file.closed 159 | 160 | 161 | @pytest.mark.parametrize("mode", ["w", "r", "r+", "w+", "a", "a+"]) 162 | async def test_simple_close(mode, tmpdir): 163 | """Open a file, read a byte, and close it.""" 164 | filename = "bigfile.bin" 165 | content = "0" * 4 * io.DEFAULT_BUFFER_SIZE 166 | 167 | full_file = tmpdir.join(filename) 168 | full_file.write(content) 169 | 170 | async with aioopen(str(full_file), mode=mode) as file: 171 | assert not file.closed 172 | assert not file._file.closed 173 | 174 | assert file.closed 175 | assert file._file.closed 176 | 177 | 178 | @pytest.mark.parametrize("mode", ["r+", "w", "a+"]) 179 | async def test_simple_truncate(mode, tmpdir): 180 | """Test truncating files.""" 181 | filename = "bigfile.bin" 182 | content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE 183 | 184 | full_file = tmpdir.join(filename) 185 | full_file.write(content) 186 | 187 | async with aioopen(str(full_file), mode=mode) as file: 188 | # The append modes want us to seek first. 189 | await file.seek(0) 190 | 191 | if "w" in mode: 192 | # We've just erased the entire file. 193 | await file.write(content) 194 | await file.flush() 195 | await file.seek(0) 196 | 197 | await file.truncate() 198 | 199 | assert "" == full_file.read() 200 | 201 | 202 | @pytest.mark.parametrize("mode", ["w", "r+", "w+", "a", "a+"]) 203 | async def test_simple_write(mode, tmpdir): 204 | """Test writing into a file.""" 205 | filename = "bigfile.bin" 206 | content = "0" * 4 * io.DEFAULT_BUFFER_SIZE 207 | 208 | full_file = tmpdir.join(filename) 209 | 210 | if "r" in mode: 211 | full_file.ensure() # Read modes want it to already exist. 212 | 213 | async with aioopen(str(full_file), mode=mode) as file: 214 | bytes_written = await file.write(content) 215 | 216 | assert bytes_written == len(content) 217 | assert content == full_file.read() 218 | assert file.closed 219 | 220 | 221 | async def test_simple_detach(tmpdir): 222 | """Test detaching for buffered streams.""" 223 | filename = "file.bin" 224 | 225 | full_file = tmpdir.join(filename) 226 | full_file.write("0123456789") 227 | 228 | with pytest.raises(ValueError): # Close will error out. 229 | async with aioopen(str(full_file), mode="r") as file: 230 | raw_file = file.detach() 231 | 232 | assert raw_file 233 | 234 | with pytest.raises(ValueError): 235 | await file.read() 236 | 237 | assert b"0123456789" == raw_file.read(10) 238 | 239 | 240 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 241 | async def test_simple_iteration_ctx_mgr(mode): 242 | """Test iterating over lines from a file.""" 243 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 244 | 245 | async with aioopen(filename, mode=mode) as file: 246 | assert not file.closed 247 | await file.seek(0) 248 | 249 | counter = 1 250 | 251 | async for line in file: 252 | assert line.strip() == "line " + str(counter) 253 | counter += 1 254 | 255 | assert file.closed 256 | 257 | 258 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 259 | async def test_name_property(mode): 260 | """Test iterating over lines from a file.""" 261 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 262 | 263 | async with aioopen(filename, mode=mode) as file: 264 | assert file.name == filename 265 | 266 | assert file.closed 267 | 268 | 269 | @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) 270 | async def test_mode_property(mode): 271 | """Test iterating over lines from a file.""" 272 | filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") 273 | 274 | async with aioopen(filename, mode=mode) as file: 275 | assert file.mode == mode 276 | 277 | assert file.closed 278 | -------------------------------------------------------------------------------- /tests/threadpool/test_wrap.py: -------------------------------------------------------------------------------- 1 | from io import FileIO 2 | 3 | import pytest 4 | 5 | from aiofiles.threadpool import wrap 6 | 7 | 8 | @pytest.mark.parametrize("entity", [int, [1, 2, 3], lambda x: x**x, FileIO]) 9 | def test_threadpool_wrapper_negative(entity): 10 | """Raising TypeError when wrapping unsupported entities.""" 11 | 12 | with pytest.raises(TypeError): 13 | wrap(entity) 14 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [gh-actions] 2 | python = 3 | 3.9: py39 4 | 3.10: py310 5 | 3.11: py311 6 | 3.12: py312 7 | 3.13: py313, lint 8 | pypy-3.7: pypy3 9 | 10 | [tox] 11 | envlist = py39, py310, py311, py312, py313, pypy3, lint 12 | isolated_build = true 13 | skipsdist = true 14 | 15 | [testenv:lint] 16 | skip_install = true 17 | basepython = python3.13 18 | allowlist_externals = 19 | make 20 | pdm 21 | commands = 22 | pdm install -G lint 23 | make lint 24 | 25 | [testenv] 26 | allowlist_externals = pdm 27 | setenv = 28 | PDM_IGNORE_SAVED_PYTHON="1" 29 | commands = 30 | pdm install -G test 31 | coverage run -m pytest tests {posargs} 32 | passenv = CI 33 | package = wheel 34 | wheel_build_env = .pkg 35 | 36 | [flake8] 37 | max-line-length = 88 38 | --------------------------------------------------------------------------------