├── .builds
├── alpine.yml
├── fedora.yml
└── freebsd.yml
├── .codecov.yml
├── .git-blame-ignore-revs
├── .gitattributes
├── .github
└── workflows
│ ├── autodeps.yml
│ ├── check-newsfragment.yml
│ ├── ci.yml
│ └── release.yml
├── .gitignore
├── .pre-commit-config.yaml
├── .readthedocs.yml
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── LICENSE.APACHE2
├── LICENSE.MIT
├── MANIFEST.in
├── README.rst
├── ci.sh
├── docs-requirements.in
├── docs-requirements.txt
├── docs
├── Makefile
├── make.bat
├── notes.txt
└── source
│ ├── _static
│ ├── .gitkeep
│ ├── favicon-32.png
│ ├── favicon.svg
│ ├── ornament.svg
│ └── styles.css
│ ├── _templates
│ ├── .gitkeep
│ └── layout.html
│ ├── awesome-trio-libraries.rst
│ ├── code-of-conduct.rst
│ ├── conf.py
│ ├── contributing.rst
│ ├── design.rst
│ ├── glossary.rst
│ ├── history.rst
│ ├── index.rst
│ ├── local_customization.py
│ ├── reference-core.rst
│ ├── reference-core
│ ├── channels-backpressure.py
│ ├── channels-mpmc-broken.py
│ ├── channels-mpmc-fixed.py
│ ├── channels-shutdown.py
│ ├── channels-simple.py
│ ├── contextvar-example.py
│ ├── from-thread-example.py
│ └── thread-contextvars-example.py
│ ├── reference-io.rst
│ ├── reference-lowlevel.rst
│ ├── reference-testing.rst
│ ├── reference-testing
│ ├── across-realtime.out
│ └── across-realtime.py
│ ├── releasing.rst
│ ├── tutorial.rst
│ ├── tutorial
│ ├── echo-client.py
│ ├── echo-server.py
│ ├── tasks-intro.py
│ └── tasks-with-trace.py
│ └── typevars.py
├── logo
├── editable.svg
├── logo-speech-bubble-favicon.svg
├── logo-speech-bubble.svg
├── logo-transparent-no-text.svg
├── logo-transparent-with-text.svg
├── logo-with-background.svg
├── wordmark-transparent.svg
└── wordmark-with-background.svg
├── newsfragments
├── .gitkeep
├── 3232.feature.rst
├── 3248.bugfix.rst
└── README.rst
├── pyproject.toml
├── src
└── trio
│ ├── __init__.py
│ ├── __main__.py
│ ├── _abc.py
│ ├── _channel.py
│ ├── _core
│ ├── __init__.py
│ ├── _asyncgens.py
│ ├── _concat_tb.py
│ ├── _entry_queue.py
│ ├── _exceptions.py
│ ├── _generated_instrumentation.py
│ ├── _generated_io_epoll.py
│ ├── _generated_io_kqueue.py
│ ├── _generated_io_windows.py
│ ├── _generated_run.py
│ ├── _instrumentation.py
│ ├── _io_common.py
│ ├── _io_epoll.py
│ ├── _io_kqueue.py
│ ├── _io_windows.py
│ ├── _ki.py
│ ├── _local.py
│ ├── _mock_clock.py
│ ├── _parking_lot.py
│ ├── _run.py
│ ├── _run_context.py
│ ├── _tests
│ │ ├── __init__.py
│ │ ├── test_asyncgen.py
│ │ ├── test_cancelled.py
│ │ ├── test_exceptiongroup_gc.py
│ │ ├── test_guest_mode.py
│ │ ├── test_instrumentation.py
│ │ ├── test_io.py
│ │ ├── test_ki.py
│ │ ├── test_local.py
│ │ ├── test_mock_clock.py
│ │ ├── test_parking_lot.py
│ │ ├── test_run.py
│ │ ├── test_thread_cache.py
│ │ ├── test_tutil.py
│ │ ├── test_unbounded_queue.py
│ │ ├── test_windows.py
│ │ ├── tutil.py
│ │ └── type_tests
│ │ │ ├── nursery_start.py
│ │ │ └── run.py
│ ├── _thread_cache.py
│ ├── _traps.py
│ ├── _unbounded_queue.py
│ ├── _wakeup_socketpair.py
│ └── _windows_cffi.py
│ ├── _deprecate.py
│ ├── _dtls.py
│ ├── _file_io.py
│ ├── _highlevel_generic.py
│ ├── _highlevel_open_tcp_listeners.py
│ ├── _highlevel_open_tcp_stream.py
│ ├── _highlevel_open_unix_stream.py
│ ├── _highlevel_serve_listeners.py
│ ├── _highlevel_socket.py
│ ├── _highlevel_ssl_helpers.py
│ ├── _path.py
│ ├── _repl.py
│ ├── _signals.py
│ ├── _socket.py
│ ├── _ssl.py
│ ├── _subprocess.py
│ ├── _subprocess_platform
│ ├── __init__.py
│ ├── kqueue.py
│ ├── waitid.py
│ └── windows.py
│ ├── _sync.py
│ ├── _tests
│ ├── __init__.py
│ ├── _check_type_completeness.json
│ ├── astrill-codesigning-cert.cer
│ ├── check_type_completeness.py
│ ├── module_with_deprecations.py
│ ├── pytest_plugin.py
│ ├── test_abc.py
│ ├── test_channel.py
│ ├── test_contextvars.py
│ ├── test_deprecate.py
│ ├── test_deprecate_strict_exception_groups_false.py
│ ├── test_dtls.py
│ ├── test_exports.py
│ ├── test_fakenet.py
│ ├── test_file_io.py
│ ├── test_highlevel_generic.py
│ ├── test_highlevel_open_tcp_listeners.py
│ ├── test_highlevel_open_tcp_stream.py
│ ├── test_highlevel_open_unix_stream.py
│ ├── test_highlevel_serve_listeners.py
│ ├── test_highlevel_socket.py
│ ├── test_highlevel_ssl_helpers.py
│ ├── test_path.py
│ ├── test_repl.py
│ ├── test_scheduler_determinism.py
│ ├── test_signals.py
│ ├── test_socket.py
│ ├── test_ssl.py
│ ├── test_subprocess.py
│ ├── test_sync.py
│ ├── test_testing.py
│ ├── test_testing_raisesgroup.py
│ ├── test_threads.py
│ ├── test_timeouts.py
│ ├── test_tracing.py
│ ├── test_trio.py
│ ├── test_unix_pipes.py
│ ├── test_util.py
│ ├── test_wait_for_object.py
│ ├── test_windows_pipes.py
│ ├── tools
│ │ ├── __init__.py
│ │ ├── test_gen_exports.py
│ │ ├── test_mypy_annotate.py
│ │ └── test_sync_requirements.py
│ └── type_tests
│ │ ├── check_wraps.py
│ │ ├── open_memory_channel.py
│ │ ├── path.py
│ │ ├── raisesgroup.py
│ │ ├── subprocesses.py
│ │ └── task_status.py
│ ├── _threads.py
│ ├── _timeouts.py
│ ├── _tools
│ ├── __init__.py
│ ├── gen_exports.py
│ ├── mypy_annotate.py
│ └── sync_requirements.py
│ ├── _unix_pipes.py
│ ├── _util.py
│ ├── _version.py
│ ├── _wait_for_object.py
│ ├── _windows_pipes.py
│ ├── abc.py
│ ├── from_thread.py
│ ├── lowlevel.py
│ ├── py.typed
│ ├── socket.py
│ ├── testing
│ ├── __init__.py
│ ├── _check_streams.py
│ ├── _checkpoints.py
│ ├── _fake_net.py
│ ├── _memory_streams.py
│ ├── _network.py
│ ├── _raises_group.py
│ ├── _sequencer.py
│ └── _trio_test.py
│ └── to_thread.py
├── test-requirements.in
├── test-requirements.txt
├── tests
├── _trio_check_attrs_aliases.py
└── cython
│ ├── run_test_cython.py
│ └── test_cython.pyx
├── tox.ini
└── zizmor.yml
/.builds/alpine.yml:
--------------------------------------------------------------------------------
1 | image: alpine/latest
2 | packages:
3 | - curl
4 | - gcc
5 | - libffi-dev
6 | - musl-dev
7 | - openssl-dev
8 | - python3-dev
9 | # required to build cryptography
10 | - rust
11 | - cargo
12 | sources:
13 | - https://github.com/python-trio/trio
14 | tasks:
15 | - test: |
16 | python3 -m venv venv
17 | source venv/bin/activate
18 | cd trio
19 | CI_BUILD_ID=$JOB_ID CI_BUILD_URL=$JOB_URL ./ci.sh
20 | environment:
21 | CODECOV_TOKEN: 87cefb17-c44b-4f2f-8b30-1fff5769ce46
22 | JOB_NAME: Alpine
23 |
--------------------------------------------------------------------------------
/.builds/fedora.yml:
--------------------------------------------------------------------------------
1 | image: fedora/rawhide
2 | packages:
3 | - python3-devel
4 | - python3-pip
5 | sources:
6 | - https://github.com/python-trio/trio
7 | tasks:
8 | - test: |
9 | python3 -m venv venv
10 | source venv/bin/activate
11 | cd trio
12 | CI_BUILD_ID=$JOB_ID CI_BUILD_URL=$JOB_URL ./ci.sh
13 | environment:
14 | CODECOV_TOKEN: 87cefb17-c44b-4f2f-8b30-1fff5769ce46
15 | JOB_NAME: Fedora
16 |
--------------------------------------------------------------------------------
/.builds/freebsd.yml:
--------------------------------------------------------------------------------
1 | image: freebsd/latest
2 | packages:
3 | - curl
4 | - python39
5 | - py39-sqlite3
6 | - rust # required to build cryptography
7 | sources:
8 | - https://github.com/python-trio/trio
9 | tasks:
10 | - setup: sudo ln -s /usr/local/bin/bash /bin/bash
11 | - test: |
12 | python3.9 -m venv venv
13 | source venv/bin/activate
14 | cd trio
15 | CI_BUILD_ID=$JOB_ID CI_BUILD_URL=$JOB_URL ./ci.sh
16 | environment:
17 | CODECOV_TOKEN: 87cefb17-c44b-4f2f-8b30-1fff5769ce46
18 | JOB_NAME: FreeBSD
19 |
--------------------------------------------------------------------------------
/.codecov.yml:
--------------------------------------------------------------------------------
1 | # -- repository yaml --
2 |
3 | # Explicitly wait for all jobs to finish, as wait_for_ci prematurely triggers.
4 | # See https://github.com/python-trio/trio/issues/2689
5 | codecov:
6 | notify:
7 | # This number needs to be changed whenever the number of runs in CI is changed.
8 | # Another option is codecov-cli: https://github.com/codecov/codecov-cli#send-notifications
9 | after_n_builds: 31
10 | wait_for_ci: false
11 | notify_error: true # if uploads fail, replace cov comment with a comment with errors.
12 | require_ci_to_pass: false
13 |
14 | # Publicly exposing the token has some small risks from mistakes or malicious actors.
15 | # See https://docs.codecov.com/docs/codecov-tokens for correctly configuring it.
16 | token: 87cefb17-c44b-4f2f-8b30-1fff5769ce46
17 |
18 | # only post PR comment if coverage changes
19 | comment:
20 | require_changes: true
21 |
22 | coverage:
23 | # required range
24 | precision: 5
25 | round: down
26 | range: 100..100
27 | status:
28 | project:
29 | default:
30 | target: 100%
31 | patch:
32 | default:
33 | target: 100% # require patches to be 100%
34 |
--------------------------------------------------------------------------------
/.git-blame-ignore-revs:
--------------------------------------------------------------------------------
1 | # sorting all imports with isort
2 | 933f77b96f0092e1baab4474a9208fc2e379aa32
3 | # enabling ruff's flake8-commas rule
4 | b25c02a94e2defcb0fad32976b02218be1133bdf
5 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # For files generated by trio/_tools/gen_exports.py
2 | trio/_core/_generated* linguist-generated=true
3 | # Treat generated files as binary in git diff
4 | trio/_core/_generated* -diff
5 |
--------------------------------------------------------------------------------
/.github/workflows/autodeps.yml:
--------------------------------------------------------------------------------
1 | name: Autodeps
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: '0 0 1 * *'
7 |
8 | jobs:
9 | Autodeps:
10 | if: github.repository_owner == 'python-trio'
11 | name: Autodeps
12 | timeout-minutes: 10
13 | runs-on: 'ubuntu-latest'
14 | # https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions#changing-github_token-permissions
15 | permissions:
16 | pull-requests: write
17 | issues: write
18 | repository-projects: write
19 | contents: write
20 |
21 | steps:
22 | - name: Checkout
23 | uses: actions/checkout@v4
24 | with:
25 | persist-credentials: true # credentials are needed to push commits
26 | - name: Setup python
27 | uses: actions/setup-python@v5
28 | with:
29 | python-version: "3.9"
30 |
31 | - name: Bump dependencies
32 | run: |
33 | python -m pip install -U pip pre-commit
34 | python -m pip install -r test-requirements.txt
35 | uv pip compile --universal --python-version=3.9 --upgrade test-requirements.in -o test-requirements.txt
36 | uv pip compile --universal --python-version=3.11 --upgrade docs-requirements.in -o docs-requirements.txt
37 | pre-commit autoupdate --jobs 0
38 |
39 | - name: Install new requirements
40 | run: python -m pip install -r test-requirements.txt
41 |
42 | # apply newer versions' formatting
43 | - name: Black
44 | run: black src/trio
45 |
46 | - name: uv
47 | run: |
48 | uv pip compile --universal --python-version=3.9 test-requirements.in -o test-requirements.txt
49 | uv pip compile --universal --python-version=3.11 docs-requirements.in -o docs-requirements.txt
50 |
51 | - name: Commit changes and create automerge PR
52 | env:
53 | GH_TOKEN: ${{ github.token }}
54 | run: |
55 | # setup git repo
56 | git switch --force-create autodeps/bump_from_${GITHUB_SHA:0:6}
57 | git config user.name 'github-actions[bot]'
58 | git config user.email '41898282+github-actions[bot]@users.noreply.github.com'
59 |
60 | if ! git commit -am "Dependency updates"; then
61 | echo "No changes to commit!"
62 | exit 0
63 | fi
64 |
65 | git push --force --set-upstream origin autodeps/bump_from_${GITHUB_SHA:0:6}
66 |
67 | # git push returns before github is ready for a pr, so we poll until success
68 | for BACKOFF in 1 2 4 8 0; do
69 | sleep $BACKOFF
70 | if gh pr create \
71 | --label dependencies --body "" \
72 | --title "Bump dependencies from commit ${GITHUB_SHA:0:6}" \
73 | ; then
74 | break
75 | fi
76 | done
77 |
78 | if [ $BACKOFF -eq 0 ]; then
79 | echo "Could not create the PR"
80 | exit 1
81 | fi
82 |
83 | # gh pr create returns before the pr is ready, so we again poll until success
84 | # https://github.com/cli/cli/issues/2619#issuecomment-1240543096
85 | for BACKOFF in 1 2 4 8 0; do
86 | sleep $BACKOFF
87 | if gh pr merge --auto --squash; then
88 | break
89 | fi
90 | done
91 |
92 | if [ $BACKOFF -eq 0 ]; then
93 | echo "Could not set automerge"
94 | exit 1
95 | fi
96 |
--------------------------------------------------------------------------------
/.github/workflows/check-newsfragment.yml:
--------------------------------------------------------------------------------
1 | name: Check newsfragment
2 |
3 | permissions: {}
4 |
5 | on:
6 | pull_request:
7 | types: [labeled, unlabeled, opened, synchronize]
8 | branches:
9 | - main
10 |
11 | jobs:
12 | check-newsfragment:
13 | if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip newsfragment') }}
14 | runs-on: 'ubuntu-latest'
15 |
16 | steps:
17 | - uses: actions/checkout@v4
18 | with:
19 | fetch-depth: 0
20 | persist-credentials: false
21 |
22 | - name: Check newsfragments
23 | run: |
24 | if git diff --name-only origin/main | grep -v '/_tests/' | grep 'src/trio/'; then
25 | git diff --name-only origin/main | grep 'newsfragments/' || exit 1
26 | fi
27 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | on:
2 | push:
3 | tags:
4 | - v*
5 |
6 | permissions: {}
7 |
8 | # a lot of code taken from https://github.com/pypa/cibuildwheel/blob/main/examples/github-deploy.yml
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - uses: actions/checkout@v4
15 | with:
16 | persist-credentials: false
17 | - uses: actions/setup-python@v5
18 | with:
19 | python-version: "3.9"
20 | - run: python -m pip install build
21 | - run: python -m build
22 |
23 | - uses: actions/upload-artifact@v4
24 | with:
25 | name: trio-dist
26 | path: |
27 | dist/*.tar.gz
28 | dist/*.whl
29 |
30 | pypi-publish:
31 | needs: [build]
32 | name: upload release to PyPI
33 | runs-on: ubuntu-latest
34 | environment:
35 | name: release
36 | url: https://pypi.org/project/trio
37 | permissions:
38 | id-token: write
39 |
40 | steps:
41 | - uses: actions/download-artifact@v4
42 | with:
43 | pattern: trio-*
44 | path: dist
45 | merge-multiple: true
46 |
47 | - name: Publish package distributions to PyPI
48 | uses: pypa/gh-action-pypi-publish@release/v1
49 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # generated by cythonize
2 | tests/cython/test_cython.c
3 |
4 | # In case somebody wants to restore the directory for local testing
5 | notes-to-self/
6 |
7 | # Project-specific generated files
8 | docs/build/
9 |
10 | bench/results/
11 | bench/env/
12 | bench/trio/
13 |
14 | # Byte-compiled / optimized / DLL files / editor temp files
15 | __pycache__/
16 | *.py[cod]
17 | *~
18 | \#*
19 | .#*
20 | *.swp
21 |
22 | # C extensions
23 | *.so
24 |
25 | # Distribution / packaging
26 | .Python
27 | /build/
28 | /develop-eggs/
29 | /dist/
30 | /eggs/
31 | /lib/
32 | /lib64/
33 | /parts/
34 | /sdist/
35 | /var/
36 | *.egg-info/
37 | .installed.cfg
38 | *.egg
39 | /.pybuild
40 | pip-wheel-metadata/
41 |
42 | # Installer logs
43 | pip-log.txt
44 |
45 | # Unit test / coverage reports
46 | htmlcov/
47 | .tox/
48 | .venv/
49 | pyvenv.cfg
50 | .coverage
51 | .coverage.*
52 | .cache
53 | .pytest_cache/
54 | .mypy_cache/
55 | nosetests.xml
56 | coverage.xml
57 |
58 | # Temp file during Mypy processing
59 | mypy_annotate.dat
60 |
61 | # Translations
62 | *.mo
63 |
64 | # Mr Developer
65 | .mr.developer.cfg
66 | .project
67 | .pydevproject
68 |
69 | # Rope
70 | .ropeproject
71 |
72 | # Django stuff:
73 | *.log
74 | *.pot
75 |
76 | # Sphinx documentation
77 | doc/_build/
78 |
79 | # PyCharm
80 | .idea/
81 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | ci:
2 | autofix_prs: true
3 | autoupdate_schedule: weekly
4 | submodules: false
5 | # pip-compile requires internet, regenerate-files may get cache
6 | # issues in CI, so they're run in check.sh
7 | skip: [pip-compile, regenerate-files]
8 |
9 | repos:
10 | - repo: https://github.com/pre-commit/pre-commit-hooks
11 | rev: v5.0.0
12 | hooks:
13 | - id: trailing-whitespace
14 | - id: end-of-file-fixer
15 | - id: check-yaml
16 | - id: check-toml
17 | - id: check-merge-conflict
18 | - id: mixed-line-ending
19 | - id: check-case-conflict
20 | - id: sort-simple-yaml
21 | files: .pre-commit-config.yaml
22 | - repo: https://github.com/psf/black-pre-commit-mirror
23 | rev: 25.1.0
24 | hooks:
25 | - id: black
26 | - repo: https://github.com/astral-sh/ruff-pre-commit
27 | rev: v0.11.11
28 | hooks:
29 | - id: ruff
30 | types: [file]
31 | types_or: [python, pyi, toml]
32 | args: ["--show-fixes"]
33 | - repo: https://github.com/codespell-project/codespell
34 | rev: v2.4.1
35 | hooks:
36 | - id: codespell
37 | additional_dependencies:
38 | # tomli needed on 3.10. tomllib is available in stdlib on 3.11+
39 | - tomli
40 | - repo: https://github.com/adhtruong/mirrors-typos
41 | rev: v1.32.0
42 | hooks:
43 | - id: typos
44 | - repo: https://github.com/sphinx-contrib/sphinx-lint
45 | rev: v1.0.0
46 | hooks:
47 | - id: sphinx-lint
48 | - repo: https://github.com/woodruffw/zizmor-pre-commit
49 | rev: v1.8.0
50 | hooks:
51 | - id: zizmor
52 | - repo: local
53 | hooks:
54 | - id: regenerate-files
55 | name: regenerate generated files
56 | language: python
57 | entry: python src/trio/_tools/gen_exports.py
58 | pass_filenames: false
59 | additional_dependencies: ["astor", "attrs", "black", "ruff"]
60 | files: ^src\/trio\/_core\/(_run|(_i(o_(common|epoll|kqueue|windows)|nstrumentation)))\.py$
61 | - id: sync-test-requirements
62 | name: synchronize test requirements
63 | language: python
64 | entry: python src/trio/_tools/sync_requirements.py
65 | pass_filenames: false
66 | additional_dependencies: ["pyyaml"]
67 | files: ^(test-requirements\.txt)|(\.pre-commit-config\.yaml)$
68 | - repo: https://github.com/astral-sh/uv-pre-commit
69 | rev: 0.7.8
70 | hooks:
71 | # Compile requirements
72 | - id: pip-compile
73 | name: uv pip-compile test-requirements.in
74 | args: [
75 | "--universal",
76 | "--python-version=3.9",
77 | "test-requirements.in",
78 | "-o",
79 | "test-requirements.txt"]
80 | files: ^test-requirements\.(in|txt)$
81 | - id: pip-compile
82 | name: uv pip-compile docs-requirements.in
83 | args: [
84 | "--universal",
85 | "--python-version=3.11",
86 | "docs-requirements.in",
87 | "-o",
88 | "docs-requirements.txt"]
89 | files: ^docs-requirements\.(in|txt)$
90 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # https://docs.readthedocs.io/en/latest/config-file/index.html
2 | version: 2
3 |
4 | formats:
5 | - htmlzip
6 | - epub
7 |
8 | build:
9 | os: "ubuntu-22.04"
10 | tools:
11 | python: "3.11"
12 |
13 | python:
14 | install:
15 | - requirements: docs-requirements.txt
16 | - path: .
17 |
18 | sphinx:
19 | fail_on_warning: true
20 | configuration: docs/source/conf.py
21 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | For the Trio code of conduct, see:
2 | https://trio.readthedocs.io/en/latest/code-of-conduct.html
3 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | For the Trio contributing guide, see:
2 | https://trio.readthedocs.io/en/latest/contributing.html
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | This software is made available under the terms of *either* of the
2 | licenses found in LICENSE.APACHE2 or LICENSE.MIT. Contributions to
3 | Trio are made under the terms of *both* these licenses.
4 |
--------------------------------------------------------------------------------
/LICENSE.MIT:
--------------------------------------------------------------------------------
1 | Copyright Contributors to the Trio project.
2 |
3 | The MIT License (MIT)
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining
6 | a copy of this software and associated documentation files (the
7 | "Software"), to deal in the Software without restriction, including
8 | without limitation the rights to use, copy, modify, merge, publish,
9 | distribute, sublicense, and/or sell copies of the Software, and to
10 | permit persons to whom the Software is furnished to do so, subject to
11 | the following conditions:
12 |
13 | The above copyright notice and this permission notice shall be
14 | included in all copies or substantial portions of the Software.
15 |
16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include .codecov.yml
2 | include check.sh
3 | include ci.sh
4 | include LICENSE LICENSE.MIT LICENSE.APACHE2
5 | include README.rst
6 | include CODE_OF_CONDUCT.md CONTRIBUTING.md
7 | include *-requirements.in
8 | include *-requirements.txt
9 | include src/trio/py.typed
10 | include src/trio/_tests/astrill-codesigning-cert.cer
11 | recursive-include src/trio/_tests/test_ssl_certs *.pem
12 | recursive-include docs *
13 | recursive-include tests *
14 | prune docs/build
15 |
--------------------------------------------------------------------------------
/docs-requirements.in:
--------------------------------------------------------------------------------
1 | # RTD is currently installing 1.5.3, which has a bug in :lineno-match: (??)
2 | # sphinx 5.3 doesn't work with our _NoValue workaround
3 | sphinx >= 6.0
4 | jinja2
5 | # >= is necessary to prevent `uv` from selecting a `Sphinx` version this does not support
6 | sphinx_rtd_theme >= 3
7 | sphinxcontrib-jquery
8 | sphinxcontrib-trio
9 | towncrier
10 | sphinx-codeautolink
11 |
12 | # Trio's own dependencies
13 | cffi; os_name == "nt"
14 | attrs >= 23.2.0
15 | sortedcontainers
16 | idna
17 | outcome
18 | sniffio
19 | exceptiongroup >= 1.0.0rc9
20 |
21 | # See note in test-requirements.in
22 | immutables >= 0.6
23 |
24 | # types used in annotations
25 | pyOpenSSL
26 |
--------------------------------------------------------------------------------
/docs-requirements.txt:
--------------------------------------------------------------------------------
1 | # This file was autogenerated by uv via the following command:
2 | # uv pip compile --universal --python-version=3.11 docs-requirements.in -o docs-requirements.txt
3 | alabaster==1.0.0
4 | # via sphinx
5 | attrs==25.3.0
6 | # via
7 | # -r docs-requirements.in
8 | # outcome
9 | babel==2.17.0
10 | # via sphinx
11 | beautifulsoup4==4.13.4
12 | # via sphinx-codeautolink
13 | certifi==2025.4.26
14 | # via requests
15 | cffi==1.17.1 ; os_name == 'nt' or platform_python_implementation != 'PyPy'
16 | # via
17 | # -r docs-requirements.in
18 | # cryptography
19 | charset-normalizer==3.4.1
20 | # via requests
21 | click==8.1.8
22 | # via towncrier
23 | colorama==0.4.6 ; sys_platform == 'win32'
24 | # via
25 | # click
26 | # sphinx
27 | cryptography==44.0.2
28 | # via pyopenssl
29 | docutils==0.21.2
30 | # via
31 | # sphinx
32 | # sphinx-rtd-theme
33 | exceptiongroup==1.2.2
34 | # via -r docs-requirements.in
35 | idna==3.10
36 | # via
37 | # -r docs-requirements.in
38 | # requests
39 | imagesize==1.4.1
40 | # via sphinx
41 | immutables==0.21
42 | # via -r docs-requirements.in
43 | jinja2==3.1.6
44 | # via
45 | # -r docs-requirements.in
46 | # sphinx
47 | # towncrier
48 | markupsafe==3.0.2
49 | # via jinja2
50 | outcome==1.3.0.post0
51 | # via -r docs-requirements.in
52 | packaging==25.0
53 | # via sphinx
54 | pycparser==2.22 ; os_name == 'nt' or platform_python_implementation != 'PyPy'
55 | # via cffi
56 | pygments==2.19.1
57 | # via sphinx
58 | pyopenssl==25.0.0
59 | # via -r docs-requirements.in
60 | requests==2.32.3
61 | # via sphinx
62 | roman-numerals-py==3.1.0
63 | # via sphinx
64 | sniffio==1.3.1
65 | # via -r docs-requirements.in
66 | snowballstemmer==2.2.0
67 | # via sphinx
68 | sortedcontainers==2.4.0
69 | # via -r docs-requirements.in
70 | soupsieve==2.7
71 | # via beautifulsoup4
72 | sphinx==8.2.3
73 | # via
74 | # -r docs-requirements.in
75 | # sphinx-codeautolink
76 | # sphinx-rtd-theme
77 | # sphinxcontrib-jquery
78 | # sphinxcontrib-trio
79 | sphinx-codeautolink==0.17.4
80 | # via -r docs-requirements.in
81 | sphinx-rtd-theme==3.0.2
82 | # via -r docs-requirements.in
83 | sphinxcontrib-applehelp==2.0.0
84 | # via sphinx
85 | sphinxcontrib-devhelp==2.0.0
86 | # via sphinx
87 | sphinxcontrib-htmlhelp==2.1.0
88 | # via sphinx
89 | sphinxcontrib-jquery==4.1
90 | # via
91 | # -r docs-requirements.in
92 | # sphinx-rtd-theme
93 | sphinxcontrib-jsmath==1.0.1
94 | # via sphinx
95 | sphinxcontrib-qthelp==2.0.0
96 | # via sphinx
97 | sphinxcontrib-serializinghtml==2.0.0
98 | # via sphinx
99 | sphinxcontrib-trio==1.1.2
100 | # via -r docs-requirements.in
101 | towncrier==24.8.0
102 | # via -r docs-requirements.in
103 | typing-extensions==4.13.2
104 | # via
105 | # beautifulsoup4
106 | # pyopenssl
107 | urllib3==2.4.0
108 | # via requests
109 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SPHINXPROJ = Trio
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 | set SPHINXPROJ=Trio
13 |
14 | if "%1" == "" goto help
15 |
16 | %SPHINXBUILD% >NUL 2>NUL
17 | if errorlevel 9009 (
18 | echo.
19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
20 | echo.installed, then set the SPHINXBUILD environment variable to point
21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
22 | echo.may add the Sphinx directory to PATH.
23 | echo.
24 | echo.If you don't have Sphinx installed, grab it from
25 | echo.http://sphinx-doc.org/
26 | exit /b 1
27 | )
28 |
29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
30 | goto end
31 |
32 | :help
33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
34 |
35 | :end
36 | popd
37 |
--------------------------------------------------------------------------------
/docs/notes.txt:
--------------------------------------------------------------------------------
1 | it's possible from extension/configuration modules to get sphinx to
2 | know about new roles and even new autodoc types.
3 |
4 | See curio docs, which cribbed from the python docs and have a link to
5 | them
6 |
7 | and also:
8 |
9 | https://github.com/aio-libs/sphinxcontrib-asyncio/pull/1/files
10 |
11 | which added autodoc hooks to sphinxcontrib-asyncio
12 |
13 |
14 | it looks like there's a table of cross-reference roles in
15 | sphinx/domains/python.py (look for PyXRefRole), which inherits from
16 | sphinx.roles.XRefRole, which has some notes on how to subclass and
17 | change rendering (see 'result_nodes' method)
18 |
19 | so..... it might even be possible to give async functions/methods their
20 | own color :-)
21 |
--------------------------------------------------------------------------------
/docs/source/_static/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/docs/source/_static/.gitkeep
--------------------------------------------------------------------------------
/docs/source/_static/favicon-32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/docs/source/_static/favicon-32.png
--------------------------------------------------------------------------------
/docs/source/_static/styles.css:
--------------------------------------------------------------------------------
1 | /* Make .. deprecation:: blocks visible
2 | * (by default they're entirely unstyled)
3 | */
4 | .deprecated {
5 | background-color: #ffe13b;
6 | }
7 |
8 | /* Make typevar/paramspec names distinguishable from classes. */
9 | .typevarref {
10 | text-decoration: dashed underline;
11 | }
12 |
13 | /* Add a snakey triskelion ornament to
14 | * https://stackoverflow.com/questions/8862344/css-hr-with-ornament/18541258#18541258
15 | * but only do it to
s in the content box, b/c the RTD popup control panel
16 | * thingummy also has an
in it, and putting the ornament on that looks
17 | * *really weird*. (In particular, the background color is wrong.)
18 | */
19 | .rst-content hr {
20 | overflow: visible;
21 | }
22 |
23 | .rst-content hr:after {
24 | /* This .svg gets displayed on top of the middle of the hrule. It has a box
25 | * behind the logo that's colored to match the RTD theme body background
26 | * color (#fcfcfc), which hides the middle part of the hrule to make it
27 | * look like there's a gap in it. The size of the box determines the size
28 | * of the gap.
29 | */
30 | background: url('ornament.svg') no-repeat top center;
31 | background-size: contain;
32 | content: "";
33 | display: block;
34 | height: 30px;
35 | position: relative;
36 | top: -15px;
37 | }
38 |
39 | /* Hacks to make the upper-left logo area look nicer */
40 |
41 | .wy-side-nav-search > a {
42 | color: #306998 !important;
43 | }
44 |
45 | /* vertically center version text */
46 | .wy-side-nav-search > a {
47 | display: flex;
48 | align-items: center;
49 | margin: auto;
50 | width: max-content;
51 | }
52 |
53 | .wy-side-nav-search > a img.logo {
54 | margin-left: 0;
55 | margin-right: 5px;
56 | }
57 |
58 | /* Get rid of the weird super dark "Contents" label that wastes vertical space
59 | */
60 | .wy-menu-vertical > p.caption {
61 | display: none !important;
62 | }
63 |
64 | /* I do not like RTD's use of Roboto Slab for headlines. So force it back to
65 | * Lato (or whatever fallback it's using if Lato isn't available for some
66 | * reason). I also experimented with using Montserrat to be extra obnoxiously
67 | * on brand, but honestly you couldn't really tell so there wasn't much point
68 | * in adding page weight for that, and this is going to match the body text
69 | * better. (Montserrat for body text *definitely* didn't look good, alas.)
70 | */
71 | h1, h2, h3, h4, h5, h6, legend, .rst-content .toctree-wrapper p.caption {
72 | font-family: inherit !important;
73 | }
74 |
75 | /* Get rid of the horrible red for literal content */
76 | .rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal {
77 | color: #222 !important;
78 | }
79 |
80 | /* Style the "Need help?" text just underneath the search box */
81 | .trio-help-hint {
82 | line-height: normal;
83 | margin-bottom: 0;
84 | /* font-size: 12px; */
85 | font-size: 80%; /* matches the "Search docs" box */
86 | padding-top: 6px;
87 | color: #306998;
88 | text-align: center;
89 | }
90 |
91 | a.trio-help-hint, .trio-help-hint a:link, .trio-help-hint a:visited {
92 | color: inherit;
93 | /* Like text-decoration: underline, but with a thinner line */
94 | text-decoration: none;
95 | border-bottom: 1px solid;
96 | }
97 |
--------------------------------------------------------------------------------
/docs/source/_templates/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/docs/source/_templates/.gitkeep
--------------------------------------------------------------------------------
/docs/source/_templates/layout.html:
--------------------------------------------------------------------------------
1 | {#
2 | https://stackoverflow.com/questions/25243482/how-to-add-sphinx-generated-index-to-the-sidebar-when-using-read-the-docs-theme
3 | #}
4 | {% extends "!layout.html" %}
5 |
6 | {% block sidebartitle %}
7 |
8 | {# the logo helper function was removed in Sphinx 6 and deprecated since Sphinx 4 #}
9 | {# the master_doc variable was renamed to root_doc in Sphinx 4 (master_doc still exists in later Sphinx versions) #}
10 | {# check sphinx_rtd_theme/layout.html:sidebartitle if this snippet has become outdated #}
11 |
12 |
13 |
14 | {%- set nav_version = version %}
15 | {% if current_version %}
16 | {%- set nav_version = current_version %}
17 | {% endif %}
18 | {# don't show the version on RTD if it's the default #}
19 | {% if nav_version != 'latest' %}
20 | {{ nav_version }}
21 | {% endif %}
22 |
23 |
24 | {% include "searchbox.html" %}
25 |
26 | Need help? Live chat, forum, StackOverflow.
30 | {% endblock %}
31 |
--------------------------------------------------------------------------------
/docs/source/glossary.rst:
--------------------------------------------------------------------------------
1 | :orphan:
2 |
3 | .. _glossary:
4 |
5 | ********
6 | Glossary
7 | ********
8 |
9 | .. glossary::
10 |
11 | asynchronous file object
12 | This is an object with an API identical to a :term:`file object`, with
13 | the exception that all methods that do I/O are async functions.
14 |
15 | The main ways to create an asynchronous file object are by using the
16 | :func:`trio.open_file` function or the :meth:`trio.Path.open`
17 | method. See :ref:`async-file-io` for more details.
18 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | .. Trio documentation master file, created by
2 | sphinx-quickstart on Sat Jan 21 19:11:14 2017.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | =============================================================
7 | Trio: a friendly Python library for async concurrency and I/O
8 | =============================================================
9 |
10 | The Trio project's goal is to produce a production-quality,
11 | `permissively licensed
12 | `__,
13 | async/await-native I/O library for Python. Like all async libraries,
14 | its main purpose is to help you write programs that do **multiple
15 | things at the same time** with **parallelized I/O**. A web spider that
16 | wants to fetch lots of pages in parallel, a web server that needs to
17 | juggle lots of downloads and websocket connections at the same time, a
18 | process supervisor monitoring multiple subprocesses... that sort of
19 | thing. Compared to other libraries, Trio attempts to distinguish
20 | itself with an obsessive focus on **usability** and
21 | **correctness**. Concurrency is complicated; we try to make it *easy*
22 | to get things *right*.
23 |
24 | Trio was built from the ground up to take advantage of the `latest
25 | Python features `__, and
26 | draws inspiration from `many sources
27 | `__, in
28 | particular Dave Beazley's `Curio `__.
29 | The resulting design is radically simpler than older competitors like
30 | `asyncio `__ and
31 | `Twisted `__, yet just as capable. Trio is
32 | the Python I/O library I always wanted; I find it makes building
33 | I/O-oriented programs easier, less error-prone, and just plain more
34 | fun. Perhaps you'll find the same.
35 |
36 | Trio is a mature and well-tested library, though it retains its
37 | “experimental” classification to allow for occasional breaking API
38 | changes as we push toward a 1.0 release. In practice, such changes are
39 | rare and typically minor. It is widely used in production environments,
40 | and we *do* encourage you do use it, but consider `subscribing to issue
41 | #1 `__ to get a warning
42 | and a chance to give feedback about any compatibility-breaking changes.
43 |
44 | Vital statistics:
45 |
46 | * Supported environments: We test on
47 |
48 | - Python: 3.9+ (CPython and PyPy)
49 | - Windows, macOS, Linux (glibc and musl), FreeBSD
50 |
51 | Other environments might also work; give it a try and see.
52 |
53 | * Install: ``python3 -m pip install -U trio`` (or on Windows, maybe
54 | ``py -3 -m pip install -U trio``). No compiler needed.
55 |
56 | * Tutorial and reference manual: https://trio.readthedocs.io
57 |
58 | * Bug tracker and source code: https://github.com/python-trio/trio
59 |
60 | * Real-time chat: https://gitter.im/python-trio/general
61 |
62 | * Discussion forum: https://trio.discourse.group
63 |
64 | * License: MIT or Apache 2, your choice
65 |
66 | * Contributor guide: https://trio.readthedocs.io/en/latest/contributing.html
67 |
68 | * Code of conduct: Contributors are requested to follow our `code of
69 | conduct
70 | `_
71 | in all project spaces.
72 |
73 |
74 | .. toctree::
75 | :maxdepth: 2
76 | :caption: Trio's friendly, yet comprehensive, manual:
77 |
78 | tutorial.rst
79 | awesome-trio-libraries.rst
80 | reference-core.rst
81 | reference-io.rst
82 | reference-testing.rst
83 | reference-lowlevel.rst
84 | design.rst
85 | history.rst
86 | contributing.rst
87 | releasing.rst
88 | code-of-conduct.rst
89 |
90 | ====================
91 | Indices and tables
92 | ====================
93 |
94 | * :ref:`genindex`
95 | * :ref:`modindex`
96 | * :ref:`search`
97 | * :ref:`glossary`
98 |
--------------------------------------------------------------------------------
/docs/source/local_customization.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import TYPE_CHECKING
4 |
5 | from docutils.parsers.rst import directives as directives
6 | from sphinx import addnodes
7 | from sphinx.domains.python import PyClasslike
8 | from sphinx.ext.autodoc import (
9 | ClassLevelDocumenter as ClassLevelDocumenter,
10 | FunctionDocumenter as FunctionDocumenter,
11 | MethodDocumenter as MethodDocumenter,
12 | Options as Options,
13 | )
14 |
15 | if TYPE_CHECKING:
16 | from sphinx.addnodes import desc_signature
17 | from sphinx.application import Sphinx
18 |
19 | """
20 |
21 | .. interface:: The nursery interface
22 |
23 | .. attribute:: blahblah
24 |
25 | """
26 |
27 |
28 | class Interface(PyClasslike):
29 | def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]:
30 | signode += addnodes.desc_name(sig, sig)
31 | return sig, ""
32 |
33 | def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
34 | return f"{name_cls[0]} (interface in {modname})"
35 |
36 |
37 | def setup(app: Sphinx) -> None:
38 | app.add_directive_to_domain("py", "interface", Interface)
39 |
--------------------------------------------------------------------------------
/docs/source/reference-core/channels-backpressure.py:
--------------------------------------------------------------------------------
1 | # Simulate a producer that generates values 10x faster than the
2 | # consumer can handle them.
3 |
4 | import trio
5 | import math
6 |
7 |
8 | async def producer(send_channel):
9 | count = 0
10 | while True:
11 | # Pretend that we have to do some work to create this message, and it
12 | # takes 0.1 seconds:
13 | await trio.sleep(0.1)
14 | await send_channel.send(count)
15 | print("Sent message:", count)
16 | count += 1
17 |
18 |
19 | async def consumer(receive_channel):
20 | async for value in receive_channel:
21 | print("Received message:", value)
22 | # Pretend that we have to do some work to handle this message, and it
23 | # takes 1 second
24 | await trio.sleep(1)
25 |
26 |
27 | async def main():
28 | send_channel, receive_channel = trio.open_memory_channel(math.inf)
29 | async with trio.open_nursery() as nursery:
30 | nursery.start_soon(producer, send_channel)
31 | nursery.start_soon(consumer, receive_channel)
32 |
33 |
34 | trio.run(main)
35 |
--------------------------------------------------------------------------------
/docs/source/reference-core/channels-mpmc-broken.py:
--------------------------------------------------------------------------------
1 | # This example usually crashes!
2 |
3 | import trio
4 | import random
5 |
6 |
7 | async def main():
8 | async with trio.open_nursery() as nursery:
9 | send_channel, receive_channel = trio.open_memory_channel(0)
10 | # Start two producers
11 | nursery.start_soon(producer, "A", send_channel)
12 | nursery.start_soon(producer, "B", send_channel)
13 | # And two consumers
14 | nursery.start_soon(consumer, "X", receive_channel)
15 | nursery.start_soon(consumer, "Y", receive_channel)
16 |
17 |
18 | async def producer(name, send_channel):
19 | async with send_channel:
20 | for i in range(3):
21 | await send_channel.send(f"{i} from producer {name}")
22 | # Random sleeps help trigger the problem more reliably
23 | await trio.sleep(random.random())
24 |
25 |
26 | async def consumer(name, receive_channel):
27 | async with receive_channel:
28 | async for value in receive_channel:
29 | print(f"consumer {name} got value {value!r}")
30 | # Random sleeps help trigger the problem more reliably
31 | await trio.sleep(random.random())
32 |
33 |
34 | trio.run(main)
35 |
--------------------------------------------------------------------------------
/docs/source/reference-core/channels-mpmc-fixed.py:
--------------------------------------------------------------------------------
1 | import trio
2 | import random
3 |
4 |
5 | async def main():
6 | async with trio.open_nursery() as nursery:
7 | send_channel, receive_channel = trio.open_memory_channel(0)
8 | async with send_channel, receive_channel:
9 | # Start two producers, giving each its own private clone
10 | nursery.start_soon(producer, "A", send_channel.clone())
11 | nursery.start_soon(producer, "B", send_channel.clone())
12 | # And two consumers, giving each its own private clone
13 | nursery.start_soon(consumer, "X", receive_channel.clone())
14 | nursery.start_soon(consumer, "Y", receive_channel.clone())
15 |
16 |
17 | async def producer(name, send_channel):
18 | async with send_channel:
19 | for i in range(3):
20 | await send_channel.send(f"{i} from producer {name}")
21 | # Random sleeps help trigger the problem more reliably
22 | await trio.sleep(random.random())
23 |
24 |
25 | async def consumer(name, receive_channel):
26 | async with receive_channel:
27 | async for value in receive_channel:
28 | print(f"consumer {name} got value {value!r}")
29 | # Random sleeps help trigger the problem more reliably
30 | await trio.sleep(random.random())
31 |
32 |
33 | trio.run(main)
34 |
--------------------------------------------------------------------------------
/docs/source/reference-core/channels-shutdown.py:
--------------------------------------------------------------------------------
1 | import trio
2 |
3 |
4 | async def main():
5 | async with trio.open_nursery() as nursery:
6 | send_channel, receive_channel = trio.open_memory_channel(0)
7 | nursery.start_soon(producer, send_channel)
8 | nursery.start_soon(consumer, receive_channel)
9 |
10 |
11 | async def producer(send_channel):
12 | async with send_channel:
13 | for i in range(3):
14 | await send_channel.send(f"message {i}")
15 |
16 |
17 | async def consumer(receive_channel):
18 | async with receive_channel:
19 | async for value in receive_channel:
20 | print(f"got value {value!r}")
21 |
22 |
23 | trio.run(main)
24 |
--------------------------------------------------------------------------------
/docs/source/reference-core/channels-simple.py:
--------------------------------------------------------------------------------
1 | import trio
2 |
3 |
4 | async def main():
5 | async with trio.open_nursery() as nursery:
6 | # Open a channel:
7 | send_channel, receive_channel = trio.open_memory_channel(0)
8 | # Start a producer and a consumer, passing one end of the channel to
9 | # each of them:
10 | nursery.start_soon(producer, send_channel)
11 | nursery.start_soon(consumer, receive_channel)
12 |
13 |
14 | async def producer(send_channel):
15 | # Producer sends 3 messages
16 | for i in range(3):
17 | # The producer sends using 'await send_channel.send(...)'
18 | await send_channel.send(f"message {i}")
19 |
20 |
21 | async def consumer(receive_channel):
22 | # The consumer uses an 'async for' loop to receive the values:
23 | async for value in receive_channel:
24 | print(f"got value {value!r}")
25 |
26 |
27 | trio.run(main)
28 |
--------------------------------------------------------------------------------
/docs/source/reference-core/contextvar-example.py:
--------------------------------------------------------------------------------
1 | import random
2 | import trio
3 | import contextvars
4 |
5 | request_info = contextvars.ContextVar("request_info")
6 |
7 |
8 | # Example logging function that tags each line with the request identifier.
9 | def log(msg):
10 | # Read from task-local storage:
11 | request_tag = request_info.get()
12 |
13 | print(f"request {request_tag}: {msg}")
14 |
15 |
16 | # An example "request handler" that does some work itself and also
17 | # spawns some helper tasks to do some concurrent work.
18 | async def handle_request(tag):
19 | # Write to task-local storage:
20 | request_info.set(tag)
21 |
22 | log("Request handler started")
23 | await trio.sleep(random.random())
24 | async with trio.open_nursery() as nursery:
25 | nursery.start_soon(concurrent_helper, "a")
26 | nursery.start_soon(concurrent_helper, "b")
27 | await trio.sleep(random.random())
28 | log("Request received finished")
29 |
30 |
31 | async def concurrent_helper(job):
32 | log(f"Helper task {job} started")
33 | await trio.sleep(random.random())
34 | log(f"Helper task {job} finished")
35 |
36 |
37 | # Spawn several "request handlers" simultaneously, to simulate a
38 | # busy server handling multiple requests at the same time.
39 | async def main():
40 | async with trio.open_nursery() as nursery:
41 | for i in range(3):
42 | nursery.start_soon(handle_request, i)
43 |
44 |
45 | trio.run(main)
46 |
--------------------------------------------------------------------------------
/docs/source/reference-core/from-thread-example.py:
--------------------------------------------------------------------------------
1 | import trio
2 |
3 |
4 | def thread_fn(receive_from_trio, send_to_trio):
5 | while True:
6 | # Since we're in a thread, we can't call methods on Trio
7 | # objects directly -- so we use trio.from_thread to call them.
8 | try:
9 | request = trio.from_thread.run(receive_from_trio.receive)
10 | except trio.EndOfChannel:
11 | trio.from_thread.run(send_to_trio.aclose)
12 | return
13 | else:
14 | response = request + 1
15 | trio.from_thread.run(send_to_trio.send, response)
16 |
17 |
18 | async def main():
19 | send_to_thread, receive_from_trio = trio.open_memory_channel(0)
20 | send_to_trio, receive_from_thread = trio.open_memory_channel(0)
21 |
22 | async with trio.open_nursery() as nursery:
23 | # In a background thread, run:
24 | # thread_fn(receive_from_trio, send_to_trio)
25 | nursery.start_soon(
26 | trio.to_thread.run_sync, thread_fn, receive_from_trio, send_to_trio
27 | )
28 |
29 | # prints "1"
30 | await send_to_thread.send(0)
31 | print(await receive_from_thread.receive())
32 |
33 | # prints "2"
34 | await send_to_thread.send(1)
35 | print(await receive_from_thread.receive())
36 |
37 | # When we close the channel, it signals the thread to exit.
38 | await send_to_thread.aclose()
39 |
40 | # When we exit the nursery, it waits for the background thread to
41 | # exit.
42 |
43 |
44 | trio.run(main)
45 |
--------------------------------------------------------------------------------
/docs/source/reference-core/thread-contextvars-example.py:
--------------------------------------------------------------------------------
1 | import contextvars
2 | import time
3 |
4 | import trio
5 |
6 | request_state = contextvars.ContextVar("request_state")
7 |
8 | # Blocking function that should be run on a thread
9 | # It could be reading or writing files, communicating with a database
10 | # with a driver not compatible with async / await, etc.
11 | def work_in_thread(msg):
12 | # Only use request_state.get() inside the worker thread
13 | state_value = request_state.get()
14 | current_user_id = state_value["current_user_id"]
15 | time.sleep(3) # this would be some blocking call, like reading a file
16 | print(f"Processed user {current_user_id} with message {msg} in a thread worker")
17 | # Modify/mutate the state object, without setting the entire
18 | # contextvar with request_state.set()
19 | state_value["msg"] = msg
20 |
21 |
22 | # An example "request handler" that does some work itself and also
23 | # spawns some helper tasks in threads to execute blocking code.
24 | async def handle_request(current_user_id):
25 | # Write to task-local storage:
26 | current_state = {"current_user_id": current_user_id, "msg": ""}
27 | request_state.set(current_state)
28 |
29 | # Here the current implicit contextvars context will be automatically copied
30 | # inside the worker thread
31 | await trio.to_thread.run_sync(work_in_thread, f"Hello {current_user_id}")
32 | # Extract the value set inside the thread in the same object stored in a contextvar
33 | new_msg = current_state["msg"]
34 | print(
35 | f"New contextvar value from worker thread for user {current_user_id}: {new_msg}"
36 | )
37 |
38 |
39 | # Spawn several "request handlers" simultaneously, to simulate a
40 | # busy server handling multiple requests at the same time.
41 | async def main():
42 | async with trio.open_nursery() as nursery:
43 | for i in range(3):
44 | nursery.start_soon(handle_request, i)
45 |
46 |
47 | trio.run(main)
48 |
--------------------------------------------------------------------------------
/docs/source/reference-testing/across-realtime.out:
--------------------------------------------------------------------------------
1 | Clock where time passes at 100 years per second:
2 |
3 | task2: sleeping for 5 years
4 | task1: sleeping for 1 year
5 | task1: woke up; clock says I've slept 1.0365006048232317 years
6 | task1: sleeping for 1 year, 100 times
7 | task2: woke up; clock says I've slept 5.0572111969813704 years
8 | task2: sleeping for 500 years
9 | task1: slept 104.77677842136472 years total
10 | task2: slept 505.25014589075 years total
11 | Total real time elapsed: 5.053582429885864 seconds
12 |
13 | Clock where time automatically skips past the boring parts:
14 |
15 | task2: sleeping for 5 years
16 | task1: sleeping for 1 year
17 | task1: woke up; clock says I've slept 1.0 years
18 | task1: sleeping for 1 year, 100 times
19 | task2: woke up; clock says I've slept 5.0 years
20 | task2: sleeping for 500 years
21 | task1: slept 101.0 years total
22 | task2: slept 505.0 years total
23 | Total real time elapsed: 0.019298791885375977 seconds
24 |
--------------------------------------------------------------------------------
/docs/source/reference-testing/across-realtime.py:
--------------------------------------------------------------------------------
1 | # across-realtime.py
2 |
3 | import time
4 | import trio
5 | import trio.testing
6 |
7 | YEAR = 365 * 24 * 60 * 60 # seconds
8 |
9 |
10 | async def task1():
11 | start = trio.current_time()
12 |
13 | print("task1: sleeping for 1 year")
14 | await trio.sleep(YEAR)
15 |
16 | duration = trio.current_time() - start
17 | print(f"task1: woke up; clock says I've slept {duration / YEAR} years")
18 |
19 | print("task1: sleeping for 1 year, 100 times")
20 | for _ in range(100):
21 | await trio.sleep(YEAR)
22 |
23 | duration = trio.current_time() - start
24 | print(f"task1: slept {duration / YEAR} years total")
25 |
26 |
27 | async def task2():
28 | start = trio.current_time()
29 |
30 | print("task2: sleeping for 5 years")
31 | await trio.sleep(5 * YEAR)
32 |
33 | duration = trio.current_time() - start
34 | print(f"task2: woke up; clock says I've slept {duration / YEAR} years")
35 |
36 | print("task2: sleeping for 500 years")
37 | await trio.sleep(500 * YEAR)
38 |
39 | duration = trio.current_time() - start
40 | print(f"task2: slept {duration / YEAR} years total")
41 |
42 |
43 | async def main():
44 | async with trio.open_nursery() as nursery:
45 | nursery.start_soon(task1)
46 | nursery.start_soon(task2)
47 |
48 |
49 | def run_example(clock):
50 | real_start = time.perf_counter()
51 | trio.run(main, clock=clock)
52 | real_duration = time.perf_counter() - real_start
53 | print(f"Total real time elapsed: {real_duration} seconds")
54 |
55 |
56 | print("Clock where time passes at 100 years per second:\n")
57 | run_example(trio.testing.MockClock(rate=100 * YEAR))
58 |
59 | print("\nClock where time automatically skips past the boring parts:\n")
60 | run_example(trio.testing.MockClock(autojump_threshold=0))
61 |
--------------------------------------------------------------------------------
/docs/source/releasing.rst:
--------------------------------------------------------------------------------
1 | .. _releasing:
2 |
3 | Preparing a release
4 | -------------------
5 |
6 | Things to do for releasing:
7 |
8 | * announce intent to release on gitter
9 |
10 | * check for open issues / pull requests that really should be in the release
11 |
12 | + come back when these are done
13 |
14 | + … or ignore them and do another release next week
15 |
16 | * check for deprecations "long enough ago" (two months or two releases, whichever is longer)
17 |
18 | + remove affected code
19 |
20 | * Do the actual release changeset
21 |
22 | + bump version number
23 |
24 | - increment as per Semantic Versioning rules
25 |
26 | - remove ``+dev`` tag from version number
27 |
28 | + Run ``towncrier``
29 |
30 | - review history change
31 |
32 | - ``git rm`` the now outdated newfragments
33 |
34 | + commit
35 |
36 | * push to your personal repository
37 |
38 | * create pull request to ``python-trio/trio``'s "main" branch
39 |
40 | * verify that all checks succeeded
41 |
42 | * tag with vVERSION, push tag on ``python-trio/trio`` (not on your personal repository)
43 |
44 | * approve the release workflow's publish job
45 |
46 | * update version number in the same pull request
47 |
48 | + add ``+dev`` tag to the end
49 |
50 | * merge the release pull request
51 |
52 | * make a GitHub release (go to the tag and press "Create release from tag")
53 |
54 | + paste in the new content in ``history.rst`` and convert it to markdown: turn the parts under section into ``---``, update links to just be the links, and whatever else is necessary.
55 |
56 | + include anything else that might be pertinent, like a link to the commits between the latest and current release.
57 |
58 | * announce on gitter
59 |
--------------------------------------------------------------------------------
/docs/source/tutorial/echo-client.py:
--------------------------------------------------------------------------------
1 | # echo-client.py
2 |
3 | import sys
4 | import trio
5 |
6 | # arbitrary, but:
7 | # - must be in between 1024 and 65535
8 | # - can't be in use by some other program on your computer
9 | # - must match what we set in our echo server
10 | PORT = 12345
11 |
12 |
13 | async def sender(client_stream):
14 | print("sender: started!")
15 | while True:
16 | data = b"async can sometimes be confusing, but I believe in you!"
17 | print(f"sender: sending {data!r}")
18 | await client_stream.send_all(data)
19 | await trio.sleep(1)
20 |
21 |
22 | async def receiver(client_stream):
23 | print("receiver: started!")
24 | async for data in client_stream:
25 | print(f"receiver: got data {data!r}")
26 | print("receiver: connection closed")
27 | sys.exit()
28 |
29 |
30 | async def parent():
31 | print(f"parent: connecting to 127.0.0.1:{PORT}")
32 | client_stream = await trio.open_tcp_stream("127.0.0.1", PORT)
33 | async with client_stream:
34 | async with trio.open_nursery() as nursery:
35 | print("parent: spawning sender...")
36 | nursery.start_soon(sender, client_stream)
37 |
38 | print("parent: spawning receiver...")
39 | nursery.start_soon(receiver, client_stream)
40 |
41 |
42 | trio.run(parent)
43 |
--------------------------------------------------------------------------------
/docs/source/tutorial/echo-server.py:
--------------------------------------------------------------------------------
1 | # echo-server.py
2 |
3 | import trio
4 | from itertools import count
5 |
6 | # Port is arbitrary, but:
7 | # - must be in between 1024 and 65535
8 | # - can't be in use by some other program on your computer
9 | # - must match what we set in our echo client
10 | PORT = 12345
11 |
12 | CONNECTION_COUNTER = count()
13 |
14 |
15 | async def echo_server(server_stream):
16 | # Assign each connection a unique number to make our debug prints easier
17 | # to understand when there are multiple simultaneous connections.
18 | ident = next(CONNECTION_COUNTER)
19 | print(f"echo_server {ident}: started")
20 | try:
21 | async for data in server_stream:
22 | print(f"echo_server {ident}: received data {data!r}")
23 | await server_stream.send_all(data)
24 | print(f"echo_server {ident}: connection closed")
25 | # FIXME: add discussion of (Base)ExceptionGroup to the tutorial, and use
26 | # exceptiongroup.catch() here. (Not important in this case, but important
27 | # if the server code uses nurseries internally.)
28 | except Exception as exc:
29 | # Unhandled exceptions will propagate into our parent and take
30 | # down the whole program. If the exception is KeyboardInterrupt,
31 | # that's what we want, but otherwise maybe not...
32 | print(f"echo_server {ident}: crashed: {exc!r}")
33 |
34 |
35 | async def main():
36 | await trio.serve_tcp(echo_server, PORT)
37 |
38 |
39 | # We could also just write 'trio.run(trio.serve_tcp, echo_server, PORT)', but real
40 | # programs almost always end up doing other stuff too and then we'd have to go
41 | # back and factor it out into a separate function anyway. So it's simplest to
42 | # just make it a standalone function from the beginning.
43 | trio.run(main)
44 |
--------------------------------------------------------------------------------
/docs/source/tutorial/tasks-intro.py:
--------------------------------------------------------------------------------
1 | # tasks-intro.py
2 |
3 | import trio
4 |
5 |
6 | async def child1():
7 | print(" child1: started! sleeping now...")
8 | await trio.sleep(1)
9 | print(" child1: exiting!")
10 |
11 |
12 | async def child2():
13 | print(" child2: started! sleeping now...")
14 | await trio.sleep(1)
15 | print(" child2: exiting!")
16 |
17 |
18 | async def parent():
19 | print("parent: started!")
20 | async with trio.open_nursery() as nursery:
21 | print("parent: spawning child1...")
22 | nursery.start_soon(child1)
23 |
24 | print("parent: spawning child2...")
25 | nursery.start_soon(child2)
26 |
27 | print("parent: waiting for children to finish...")
28 | # -- we exit the nursery block here --
29 | print("parent: all done!")
30 |
31 |
32 | trio.run(parent)
33 |
--------------------------------------------------------------------------------
/docs/source/tutorial/tasks-with-trace.py:
--------------------------------------------------------------------------------
1 | # tasks-with-trace.py
2 |
3 | import trio
4 |
5 |
6 | async def child1():
7 | print(" child1: started! sleeping now...")
8 | await trio.sleep(1)
9 | print(" child1: exiting!")
10 |
11 |
12 | async def child2():
13 | print(" child2 started! sleeping now...")
14 | await trio.sleep(1)
15 | print(" child2 exiting!")
16 |
17 |
18 | async def parent():
19 | print("parent: started!")
20 | async with trio.open_nursery() as nursery:
21 | print("parent: spawning child1...")
22 | nursery.start_soon(child1)
23 |
24 | print("parent: spawning child2...")
25 | nursery.start_soon(child2)
26 |
27 | print("parent: waiting for children to finish...")
28 | # -- we exit the nursery block here --
29 | print("parent: all done!")
30 |
31 |
32 | class Tracer(trio.abc.Instrument):
33 | def before_run(self):
34 | print("!!! run started")
35 |
36 | def _print_with_task(self, msg, task):
37 | # repr(task) is perhaps more useful than task.name in general,
38 | # but in context of a tutorial the extra noise is unhelpful.
39 | print(f"{msg}: {task.name}")
40 |
41 | def task_spawned(self, task):
42 | self._print_with_task("### new task spawned", task)
43 |
44 | def task_scheduled(self, task):
45 | self._print_with_task("### task scheduled", task)
46 |
47 | def before_task_step(self, task):
48 | self._print_with_task(">>> about to run one step of task", task)
49 |
50 | def after_task_step(self, task):
51 | self._print_with_task("<<< task step finished", task)
52 |
53 | def task_exited(self, task):
54 | self._print_with_task("### task exited", task)
55 |
56 | def before_io_wait(self, timeout):
57 | if timeout:
58 | print(f"### waiting for I/O for up to {timeout} seconds")
59 | else:
60 | print("### doing a quick check for I/O")
61 | self._sleep_time = trio.current_time()
62 |
63 | def after_io_wait(self, timeout):
64 | duration = trio.current_time() - self._sleep_time
65 | print(f"### finished I/O check (took {duration} seconds)")
66 |
67 | def after_run(self):
68 | print("!!! run finished")
69 |
70 |
71 | trio.run(parent, instruments=[Tracer()])
72 |
--------------------------------------------------------------------------------
/docs/source/typevars.py:
--------------------------------------------------------------------------------
1 | """Transform references to typevars to avoid missing reference errors.
2 |
3 | See https://github.com/sphinx-doc/sphinx/issues/7722 also.
4 | """
5 |
6 | from __future__ import annotations
7 |
8 | import re
9 | from pathlib import Path
10 | from typing import TYPE_CHECKING
11 |
12 | import trio
13 | from sphinx.errors import NoUri
14 |
15 | if TYPE_CHECKING:
16 | from sphinx.addnodes import Element, pending_xref
17 | from sphinx.application import Sphinx
18 | from sphinx.environment import BuildEnvironment
19 |
20 |
21 | def identify_typevars(trio_folder: Path) -> None:
22 | """Record all typevars in trio."""
23 | for filename in trio_folder.rglob("*.py"):
24 | with open(filename, encoding="utf8") as f:
25 | for line in f:
26 | # A simple regex should be sufficient to find them all, no need to actually parse.
27 | match = re.search(
28 | r"\b(TypeVar|TypeVarTuple|ParamSpec)\(['\"]([^'\"]+)['\"]",
29 | line,
30 | )
31 | if match is not None:
32 | relative = "trio" / filename.relative_to(trio_folder)
33 | relative = relative.with_suffix("")
34 | if relative.name == "__init__": # Package, remove.
35 | relative = relative.parent
36 | kind = match.group(1)
37 | name = match.group(2)
38 | typevars_qualified[f'{".".join(relative.parts)}.{name}'] = kind
39 | existing = typevars_named.setdefault(name, kind)
40 | if existing != kind:
41 | print("Mismatch: {} = {}, {}", name, existing, kind)
42 |
43 |
44 | # All our typevars, so we can suppress reference errors for them.
45 | typevars_qualified: dict[str, str] = {}
46 | typevars_named: dict[str, str] = {}
47 |
48 |
49 | def lookup_reference(
50 | app: Sphinx,
51 | env: BuildEnvironment,
52 | node: pending_xref,
53 | contnode: Element,
54 | ) -> Element | None:
55 | """Handle missing references."""
56 | # If this is a typing_extensions object, redirect to typing.
57 | # Most things there are backports, so the stdlib docs should have an entry.
58 | target: str = node["reftarget"]
59 | if target.startswith("typing_extensions."):
60 | new_node = node.copy()
61 | new_node["reftarget"] = f"typing.{target[18:]}"
62 | # This fires off this same event, with our new modified node in order to fetch the right
63 | # URL to use.
64 | return app.emit_firstresult( # type: ignore[no-any-return]
65 | "missing-reference",
66 | env,
67 | new_node,
68 | contnode,
69 | allowed_exceptions=(NoUri,),
70 | )
71 |
72 | try:
73 | typevar_type = typevars_qualified[target]
74 | except KeyError:
75 | # Imports might mean the typevar was defined in a different module or something.
76 | # Fall back to checking just by name.
77 | dot = target.rfind(".")
78 | stem = target[dot + 1 :] if dot >= 0 else target
79 | try:
80 | typevar_type = typevars_named[stem]
81 | except KeyError:
82 | # Let other handlers deal with this name, it's not a typevar.
83 | return None
84 |
85 | # Found a typevar. Redirect to the stdlib docs for that kind of var.
86 | new_node = node.copy()
87 | new_node["reftarget"] = f"typing.{typevar_type}"
88 | new_node = app.emit_firstresult(
89 | "missing-reference",
90 | env,
91 | new_node,
92 | contnode,
93 | allowed_exceptions=(NoUri,),
94 | )
95 | reftitle = new_node["reftitle"]
96 | # Is normally "(in Python 3.XX)", make it say typevar/paramspec/etc
97 | paren = "(" if reftitle.startswith("(") else ""
98 | new_node["reftitle"] = f"{paren}{typevar_type}, {reftitle.lstrip('(')}"
99 | # Add a CSS class, for restyling.
100 | new_node["classes"].append("typevarref")
101 | return new_node
102 |
103 |
104 | def setup(app: Sphinx) -> None:
105 | identify_typevars(Path(trio.__file__).parent)
106 | app.connect("missing-reference", lookup_reference, -10)
107 |
--------------------------------------------------------------------------------
/newsfragments/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/newsfragments/.gitkeep
--------------------------------------------------------------------------------
/newsfragments/3232.feature.rst:
--------------------------------------------------------------------------------
1 | :exc:`Cancelled` strings can now display the source and reason for a cancellation. Trio-internal sources of cancellation will set this string, and :meth:`CancelScope.cancel` now has a ``reason`` string parameter that can be used to attach info to any :exc:`Cancelled` to help in debugging.
2 |
--------------------------------------------------------------------------------
/newsfragments/3248.bugfix.rst:
--------------------------------------------------------------------------------
1 | Allow pickling `trio.Cancelled`, as they can show up when you want to pickle something else. This does not rule out pickling other ``NoPublicConstructor`` objects -- create an issue if necessary.
2 |
--------------------------------------------------------------------------------
/newsfragments/README.rst:
--------------------------------------------------------------------------------
1 | This directory collects "newsfragments": short files that each contain
2 | a snippet of ReST-formatted text that will be added to the next
3 | release notes. This should be a description of aspects of the change
4 | (if any) that are relevant to users. (This contrasts with your commit
5 | message and PR description, which are a description of the change as
6 | relevant to people working on the code itself.)
7 |
8 | Each file should be named like ``..rst``, where
9 | ```` is an issue number, and ```` is one of:
10 |
11 | * ``headline``: a major new feature we want to highlight for users
12 | * ``breaking``: any breaking changes that happen without a proper
13 | deprecation period (note: deprecations, and removal of previously
14 | deprecated features after an appropriate time, go in the
15 | ``deprecated`` category instead)
16 | * ``feature``: any new feature that doesn't qualify for ``headline``
17 | * ``removal``: removing support for old python versions, or other removals with no deprecation period.
18 | * ``bugfix``
19 | * ``doc``
20 | * ``deprecated``
21 | * ``misc``
22 |
23 | So for example: ``123.headline.rst``, ``456.bugfix.rst``,
24 | ``789.deprecated.rst``
25 |
26 | If your PR fixes an issue, use that number here. If there is no issue,
27 | then after you submit the PR and get the PR number you can add a
28 | newsfragment using that instead.
29 |
30 | Your text can use all the same markup that we use in our Sphinx docs.
31 | For example, you can use double-backticks to mark code snippets, or
32 | single-backticks to link to a function/class/module.
33 |
34 | To check how your formatting looks, the easiest way is to make the PR,
35 | and then after the CI checks run, click on the "Read the Docs build"
36 | details link, and navigate to the release history page.
37 |
--------------------------------------------------------------------------------
/src/trio/__init__.py:
--------------------------------------------------------------------------------
1 | """Trio - A friendly Python library for async concurrency and I/O"""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import TYPE_CHECKING
6 |
7 | # General layout:
8 | #
9 | # trio/_core/... is the self-contained core library. It does various
10 | # shenanigans to export a consistent "core API", but parts of the core API are
11 | # too low-level to be recommended for regular use.
12 | #
13 | # trio/*.py define a set of more usable tools on top of this. They import from
14 | # trio._core and from each other.
15 | #
16 | # This file pulls together the friendly public API, by re-exporting the more
17 | # innocuous bits of the _core API + the higher-level tools from trio/*.py.
18 | #
19 | # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625)
20 | #
21 | # must be imported early to avoid circular import
22 | from ._core import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED # isort: split
23 |
24 | # Submodules imported by default
25 | from . import abc, from_thread, lowlevel, socket, to_thread
26 | from ._channel import (
27 | MemoryChannelStatistics as MemoryChannelStatistics,
28 | MemoryReceiveChannel as MemoryReceiveChannel,
29 | MemorySendChannel as MemorySendChannel,
30 | as_safe_channel as as_safe_channel,
31 | open_memory_channel as open_memory_channel,
32 | )
33 | from ._core import (
34 | BrokenResourceError as BrokenResourceError,
35 | BusyResourceError as BusyResourceError,
36 | Cancelled as Cancelled,
37 | CancelScope as CancelScope,
38 | ClosedResourceError as ClosedResourceError,
39 | EndOfChannel as EndOfChannel,
40 | Nursery as Nursery,
41 | RunFinishedError as RunFinishedError,
42 | TaskStatus as TaskStatus,
43 | TrioInternalError as TrioInternalError,
44 | WouldBlock as WouldBlock,
45 | current_effective_deadline as current_effective_deadline,
46 | current_time as current_time,
47 | open_nursery as open_nursery,
48 | run as run,
49 | )
50 | from ._deprecate import TrioDeprecationWarning as TrioDeprecationWarning
51 | from ._dtls import (
52 | DTLSChannel as DTLSChannel,
53 | DTLSChannelStatistics as DTLSChannelStatistics,
54 | DTLSEndpoint as DTLSEndpoint,
55 | )
56 | from ._file_io import open_file as open_file, wrap_file as wrap_file
57 | from ._highlevel_generic import (
58 | StapledStream as StapledStream,
59 | aclose_forcefully as aclose_forcefully,
60 | )
61 | from ._highlevel_open_tcp_listeners import (
62 | open_tcp_listeners as open_tcp_listeners,
63 | serve_tcp as serve_tcp,
64 | )
65 | from ._highlevel_open_tcp_stream import open_tcp_stream as open_tcp_stream
66 | from ._highlevel_open_unix_stream import open_unix_socket as open_unix_socket
67 | from ._highlevel_serve_listeners import serve_listeners as serve_listeners
68 | from ._highlevel_socket import (
69 | SocketListener as SocketListener,
70 | SocketStream as SocketStream,
71 | )
72 | from ._highlevel_ssl_helpers import (
73 | open_ssl_over_tcp_listeners as open_ssl_over_tcp_listeners,
74 | open_ssl_over_tcp_stream as open_ssl_over_tcp_stream,
75 | serve_ssl_over_tcp as serve_ssl_over_tcp,
76 | )
77 | from ._path import Path as Path, PosixPath as PosixPath, WindowsPath as WindowsPath
78 | from ._signals import open_signal_receiver as open_signal_receiver
79 | from ._ssl import (
80 | NeedHandshakeError as NeedHandshakeError,
81 | SSLListener as SSLListener,
82 | SSLStream as SSLStream,
83 | )
84 | from ._subprocess import Process as Process, run_process as run_process
85 | from ._sync import (
86 | CapacityLimiter as CapacityLimiter,
87 | CapacityLimiterStatistics as CapacityLimiterStatistics,
88 | Condition as Condition,
89 | ConditionStatistics as ConditionStatistics,
90 | Event as Event,
91 | EventStatistics as EventStatistics,
92 | Lock as Lock,
93 | LockStatistics as LockStatistics,
94 | Semaphore as Semaphore,
95 | StrictFIFOLock as StrictFIFOLock,
96 | )
97 | from ._timeouts import (
98 | TooSlowError as TooSlowError,
99 | fail_after as fail_after,
100 | fail_at as fail_at,
101 | move_on_after as move_on_after,
102 | move_on_at as move_on_at,
103 | sleep as sleep,
104 | sleep_forever as sleep_forever,
105 | sleep_until as sleep_until,
106 | )
107 | from ._version import __version__ as __version__
108 |
109 | # Not imported by default, but mentioned here so static analysis tools like
110 | # pylint will know that it exists.
111 | if TYPE_CHECKING:
112 | from . import testing
113 |
114 | from . import _deprecate as _deprecate
115 |
116 | _deprecate.deprecate_attributes(__name__, {})
117 |
118 | # Having the public path in .__module__ attributes is important for:
119 | # - exception names in printed tracebacks
120 | # - sphinx :show-inheritance:
121 | # - deprecation warnings
122 | # - pickle
123 | # - probably other stuff
124 | from ._util import fixup_module_metadata
125 |
126 | fixup_module_metadata(__name__, globals())
127 | fixup_module_metadata(lowlevel.__name__, lowlevel.__dict__)
128 | fixup_module_metadata(socket.__name__, socket.__dict__)
129 | fixup_module_metadata(abc.__name__, abc.__dict__)
130 | fixup_module_metadata(from_thread.__name__, from_thread.__dict__)
131 | fixup_module_metadata(to_thread.__name__, to_thread.__dict__)
132 | del fixup_module_metadata
133 | del TYPE_CHECKING
134 |
--------------------------------------------------------------------------------
/src/trio/__main__.py:
--------------------------------------------------------------------------------
1 | from trio._repl import main
2 |
3 | main(locals())
4 |
--------------------------------------------------------------------------------
/src/trio/_core/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | This namespace represents the core functionality that has to be built-in
3 | and deal with private internal data structures. Things in this namespace
4 | are publicly available in either trio, trio.lowlevel, or trio.testing.
5 | """
6 |
7 | import sys
8 |
9 | from ._entry_queue import TrioToken
10 | from ._exceptions import (
11 | BrokenResourceError,
12 | BusyResourceError,
13 | Cancelled,
14 | ClosedResourceError,
15 | EndOfChannel,
16 | RunFinishedError,
17 | TrioInternalError,
18 | WouldBlock,
19 | )
20 | from ._ki import currently_ki_protected, disable_ki_protection, enable_ki_protection
21 | from ._local import RunVar, RunVarToken
22 | from ._mock_clock import MockClock
23 | from ._parking_lot import (
24 | ParkingLot,
25 | ParkingLotStatistics,
26 | add_parking_lot_breaker,
27 | remove_parking_lot_breaker,
28 | )
29 |
30 | # Imports that always exist
31 | from ._run import (
32 | TASK_STATUS_IGNORED,
33 | CancelScope,
34 | Nursery,
35 | RunStatistics,
36 | Task,
37 | TaskStatus,
38 | add_instrument,
39 | checkpoint,
40 | checkpoint_if_cancelled,
41 | current_clock,
42 | current_effective_deadline,
43 | current_root_task,
44 | current_statistics,
45 | current_task,
46 | current_time,
47 | current_trio_token,
48 | in_trio_run,
49 | in_trio_task,
50 | notify_closing,
51 | open_nursery,
52 | remove_instrument,
53 | reschedule,
54 | run,
55 | spawn_system_task,
56 | start_guest_run,
57 | wait_all_tasks_blocked,
58 | wait_readable,
59 | wait_writable,
60 | )
61 | from ._thread_cache import start_thread_soon
62 |
63 | # Has to come after _run to resolve a circular import
64 | from ._traps import (
65 | Abort,
66 | RaiseCancelT,
67 | cancel_shielded_checkpoint,
68 | permanently_detach_coroutine_object,
69 | reattach_detached_coroutine_object,
70 | temporarily_detach_coroutine_object,
71 | wait_task_rescheduled,
72 | )
73 | from ._unbounded_queue import UnboundedQueue, UnboundedQueueStatistics
74 |
75 | # Windows imports
76 | if sys.platform == "win32":
77 | from ._run import (
78 | current_iocp,
79 | monitor_completion_key,
80 | readinto_overlapped,
81 | register_with_iocp,
82 | wait_overlapped,
83 | write_overlapped,
84 | )
85 | # Kqueue imports
86 | elif sys.platform != "linux" and sys.platform != "win32":
87 | from ._run import current_kqueue, monitor_kevent, wait_kevent
88 |
89 | del sys # It would be better to import sys as _sys, but mypy does not understand it
90 |
--------------------------------------------------------------------------------
/src/trio/_core/_concat_tb.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from types import TracebackType
4 |
5 |
6 | # this is used for collapsing single-exception ExceptionGroups when using
7 | # `strict_exception_groups=False`. Once that is retired this function can
8 | # be removed as well.
9 | def concat_tb(
10 | head: TracebackType | None,
11 | tail: TracebackType | None,
12 | ) -> TracebackType | None:
13 | # We have to use an iterative algorithm here, because in the worst case
14 | # this might be a RecursionError stack that is by definition too deep to
15 | # process by recursion!
16 | head_tbs = []
17 | pointer = head
18 | while pointer is not None:
19 | head_tbs.append(pointer)
20 | pointer = pointer.tb_next
21 | current_head = tail
22 | for head_tb in reversed(head_tbs):
23 | current_head = TracebackType(
24 | current_head, head_tb.tb_frame, head_tb.tb_lasti, head_tb.tb_lineno
25 | )
26 | return current_head
27 |
--------------------------------------------------------------------------------
/src/trio/_core/_generated_instrumentation.py:
--------------------------------------------------------------------------------
1 | # ***********************************************************
2 | # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ******
3 | # *************************************************************
4 | from __future__ import annotations
5 |
6 | from typing import TYPE_CHECKING
7 |
8 | from ._ki import enable_ki_protection
9 | from ._run import GLOBAL_RUN_CONTEXT
10 |
11 | if TYPE_CHECKING:
12 | from ._instrumentation import Instrument
13 |
14 | __all__ = ["add_instrument", "remove_instrument"]
15 |
16 |
17 | @enable_ki_protection
18 | def add_instrument(instrument: Instrument) -> None:
19 | """Start instrumenting the current run loop with the given instrument.
20 |
21 | Args:
22 | instrument (trio.abc.Instrument): The instrument to activate.
23 |
24 | If ``instrument`` is already active, does nothing.
25 |
26 | """
27 | try:
28 | return GLOBAL_RUN_CONTEXT.runner.instruments.add_instrument(instrument)
29 | except AttributeError:
30 | raise RuntimeError("must be called from async context") from None
31 |
32 |
33 | @enable_ki_protection
34 | def remove_instrument(instrument: Instrument) -> None:
35 | """Stop instrumenting the current run loop with the given instrument.
36 |
37 | Args:
38 | instrument (trio.abc.Instrument): The instrument to de-activate.
39 |
40 | Raises:
41 | KeyError: if the instrument is not currently active. This could
42 | occur either because you never added it, or because you added it
43 | and then it raised an unhandled exception and was automatically
44 | deactivated.
45 |
46 | """
47 | try:
48 | return GLOBAL_RUN_CONTEXT.runner.instruments.remove_instrument(instrument)
49 | except AttributeError:
50 | raise RuntimeError("must be called from async context") from None
51 |
--------------------------------------------------------------------------------
/src/trio/_core/_generated_io_epoll.py:
--------------------------------------------------------------------------------
1 | # ***********************************************************
2 | # ******* WARNING: AUTOGENERATED! ALL EDITS WILL BE LOST ******
3 | # *************************************************************
4 | from __future__ import annotations
5 |
6 | import sys
7 | from typing import TYPE_CHECKING
8 |
9 | from ._ki import enable_ki_protection
10 | from ._run import GLOBAL_RUN_CONTEXT
11 |
12 | if TYPE_CHECKING:
13 | from .._file_io import _HasFileNo
14 |
15 | assert not TYPE_CHECKING or sys.platform == "linux"
16 |
17 |
18 | __all__ = ["notify_closing", "wait_readable", "wait_writable"]
19 |
20 |
21 | @enable_ki_protection
22 | async def wait_readable(fd: int | _HasFileNo) -> None:
23 | """Block until the kernel reports that the given object is readable.
24 |
25 | On Unix systems, ``fd`` must either be an integer file descriptor,
26 | or else an object with a ``.fileno()`` method which returns an
27 | integer file descriptor. Any kind of file descriptor can be passed,
28 | though the exact semantics will depend on your kernel. For example,
29 | this probably won't do anything useful for on-disk files.
30 |
31 | On Windows systems, ``fd`` must either be an integer ``SOCKET``
32 | handle, or else an object with a ``.fileno()`` method which returns
33 | an integer ``SOCKET`` handle. File descriptors aren't supported,
34 | and neither are handles that refer to anything besides a
35 | ``SOCKET``.
36 |
37 | :raises trio.BusyResourceError:
38 | if another task is already waiting for the given socket to
39 | become readable.
40 | :raises trio.ClosedResourceError:
41 | if another task calls :func:`notify_closing` while this
42 | function is still working.
43 | """
44 | try:
45 | return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_readable(fd)
46 | except AttributeError:
47 | raise RuntimeError("must be called from async context") from None
48 |
49 |
50 | @enable_ki_protection
51 | async def wait_writable(fd: int | _HasFileNo) -> None:
52 | """Block until the kernel reports that the given object is writable.
53 |
54 | See `wait_readable` for the definition of ``fd``.
55 |
56 | :raises trio.BusyResourceError:
57 | if another task is already waiting for the given socket to
58 | become writable.
59 | :raises trio.ClosedResourceError:
60 | if another task calls :func:`notify_closing` while this
61 | function is still working.
62 | """
63 | try:
64 | return await GLOBAL_RUN_CONTEXT.runner.io_manager.wait_writable(fd)
65 | except AttributeError:
66 | raise RuntimeError("must be called from async context") from None
67 |
68 |
69 | @enable_ki_protection
70 | def notify_closing(fd: int | _HasFileNo) -> None:
71 | """Notify waiters of the given object that it will be closed.
72 |
73 | Call this before closing a file descriptor (on Unix) or socket (on
74 | Windows). This will cause any `wait_readable` or `wait_writable`
75 | calls on the given object to immediately wake up and raise
76 | `~trio.ClosedResourceError`.
77 |
78 | This doesn't actually close the object – you still have to do that
79 | yourself afterwards. Also, you want to be careful to make sure no
80 | new tasks start waiting on the object in between when you call this
81 | and when it's actually closed. So to close something properly, you
82 | usually want to do these steps in order:
83 |
84 | 1. Explicitly mark the object as closed, so that any new attempts
85 | to use it will abort before they start.
86 | 2. Call `notify_closing` to wake up any already-existing users.
87 | 3. Actually close the object.
88 |
89 | It's also possible to do them in a different order if that's more
90 | convenient, *but only if* you make sure not to have any checkpoints in
91 | between the steps. This way they all happen in a single atomic
92 | step, so other tasks won't be able to tell what order they happened
93 | in anyway.
94 | """
95 | try:
96 | return GLOBAL_RUN_CONTEXT.runner.io_manager.notify_closing(fd)
97 | except AttributeError:
98 | raise RuntimeError("must be called from async context") from None
99 |
--------------------------------------------------------------------------------
/src/trio/_core/_instrumentation.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import logging
4 | import types
5 | from collections import UserDict
6 | from typing import TYPE_CHECKING, TypeVar
7 |
8 | from .._abc import Instrument
9 |
10 | # Used to log exceptions in instruments
11 | INSTRUMENT_LOGGER = logging.getLogger("trio.abc.Instrument")
12 |
13 | if TYPE_CHECKING:
14 | from collections.abc import Sequence
15 |
16 | T = TypeVar("T")
17 |
18 |
19 | # Decorator to mark methods public. This does nothing by itself, but
20 | # trio/_tools/gen_exports.py looks for it.
21 | def _public(fn: T) -> T:
22 | return fn
23 |
24 |
25 | class Instruments(UserDict[str, dict[Instrument, None]]):
26 | """A collection of `trio.abc.Instrument` organized by hook.
27 |
28 | Instrumentation calls are rather expensive, and we don't want a
29 | rarely-used instrument (like before_run()) to slow down hot
30 | operations (like before_task_step()). Thus, we cache the set of
31 | instruments to be called for each hook, and skip the instrumentation
32 | call if there's nothing currently installed for that hook.
33 | """
34 |
35 | __slots__ = ()
36 |
37 | def __init__(self, incoming: Sequence[Instrument]) -> None:
38 | super().__init__({"_all": {}})
39 | for instrument in incoming:
40 | self.add_instrument(instrument)
41 |
42 | @_public
43 | def add_instrument(self, instrument: Instrument) -> None:
44 | """Start instrumenting the current run loop with the given instrument.
45 |
46 | Args:
47 | instrument (trio.abc.Instrument): The instrument to activate.
48 |
49 | If ``instrument`` is already active, does nothing.
50 |
51 | """
52 | if instrument in self.data["_all"]:
53 | return
54 | self.data["_all"][instrument] = None
55 | try:
56 | for name in dir(instrument):
57 | if name.startswith("_"):
58 | continue
59 | try:
60 | prototype = getattr(Instrument, name)
61 | except AttributeError:
62 | continue
63 | impl = getattr(instrument, name)
64 | if isinstance(impl, types.MethodType) and impl.__func__ is prototype:
65 | # Inherited unchanged from _abc.Instrument
66 | continue
67 | self.data.setdefault(name, {})[instrument] = None
68 | except:
69 | self.remove_instrument(instrument)
70 | raise
71 |
72 | @_public
73 | def remove_instrument(self, instrument: Instrument) -> None:
74 | """Stop instrumenting the current run loop with the given instrument.
75 |
76 | Args:
77 | instrument (trio.abc.Instrument): The instrument to de-activate.
78 |
79 | Raises:
80 | KeyError: if the instrument is not currently active. This could
81 | occur either because you never added it, or because you added it
82 | and then it raised an unhandled exception and was automatically
83 | deactivated.
84 |
85 | """
86 | # If instrument isn't present, the KeyError propagates out
87 | self.data["_all"].pop(instrument)
88 | for hookname, instruments in list(self.data.items()):
89 | if instrument in instruments:
90 | del instruments[instrument]
91 | if not instruments:
92 | del self.data[hookname]
93 |
94 | def call(
95 | self,
96 | hookname: str,
97 | *args: object,
98 | ) -> None:
99 | """Call hookname(*args) on each applicable instrument.
100 |
101 | You must first check whether there are any instruments installed for
102 | that hook, e.g.::
103 |
104 | if "before_task_step" in instruments:
105 | instruments.call("before_task_step", task)
106 | """
107 | for instrument in list(self.data[hookname]):
108 | try:
109 | getattr(instrument, hookname)(*args)
110 | except BaseException:
111 | self.remove_instrument(instrument)
112 | INSTRUMENT_LOGGER.exception(
113 | "Exception raised when calling %r on instrument %r. "
114 | "Instrument has been disabled.",
115 | hookname,
116 | instrument,
117 | )
118 |
--------------------------------------------------------------------------------
/src/trio/_core/_io_common.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import copy
4 | from typing import TYPE_CHECKING
5 |
6 | import outcome
7 |
8 | from .. import _core
9 |
10 | if TYPE_CHECKING:
11 | from ._io_epoll import EpollWaiters
12 | from ._io_windows import AFDWaiters
13 |
14 |
15 | # Utility function shared between _io_epoll and _io_windows
16 | def wake_all(waiters: EpollWaiters | AFDWaiters, exc: BaseException) -> None:
17 | try:
18 | current_task = _core.current_task()
19 | except RuntimeError:
20 | current_task = None
21 | raise_at_end = False
22 | for attr_name in ["read_task", "write_task"]:
23 | task = getattr(waiters, attr_name)
24 | if task is not None:
25 | if task is current_task:
26 | raise_at_end = True
27 | else:
28 | _core.reschedule(task, outcome.Error(copy.copy(exc)))
29 | setattr(waiters, attr_name, None)
30 | if raise_at_end:
31 | raise exc
32 |
--------------------------------------------------------------------------------
/src/trio/_core/_local.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import Generic, TypeVar, cast
4 |
5 | # Runvar implementations
6 | import attrs
7 |
8 | from .._util import NoPublicConstructor, final
9 | from . import _run
10 |
11 | T = TypeVar("T")
12 |
13 |
14 | @final
15 | class _NoValue: ...
16 |
17 |
18 | @final
19 | @attrs.define(eq=False)
20 | class RunVarToken(Generic[T], metaclass=NoPublicConstructor):
21 | _var: RunVar[T]
22 | previous_value: T | type[_NoValue] = _NoValue
23 | redeemed: bool = attrs.field(default=False, init=False)
24 |
25 | @classmethod
26 | def _empty(cls, var: RunVar[T]) -> RunVarToken[T]:
27 | return cls._create(var)
28 |
29 |
30 | @final
31 | @attrs.define(eq=False, repr=False)
32 | class RunVar(Generic[T]):
33 | """The run-local variant of a context variable.
34 |
35 | :class:`RunVar` objects are similar to context variable objects,
36 | except that they are shared across a single call to :func:`trio.run`
37 | rather than a single task.
38 |
39 | """
40 |
41 | _name: str = attrs.field(alias="name")
42 | _default: T | type[_NoValue] = attrs.field(default=_NoValue, alias="default")
43 |
44 | def get(self, default: T | type[_NoValue] = _NoValue) -> T:
45 | """Gets the value of this :class:`RunVar` for the current run call."""
46 | try:
47 | return cast("T", _run.GLOBAL_RUN_CONTEXT.runner._locals[self])
48 | except AttributeError:
49 | raise RuntimeError("Cannot be used outside of a run context") from None
50 | except KeyError:
51 | # contextvars consistency
52 | # `type: ignore` awaiting https://github.com/python/mypy/issues/15553 to be fixed & released
53 | if default is not _NoValue:
54 | return default # type: ignore[return-value]
55 |
56 | if self._default is not _NoValue:
57 | return self._default # type: ignore[return-value]
58 |
59 | raise LookupError(self) from None
60 |
61 | def set(self, value: T) -> RunVarToken[T]:
62 | """Sets the value of this :class:`RunVar` for this current run
63 | call.
64 |
65 | """
66 | try:
67 | old_value = self.get()
68 | except LookupError:
69 | token = RunVarToken._empty(self)
70 | else:
71 | token = RunVarToken[T]._create(self, old_value)
72 |
73 | # This can't fail, because if we weren't in Trio context then the
74 | # get() above would have failed.
75 | _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = value
76 | return token
77 |
78 | def reset(self, token: RunVarToken[T]) -> None:
79 | """Resets the value of this :class:`RunVar` to what it was
80 | previously specified by the token.
81 |
82 | """
83 | if token is None:
84 | raise TypeError("token must not be none")
85 |
86 | if token.redeemed:
87 | raise ValueError("token has already been used")
88 |
89 | if token._var is not self:
90 | raise ValueError("token is not for us")
91 |
92 | previous = token.previous_value
93 | try:
94 | if previous is _NoValue:
95 | _run.GLOBAL_RUN_CONTEXT.runner._locals.pop(self)
96 | else:
97 | _run.GLOBAL_RUN_CONTEXT.runner._locals[self] = previous
98 | except AttributeError:
99 | raise RuntimeError("Cannot be used outside of a run context") from None
100 |
101 | token.redeemed = True
102 |
103 | def __repr__(self) -> str:
104 | return f""
105 |
--------------------------------------------------------------------------------
/src/trio/_core/_run_context.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import threading
4 | from typing import TYPE_CHECKING, Final
5 |
6 | if TYPE_CHECKING:
7 | from ._run import Runner, Task
8 |
9 |
10 | class RunContext(threading.local):
11 | runner: Runner
12 | task: Task
13 |
14 |
15 | GLOBAL_RUN_CONTEXT: Final = RunContext()
16 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/src/trio/_core/_tests/__init__.py
--------------------------------------------------------------------------------
/src/trio/_core/_tests/test_exceptiongroup_gc.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import gc
4 | import sys
5 | from traceback import extract_tb
6 | from typing import TYPE_CHECKING, Callable, NoReturn
7 |
8 | import pytest
9 |
10 | from .._concat_tb import concat_tb
11 |
12 | if TYPE_CHECKING:
13 | from types import TracebackType
14 |
15 | if sys.version_info < (3, 11):
16 | from exceptiongroup import ExceptionGroup
17 |
18 |
19 | def raiser1() -> NoReturn:
20 | raiser1_2()
21 |
22 |
23 | def raiser1_2() -> NoReturn:
24 | raiser1_3()
25 |
26 |
27 | def raiser1_3() -> NoReturn:
28 | raise ValueError("raiser1_string")
29 |
30 |
31 | def raiser2() -> NoReturn:
32 | raiser2_2()
33 |
34 |
35 | def raiser2_2() -> NoReturn:
36 | raise KeyError("raiser2_string")
37 |
38 |
39 | def get_exc(raiser: Callable[[], NoReturn]) -> Exception:
40 | try:
41 | raiser()
42 | except Exception as exc:
43 | return exc
44 | raise AssertionError("raiser should always raise") # pragma: no cover
45 |
46 |
47 | def get_tb(raiser: Callable[[], NoReturn]) -> TracebackType | None:
48 | return get_exc(raiser).__traceback__
49 |
50 |
51 | def test_concat_tb() -> None:
52 | tb1 = get_tb(raiser1)
53 | tb2 = get_tb(raiser2)
54 |
55 | # These return a list of (filename, lineno, fn name, text) tuples
56 | # https://docs.python.org/3/library/traceback.html#traceback.extract_tb
57 | entries1 = extract_tb(tb1)
58 | entries2 = extract_tb(tb2)
59 |
60 | tb12 = concat_tb(tb1, tb2)
61 | assert extract_tb(tb12) == entries1 + entries2
62 |
63 | tb21 = concat_tb(tb2, tb1)
64 | assert extract_tb(tb21) == entries2 + entries1
65 |
66 | # Check degenerate cases
67 | assert extract_tb(concat_tb(None, tb1)) == entries1
68 | assert extract_tb(concat_tb(tb1, None)) == entries1
69 | assert concat_tb(None, None) is None
70 |
71 | # Make sure the original tracebacks didn't get mutated by mistake
72 | assert extract_tb(get_tb(raiser1)) == entries1
73 | assert extract_tb(get_tb(raiser2)) == entries2
74 |
75 |
76 | # Unclear if this can still fail, removing the `del` from _concat_tb.copy_tb does not seem
77 | # to trigger it (on a platform where the `del` is executed)
78 | @pytest.mark.skipif(
79 | sys.implementation.name != "cpython",
80 | reason="Only makes sense with refcounting GC",
81 | )
82 | def test_ExceptionGroup_catch_doesnt_create_cyclic_garbage() -> None:
83 | # https://github.com/python-trio/trio/pull/2063
84 | gc.collect()
85 | old_flags = gc.get_debug()
86 |
87 | def make_multi() -> NoReturn:
88 | raise ExceptionGroup("", [get_exc(raiser1), get_exc(raiser2)])
89 |
90 | try:
91 | gc.set_debug(gc.DEBUG_SAVEALL)
92 | with pytest.raises(ExceptionGroup) as excinfo:
93 | # covers ~~MultiErrorCatcher.__exit__ and~~ _concat_tb.copy_tb
94 | # TODO: is the above comment true anymore? as this no longer uses MultiError.catch
95 | raise make_multi()
96 | for exc in excinfo.value.exceptions:
97 | assert isinstance(exc, (ValueError, KeyError))
98 | gc.collect()
99 | assert not gc.garbage
100 | finally:
101 | gc.set_debug(old_flags)
102 | gc.garbage.clear()
103 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/test_local.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from trio import run
4 | from trio.lowlevel import RunVar, RunVarToken
5 |
6 | from ... import _core
7 |
8 |
9 | # scary runvar tests
10 | def test_runvar_smoketest() -> None:
11 | t1 = RunVar[str]("test1")
12 | t2 = RunVar[str]("test2", default="catfish")
13 |
14 | assert repr(t1) == ""
15 |
16 | async def first_check() -> None:
17 | with pytest.raises(LookupError):
18 | t1.get()
19 |
20 | t1.set("swordfish")
21 | assert t1.get() == "swordfish"
22 | assert t2.get() == "catfish"
23 | assert t2.get(default="eel") == "eel"
24 |
25 | t2.set("goldfish")
26 | assert t2.get() == "goldfish"
27 | assert t2.get(default="tuna") == "goldfish"
28 |
29 | async def second_check() -> None:
30 | with pytest.raises(LookupError):
31 | t1.get()
32 |
33 | assert t2.get() == "catfish"
34 |
35 | run(first_check)
36 | run(second_check)
37 |
38 |
39 | def test_runvar_resetting() -> None:
40 | t1 = RunVar[str]("test1")
41 | t2 = RunVar[str]("test2", default="dogfish")
42 | t3 = RunVar[str]("test3")
43 |
44 | async def reset_check() -> None:
45 | token = t1.set("moonfish")
46 | assert t1.get() == "moonfish"
47 | t1.reset(token)
48 |
49 | with pytest.raises(TypeError):
50 | t1.reset(None) # type: ignore[arg-type]
51 |
52 | with pytest.raises(LookupError):
53 | t1.get()
54 |
55 | token2 = t2.set("catdogfish")
56 | assert t2.get() == "catdogfish"
57 | t2.reset(token2)
58 | assert t2.get() == "dogfish"
59 |
60 | with pytest.raises(ValueError, match=r"^token has already been used$"):
61 | t2.reset(token2)
62 |
63 | token3 = t3.set("basculin")
64 | assert t3.get() == "basculin"
65 |
66 | with pytest.raises(ValueError, match=r"^token is not for us$"):
67 | t1.reset(token3)
68 |
69 | run(reset_check)
70 |
71 |
72 | def test_runvar_sync() -> None:
73 | t1 = RunVar[str]("test1")
74 |
75 | async def sync_check() -> None:
76 | async def task1() -> None:
77 | t1.set("plaice")
78 | assert t1.get() == "plaice"
79 |
80 | async def task2(tok: RunVarToken[str]) -> None:
81 | t1.reset(tok)
82 |
83 | with pytest.raises(LookupError):
84 | t1.get()
85 |
86 | t1.set("haddock")
87 |
88 | async with _core.open_nursery() as n:
89 | token = t1.set("cod")
90 | assert t1.get() == "cod"
91 |
92 | n.start_soon(task1)
93 | await _core.wait_all_tasks_blocked()
94 | assert t1.get() == "plaice"
95 |
96 | n.start_soon(task2, token)
97 | await _core.wait_all_tasks_blocked()
98 | assert t1.get() == "haddock"
99 |
100 | run(sync_check)
101 |
102 |
103 | def test_accessing_runvar_outside_run_call_fails() -> None:
104 | t1 = RunVar[str]("test1")
105 |
106 | with pytest.raises(RuntimeError):
107 | t1.set("asdf")
108 |
109 | with pytest.raises(RuntimeError):
110 | t1.get()
111 |
112 | async def get_token() -> RunVarToken[str]:
113 | return t1.set("ok")
114 |
115 | token = run(get_token)
116 |
117 | with pytest.raises(RuntimeError):
118 | t1.reset(token)
119 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/test_tutil.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from .tutil import check_sequence_matches
4 |
5 |
6 | def test_check_sequence_matches() -> None:
7 | check_sequence_matches([1, 2, 3], [1, 2, 3])
8 | with pytest.raises(AssertionError):
9 | check_sequence_matches([1, 3, 2], [1, 2, 3])
10 | check_sequence_matches([1, 2, 3, 4], [1, {2, 3}, 4])
11 | check_sequence_matches([1, 3, 2, 4], [1, {2, 3}, 4])
12 | with pytest.raises(AssertionError):
13 | check_sequence_matches([1, 2, 4, 3], [1, {2, 3}, 4])
14 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/test_unbounded_queue.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import itertools
4 |
5 | import pytest
6 |
7 | from ... import _core
8 | from ...testing import assert_checkpoints, wait_all_tasks_blocked
9 |
10 | pytestmark = pytest.mark.filterwarnings(
11 | "ignore:.*UnboundedQueue:trio.TrioDeprecationWarning",
12 | )
13 |
14 |
15 | async def test_UnboundedQueue_basic() -> None:
16 | q: _core.UnboundedQueue[str | int | None] = _core.UnboundedQueue()
17 | q.put_nowait("hi")
18 | assert await q.get_batch() == ["hi"]
19 | with pytest.raises(_core.WouldBlock):
20 | q.get_batch_nowait()
21 | q.put_nowait(1)
22 | q.put_nowait(2)
23 | q.put_nowait(3)
24 | assert q.get_batch_nowait() == [1, 2, 3]
25 |
26 | assert q.empty()
27 | assert q.qsize() == 0
28 | q.put_nowait(None)
29 | assert not q.empty()
30 | assert q.qsize() == 1
31 |
32 | stats = q.statistics()
33 | assert stats.qsize == 1
34 | assert stats.tasks_waiting == 0
35 |
36 | # smoke test
37 | repr(q)
38 |
39 |
40 | async def test_UnboundedQueue_blocking() -> None:
41 | record = []
42 | q = _core.UnboundedQueue[int]()
43 |
44 | async def get_batch_consumer() -> None:
45 | while True:
46 | batch = await q.get_batch()
47 | assert batch
48 | record.append(batch)
49 |
50 | async def aiter_consumer() -> None:
51 | async for batch in q:
52 | assert batch
53 | record.append(batch)
54 |
55 | for consumer in (get_batch_consumer, aiter_consumer):
56 | record.clear()
57 | async with _core.open_nursery() as nursery:
58 | nursery.start_soon(consumer)
59 | await _core.wait_all_tasks_blocked()
60 | stats = q.statistics()
61 | assert stats.qsize == 0
62 | assert stats.tasks_waiting == 1
63 | q.put_nowait(10)
64 | q.put_nowait(11)
65 | await _core.wait_all_tasks_blocked()
66 | q.put_nowait(12)
67 | await _core.wait_all_tasks_blocked()
68 | assert record == [[10, 11], [12]]
69 | nursery.cancel_scope.cancel()
70 |
71 |
72 | async def test_UnboundedQueue_fairness() -> None:
73 | q = _core.UnboundedQueue[int]()
74 |
75 | # If there's no-one else around, we can put stuff in and take it out
76 | # again, no problem
77 | q.put_nowait(1)
78 | q.put_nowait(2)
79 | assert q.get_batch_nowait() == [1, 2]
80 |
81 | result = None
82 |
83 | async def get_batch(q: _core.UnboundedQueue[int]) -> None:
84 | nonlocal result
85 | result = await q.get_batch()
86 |
87 | # But if someone else is waiting to read, then they get dibs
88 | async with _core.open_nursery() as nursery:
89 | nursery.start_soon(get_batch, q)
90 | await _core.wait_all_tasks_blocked()
91 | q.put_nowait(3)
92 | q.put_nowait(4)
93 | with pytest.raises(_core.WouldBlock):
94 | q.get_batch_nowait()
95 | assert result == [3, 4]
96 |
97 | # If two tasks are trying to read, they alternate
98 | record = []
99 |
100 | async def reader(name: str) -> None:
101 | while True:
102 | record.append((name, await q.get_batch()))
103 |
104 | async with _core.open_nursery() as nursery:
105 | nursery.start_soon(reader, "a")
106 | await _core.wait_all_tasks_blocked()
107 | nursery.start_soon(reader, "b")
108 | await _core.wait_all_tasks_blocked()
109 |
110 | for i in range(20):
111 | q.put_nowait(i)
112 | await _core.wait_all_tasks_blocked()
113 |
114 | nursery.cancel_scope.cancel()
115 |
116 | assert record == list(zip(itertools.cycle("ab"), [[i] for i in range(20)]))
117 |
118 |
119 | async def test_UnboundedQueue_trivial_yields() -> None:
120 | q = _core.UnboundedQueue[None]()
121 |
122 | q.put_nowait(None)
123 | with assert_checkpoints():
124 | await q.get_batch()
125 |
126 | q.put_nowait(None)
127 | with assert_checkpoints():
128 | async for _ in q: # pragma: no branch
129 | break
130 |
131 |
132 | async def test_UnboundedQueue_no_spurious_wakeups() -> None:
133 | # If we have two tasks waiting, and put two items into the queue... then
134 | # only one task wakes up
135 | record = []
136 |
137 | async def getter(q: _core.UnboundedQueue[int], i: int) -> None:
138 | got = await q.get_batch()
139 | record.append((i, got))
140 |
141 | async with _core.open_nursery() as nursery:
142 | q = _core.UnboundedQueue[int]()
143 | nursery.start_soon(getter, q, 1)
144 | await wait_all_tasks_blocked()
145 | nursery.start_soon(getter, q, 2)
146 | await wait_all_tasks_blocked()
147 |
148 | for i in range(10):
149 | q.put_nowait(i)
150 | await wait_all_tasks_blocked()
151 |
152 | assert record == [(1, list(range(10)))]
153 |
154 | nursery.cancel_scope.cancel()
155 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/tutil.py:
--------------------------------------------------------------------------------
1 | # Utilities for testing
2 | from __future__ import annotations
3 |
4 | import asyncio
5 | import gc
6 | import os
7 | import socket as stdlib_socket
8 | import sys
9 | import warnings
10 | from contextlib import closing, contextmanager
11 | from typing import TYPE_CHECKING, TypeVar
12 |
13 | import pytest
14 |
15 | # See trio/_tests/pytest_plugin.py for the other half of this
16 | from trio._tests.pytest_plugin import RUN_SLOW
17 |
18 | if TYPE_CHECKING:
19 | from collections.abc import Generator, Iterable, Sequence
20 |
21 | slow = pytest.mark.skipif(not RUN_SLOW, reason="use --run-slow to run slow tests")
22 |
23 | T = TypeVar("T")
24 |
25 | try:
26 | s = stdlib_socket.socket(stdlib_socket.AF_INET6, stdlib_socket.SOCK_STREAM, 0)
27 | except OSError: # pragma: no cover
28 | # Some systems don't even support creating an IPv6 socket, let alone
29 | # binding it. (ex: Linux with 'ipv6.disable=1' in the kernel command line)
30 | # We don't have any of those in our CI, and there's nothing that gets
31 | # tested _only_ if can_create_ipv6 = False, so we'll just no-cover this.
32 | can_create_ipv6 = False
33 | can_bind_ipv6 = False
34 | else:
35 | can_create_ipv6 = True
36 | with s:
37 | try:
38 | s.bind(("::1", 0))
39 | except OSError: # pragma: no cover # since support for 3.7 was removed
40 | can_bind_ipv6 = False
41 | else:
42 | can_bind_ipv6 = True
43 |
44 | creates_ipv6 = pytest.mark.skipif(not can_create_ipv6, reason="need IPv6")
45 | binds_ipv6 = pytest.mark.skipif(not can_bind_ipv6, reason="need IPv6")
46 |
47 |
48 | def gc_collect_harder() -> None:
49 | # In the test suite we sometimes want to call gc.collect() to make sure
50 | # that any objects with noisy __del__ methods (e.g. unawaited coroutines)
51 | # get collected before we continue, so their noise doesn't leak into
52 | # unrelated tests.
53 | #
54 | # On PyPy, coroutine objects (for example) can survive at least 1 round of
55 | # garbage collection, because executing their __del__ method to print the
56 | # warning can cause them to be resurrected. So we call collect a few times
57 | # to make sure.
58 | for _ in range(5):
59 | gc.collect()
60 |
61 |
62 | # Some of our tests need to leak coroutines, and thus trigger the
63 | # "RuntimeWarning: coroutine '...' was never awaited" message. This context
64 | # manager should be used anywhere this happens to hide those messages, because
65 | # when expected they're clutter.
66 | @contextmanager
67 | def ignore_coroutine_never_awaited_warnings() -> Generator[None, None, None]:
68 | with warnings.catch_warnings():
69 | warnings.filterwarnings("ignore", message="coroutine '.*' was never awaited")
70 | try:
71 | yield
72 | finally:
73 | # Make sure to trigger any coroutine __del__ methods now, before
74 | # we leave the context manager.
75 | gc_collect_harder()
76 |
77 |
78 | def _noop(*args: object, **kwargs: object) -> None:
79 | pass # pragma: no cover
80 |
81 |
82 | @contextmanager
83 | def restore_unraisablehook() -> Generator[None, None, None]:
84 | sys.unraisablehook, prev = sys.__unraisablehook__, sys.unraisablehook
85 | try:
86 | yield
87 | finally:
88 | sys.unraisablehook = prev
89 |
90 |
91 | # Used to check sequences that might have some elements out of order.
92 | # Example usage:
93 | # The sequences [1, 2.1, 2.2, 3] and [1, 2.2, 2.1, 3] are both
94 | # matched by the template [1, {2.1, 2.2}, 3]
95 | def check_sequence_matches(seq: Sequence[T], template: Iterable[T | set[T]]) -> None:
96 | i = 0
97 | for pattern in template:
98 | if not isinstance(pattern, set):
99 | pattern = {pattern}
100 | got = set(seq[i : i + len(pattern)])
101 | assert got == pattern
102 | i += len(got)
103 |
104 |
105 | # https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=246350
106 | skip_if_fbsd_pipes_broken = pytest.mark.skipif(
107 | sys.platform != "win32" # prevent mypy from complaining about missing uname
108 | and hasattr(os, "uname")
109 | and os.uname().sysname == "FreeBSD"
110 | and os.uname().release[:4] < "12.2",
111 | reason="hangs on FreeBSD 12.1 and earlier, due to FreeBSD bug #246350",
112 | )
113 |
114 |
115 | def create_asyncio_future_in_new_loop() -> asyncio.Future[object]:
116 | with closing(asyncio.new_event_loop()) as loop:
117 | return loop.create_future()
118 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/type_tests/nursery_start.py:
--------------------------------------------------------------------------------
1 | """Test variadic generic typing for Nursery.start[_soon]()."""
2 |
3 | from typing import TYPE_CHECKING
4 |
5 | from trio import TASK_STATUS_IGNORED, Nursery, TaskStatus
6 |
7 | if TYPE_CHECKING:
8 | from collections.abc import Awaitable, Callable
9 |
10 |
11 | async def task_0() -> None: ...
12 |
13 |
14 | async def task_1a(value: int) -> None: ...
15 |
16 |
17 | async def task_1b(value: str) -> None: ...
18 |
19 |
20 | async def task_2a(a: int, b: str) -> None: ...
21 |
22 |
23 | async def task_2b(a: str, b: int) -> None: ...
24 |
25 |
26 | async def task_2c(a: str, b: int, optional: bool = False) -> None: ...
27 |
28 |
29 | async def task_requires_kw(a: int, *, b: bool) -> None: ...
30 |
31 |
32 | async def task_startable_1(
33 | a: str,
34 | *,
35 | task_status: TaskStatus[bool] = TASK_STATUS_IGNORED,
36 | ) -> None: ...
37 |
38 |
39 | async def task_startable_2(
40 | a: str,
41 | b: float,
42 | *,
43 | task_status: TaskStatus[bool] = TASK_STATUS_IGNORED,
44 | ) -> None: ...
45 |
46 |
47 | async def task_requires_start(*, task_status: TaskStatus[str]) -> None:
48 | """Check a function requiring start() to be used."""
49 |
50 |
51 | async def task_pos_or_kw(value: str, task_status: TaskStatus[int]) -> None:
52 | """Check a function which doesn't use the *-syntax works."""
53 |
54 |
55 | def check_start_soon(nursery: Nursery) -> None:
56 | """start_soon() functionality."""
57 | nursery.start_soon(task_0)
58 | nursery.start_soon(task_1a) # type: ignore
59 | nursery.start_soon(task_2b) # type: ignore
60 |
61 | nursery.start_soon(task_0, 45) # type: ignore
62 | nursery.start_soon(task_1a, 32)
63 | nursery.start_soon(task_1b, 32) # type: ignore
64 | nursery.start_soon(task_1a, "abc") # type: ignore
65 | nursery.start_soon(task_1b, "abc")
66 |
67 | nursery.start_soon(task_2b, "abc") # type: ignore
68 | nursery.start_soon(task_2a, 38, "46")
69 | nursery.start_soon(task_2c, "abc", 12, True)
70 |
71 | nursery.start_soon(task_2c, "abc", 12)
72 | task_2c_cast: Callable[[str, int], Awaitable[object]] = (
73 | task_2c # The assignment makes it work.
74 | )
75 | nursery.start_soon(task_2c_cast, "abc", 12)
76 |
77 | nursery.start_soon(task_requires_kw, 12, True) # type: ignore
78 | # Tasks following the start() API can be made to work.
79 | nursery.start_soon(task_startable_1, "cdf")
80 |
--------------------------------------------------------------------------------
/src/trio/_core/_tests/type_tests/run.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import TYPE_CHECKING, overload
4 |
5 | import trio
6 | from typing_extensions import assert_type
7 |
8 | if TYPE_CHECKING:
9 | from collections.abc import Sequence
10 |
11 |
12 | async def sleep_sort(values: Sequence[float]) -> list[float]:
13 | return [1]
14 |
15 |
16 | async def has_optional(arg: int | None = None) -> int:
17 | return 5
18 |
19 |
20 | @overload
21 | async def foo_overloaded(arg: int) -> str: ...
22 |
23 |
24 | @overload
25 | async def foo_overloaded(arg: str) -> int: ...
26 |
27 |
28 | async def foo_overloaded(arg: int | str) -> int | str:
29 | if isinstance(arg, str):
30 | return 5
31 | return "hello"
32 |
33 |
34 | v = trio.run(
35 | sleep_sort,
36 | (1, 3, 5, 2, 4),
37 | clock=trio.testing.MockClock(autojump_threshold=0),
38 | )
39 | assert_type(v, "list[float]")
40 | trio.run(sleep_sort, ["hi", "there"]) # type: ignore[arg-type]
41 | trio.run(sleep_sort) # type: ignore[arg-type]
42 |
43 | r = trio.run(has_optional)
44 | assert_type(r, int)
45 | r = trio.run(has_optional, 5)
46 | trio.run(has_optional, 7, 8) # type: ignore[arg-type]
47 | trio.run(has_optional, "hello") # type: ignore[arg-type]
48 |
49 |
50 | assert_type(trio.run(foo_overloaded, 5), str)
51 | assert_type(trio.run(foo_overloaded, ""), int)
52 |
--------------------------------------------------------------------------------
/src/trio/_core/_unbounded_queue.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import TYPE_CHECKING, Generic, TypeVar
4 |
5 | import attrs
6 |
7 | from .. import _core
8 | from .._deprecate import deprecated
9 | from .._util import final
10 |
11 | T = TypeVar("T")
12 |
13 | if TYPE_CHECKING:
14 | from typing_extensions import Self
15 |
16 |
17 | @attrs.frozen
18 | class UnboundedQueueStatistics:
19 | """An object containing debugging information.
20 |
21 | Currently, the following fields are defined:
22 |
23 | * ``qsize``: The number of items currently in the queue.
24 | * ``tasks_waiting``: The number of tasks blocked on this queue's
25 | :meth:`get_batch` method.
26 |
27 | """
28 |
29 | qsize: int
30 | tasks_waiting: int
31 |
32 |
33 | @final
34 | class UnboundedQueue(Generic[T]):
35 | """An unbounded queue suitable for certain unusual forms of inter-task
36 | communication.
37 |
38 | This class is designed for use as a queue in cases where the producer for
39 | some reason cannot be subjected to back-pressure, i.e., :meth:`put_nowait`
40 | has to always succeed. In order to prevent the queue backlog from actually
41 | growing without bound, the consumer API is modified to dequeue items in
42 | "batches". If a consumer task processes each batch without yielding, then
43 | this helps achieve (but does not guarantee) an effective bound on the
44 | queue's memory use, at the cost of potentially increasing system latencies
45 | in general. You should generally prefer to use a memory channel
46 | instead if you can.
47 |
48 | Currently each batch completely empties the queue, but `this may change in
49 | the future `__.
50 |
51 | A :class:`UnboundedQueue` object can be used as an asynchronous iterator,
52 | where each iteration returns a new batch of items. I.e., these two loops
53 | are equivalent::
54 |
55 | async for batch in queue:
56 | ...
57 |
58 | while True:
59 | obj = await queue.get_batch()
60 | ...
61 |
62 | """
63 |
64 | @deprecated(
65 | "0.9.0",
66 | issue=497,
67 | thing="trio.lowlevel.UnboundedQueue",
68 | instead="trio.open_memory_channel(math.inf)",
69 | use_triodeprecationwarning=True,
70 | )
71 | def __init__(self) -> None:
72 | self._lot = _core.ParkingLot()
73 | self._data: list[T] = []
74 | # used to allow handoff from put to the first task in the lot
75 | self._can_get = False
76 |
77 | def __repr__(self) -> str:
78 | return f""
79 |
80 | def qsize(self) -> int:
81 | """Returns the number of items currently in the queue."""
82 | return len(self._data)
83 |
84 | def empty(self) -> bool:
85 | """Returns True if the queue is empty, False otherwise.
86 |
87 | There is some subtlety to interpreting this method's return value: see
88 | `issue #63 `__.
89 |
90 | """
91 | return not self._data
92 |
93 | @_core.enable_ki_protection
94 | def put_nowait(self, obj: T) -> None:
95 | """Put an object into the queue, without blocking.
96 |
97 | This always succeeds, because the queue is unbounded. We don't provide
98 | a blocking ``put`` method, because it would never need to block.
99 |
100 | Args:
101 | obj (object): The object to enqueue.
102 |
103 | """
104 | if not self._data:
105 | assert not self._can_get
106 | if self._lot:
107 | self._lot.unpark(count=1)
108 | else:
109 | self._can_get = True
110 | self._data.append(obj)
111 |
112 | def _get_batch_protected(self) -> list[T]:
113 | data = self._data.copy()
114 | self._data.clear()
115 | self._can_get = False
116 | return data
117 |
118 | def get_batch_nowait(self) -> list[T]:
119 | """Attempt to get the next batch from the queue, without blocking.
120 |
121 | Returns:
122 | list: A list of dequeued items, in order. On a successful call this
123 | list is always non-empty; if it would be empty we raise
124 | :exc:`~trio.WouldBlock` instead.
125 |
126 | Raises:
127 | ~trio.WouldBlock: if the queue is empty.
128 |
129 | """
130 | if not self._can_get:
131 | raise _core.WouldBlock
132 | return self._get_batch_protected()
133 |
134 | async def get_batch(self) -> list[T]:
135 | """Get the next batch from the queue, blocking as necessary.
136 |
137 | Returns:
138 | list: A list of dequeued items, in order. This list is always
139 | non-empty.
140 |
141 | """
142 | await _core.checkpoint_if_cancelled()
143 | if not self._can_get:
144 | await self._lot.park()
145 | return self._get_batch_protected()
146 | else:
147 | try:
148 | return self._get_batch_protected()
149 | finally:
150 | await _core.cancel_shielded_checkpoint()
151 |
152 | def statistics(self) -> UnboundedQueueStatistics:
153 | """Return an :class:`UnboundedQueueStatistics` object containing debugging information."""
154 | return UnboundedQueueStatistics(
155 | qsize=len(self._data),
156 | tasks_waiting=self._lot.statistics().tasks_waiting,
157 | )
158 |
159 | def __aiter__(self) -> Self:
160 | return self
161 |
162 | async def __anext__(self) -> list[T]:
163 | return await self.get_batch()
164 |
--------------------------------------------------------------------------------
/src/trio/_core/_wakeup_socketpair.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import contextlib
4 | import signal
5 | import socket
6 | import warnings
7 |
8 | from .. import _core
9 | from .._util import is_main_thread
10 |
11 |
12 | class WakeupSocketpair:
13 | def __init__(self) -> None:
14 | # explicitly typed to please `pyright --verifytypes` without `--ignoreexternal`
15 | self.wakeup_sock: socket.socket
16 | self.write_sock: socket.socket
17 |
18 | self.wakeup_sock, self.write_sock = socket.socketpair()
19 | self.wakeup_sock.setblocking(False)
20 | self.write_sock.setblocking(False)
21 | # This somewhat reduces the amount of memory wasted queueing up data
22 | # for wakeups. With these settings, maximum number of 1-byte sends
23 | # before getting BlockingIOError:
24 | # Linux 4.8: 6
25 | # macOS (darwin 15.5): 1
26 | # Windows 10: 525347
27 | # Windows you're weird. (And on Windows setting SNDBUF to 0 makes send
28 | # blocking, even on non-blocking sockets, so don't do that.)
29 | self.wakeup_sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1)
30 | self.write_sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1)
31 | # On Windows this is a TCP socket so this might matter. On other
32 | # platforms this fails b/c AF_UNIX sockets aren't actually TCP.
33 | with contextlib.suppress(OSError):
34 | self.write_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
35 | self.old_wakeup_fd: int | None = None
36 |
37 | def wakeup_thread_and_signal_safe(self) -> None:
38 | with contextlib.suppress(BlockingIOError):
39 | self.write_sock.send(b"\x00")
40 |
41 | async def wait_woken(self) -> None:
42 | await _core.wait_readable(self.wakeup_sock)
43 | self.drain()
44 |
45 | def drain(self) -> None:
46 | try:
47 | while True:
48 | self.wakeup_sock.recv(2**16)
49 | except BlockingIOError:
50 | pass
51 |
52 | def wakeup_on_signals(self) -> None:
53 | assert self.old_wakeup_fd is None
54 | if not is_main_thread():
55 | return
56 | fd = self.write_sock.fileno()
57 | self.old_wakeup_fd = signal.set_wakeup_fd(fd, warn_on_full_buffer=False)
58 | if self.old_wakeup_fd != -1:
59 | warnings.warn(
60 | RuntimeWarning(
61 | "It looks like Trio's signal handling code might have "
62 | "collided with another library you're using. If you're "
63 | "running Trio in guest mode, then this might mean you "
64 | "should set host_uses_signal_set_wakeup_fd=True. "
65 | "Otherwise, file a bug on Trio and we'll help you figure "
66 | "out what's going on.",
67 | ),
68 | stacklevel=1,
69 | )
70 |
71 | def close(self) -> None:
72 | self.wakeup_sock.close()
73 | self.write_sock.close()
74 | if self.old_wakeup_fd is not None:
75 | signal.set_wakeup_fd(self.old_wakeup_fd)
76 |
--------------------------------------------------------------------------------
/src/trio/_highlevel_generic.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import TYPE_CHECKING, Generic, TypeVar
4 |
5 | import attrs
6 |
7 | import trio
8 | from trio._util import final
9 |
10 | from .abc import AsyncResource, HalfCloseableStream, ReceiveStream, SendStream
11 |
12 | if TYPE_CHECKING:
13 | from typing_extensions import TypeGuard
14 |
15 |
16 | SendStreamT = TypeVar("SendStreamT", bound=SendStream)
17 | ReceiveStreamT = TypeVar("ReceiveStreamT", bound=ReceiveStream)
18 |
19 |
20 | async def aclose_forcefully(resource: AsyncResource) -> None:
21 | """Close an async resource or async generator immediately, without
22 | blocking to do any graceful cleanup.
23 |
24 | :class:`~trio.abc.AsyncResource` objects guarantee that if their
25 | :meth:`~trio.abc.AsyncResource.aclose` method is cancelled, then they will
26 | still close the resource (albeit in a potentially ungraceful
27 | fashion). :func:`aclose_forcefully` is a convenience function that
28 | exploits this behavior to let you force a resource to be closed without
29 | blocking: it works by calling ``await resource.aclose()`` and then
30 | cancelling it immediately.
31 |
32 | Most users won't need this, but it may be useful on cleanup paths where
33 | you can't afford to block, or if you want to close a resource and don't
34 | care about handling it gracefully. For example, if
35 | :class:`~trio.SSLStream` encounters an error and cannot perform its
36 | own graceful close, then there's no point in waiting to gracefully shut
37 | down the underlying transport either, so it calls ``await
38 | aclose_forcefully(self.transport_stream)``.
39 |
40 | Note that this function is async, and that it acts as a checkpoint, but
41 | unlike most async functions it cannot block indefinitely (at least,
42 | assuming the underlying resource object is correctly implemented).
43 |
44 | """
45 | with trio.CancelScope() as cs:
46 | cs.cancel(reason="cancelled during aclose_forcefully")
47 | await resource.aclose()
48 |
49 |
50 | def _is_halfclosable(stream: SendStream) -> TypeGuard[HalfCloseableStream]:
51 | """Check if the stream has a send_eof() method."""
52 | return hasattr(stream, "send_eof")
53 |
54 |
55 | @final
56 | @attrs.define(eq=False, slots=False)
57 | class StapledStream(
58 | HalfCloseableStream,
59 | Generic[SendStreamT, ReceiveStreamT],
60 | ):
61 | """This class `staples `__
62 | together two unidirectional streams to make single bidirectional stream.
63 |
64 | Args:
65 | send_stream (~trio.abc.SendStream): The stream to use for sending.
66 | receive_stream (~trio.abc.ReceiveStream): The stream to use for
67 | receiving.
68 |
69 | Example:
70 |
71 | A silly way to make a stream that echoes back whatever you write to
72 | it::
73 |
74 | left, right = trio.testing.memory_stream_pair()
75 | echo_stream = StapledStream(SocketStream(left), SocketStream(right))
76 | await echo_stream.send_all(b"x")
77 | assert await echo_stream.receive_some() == b"x"
78 |
79 | :class:`StapledStream` objects implement the methods in the
80 | :class:`~trio.abc.HalfCloseableStream` interface. They also have two
81 | additional public attributes:
82 |
83 | .. attribute:: send_stream
84 |
85 | The underlying :class:`~trio.abc.SendStream`. :meth:`send_all` and
86 | :meth:`wait_send_all_might_not_block` are delegated to this object.
87 |
88 | .. attribute:: receive_stream
89 |
90 | The underlying :class:`~trio.abc.ReceiveStream`. :meth:`receive_some`
91 | is delegated to this object.
92 |
93 | """
94 |
95 | send_stream: SendStreamT
96 | receive_stream: ReceiveStreamT
97 |
98 | async def send_all(self, data: bytes | bytearray | memoryview) -> None:
99 | """Calls ``self.send_stream.send_all``."""
100 | return await self.send_stream.send_all(data)
101 |
102 | async def wait_send_all_might_not_block(self) -> None:
103 | """Calls ``self.send_stream.wait_send_all_might_not_block``."""
104 | return await self.send_stream.wait_send_all_might_not_block()
105 |
106 | async def send_eof(self) -> None:
107 | """Shuts down the send side of the stream.
108 |
109 | If :meth:`self.send_stream.send_eof() ` exists,
110 | then this calls it. Otherwise, this calls
111 | :meth:`self.send_stream.aclose() `.
112 | """
113 | stream = self.send_stream
114 | if _is_halfclosable(stream):
115 | return await stream.send_eof()
116 | else:
117 | return await stream.aclose()
118 |
119 | # we intentionally accept more types from the caller than we support returning
120 | async def receive_some(self, max_bytes: int | None = None) -> bytes:
121 | """Calls ``self.receive_stream.receive_some``."""
122 | return await self.receive_stream.receive_some(max_bytes)
123 |
124 | async def aclose(self) -> None:
125 | """Calls ``aclose`` on both underlying streams."""
126 | try:
127 | await self.send_stream.aclose()
128 | finally:
129 | await self.receive_stream.aclose()
130 |
--------------------------------------------------------------------------------
/src/trio/_highlevel_open_unix_stream.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import os
4 | from contextlib import contextmanager
5 | from typing import TYPE_CHECKING, Protocol, TypeVar
6 |
7 | import trio
8 | from trio.socket import SOCK_STREAM, socket
9 |
10 | if TYPE_CHECKING:
11 | from collections.abc import Generator
12 |
13 |
14 | class Closable(Protocol):
15 | def close(self) -> None: ...
16 |
17 |
18 | CloseT = TypeVar("CloseT", bound=Closable)
19 |
20 |
21 | try:
22 | from trio.socket import AF_UNIX
23 |
24 | has_unix = True
25 | except ImportError:
26 | has_unix = False
27 |
28 |
29 | @contextmanager
30 | def close_on_error(obj: CloseT) -> Generator[CloseT, None, None]:
31 | try:
32 | yield obj
33 | except:
34 | obj.close()
35 | raise
36 |
37 |
38 | async def open_unix_socket(
39 | filename: str | bytes | os.PathLike[str] | os.PathLike[bytes],
40 | ) -> trio.SocketStream:
41 | """Opens a connection to the specified
42 | `Unix domain socket `__.
43 |
44 | You must have read/write permission on the specified file to connect.
45 |
46 | Args:
47 | filename (str or bytes): The filename to open the connection to.
48 |
49 | Returns:
50 | SocketStream: a :class:`~trio.abc.Stream` connected to the given file.
51 |
52 | Raises:
53 | OSError: If the socket file could not be connected to.
54 | RuntimeError: If AF_UNIX sockets are not supported.
55 | """
56 | if not has_unix:
57 | raise RuntimeError("Unix sockets are not supported on this platform")
58 |
59 | # much more simplified logic vs tcp sockets - one socket type and only one
60 | # possible location to connect to
61 | sock = socket(AF_UNIX, SOCK_STREAM)
62 | with close_on_error(sock):
63 | await sock.connect(os.fspath(filename))
64 |
65 | return trio.SocketStream(sock)
66 |
--------------------------------------------------------------------------------
/src/trio/_repl.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import ast
4 | import contextlib
5 | import inspect
6 | import sys
7 | import types
8 | import warnings
9 | from code import InteractiveConsole
10 |
11 | import outcome
12 |
13 | import trio
14 | import trio.lowlevel
15 | from trio._util import final
16 |
17 |
18 | @final
19 | class TrioInteractiveConsole(InteractiveConsole):
20 | def __init__(self, repl_locals: dict[str, object] | None = None) -> None:
21 | super().__init__(locals=repl_locals)
22 | self.compile.compiler.flags |= ast.PyCF_ALLOW_TOP_LEVEL_AWAIT
23 |
24 | def runcode(self, code: types.CodeType) -> None:
25 | # https://github.com/python/typeshed/issues/13768
26 | func = types.FunctionType(code, self.locals) # type: ignore[arg-type]
27 | if inspect.iscoroutinefunction(func):
28 | result = trio.from_thread.run(outcome.acapture, func)
29 | else:
30 | result = trio.from_thread.run_sync(outcome.capture, func)
31 | if isinstance(result, outcome.Error):
32 | # If it is SystemExit, quit the repl. Otherwise, print the traceback.
33 | # If there is a SystemExit inside a BaseExceptionGroup, it probably isn't
34 | # the user trying to quit the repl, but rather an error in the code. So, we
35 | # don't try to inspect groups for SystemExit. Instead, we just print and
36 | # return to the REPL.
37 | if isinstance(result.error, SystemExit):
38 | raise result.error
39 | else:
40 | # Inline our own version of self.showtraceback that can use
41 | # outcome.Error.error directly to print clean tracebacks.
42 | # This also means overriding self.showtraceback does nothing.
43 | sys.last_type, sys.last_value = type(result.error), result.error
44 | sys.last_traceback = result.error.__traceback__
45 | # see https://docs.python.org/3/library/sys.html#sys.last_exc
46 | if sys.version_info >= (3, 12):
47 | sys.last_exc = result.error
48 |
49 | # We always use sys.excepthook, unlike other implementations.
50 | # This means that overriding self.write also does nothing to tbs.
51 | sys.excepthook(sys.last_type, sys.last_value, sys.last_traceback)
52 |
53 |
54 | async def run_repl(console: TrioInteractiveConsole) -> None:
55 | banner = (
56 | f"trio REPL {sys.version} on {sys.platform}\n"
57 | f'Use "await" directly instead of "trio.run()".\n'
58 | f'Type "help", "copyright", "credits" or "license" '
59 | f"for more information.\n"
60 | f'{getattr(sys, "ps1", ">>> ")}import trio'
61 | )
62 | try:
63 | await trio.to_thread.run_sync(console.interact, banner)
64 | finally:
65 | warnings.filterwarnings(
66 | "ignore",
67 | message=r"^coroutine .* was never awaited$",
68 | category=RuntimeWarning,
69 | )
70 |
71 |
72 | def main(original_locals: dict[str, object]) -> None:
73 | with contextlib.suppress(ImportError):
74 | import readline # noqa: F401
75 |
76 | repl_locals: dict[str, object] = {"trio": trio}
77 | for key in {
78 | "__name__",
79 | "__package__",
80 | "__loader__",
81 | "__spec__",
82 | "__builtins__",
83 | "__file__",
84 | }:
85 | repl_locals[key] = original_locals[key]
86 |
87 | console = TrioInteractiveConsole(repl_locals)
88 | trio.run(run_repl, console)
89 |
--------------------------------------------------------------------------------
/src/trio/_subprocess_platform/__init__.py:
--------------------------------------------------------------------------------
1 | # Platform-specific subprocess bits'n'pieces.
2 | from __future__ import annotations
3 |
4 | import os
5 | import sys
6 | from typing import TYPE_CHECKING
7 |
8 | import trio
9 |
10 | from .. import _core, _subprocess
11 | from .._abc import ReceiveStream, SendStream # noqa: TC001
12 |
13 | _wait_child_exiting_error: ImportError | None = None
14 | _create_child_pipe_error: ImportError | None = None
15 |
16 |
17 | if TYPE_CHECKING:
18 | # internal types for the pipe representations used in type checking only
19 | class ClosableSendStream(SendStream):
20 | def close(self) -> None: ...
21 |
22 | class ClosableReceiveStream(ReceiveStream):
23 | def close(self) -> None: ...
24 |
25 |
26 | # Fallback versions of the functions provided -- implementations
27 | # per OS are imported atop these at the bottom of the module.
28 | async def wait_child_exiting(process: _subprocess.Process) -> None:
29 | """Block until the child process managed by ``process`` is exiting.
30 |
31 | It is invalid to call this function if the process has already
32 | been waited on; that is, ``process.returncode`` must be None.
33 |
34 | When this function returns, it indicates that a call to
35 | :meth:`subprocess.Popen.wait` will immediately be able to
36 | return the process's exit status. The actual exit status is not
37 | consumed by this call, since :class:`~subprocess.Popen` wants
38 | to be able to do that itself.
39 | """
40 | raise NotImplementedError from _wait_child_exiting_error # pragma: no cover
41 |
42 |
43 | def create_pipe_to_child_stdin() -> tuple[ClosableSendStream, int]:
44 | """Create a new pipe suitable for sending data from this
45 | process to the standard input of a child we're about to spawn.
46 |
47 | Returns:
48 | A pair ``(trio_end, subprocess_end)`` where ``trio_end`` is a
49 | :class:`~trio.abc.SendStream` and ``subprocess_end`` is
50 | something suitable for passing as the ``stdin`` argument of
51 | :class:`subprocess.Popen`.
52 | """
53 | raise NotImplementedError from _create_child_pipe_error # pragma: no cover
54 |
55 |
56 | def create_pipe_from_child_output() -> tuple[ClosableReceiveStream, int]:
57 | """Create a new pipe suitable for receiving data into this
58 | process from the standard output or error stream of a child
59 | we're about to spawn.
60 |
61 | Returns:
62 | A pair ``(trio_end, subprocess_end)`` where ``trio_end`` is a
63 | :class:`~trio.abc.ReceiveStream` and ``subprocess_end`` is
64 | something suitable for passing as the ``stdin`` argument of
65 | :class:`subprocess.Popen`.
66 | """
67 | raise NotImplementedError from _create_child_pipe_error # pragma: no cover
68 |
69 |
70 | try:
71 | if sys.platform == "win32":
72 | from .windows import wait_child_exiting
73 | elif sys.platform != "linux" and (TYPE_CHECKING or hasattr(_core, "wait_kevent")):
74 | from .kqueue import wait_child_exiting
75 | else:
76 | # as it's an exported symbol, noqa'd
77 | from .waitid import wait_child_exiting # noqa: F401
78 | except ImportError as ex: # pragma: no cover
79 | _wait_child_exiting_error = ex
80 |
81 | try:
82 | if TYPE_CHECKING:
83 | # Not worth type checking these definitions
84 | pass
85 |
86 | elif os.name == "posix":
87 |
88 | def create_pipe_to_child_stdin() -> tuple[trio.lowlevel.FdStream, int]:
89 | rfd, wfd = os.pipe()
90 | return trio.lowlevel.FdStream(wfd), rfd
91 |
92 | def create_pipe_from_child_output() -> tuple[trio.lowlevel.FdStream, int]:
93 | rfd, wfd = os.pipe()
94 | return trio.lowlevel.FdStream(rfd), wfd
95 |
96 | elif os.name == "nt":
97 | import msvcrt
98 |
99 | # This isn't exported or documented, but it's also not
100 | # underscore-prefixed, and seems kosher to use. The asyncio docs
101 | # for 3.5 included an example that imported socketpair from
102 | # windows_utils (before socket.socketpair existed on Windows), and
103 | # when asyncio.windows_utils.socketpair was removed in 3.7, the
104 | # removal was mentioned in the release notes.
105 | from asyncio.windows_utils import pipe as windows_pipe
106 |
107 | from .._windows_pipes import PipeReceiveStream, PipeSendStream
108 |
109 | def create_pipe_to_child_stdin() -> tuple[PipeSendStream, int]:
110 | # for stdin, we want the write end (our end) to use overlapped I/O
111 | rh, wh = windows_pipe(overlapped=(False, True))
112 | return PipeSendStream(wh), msvcrt.open_osfhandle(rh, os.O_RDONLY)
113 |
114 | def create_pipe_from_child_output() -> tuple[PipeReceiveStream, int]:
115 | # for stdout/err, it's the read end that's overlapped
116 | rh, wh = windows_pipe(overlapped=(True, False))
117 | return PipeReceiveStream(rh), msvcrt.open_osfhandle(wh, 0)
118 |
119 | else: # pragma: no cover
120 | raise ImportError("pipes not implemented on this platform")
121 |
122 | except ImportError as ex: # pragma: no cover
123 | _create_child_pipe_error = ex
124 |
--------------------------------------------------------------------------------
/src/trio/_subprocess_platform/kqueue.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import select
4 | import sys
5 | from typing import TYPE_CHECKING
6 |
7 | from .. import _core, _subprocess
8 |
9 | assert (sys.platform != "win32" and sys.platform != "linux") or not TYPE_CHECKING
10 |
11 |
12 | async def wait_child_exiting(process: _subprocess.Process) -> None:
13 | kqueue = _core.current_kqueue()
14 | try:
15 | from select import KQ_NOTE_EXIT
16 | except ImportError: # pragma: no cover
17 | # pypy doesn't define KQ_NOTE_EXIT:
18 | # https://bitbucket.org/pypy/pypy/issues/2921/
19 | # I verified this value against both Darwin and FreeBSD
20 | KQ_NOTE_EXIT = 0x80000000
21 |
22 | def make_event(flags: int) -> select.kevent:
23 | return select.kevent(
24 | process.pid,
25 | filter=select.KQ_FILTER_PROC,
26 | flags=flags,
27 | fflags=KQ_NOTE_EXIT,
28 | )
29 |
30 | try:
31 | kqueue.control([make_event(select.KQ_EV_ADD | select.KQ_EV_ONESHOT)], 0)
32 | except ProcessLookupError: # pragma: no cover
33 | # This can supposedly happen if the process is in the process
34 | # of exiting, and it can even be the case that kqueue says the
35 | # process doesn't exist before waitpid(WNOHANG) says it hasn't
36 | # exited yet. See the discussion in https://chromium.googlesource.com/
37 | # chromium/src/base/+/master/process/kill_mac.cc .
38 | # We haven't actually seen this error occur since we added
39 | # locking to prevent multiple calls to wait_child_exiting()
40 | # for the same process simultaneously, but given the explanation
41 | # in Chromium it seems we should still keep the check.
42 | return
43 |
44 | def abort(_: _core.RaiseCancelT) -> _core.Abort:
45 | kqueue.control([make_event(select.KQ_EV_DELETE)], 0)
46 | return _core.Abort.SUCCEEDED
47 |
48 | await _core.wait_kevent(process.pid, select.KQ_FILTER_PROC, abort)
49 |
--------------------------------------------------------------------------------
/src/trio/_subprocess_platform/waitid.py:
--------------------------------------------------------------------------------
1 | import errno
2 | import math
3 | import os
4 | import sys
5 | from typing import TYPE_CHECKING
6 |
7 | from .. import _core, _subprocess
8 | from .._sync import CapacityLimiter, Event
9 | from .._threads import to_thread_run_sync
10 |
11 | assert (sys.platform != "win32" and sys.platform != "darwin") or not TYPE_CHECKING
12 |
13 | try:
14 | from os import waitid
15 |
16 | def sync_wait_reapable(pid: int) -> None:
17 | waitid(os.P_PID, pid, os.WEXITED | os.WNOWAIT)
18 |
19 | except ImportError:
20 | # pypy doesn't define os.waitid so we need to pull it out ourselves
21 | # using cffi: https://bitbucket.org/pypy/pypy/issues/2922/
22 | import cffi
23 |
24 | waitid_ffi = cffi.FFI()
25 |
26 | # Believe it or not, siginfo_t starts with fields in the
27 | # same layout on both Linux and Darwin. The Linux structure
28 | # is bigger so that's what we use to size `pad`; while
29 | # there are a few extra fields in there, most of it is
30 | # true padding which would not be written by the syscall.
31 | waitid_ffi.cdef(
32 | """
33 | typedef struct siginfo_s {
34 | int si_signo;
35 | int si_errno;
36 | int si_code;
37 | int si_pid;
38 | int si_uid;
39 | int si_status;
40 | int pad[26];
41 | } siginfo_t;
42 | int waitid(int idtype, int id, siginfo_t* result, int options);
43 | """,
44 | )
45 | waitid_cffi = waitid_ffi.dlopen(None).waitid # type: ignore[attr-defined]
46 |
47 | def sync_wait_reapable(pid: int) -> None:
48 | P_PID = 1
49 | WEXITED = 0x00000004
50 | if sys.platform == "darwin": # pragma: no cover
51 | # waitid() is not exposed on Python on Darwin but does
52 | # work through CFFI; note that we typically won't get
53 | # here since Darwin also defines kqueue
54 | WNOWAIT = 0x00000020
55 | else:
56 | WNOWAIT = 0x01000000
57 | result = waitid_ffi.new("siginfo_t *")
58 | while waitid_cffi(P_PID, pid, result, WEXITED | WNOWAIT) < 0:
59 | got_errno = waitid_ffi.errno
60 | if got_errno == errno.EINTR:
61 | continue
62 | raise OSError(got_errno, os.strerror(got_errno))
63 |
64 |
65 | # adapted from
66 | # https://github.com/python-trio/trio/issues/4#issuecomment-398967572
67 |
68 | waitid_limiter = CapacityLimiter(math.inf)
69 |
70 |
71 | async def _waitid_system_task(pid: int, event: Event) -> None:
72 | """Spawn a thread that waits for ``pid`` to exit, then wake any tasks
73 | that were waiting on it.
74 | """
75 | # abandon_on_cancel=True: if this task is cancelled, then we abandon the
76 | # thread to keep running waitpid in the background. Since this is
77 | # always run as a system task, this will only happen if the whole
78 | # call to trio.run is shutting down.
79 |
80 | try:
81 | await to_thread_run_sync(
82 | sync_wait_reapable,
83 | pid,
84 | abandon_on_cancel=True,
85 | limiter=waitid_limiter,
86 | )
87 | except OSError:
88 | # If waitid fails, waitpid will fail too, so it still makes
89 | # sense to wake up the callers of wait_process_exiting(). The
90 | # most likely reason for this error in practice is a child
91 | # exiting when wait() is not possible because SIGCHLD is
92 | # ignored.
93 | pass
94 | finally:
95 | event.set()
96 |
97 |
98 | async def wait_child_exiting(process: "_subprocess.Process") -> None:
99 | # Logic of this function:
100 | # - The first time we get called, we create an Event and start
101 | # an instance of _waitid_system_task that will set the Event
102 | # when waitid() completes. If that Event is set before
103 | # we get cancelled, we're good.
104 | # - Otherwise, a following call after the cancellation must
105 | # reuse the Event created during the first call, lest we
106 | # create an arbitrary number of threads waiting on the same
107 | # process.
108 |
109 | if process._wait_for_exit_data is None:
110 | process._wait_for_exit_data = event = Event()
111 | _core.spawn_system_task(_waitid_system_task, process.pid, event)
112 | assert isinstance(process._wait_for_exit_data, Event)
113 | await process._wait_for_exit_data.wait()
114 |
--------------------------------------------------------------------------------
/src/trio/_subprocess_platform/windows.py:
--------------------------------------------------------------------------------
1 | from typing import TYPE_CHECKING
2 |
3 | from .._wait_for_object import WaitForSingleObject
4 |
5 | if TYPE_CHECKING:
6 | from .. import _subprocess
7 |
8 |
9 | async def wait_child_exiting(process: "_subprocess.Process") -> None:
10 | # _handle is not in Popen stubs, though it is present on Windows.
11 | await WaitForSingleObject(int(process._proc._handle)) # type: ignore[attr-defined]
12 |
--------------------------------------------------------------------------------
/src/trio/_tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/src/trio/_tests/__init__.py
--------------------------------------------------------------------------------
/src/trio/_tests/_check_type_completeness.json:
--------------------------------------------------------------------------------
1 | {
2 | "Darwin": [
3 | "No docstring found for function \"trio._unix_pipes.FdStream.send_all\"",
4 | "No docstring found for function \"trio._unix_pipes.FdStream.wait_send_all_might_not_block\"",
5 | "No docstring found for function \"trio._unix_pipes.FdStream.receive_some\"",
6 | "No docstring found for function \"trio._unix_pipes.FdStream.close\"",
7 | "No docstring found for function \"trio._unix_pipes.FdStream.aclose\"",
8 | "No docstring found for function \"trio._unix_pipes.FdStream.fileno\""
9 | ],
10 | "Linux": [
11 | "No docstring found for class \"trio._core._io_epoll._EpollStatistics\"",
12 | "No docstring found for function \"trio._unix_pipes.FdStream.send_all\"",
13 | "No docstring found for function \"trio._unix_pipes.FdStream.wait_send_all_might_not_block\"",
14 | "No docstring found for function \"trio._unix_pipes.FdStream.receive_some\"",
15 | "No docstring found for function \"trio._unix_pipes.FdStream.close\"",
16 | "No docstring found for function \"trio._unix_pipes.FdStream.aclose\"",
17 | "No docstring found for function \"trio._unix_pipes.FdStream.fileno\""
18 | ],
19 | "Windows": [],
20 | "all": [
21 | "No docstring found for class \"trio.MemoryReceiveChannel\"",
22 | "No docstring found for class \"trio._channel.MemoryReceiveChannel\"",
23 | "No docstring found for class \"trio.MemoryChannelStatistics\"",
24 | "No docstring found for class \"trio._channel.MemoryChannelStatistics\"",
25 | "No docstring found for class \"trio.MemorySendChannel\"",
26 | "No docstring found for class \"trio._channel.MemorySendChannel\"",
27 | "No docstring found for class \"trio._core._run.Task\"",
28 | "No docstring found for class \"trio._socket.SocketType\"",
29 | "No docstring found for function \"trio._highlevel_socket.SocketStream.send_all\"",
30 | "No docstring found for function \"trio._highlevel_socket.SocketStream.wait_send_all_might_not_block\"",
31 | "No docstring found for function \"trio._highlevel_socket.SocketStream.send_eof\"",
32 | "No docstring found for function \"trio._highlevel_socket.SocketStream.receive_some\"",
33 | "No docstring found for function \"trio._highlevel_socket.SocketStream.aclose\"",
34 | "No docstring found for function \"trio._subprocess.HasFileno.fileno\"",
35 | "No docstring found for class \"trio._sync.AsyncContextManagerMixin\"",
36 | "No docstring found for function \"trio._sync._HasAcquireRelease.acquire\"",
37 | "No docstring found for function \"trio._sync._HasAcquireRelease.release\"",
38 | "No docstring found for class \"trio._sync._LockImpl\"",
39 | "No docstring found for class \"trio._core._local._NoValue\"",
40 | "No docstring found for class \"trio._core._local.RunVarToken\"",
41 | "No docstring found for class \"trio.lowlevel.RunVarToken\"",
42 | "No docstring found for class \"trio.lowlevel.Task\"",
43 | "No docstring found for class \"trio.socket.SocketType\"",
44 | "No docstring found for class \"trio.socket.gaierror\"",
45 | "No docstring found for class \"trio.socket.herror\"",
46 | "No docstring found for function \"trio._core._mock_clock.MockClock.start_clock\"",
47 | "No docstring found for function \"trio._core._mock_clock.MockClock.current_time\"",
48 | "No docstring found for function \"trio._core._mock_clock.MockClock.deadline_to_sleep_time\"",
49 | "No docstring found for function \"trio.testing._raises_group._ExceptionInfo.exconly\"",
50 | "No docstring found for function \"trio.testing._raises_group._ExceptionInfo.errisinstance\"",
51 | "No docstring found for function \"trio.testing._raises_group._ExceptionInfo.getrepr\"",
52 | "No docstring found for function \"trio.testing._raises_group.RaisesGroup.expected_type\""
53 | ]
54 | }
55 |
--------------------------------------------------------------------------------
/src/trio/_tests/astrill-codesigning-cert.cer:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/src/trio/_tests/astrill-codesigning-cert.cer
--------------------------------------------------------------------------------
/src/trio/_tests/module_with_deprecations.py:
--------------------------------------------------------------------------------
1 | regular = "hi"
2 |
3 | import sys
4 |
5 | from .. import _deprecate
6 |
7 | _deprecate.deprecate_attributes(
8 | __name__,
9 | {
10 | "dep1": _deprecate.DeprecatedAttribute("value1", "1.1", issue=1),
11 | "dep2": _deprecate.DeprecatedAttribute(
12 | "value2",
13 | "1.2",
14 | issue=1,
15 | instead="instead-string",
16 | ),
17 | },
18 | )
19 |
20 | this_mod = sys.modules[__name__]
21 | assert this_mod.regular == "hi"
22 | assert "dep1" not in globals()
23 |
--------------------------------------------------------------------------------
/src/trio/_tests/pytest_plugin.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import inspect
4 | from typing import NoReturn
5 |
6 | import pytest
7 |
8 | from ..testing import MockClock, trio_test
9 |
10 | RUN_SLOW = True
11 | SKIP_OPTIONAL_IMPORTS = False
12 |
13 |
14 | def pytest_addoption(parser: pytest.Parser) -> None:
15 | parser.addoption("--run-slow", action="store_true", help="run slow tests")
16 | parser.addoption(
17 | "--skip-optional-imports",
18 | action="store_true",
19 | help="skip tests that rely on libraries not required by trio itself",
20 | )
21 |
22 |
23 | def pytest_configure(config: pytest.Config) -> None:
24 | global RUN_SLOW
25 | RUN_SLOW = config.getoption("--run-slow", default=True)
26 | global SKIP_OPTIONAL_IMPORTS
27 | SKIP_OPTIONAL_IMPORTS = config.getoption("--skip-optional-imports", default=False)
28 |
29 |
30 | @pytest.fixture
31 | def mock_clock() -> MockClock:
32 | return MockClock()
33 |
34 |
35 | @pytest.fixture
36 | def autojump_clock() -> MockClock:
37 | return MockClock(autojump_threshold=0)
38 |
39 |
40 | # FIXME: split off into a package (or just make part of Trio's public
41 | # interface?), with config file to enable? and I guess a mark option too; I
42 | # guess it's useful with the class- and file-level marking machinery (where
43 | # the raw @trio_test decorator isn't enough).
44 | @pytest.hookimpl(tryfirst=True)
45 | def pytest_pyfunc_call(pyfuncitem: pytest.Function) -> None:
46 | if inspect.iscoroutinefunction(pyfuncitem.obj):
47 | pyfuncitem.obj = trio_test(pyfuncitem.obj)
48 |
49 |
50 | def skip_if_optional_else_raise(error: ImportError) -> NoReturn:
51 | if SKIP_OPTIONAL_IMPORTS:
52 | pytest.skip(error.msg, allow_module_level=True)
53 | else: # pragma: no cover
54 | raise error
55 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_abc.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import attrs
4 | import pytest
5 |
6 | from .. import abc as tabc
7 | from ..lowlevel import Task
8 |
9 |
10 | def test_instrument_implements_hook_methods() -> None:
11 | attrs = {
12 | "before_run": (),
13 | "after_run": (),
14 | "task_spawned": (Task,),
15 | "task_scheduled": (Task,),
16 | "before_task_step": (Task,),
17 | "after_task_step": (Task,),
18 | "task_exited": (Task,),
19 | "before_io_wait": (3.3,),
20 | "after_io_wait": (3.3,),
21 | }
22 |
23 | mayonnaise = tabc.Instrument()
24 |
25 | for method_name, args in attrs.items():
26 | assert hasattr(mayonnaise, method_name)
27 | method = getattr(mayonnaise, method_name)
28 | assert callable(method)
29 | method(*args)
30 |
31 |
32 | async def test_AsyncResource_defaults() -> None:
33 | @attrs.define(slots=False)
34 | class MyAR(tabc.AsyncResource):
35 | record: list[str] = attrs.Factory(list)
36 |
37 | async def aclose(self) -> None:
38 | self.record.append("ac")
39 |
40 | async with MyAR() as myar:
41 | assert isinstance(myar, MyAR)
42 | assert myar.record == []
43 |
44 | assert myar.record == ["ac"]
45 |
46 |
47 | def test_abc_generics() -> None:
48 | # Pythons below 3.5.2 had a typing.Generic that would throw
49 | # errors when instantiating or subclassing a parameterized
50 | # version of a class with any __slots__. This is why RunVar
51 | # (which has slots) is not generic. This tests that
52 | # the generic ABCs are fine, because while they are slotted
53 | # they don't actually define any slots.
54 |
55 | class SlottedChannel(tabc.SendChannel[tabc.Stream]):
56 | __slots__ = ("x",)
57 |
58 | def send_nowait(self, value: object) -> None:
59 | raise RuntimeError
60 |
61 | async def send(self, value: object) -> None:
62 | raise RuntimeError # pragma: no cover
63 |
64 | def clone(self) -> None:
65 | raise RuntimeError # pragma: no cover
66 |
67 | async def aclose(self) -> None:
68 | pass # pragma: no cover
69 |
70 | channel = SlottedChannel()
71 | with pytest.raises(RuntimeError):
72 | channel.send_nowait(None)
73 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_contextvars.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import contextvars
4 |
5 | from .. import _core
6 |
7 | trio_testing_contextvar: contextvars.ContextVar[str] = contextvars.ContextVar(
8 | "trio_testing_contextvar",
9 | )
10 |
11 |
12 | async def test_contextvars_default() -> None:
13 | trio_testing_contextvar.set("main")
14 | record: list[str] = []
15 |
16 | async def child() -> None:
17 | value = trio_testing_contextvar.get()
18 | record.append(value)
19 |
20 | async with _core.open_nursery() as nursery:
21 | nursery.start_soon(child)
22 | assert record == ["main"]
23 |
24 |
25 | async def test_contextvars_set() -> None:
26 | trio_testing_contextvar.set("main")
27 | record: list[str] = []
28 |
29 | async def child() -> None:
30 | trio_testing_contextvar.set("child")
31 | value = trio_testing_contextvar.get()
32 | record.append(value)
33 |
34 | async with _core.open_nursery() as nursery:
35 | nursery.start_soon(child)
36 | value = trio_testing_contextvar.get()
37 | assert record == ["child"]
38 | assert value == "main"
39 |
40 |
41 | async def test_contextvars_copy() -> None:
42 | trio_testing_contextvar.set("main")
43 | context = contextvars.copy_context()
44 | trio_testing_contextvar.set("second_main")
45 | record: list[str] = []
46 |
47 | async def child() -> None:
48 | value = trio_testing_contextvar.get()
49 | record.append(value)
50 |
51 | async with _core.open_nursery() as nursery:
52 | context.run(nursery.start_soon, child)
53 | nursery.start_soon(child)
54 | value = trio_testing_contextvar.get()
55 | assert set(record) == {"main", "second_main"}
56 | assert value == "second_main"
57 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_deprecate_strict_exception_groups_false.py:
--------------------------------------------------------------------------------
1 | from collections.abc import Awaitable, Callable
2 |
3 | import pytest
4 |
5 | import trio
6 |
7 |
8 | async def test_deprecation_warning_open_nursery() -> None:
9 | with pytest.warns(
10 | trio.TrioDeprecationWarning,
11 | match="strict_exception_groups=False",
12 | ) as record:
13 | async with trio.open_nursery(strict_exception_groups=False):
14 | ...
15 | assert len(record) == 1
16 | async with trio.open_nursery(strict_exception_groups=True):
17 | ...
18 | async with trio.open_nursery():
19 | ...
20 |
21 |
22 | def test_deprecation_warning_run() -> None:
23 | async def foo() -> None: ...
24 |
25 | async def foo_nursery() -> None:
26 | # this should not raise a warning, even if it's implied loose
27 | async with trio.open_nursery():
28 | ...
29 |
30 | async def foo_loose_nursery() -> None:
31 | # this should raise a warning, even if specifying the parameter is redundant
32 | async with trio.open_nursery(strict_exception_groups=False):
33 | ...
34 |
35 | def helper(fun: Callable[[], Awaitable[None]], num: int) -> None:
36 | with pytest.warns(
37 | trio.TrioDeprecationWarning,
38 | match="strict_exception_groups=False",
39 | ) as record:
40 | trio.run(fun, strict_exception_groups=False)
41 | assert len(record) == num
42 |
43 | helper(foo, 1)
44 | helper(foo_nursery, 1)
45 | helper(foo_loose_nursery, 2)
46 |
47 |
48 | def test_deprecation_warning_start_guest_run() -> None:
49 | # "The simplest possible "host" loop."
50 | from .._core._tests.test_guest_mode import trivial_guest_run
51 |
52 | async def trio_return(in_host: object) -> str:
53 | await trio.lowlevel.checkpoint()
54 | return "ok"
55 |
56 | with pytest.warns(
57 | trio.TrioDeprecationWarning,
58 | match="strict_exception_groups=False",
59 | ) as record:
60 | trivial_guest_run(
61 | trio_return,
62 | strict_exception_groups=False,
63 | )
64 | assert len(record) == 1
65 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_highlevel_generic.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import NoReturn
4 |
5 | import attrs
6 | import pytest
7 |
8 | from .._highlevel_generic import StapledStream
9 | from ..abc import ReceiveStream, SendStream
10 |
11 |
12 | @attrs.define(slots=False)
13 | class RecordSendStream(SendStream):
14 | record: list[str | tuple[str, object]] = attrs.Factory(list)
15 |
16 | async def send_all(self, data: object) -> None:
17 | self.record.append(("send_all", data))
18 |
19 | async def wait_send_all_might_not_block(self) -> None:
20 | self.record.append("wait_send_all_might_not_block")
21 |
22 | async def aclose(self) -> None:
23 | self.record.append("aclose")
24 |
25 |
26 | @attrs.define(slots=False)
27 | class RecordReceiveStream(ReceiveStream):
28 | record: list[str | tuple[str, int | None]] = attrs.Factory(list)
29 |
30 | async def receive_some(self, max_bytes: int | None = None) -> bytes:
31 | self.record.append(("receive_some", max_bytes))
32 | return b""
33 |
34 | async def aclose(self) -> None:
35 | self.record.append("aclose")
36 |
37 |
38 | async def test_StapledStream() -> None:
39 | send_stream = RecordSendStream()
40 | receive_stream = RecordReceiveStream()
41 | stapled = StapledStream(send_stream, receive_stream)
42 |
43 | assert stapled.send_stream is send_stream
44 | assert stapled.receive_stream is receive_stream
45 |
46 | await stapled.send_all(b"foo")
47 | await stapled.wait_send_all_might_not_block()
48 | assert send_stream.record == [
49 | ("send_all", b"foo"),
50 | "wait_send_all_might_not_block",
51 | ]
52 | send_stream.record.clear()
53 |
54 | await stapled.send_eof()
55 | assert send_stream.record == ["aclose"]
56 | send_stream.record.clear()
57 |
58 | async def fake_send_eof() -> None:
59 | send_stream.record.append("send_eof")
60 |
61 | send_stream.send_eof = fake_send_eof # type: ignore[attr-defined]
62 | await stapled.send_eof()
63 | assert send_stream.record == ["send_eof"]
64 |
65 | send_stream.record.clear()
66 | assert receive_stream.record == []
67 |
68 | await stapled.receive_some(1234)
69 | assert receive_stream.record == [("receive_some", 1234)]
70 | assert send_stream.record == []
71 | receive_stream.record.clear()
72 |
73 | await stapled.aclose()
74 | assert receive_stream.record == ["aclose"]
75 | assert send_stream.record == ["aclose"]
76 |
77 |
78 | async def test_StapledStream_with_erroring_close() -> None:
79 | # Make sure that if one of the aclose methods errors out, then the other
80 | # one still gets called.
81 | class BrokenSendStream(RecordSendStream):
82 | async def aclose(self) -> NoReturn:
83 | await super().aclose()
84 | raise ValueError("send error")
85 |
86 | class BrokenReceiveStream(RecordReceiveStream):
87 | async def aclose(self) -> NoReturn:
88 | await super().aclose()
89 | raise ValueError("recv error")
90 |
91 | stapled = StapledStream(BrokenSendStream(), BrokenReceiveStream())
92 |
93 | with pytest.raises(ValueError, match=r"^(send|recv) error$") as excinfo:
94 | await stapled.aclose()
95 | assert isinstance(excinfo.value.__context__, ValueError)
96 |
97 | assert stapled.send_stream.record == ["aclose"]
98 | assert stapled.receive_stream.record == ["aclose"]
99 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_highlevel_open_unix_stream.py:
--------------------------------------------------------------------------------
1 | import os
2 | import socket
3 | import sys
4 | import tempfile
5 | from typing import TYPE_CHECKING
6 |
7 | import pytest
8 |
9 | from trio import Path, open_unix_socket
10 | from trio._highlevel_open_unix_stream import close_on_error
11 |
12 | assert not TYPE_CHECKING or sys.platform != "win32"
13 |
14 | skip_if_not_unix = pytest.mark.skipif(
15 | not hasattr(socket, "AF_UNIX"),
16 | reason="Needs unix socket support",
17 | )
18 |
19 |
20 | @skip_if_not_unix
21 | def test_close_on_error() -> None:
22 | class CloseMe:
23 | closed = False
24 |
25 | def close(self) -> None:
26 | self.closed = True
27 |
28 | with close_on_error(CloseMe()) as c:
29 | pass
30 | assert not c.closed
31 |
32 | with pytest.raises(RuntimeError):
33 | with close_on_error(CloseMe()) as c:
34 | raise RuntimeError
35 | assert c.closed
36 |
37 |
38 | @skip_if_not_unix
39 | @pytest.mark.parametrize("filename", [4, 4.5])
40 | async def test_open_with_bad_filename_type(filename: float) -> None:
41 | with pytest.raises(TypeError):
42 | await open_unix_socket(filename) # type: ignore[arg-type]
43 |
44 |
45 | @skip_if_not_unix
46 | async def test_open_bad_socket() -> None:
47 | # mktemp is marked as insecure, but that's okay, we don't want the file to
48 | # exist
49 | name = tempfile.mktemp()
50 | with pytest.raises(FileNotFoundError):
51 | await open_unix_socket(name)
52 |
53 |
54 | @skip_if_not_unix
55 | async def test_open_unix_socket() -> None:
56 | for name_type in [Path, str]:
57 | name = tempfile.mktemp()
58 | serv_sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
59 | with serv_sock:
60 | serv_sock.bind(name)
61 | try:
62 | serv_sock.listen(1)
63 |
64 | # The actual function we're testing
65 | unix_socket = await open_unix_socket(name_type(name))
66 |
67 | async with unix_socket:
68 | client, _ = serv_sock.accept()
69 | with client:
70 | await unix_socket.send_all(b"test")
71 | assert client.recv(2048) == b"test"
72 |
73 | client.sendall(b"response")
74 | received = await unix_socket.receive_some(2048)
75 | assert received == b"response"
76 | finally:
77 | os.unlink(name)
78 |
79 |
80 | @pytest.mark.skipif(hasattr(socket, "AF_UNIX"), reason="Test for non-unix platforms")
81 | async def test_error_on_no_unix() -> None:
82 | with pytest.raises(
83 | RuntimeError,
84 | match=r"^Unix sockets are not supported on this platform$",
85 | ):
86 | await open_unix_socket("")
87 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_scheduler_determinism.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import TYPE_CHECKING
4 |
5 | import trio
6 |
7 | if TYPE_CHECKING:
8 | import pytest
9 |
10 |
11 | async def scheduler_trace() -> tuple[tuple[str, int], ...]:
12 | """Returns a scheduler-dependent value we can use to check determinism."""
13 | trace = []
14 |
15 | async def tracer(name: str) -> None:
16 | for i in range(50):
17 | trace.append((name, i))
18 | await trio.lowlevel.checkpoint()
19 |
20 | async with trio.open_nursery() as nursery:
21 | for i in range(5):
22 | nursery.start_soon(tracer, str(i))
23 |
24 | return tuple(trace)
25 |
26 |
27 | def test_the_trio_scheduler_is_not_deterministic() -> None:
28 | # At least, not yet. See https://github.com/python-trio/trio/issues/32
29 | traces = [trio.run(scheduler_trace) for _ in range(10)]
30 | assert len(set(traces)) == len(traces)
31 |
32 |
33 | def test_the_trio_scheduler_is_deterministic_if_seeded(
34 | monkeypatch: pytest.MonkeyPatch,
35 | ) -> None:
36 | monkeypatch.setattr(trio._core._run, "_ALLOW_DETERMINISTIC_SCHEDULING", True)
37 | traces = []
38 | for _ in range(10):
39 | state = trio._core._run._r.getstate()
40 | try:
41 | trio._core._run._r.seed(0)
42 | traces.append(trio.run(scheduler_trace))
43 | finally:
44 | trio._core._run._r.setstate(state)
45 |
46 | assert len(traces) == 10
47 | assert len(set(traces)) == 1
48 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_tracing.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from typing import TYPE_CHECKING
4 |
5 | import trio
6 |
7 | if TYPE_CHECKING:
8 | from collections.abc import AsyncGenerator
9 |
10 |
11 | async def coro1(event: trio.Event) -> None:
12 | event.set()
13 | await trio.sleep_forever()
14 |
15 |
16 | async def coro2(event: trio.Event) -> None:
17 | await coro1(event)
18 |
19 |
20 | async def coro3(event: trio.Event) -> None:
21 | await coro2(event)
22 |
23 |
24 | async def coro2_async_gen(event: trio.Event) -> AsyncGenerator[None, None]:
25 | # mypy does not like `yield await trio.lowlevel.checkpoint()` - but that
26 | # should be equivalent to splitting the statement
27 | await trio.lowlevel.checkpoint()
28 | yield
29 | await coro1(event)
30 | yield # pragma: no cover
31 | await trio.lowlevel.checkpoint() # pragma: no cover
32 | yield # pragma: no cover
33 |
34 |
35 | async def coro3_async_gen(event: trio.Event) -> None:
36 | async for _ in coro2_async_gen(event):
37 | pass
38 |
39 |
40 | async def test_task_iter_await_frames() -> None:
41 | async with trio.open_nursery() as nursery:
42 | event = trio.Event()
43 | nursery.start_soon(coro3, event)
44 | await event.wait()
45 |
46 | (task,) = nursery.child_tasks
47 |
48 | assert [frame.f_code.co_name for frame, _ in task.iter_await_frames()][:3] == [
49 | "coro3",
50 | "coro2",
51 | "coro1",
52 | ]
53 |
54 | nursery.cancel_scope.cancel()
55 |
56 |
57 | async def test_task_iter_await_frames_async_gen() -> None:
58 | async with trio.open_nursery() as nursery:
59 | event = trio.Event()
60 | nursery.start_soon(coro3_async_gen, event)
61 | await event.wait()
62 |
63 | (task,) = nursery.child_tasks
64 |
65 | assert [frame.f_code.co_name for frame, _ in task.iter_await_frames()][:3] == [
66 | "coro3_async_gen",
67 | "coro2_async_gen",
68 | "coro1",
69 | ]
70 |
71 | nursery.cancel_scope.cancel()
72 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_trio.py:
--------------------------------------------------------------------------------
1 | def test_trio_import() -> None:
2 | import sys
3 |
4 | for module in list(sys.modules.keys()):
5 | if module.startswith("trio"):
6 | del sys.modules[module]
7 |
8 | import trio # noqa: F401
9 |
--------------------------------------------------------------------------------
/src/trio/_tests/test_windows_pipes.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import sys
4 | from typing import TYPE_CHECKING
5 |
6 | import pytest
7 |
8 | from .. import _core
9 | from ..testing import check_one_way_stream, wait_all_tasks_blocked
10 |
11 | # Mark all the tests in this file as being windows-only
12 | pytestmark = pytest.mark.skipif(sys.platform != "win32", reason="windows only")
13 |
14 | assert ( # Skip type checking when not on Windows
15 | sys.platform == "win32" or not TYPE_CHECKING
16 | )
17 |
18 | if sys.platform == "win32":
19 | from asyncio.windows_utils import pipe
20 |
21 | from .._core._windows_cffi import _handle, kernel32
22 | from .._windows_pipes import PipeReceiveStream, PipeSendStream
23 |
24 |
25 | async def make_pipe() -> tuple[PipeSendStream, PipeReceiveStream]:
26 | """Makes a new pair of pipes."""
27 | (r, w) = pipe()
28 | return PipeSendStream(w), PipeReceiveStream(r)
29 |
30 |
31 | def test_pipe_typecheck() -> None:
32 | with pytest.raises(TypeError):
33 | PipeSendStream(1.0) # type: ignore[arg-type]
34 | with pytest.raises(TypeError):
35 | PipeReceiveStream(None) # type: ignore[arg-type]
36 |
37 |
38 | async def test_pipe_error_on_close() -> None:
39 | # Make sure we correctly handle a failure from kernel32.CloseHandle
40 | r, w = pipe()
41 |
42 | send_stream = PipeSendStream(w)
43 | receive_stream = PipeReceiveStream(r)
44 |
45 | assert kernel32.CloseHandle(_handle(r))
46 | assert kernel32.CloseHandle(_handle(w))
47 |
48 | with pytest.raises(OSError, match=r"^\[WinError 6\] The handle is invalid$"):
49 | await send_stream.aclose()
50 | with pytest.raises(OSError, match=r"^\[WinError 6\] The handle is invalid$"):
51 | await receive_stream.aclose()
52 |
53 |
54 | async def test_pipes_combined() -> None:
55 | write, read = await make_pipe()
56 | count = 2**20
57 | replicas = 3
58 |
59 | async def sender() -> None:
60 | async with write:
61 | big = bytearray(count)
62 | for _ in range(replicas):
63 | await write.send_all(big)
64 |
65 | async def reader() -> None:
66 | async with read:
67 | await wait_all_tasks_blocked()
68 | total_received = 0
69 | while True:
70 | # 5000 is chosen because it doesn't evenly divide 2**20
71 | received = len(await read.receive_some(5000))
72 | if not received:
73 | break
74 | total_received += received
75 |
76 | assert total_received == count * replicas
77 |
78 | async with _core.open_nursery() as n:
79 | n.start_soon(sender)
80 | n.start_soon(reader)
81 |
82 |
83 | async def test_async_with() -> None:
84 | w, r = await make_pipe()
85 | async with w, r:
86 | pass
87 |
88 | with pytest.raises(_core.ClosedResourceError):
89 | await w.send_all(b"")
90 | with pytest.raises(_core.ClosedResourceError):
91 | await r.receive_some(10)
92 |
93 |
94 | async def test_close_during_write() -> None:
95 | w, _r = await make_pipe()
96 | async with _core.open_nursery() as nursery:
97 |
98 | async def write_forever() -> None:
99 | with pytest.raises(_core.ClosedResourceError) as excinfo:
100 | while True:
101 | await w.send_all(b"x" * 4096)
102 | assert "another task" in str(excinfo.value)
103 |
104 | nursery.start_soon(write_forever)
105 | await wait_all_tasks_blocked(0.1)
106 | await w.aclose()
107 |
108 |
109 | async def test_pipe_fully() -> None:
110 | # passing make_clogged_pipe tests wait_send_all_might_not_block, and we
111 | # can't implement that on Windows
112 | await check_one_way_stream(make_pipe, None)
113 |
--------------------------------------------------------------------------------
/src/trio/_tests/tools/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/src/trio/_tests/tools/__init__.py
--------------------------------------------------------------------------------
/src/trio/_tests/tools/test_mypy_annotate.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import io
4 | import sys
5 | from typing import TYPE_CHECKING
6 |
7 | import pytest
8 |
9 | from trio._tools.mypy_annotate import Result, export, main, process_line
10 |
11 | if TYPE_CHECKING:
12 | from pathlib import Path
13 |
14 |
15 | @pytest.mark.parametrize(
16 | ("src", "expected"),
17 | [
18 | ("", None),
19 | ("a regular line\n", None),
20 | (
21 | "package\\filename.py:42:8: note: Some info\n",
22 | Result(
23 | kind="notice",
24 | filename="package\\filename.py",
25 | start_line=42,
26 | start_col=8,
27 | end_line=None,
28 | end_col=None,
29 | message=" Some info",
30 | ),
31 | ),
32 | (
33 | "package/filename.py:42:1:46:3: error: Type error here [code]\n",
34 | Result(
35 | kind="error",
36 | filename="package/filename.py",
37 | start_line=42,
38 | start_col=1,
39 | end_line=46,
40 | end_col=3,
41 | message=" Type error here [code]",
42 | ),
43 | ),
44 | (
45 | "package/module.py:87: warn: Bad code\n",
46 | Result(
47 | kind="warning",
48 | filename="package/module.py",
49 | start_line=87,
50 | message=" Bad code",
51 | ),
52 | ),
53 | ],
54 | ids=["blank", "normal", "note-wcol", "error-wend", "warn-lineonly"],
55 | )
56 | def test_processing(src: str, expected: Result | None) -> None:
57 | result = process_line(src)
58 | assert result == expected
59 |
60 |
61 | def test_export(capsys: pytest.CaptureFixture[str]) -> None:
62 | results = {
63 | Result(
64 | kind="notice",
65 | filename="package\\filename.py",
66 | start_line=42,
67 | start_col=8,
68 | end_line=None,
69 | end_col=None,
70 | message=" Some info",
71 | ): ["Windows", "Mac"],
72 | Result(
73 | kind="error",
74 | filename="package/filename.py",
75 | start_line=42,
76 | start_col=1,
77 | end_line=46,
78 | end_col=3,
79 | message=" Type error here [code]",
80 | ): ["Linux", "Mac"],
81 | Result(
82 | kind="warning",
83 | filename="package/module.py",
84 | start_line=87,
85 | message=" Bad code",
86 | ): ["Linux"],
87 | }
88 | export(results)
89 | std = capsys.readouterr()
90 | assert std.err == ""
91 | assert std.out == (
92 | "::notice file=package\\filename.py,line=42,col=8,"
93 | "title=Mypy-Windows+Mac::package\\filename.py:(42:8): Some info"
94 | "\n"
95 | "::error file=package/filename.py,line=42,col=1,endLine=46,endColumn=3,"
96 | "title=Mypy-Linux+Mac::package/filename.py:(42:1 - 46:3): Type error here [code]"
97 | "\n"
98 | "::warning file=package/module.py,line=87,"
99 | "title=Mypy-Linux::package/module.py:87: Bad code\n"
100 | )
101 |
102 |
103 | def test_endtoend(
104 | tmp_path: Path,
105 | monkeypatch: pytest.MonkeyPatch,
106 | capsys: pytest.CaptureFixture[str],
107 | ) -> None:
108 | import trio._tools.mypy_annotate as mypy_annotate
109 |
110 | inp_text = """\
111 | Mypy begun
112 | trio/core.py:15: error: Bad types here [misc]
113 | trio/package/module.py:48:4:56:18: warn: Missing annotations [no-untyped-def]
114 | Found 3 errors in 29 files
115 | """
116 | result_file = tmp_path / "dump.dat"
117 | assert not result_file.exists()
118 | with monkeypatch.context():
119 | monkeypatch.setattr(sys, "stdin", io.StringIO(inp_text))
120 |
121 | mypy_annotate.main(
122 | ["--dumpfile", str(result_file), "--platform", "SomePlatform"],
123 | )
124 |
125 | std = capsys.readouterr()
126 | assert std.err == ""
127 | assert std.out == inp_text # Echos the original.
128 |
129 | assert result_file.exists()
130 |
131 | main(["--dumpfile", str(result_file)])
132 |
133 | std = capsys.readouterr()
134 | assert std.err == ""
135 | assert std.out == (
136 | "::error file=trio/core.py,line=15,title=Mypy-SomePlatform::trio/core.py:15: Bad types here [misc]\n"
137 | "::warning file=trio/package/module.py,line=48,col=4,endLine=56,endColumn=18,"
138 | "title=Mypy-SomePlatform::trio/package/module.py:(48:4 - 56:18): Missing "
139 | "annotations [no-untyped-def]\n"
140 | )
141 |
--------------------------------------------------------------------------------
/src/trio/_tests/tools/test_sync_requirements.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from pathlib import Path
4 | from typing import TYPE_CHECKING
5 |
6 | from trio._tests.pytest_plugin import skip_if_optional_else_raise
7 |
8 | # imports in gen_exports that are not in `install_requires` in requirements
9 | try:
10 | import yaml # noqa: F401
11 | except ImportError as error:
12 | skip_if_optional_else_raise(error)
13 |
14 | from trio._tools.sync_requirements import (
15 | update_requirements,
16 | yield_pre_commit_version_data,
17 | )
18 |
19 | if TYPE_CHECKING:
20 | from pathlib import Path
21 |
22 |
23 | def test_yield_pre_commit_version_data() -> None:
24 | text = """
25 | repos:
26 | - repo: https://github.com/astral-sh/ruff-pre-commit
27 | rev: v0.11.0
28 | - repo: https://github.com/psf/black-pre-commit-mirror
29 | rev: 25.1.0
30 | - bad: data
31 | """
32 | results = tuple(yield_pre_commit_version_data(text))
33 | assert results == (
34 | ("ruff-pre-commit", "0.11.0"),
35 | ("black-pre-commit-mirror", "25.1.0"),
36 | )
37 |
38 |
39 | def test_update_requirements(
40 | tmp_path: Path,
41 | ) -> None:
42 | requirements_file = tmp_path / "requirements.txt"
43 | assert not requirements_file.exists()
44 | requirements_file.write_text(
45 | """# comment
46 | # also comment but spaces line start
47 | waffles are delicious no equals
48 | black==3.1.4 ; specific version thingy
49 | mypy==1.15.0
50 | ruff==1.2.5
51 | # required by soupy cat""",
52 | encoding="utf-8",
53 | )
54 | assert update_requirements(requirements_file, {"black": "3.1.5", "ruff": "1.2.7"})
55 | assert (
56 | requirements_file.read_text(encoding="utf-8")
57 | == """# comment
58 | # also comment but spaces line start
59 | waffles are delicious no equals
60 | black==3.1.5 ; specific version thingy
61 | mypy==1.15.0
62 | ruff==1.2.7
63 | # required by soupy cat"""
64 | )
65 |
66 |
67 | def test_update_requirements_no_changes(
68 | tmp_path: Path,
69 | ) -> None:
70 | requirements_file = tmp_path / "requirements.txt"
71 | assert not requirements_file.exists()
72 | original = """# comment
73 | # also comment but spaces line start
74 | waffles are delicious no equals
75 | black==3.1.4 ; specific version thingy
76 | mypy==1.15.0
77 | ruff==1.2.5
78 | # required by soupy cat"""
79 | requirements_file.write_text(original, encoding="utf-8")
80 | assert not update_requirements(
81 | requirements_file, {"black": "3.1.4", "ruff": "1.2.5"}
82 | )
83 | assert requirements_file.read_text(encoding="utf-8") == original
84 |
--------------------------------------------------------------------------------
/src/trio/_tests/type_tests/check_wraps.py:
--------------------------------------------------------------------------------
1 | # https://github.com/python-trio/trio/issues/2775#issuecomment-1702267589
2 | # (except platform independent...)
3 | import trio
4 | from typing_extensions import assert_type
5 |
6 |
7 | async def fn(s: trio.SocketStream) -> None:
8 | result = await s.socket.sendto(b"a", "h")
9 | assert_type(result, int)
10 |
--------------------------------------------------------------------------------
/src/trio/_tests/type_tests/open_memory_channel.py:
--------------------------------------------------------------------------------
1 | # https://github.com/python-trio/trio/issues/2873
2 | import trio
3 |
4 | s, r = trio.open_memory_channel[int](0)
5 |
--------------------------------------------------------------------------------
/src/trio/_tests/type_tests/subprocesses.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | import trio
4 |
5 |
6 | async def test() -> None:
7 | # this could test more by using platform checks, but currently this
8 | # is just regression tests + sanity checks.
9 | await trio.run_process("python", executable="ls")
10 | await trio.lowlevel.open_process("python", executable="ls")
11 |
12 | # note: there's no error code on the type ignore as it varies
13 | # between platforms.
14 | await trio.run_process("python", capture_stdout=True)
15 | await trio.lowlevel.open_process("python", capture_stdout=True) # type: ignore
16 |
17 | if sys.platform != "win32" and sys.version_info >= (3, 9):
18 | await trio.run_process("python", extra_groups=[5])
19 | await trio.lowlevel.open_process("python", extra_groups=[5])
20 |
21 | # 3.11+:
22 | await trio.run_process("python", process_group=5) # type: ignore
23 | await trio.lowlevel.open_process("python", process_group=5) # type: ignore
24 |
--------------------------------------------------------------------------------
/src/trio/_tests/type_tests/task_status.py:
--------------------------------------------------------------------------------
1 | """Check that started() can only be called for TaskStatus[None]."""
2 |
3 | from trio import TaskStatus
4 | from typing_extensions import assert_type
5 |
6 |
7 | def check_status(
8 | none_status_explicit: TaskStatus[None],
9 | none_status_implicit: TaskStatus,
10 | int_status: TaskStatus[int],
11 | ) -> None:
12 | assert_type(none_status_explicit, TaskStatus[None])
13 | assert_type(none_status_implicit, TaskStatus[None]) # Default typevar
14 | assert_type(int_status, TaskStatus[int])
15 |
16 | # Omitting the parameter is only allowed for None.
17 | none_status_explicit.started()
18 | none_status_implicit.started()
19 | int_status.started() # type: ignore
20 |
21 | # Explicit None is allowed.
22 | none_status_explicit.started(None)
23 | none_status_implicit.started(None)
24 | int_status.started(None) # type: ignore
25 |
26 | none_status_explicit.started(42) # type: ignore
27 | none_status_implicit.started(42) # type: ignore
28 | int_status.started(42)
29 | int_status.started(True)
30 |
--------------------------------------------------------------------------------
/src/trio/_tools/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/src/trio/_tools/__init__.py
--------------------------------------------------------------------------------
/src/trio/_tools/mypy_annotate.py:
--------------------------------------------------------------------------------
1 | """Translates Mypy's output into GitHub's error/warning annotation syntax.
2 |
3 | See: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions
4 |
5 | This first is run with Mypy's output piped in, to collect messages in
6 | mypy_annotate.dat. After all platforms run, we run this again, which prints the
7 | messages in GitHub's format but with cross-platform failures deduplicated.
8 | """
9 |
10 | from __future__ import annotations
11 |
12 | import argparse
13 | import pickle
14 | import re
15 | import sys
16 |
17 | import attrs
18 |
19 | # Example: 'package/filename.py:42:1:46:3: error: Type error here [code]'
20 | report_re = re.compile(
21 | r"""
22 | ([^:]+): # Filename (anything but ":")
23 | ([0-9]+): # Line number (start)
24 | (?:([0-9]+): # Optional column number
25 | (?:([0-9]+):([0-9]+):)? # then also optionally, 2 more numbers for end columns
26 | )?
27 | \s*(error|warn|note): # Kind, prefixed with space
28 | (.+) # Message
29 | """,
30 | re.VERBOSE,
31 | )
32 |
33 | mypy_to_github = {
34 | "error": "error",
35 | "warn": "warning",
36 | "note": "notice",
37 | }
38 |
39 |
40 | @attrs.frozen(kw_only=True)
41 | class Result:
42 | """Accumulated results, used as a dict key to deduplicate."""
43 |
44 | filename: str
45 | start_line: int
46 | kind: str
47 | message: str
48 | start_col: int | None = None
49 | end_line: int | None = None
50 | end_col: int | None = None
51 |
52 |
53 | def process_line(line: str) -> Result | None:
54 | if match := report_re.fullmatch(line.rstrip()):
55 | filename, st_line, st_col, end_line, end_col, kind, message = match.groups()
56 | return Result(
57 | filename=filename,
58 | start_line=int(st_line),
59 | start_col=int(st_col) if st_col is not None else None,
60 | end_line=int(end_line) if end_line is not None else None,
61 | end_col=int(end_col) if end_col is not None else None,
62 | kind=mypy_to_github[kind],
63 | message=message,
64 | )
65 | else:
66 | return None
67 |
68 |
69 | def export(results: dict[Result, list[str]]) -> None:
70 | """Display the collected results."""
71 | for res, platforms in results.items():
72 | print(f"::{res.kind} file={res.filename},line={res.start_line},", end="")
73 | if res.start_col is not None:
74 | print(f"col={res.start_col},", end="")
75 | if res.end_col is not None and res.end_line is not None:
76 | print(f"endLine={res.end_line},endColumn={res.end_col},", end="")
77 | message = f"({res.start_line}:{res.start_col} - {res.end_line}:{res.end_col}):{res.message}"
78 | else:
79 | message = f"({res.start_line}:{res.start_col}):{res.message}"
80 | else:
81 | message = f"{res.start_line}:{res.message}"
82 | print(f"title=Mypy-{'+'.join(platforms)}::{res.filename}:{message}")
83 |
84 |
85 | def main(argv: list[str]) -> None:
86 | """Look for error messages, and convert the format."""
87 | parser = argparse.ArgumentParser(description=__doc__)
88 | parser.add_argument(
89 | "--dumpfile",
90 | help="File to write pickled messages to.",
91 | required=True,
92 | )
93 | parser.add_argument(
94 | "--platform",
95 | help="OS name, if set Mypy should be piped to stdin.",
96 | default=None,
97 | )
98 | cmd_line = parser.parse_args(argv)
99 |
100 | results: dict[Result, list[str]]
101 | try:
102 | with open(cmd_line.dumpfile, "rb") as f:
103 | results = pickle.load(f)
104 | except (FileNotFoundError, pickle.UnpicklingError):
105 | # If we fail to load, assume it's an old result.
106 | results = {}
107 |
108 | if cmd_line.platform is None:
109 | # Write out the results.
110 | export(results)
111 | else:
112 | platform: str = cmd_line.platform
113 | for line in sys.stdin:
114 | parsed = process_line(line)
115 | if parsed is not None:
116 | try:
117 | results[parsed].append(platform)
118 | except KeyError:
119 | results[parsed] = [platform]
120 | sys.stdout.write(line)
121 | with open(cmd_line.dumpfile, "wb") as f:
122 | pickle.dump(results, f)
123 |
124 |
125 | if __name__ == "__main__":
126 | main(sys.argv[1:])
127 |
--------------------------------------------------------------------------------
/src/trio/_tools/sync_requirements.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 |
3 | """Sync Requirements - Automatically upgrade test requirements pinned
4 | versions from pre-commit config file."""
5 |
6 | from __future__ import annotations
7 |
8 | import sys
9 | from pathlib import Path
10 | from typing import TYPE_CHECKING
11 |
12 | from yaml import load as load_yaml
13 |
14 | if TYPE_CHECKING:
15 | from collections.abc import Generator
16 |
17 | from yaml import CLoader as _CLoader, Loader as _Loader
18 |
19 | Loader: type[_CLoader | _Loader]
20 |
21 | try:
22 | from yaml import CLoader as Loader
23 | except ImportError:
24 | from yaml import Loader
25 |
26 |
27 | def yield_pre_commit_version_data(
28 | pre_commit_text: str,
29 | ) -> Generator[tuple[str, str], None, None]:
30 | """Yield (name, rev) tuples from pre-commit config file."""
31 | pre_commit_config = load_yaml(pre_commit_text, Loader)
32 | for repo in pre_commit_config["repos"]:
33 | if "repo" not in repo or "rev" not in repo:
34 | continue
35 | url = repo["repo"]
36 | name = url.rsplit("/", 1)[-1]
37 | rev = repo["rev"].removeprefix("v")
38 | yield name, rev
39 |
40 |
41 | def update_requirements(
42 | requirements: Path,
43 | version_data: dict[str, str],
44 | ) -> bool:
45 | """Return if updated requirements file.
46 |
47 | Update requirements file to match versions in version_data."""
48 | changed = False
49 | old_lines = requirements.read_text(encoding="utf-8").splitlines(True)
50 |
51 | with requirements.open("w", encoding="utf-8") as file:
52 | for line in old_lines:
53 | # If comment or not version mark line, ignore.
54 | if line.startswith("#") or "==" not in line:
55 | file.write(line)
56 | continue
57 | name, rest = line.split("==", 1)
58 | # Maintain extra markers if they exist
59 | old_version = rest.strip()
60 | extra = "\n"
61 | if ";" in rest:
62 | old_version, extra = rest.split(";", 1)
63 | old_version = old_version.strip()
64 | extra = " ;" + extra
65 | version = version_data.get(name)
66 | # If does not exist, skip
67 | if version is None:
68 | file.write(line)
69 | continue
70 | # Otherwise might have changed
71 | new_line = f"{name}=={version}{extra}"
72 | if new_line != line:
73 | if not changed:
74 | changed = True
75 | print("Changed test requirements version to match pre-commit")
76 | print(f"{name}=={old_version} -> {name}=={version}")
77 | file.write(new_line)
78 | return changed
79 |
80 |
81 | if __name__ == "__main__":
82 | source_root = Path.cwd().absolute()
83 |
84 | # Double-check we found the right directory
85 | assert (source_root / "LICENSE").exists()
86 | pre_commit = source_root / ".pre-commit-config.yaml"
87 | test_requirements = source_root / "test-requirements.txt"
88 |
89 | pre_commit_text = pre_commit.read_text(encoding="utf-8")
90 |
91 | # Get tool versions from pre-commit
92 | # Get correct names
93 | pre_commit_versions = {
94 | name.removesuffix("-mirror").removesuffix("-pre-commit"): version
95 | for name, version in yield_pre_commit_version_data(pre_commit_text)
96 | }
97 | changed = update_requirements(test_requirements, pre_commit_versions)
98 | sys.exit(int(changed))
99 |
--------------------------------------------------------------------------------
/src/trio/_version.py:
--------------------------------------------------------------------------------
1 | # This file is imported from __init__.py and parsed by setuptools
2 |
3 | __version__ = "0.30.0+dev"
4 |
--------------------------------------------------------------------------------
/src/trio/_wait_for_object.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import math
4 |
5 | import trio
6 |
7 | from ._core._windows_cffi import (
8 | CData,
9 | ErrorCodes,
10 | _handle,
11 | ffi,
12 | handle_array,
13 | kernel32,
14 | raise_winerror,
15 | )
16 |
17 |
18 | async def WaitForSingleObject(obj: int | CData) -> None:
19 | """Async and cancellable variant of WaitForSingleObject. Windows only.
20 |
21 | Args:
22 | handle: A Win32 handle, as a Python integer.
23 |
24 | Raises:
25 | OSError: If the handle is invalid, e.g. when it is already closed.
26 |
27 | """
28 | # Allow ints or whatever we can convert to a win handle
29 | handle = _handle(obj)
30 |
31 | # Quick check; we might not even need to spawn a thread. The zero
32 | # means a zero timeout; this call never blocks. We also exit here
33 | # if the handle is already closed for some reason.
34 | retcode = kernel32.WaitForSingleObject(handle, 0)
35 | if retcode == ErrorCodes.WAIT_FAILED:
36 | raise_winerror()
37 | elif retcode != ErrorCodes.WAIT_TIMEOUT:
38 | return
39 |
40 | # Wait for a thread that waits for two handles: the handle plus a handle
41 | # that we can use to cancel the thread.
42 | cancel_handle = kernel32.CreateEventA(ffi.NULL, True, False, ffi.NULL)
43 | try:
44 | await trio.to_thread.run_sync(
45 | WaitForMultipleObjects_sync,
46 | handle,
47 | cancel_handle,
48 | abandon_on_cancel=True,
49 | limiter=trio.CapacityLimiter(math.inf),
50 | )
51 | finally:
52 | # Clean up our cancel handle. In case we get here because this task was
53 | # cancelled, we also want to set the cancel_handle to stop the thread.
54 | kernel32.SetEvent(cancel_handle)
55 | kernel32.CloseHandle(cancel_handle)
56 |
57 |
58 | def WaitForMultipleObjects_sync(*handles: int | CData) -> None:
59 | """Wait for any of the given Windows handles to be signaled."""
60 | n = len(handles)
61 | handle_arr = handle_array(n)
62 | for i in range(n):
63 | handle_arr[i] = handles[i]
64 | timeout = 0xFFFFFFFF # INFINITE
65 | retcode = kernel32.WaitForMultipleObjects(n, handle_arr, False, timeout) # blocking
66 | if retcode == ErrorCodes.WAIT_FAILED:
67 | raise_winerror()
68 |
--------------------------------------------------------------------------------
/src/trio/_windows_pipes.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import sys
4 | from typing import TYPE_CHECKING
5 |
6 | from . import _core
7 | from ._abc import ReceiveStream, SendStream
8 | from ._core._windows_cffi import _handle, kernel32, raise_winerror
9 | from ._util import ConflictDetector, final
10 |
11 | assert sys.platform == "win32" or not TYPE_CHECKING
12 |
13 | # XX TODO: don't just make this up based on nothing.
14 | DEFAULT_RECEIVE_SIZE = 65536
15 |
16 |
17 | # See the comments on _unix_pipes._FdHolder for discussion of why we set the
18 | # handle to -1 when it's closed.
19 | class _HandleHolder:
20 | def __init__(self, handle: int) -> None:
21 | self.handle = -1
22 | if not isinstance(handle, int):
23 | raise TypeError("handle must be an int")
24 | self.handle = handle
25 | _core.register_with_iocp(self.handle)
26 |
27 | @property
28 | def closed(self) -> bool:
29 | return self.handle == -1
30 |
31 | def close(self) -> None:
32 | if self.closed:
33 | return
34 | handle = self.handle
35 | self.handle = -1
36 | if not kernel32.CloseHandle(_handle(handle)):
37 | raise_winerror()
38 |
39 | def __del__(self) -> None:
40 | self.close()
41 |
42 |
43 | @final
44 | class PipeSendStream(SendStream):
45 | """Represents a send stream over a Windows named pipe that has been
46 | opened in OVERLAPPED mode.
47 | """
48 |
49 | def __init__(self, handle: int) -> None:
50 | self._handle_holder = _HandleHolder(handle)
51 | self._conflict_detector = ConflictDetector(
52 | "another task is currently using this pipe",
53 | )
54 |
55 | async def send_all(self, data: bytes) -> None:
56 | with self._conflict_detector:
57 | if self._handle_holder.closed:
58 | raise _core.ClosedResourceError("this pipe is already closed")
59 |
60 | if not data:
61 | await _core.checkpoint()
62 | return
63 |
64 | try:
65 | written = await _core.write_overlapped(self._handle_holder.handle, data)
66 | except BrokenPipeError as ex:
67 | raise _core.BrokenResourceError from ex
68 | # By my reading of MSDN, this assert is guaranteed to pass so long
69 | # as the pipe isn't in nonblocking mode, but... let's just
70 | # double-check.
71 | assert written == len(data)
72 |
73 | async def wait_send_all_might_not_block(self) -> None:
74 | with self._conflict_detector:
75 | if self._handle_holder.closed:
76 | raise _core.ClosedResourceError("This pipe is already closed")
77 |
78 | # not implemented yet, and probably not needed
79 | await _core.checkpoint()
80 |
81 | def close(self) -> None:
82 | self._handle_holder.close()
83 |
84 | async def aclose(self) -> None:
85 | self.close()
86 | await _core.checkpoint()
87 |
88 |
89 | @final
90 | class PipeReceiveStream(ReceiveStream):
91 | """Represents a receive stream over an os.pipe object."""
92 |
93 | def __init__(self, handle: int) -> None:
94 | self._handle_holder = _HandleHolder(handle)
95 | self._conflict_detector = ConflictDetector(
96 | "another task is currently using this pipe",
97 | )
98 |
99 | async def receive_some(self, max_bytes: int | None = None) -> bytes:
100 | with self._conflict_detector:
101 | if self._handle_holder.closed:
102 | raise _core.ClosedResourceError("this pipe is already closed")
103 |
104 | if max_bytes is None:
105 | max_bytes = DEFAULT_RECEIVE_SIZE
106 | else:
107 | if not isinstance(max_bytes, int):
108 | raise TypeError("max_bytes must be integer >= 1")
109 | if max_bytes < 1:
110 | raise ValueError("max_bytes must be integer >= 1")
111 |
112 | buffer = bytearray(max_bytes)
113 | try:
114 | size = await _core.readinto_overlapped(
115 | self._handle_holder.handle,
116 | buffer,
117 | )
118 | except BrokenPipeError:
119 | if self._handle_holder.closed:
120 | raise _core.ClosedResourceError(
121 | "another task closed this pipe",
122 | ) from None
123 |
124 | # Windows raises BrokenPipeError on one end of a pipe
125 | # whenever the other end closes, regardless of direction.
126 | # Convert this to the Unix behavior of returning EOF to the
127 | # reader when the writer closes.
128 | #
129 | # And since we're not raising an exception, we have to
130 | # checkpoint. But readinto_overlapped did raise an exception,
131 | # so it might not have checkpointed for us. So we have to
132 | # checkpoint manually.
133 | await _core.checkpoint()
134 | return b""
135 | else:
136 | del buffer[size:]
137 | return buffer
138 |
139 | def close(self) -> None:
140 | self._handle_holder.close()
141 |
142 | async def aclose(self) -> None:
143 | self.close()
144 | await _core.checkpoint()
145 |
--------------------------------------------------------------------------------
/src/trio/abc.py:
--------------------------------------------------------------------------------
1 | # This is a public namespace, so we don't want to expose any non-underscored
2 | # attributes that aren't actually part of our public API. But it's very
3 | # annoying to carefully always use underscored names for module-level
4 | # temporaries, imports, etc. when implementing the module. So we put the
5 | # implementation in an underscored module, and then re-export the public parts
6 | # here.
7 |
8 | # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625)
9 | from ._abc import (
10 | AsyncResource as AsyncResource,
11 | Channel as Channel,
12 | Clock as Clock,
13 | HalfCloseableStream as HalfCloseableStream,
14 | HostnameResolver as HostnameResolver,
15 | Instrument as Instrument,
16 | Listener as Listener,
17 | ReceiveChannel as ReceiveChannel,
18 | ReceiveStream as ReceiveStream,
19 | SendChannel as SendChannel,
20 | SendStream as SendStream,
21 | SocketFactory as SocketFactory,
22 | Stream as Stream,
23 | )
24 |
--------------------------------------------------------------------------------
/src/trio/from_thread.py:
--------------------------------------------------------------------------------
1 | """
2 | This namespace represents special functions that can call back into Trio from
3 | an external thread by means of a Trio Token present in Thread Local Storage
4 | """
5 |
6 | from ._threads import (
7 | from_thread_check_cancelled as check_cancelled,
8 | from_thread_run as run,
9 | from_thread_run_sync as run_sync,
10 | )
11 |
12 | # need to use __all__ for pyright --verifytypes to see re-exports when renaming them
13 | __all__ = ["check_cancelled", "run", "run_sync"]
14 |
--------------------------------------------------------------------------------
/src/trio/lowlevel.py:
--------------------------------------------------------------------------------
1 | """
2 | This namespace represents low-level functionality not intended for daily use,
3 | but useful for extending Trio's functionality.
4 | """
5 |
6 | # imports are renamed with leading underscores to indicate they are not part of the public API
7 |
8 | import select as _select
9 |
10 | # static checkers don't understand if importing this as _sys, so it's deleted later
11 | import sys
12 | import typing as _t
13 |
14 | # Generally available symbols
15 | from ._core import (
16 | Abort as Abort,
17 | ParkingLot as ParkingLot,
18 | ParkingLotStatistics as ParkingLotStatistics,
19 | RaiseCancelT as RaiseCancelT,
20 | RunStatistics as RunStatistics,
21 | RunVar as RunVar,
22 | RunVarToken as RunVarToken,
23 | Task as Task,
24 | TrioToken as TrioToken,
25 | UnboundedQueue as UnboundedQueue,
26 | UnboundedQueueStatistics as UnboundedQueueStatistics,
27 | add_instrument as add_instrument,
28 | add_parking_lot_breaker as add_parking_lot_breaker,
29 | cancel_shielded_checkpoint as cancel_shielded_checkpoint,
30 | checkpoint as checkpoint,
31 | checkpoint_if_cancelled as checkpoint_if_cancelled,
32 | current_clock as current_clock,
33 | current_root_task as current_root_task,
34 | current_statistics as current_statistics,
35 | current_task as current_task,
36 | current_trio_token as current_trio_token,
37 | currently_ki_protected as currently_ki_protected,
38 | disable_ki_protection as disable_ki_protection,
39 | enable_ki_protection as enable_ki_protection,
40 | in_trio_run as in_trio_run,
41 | in_trio_task as in_trio_task,
42 | notify_closing as notify_closing,
43 | permanently_detach_coroutine_object as permanently_detach_coroutine_object,
44 | reattach_detached_coroutine_object as reattach_detached_coroutine_object,
45 | remove_instrument as remove_instrument,
46 | remove_parking_lot_breaker as remove_parking_lot_breaker,
47 | reschedule as reschedule,
48 | spawn_system_task as spawn_system_task,
49 | start_guest_run as start_guest_run,
50 | start_thread_soon as start_thread_soon,
51 | temporarily_detach_coroutine_object as temporarily_detach_coroutine_object,
52 | wait_readable as wait_readable,
53 | wait_task_rescheduled as wait_task_rescheduled,
54 | wait_writable as wait_writable,
55 | )
56 | from ._subprocess import open_process as open_process
57 |
58 | # This is the union of a subset of trio/_core/ and some things from trio/*.py.
59 | # See comments in trio/__init__.py for details.
60 |
61 | # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625)
62 |
63 |
64 | if sys.platform == "win32":
65 | # Windows symbols
66 | from ._core import (
67 | current_iocp as current_iocp,
68 | monitor_completion_key as monitor_completion_key,
69 | readinto_overlapped as readinto_overlapped,
70 | register_with_iocp as register_with_iocp,
71 | wait_overlapped as wait_overlapped,
72 | write_overlapped as write_overlapped,
73 | )
74 | from ._wait_for_object import WaitForSingleObject as WaitForSingleObject
75 | else:
76 | # Unix symbols
77 | from ._unix_pipes import FdStream as FdStream
78 |
79 | # Kqueue-specific symbols
80 | if sys.platform != "linux" and (_t.TYPE_CHECKING or not hasattr(_select, "epoll")):
81 | from ._core import (
82 | current_kqueue as current_kqueue,
83 | monitor_kevent as monitor_kevent,
84 | wait_kevent as wait_kevent,
85 | )
86 |
87 | del sys
88 |
--------------------------------------------------------------------------------
/src/trio/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/python-trio/trio/efd785a20721707b52a6e2289a65e25722b30c96/src/trio/py.typed
--------------------------------------------------------------------------------
/src/trio/testing/__init__.py:
--------------------------------------------------------------------------------
1 | # Uses `from x import y as y` for compatibility with `pyright --verifytypes` (#2625)
2 |
3 | from .._core import (
4 | MockClock as MockClock,
5 | wait_all_tasks_blocked as wait_all_tasks_blocked,
6 | )
7 | from .._threads import (
8 | active_thread_count as active_thread_count,
9 | wait_all_threads_completed as wait_all_threads_completed,
10 | )
11 | from .._util import fixup_module_metadata
12 | from ._check_streams import (
13 | check_half_closeable_stream as check_half_closeable_stream,
14 | check_one_way_stream as check_one_way_stream,
15 | check_two_way_stream as check_two_way_stream,
16 | )
17 | from ._checkpoints import (
18 | assert_checkpoints as assert_checkpoints,
19 | assert_no_checkpoints as assert_no_checkpoints,
20 | )
21 | from ._memory_streams import (
22 | MemoryReceiveStream as MemoryReceiveStream,
23 | MemorySendStream as MemorySendStream,
24 | lockstep_stream_one_way_pair as lockstep_stream_one_way_pair,
25 | lockstep_stream_pair as lockstep_stream_pair,
26 | memory_stream_one_way_pair as memory_stream_one_way_pair,
27 | memory_stream_pair as memory_stream_pair,
28 | memory_stream_pump as memory_stream_pump,
29 | )
30 | from ._network import open_stream_to_socket_listener as open_stream_to_socket_listener
31 | from ._raises_group import Matcher as Matcher, RaisesGroup as RaisesGroup
32 | from ._sequencer import Sequencer as Sequencer
33 | from ._trio_test import trio_test as trio_test
34 |
35 | ################################################################
36 |
37 |
38 | fixup_module_metadata(__name__, globals())
39 | del fixup_module_metadata
40 |
--------------------------------------------------------------------------------
/src/trio/testing/_checkpoints.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from contextlib import AbstractContextManager, contextmanager
4 | from typing import TYPE_CHECKING
5 |
6 | from .. import _core
7 |
8 | if TYPE_CHECKING:
9 | from collections.abc import Generator
10 |
11 |
12 | @contextmanager
13 | def _assert_yields_or_not(expected: bool) -> Generator[None, None, None]:
14 | """Check if checkpoints are executed in a block of code."""
15 | __tracebackhide__ = True
16 | task = _core.current_task()
17 | orig_cancel = task._cancel_points
18 | orig_schedule = task._schedule_points
19 | try:
20 | yield
21 | if expected and (
22 | task._cancel_points == orig_cancel or task._schedule_points == orig_schedule
23 | ):
24 | raise AssertionError("assert_checkpoints block did not yield!")
25 | finally:
26 | if not expected and (
27 | task._cancel_points != orig_cancel or task._schedule_points != orig_schedule
28 | ):
29 | raise AssertionError("assert_no_checkpoints block yielded!")
30 |
31 |
32 | def assert_checkpoints() -> AbstractContextManager[None]:
33 | """Use as a context manager to check that the code inside the ``with``
34 | block either exits with an exception or executes at least one
35 | :ref:`checkpoint `.
36 |
37 | Raises:
38 | AssertionError: if no checkpoint was executed.
39 |
40 | Example:
41 | Check that :func:`trio.sleep` is a checkpoint, even if it doesn't
42 | block::
43 |
44 | with trio.testing.assert_checkpoints():
45 | await trio.sleep(0)
46 |
47 | """
48 | __tracebackhide__ = True
49 | return _assert_yields_or_not(True)
50 |
51 |
52 | def assert_no_checkpoints() -> AbstractContextManager[None]:
53 | """Use as a context manager to check that the code inside the ``with``
54 | block does not execute any :ref:`checkpoints `.
55 |
56 | Raises:
57 | AssertionError: if a checkpoint was executed.
58 |
59 | Example:
60 | Synchronous code never contains any checkpoints, but we can double-check
61 | that::
62 |
63 | send_channel, receive_channel = trio.open_memory_channel(10)
64 | with trio.testing.assert_no_checkpoints():
65 | send_channel.send_nowait(None)
66 |
67 | """
68 | __tracebackhide__ = True
69 | return _assert_yields_or_not(False)
70 |
--------------------------------------------------------------------------------
/src/trio/testing/_network.py:
--------------------------------------------------------------------------------
1 | from .. import socket as tsocket
2 | from .._highlevel_socket import SocketListener, SocketStream
3 |
4 |
5 | async def open_stream_to_socket_listener(
6 | socket_listener: SocketListener,
7 | ) -> SocketStream:
8 | """Connect to the given :class:`~trio.SocketListener`.
9 |
10 | This is particularly useful in tests when you want to let a server pick
11 | its own port, and then connect to it::
12 |
13 | listeners = await trio.open_tcp_listeners(0)
14 | client = await trio.testing.open_stream_to_socket_listener(listeners[0])
15 |
16 | Args:
17 | socket_listener (~trio.SocketListener): The
18 | :class:`~trio.SocketListener` to connect to.
19 |
20 | Returns:
21 | SocketStream: a stream connected to the given listener.
22 |
23 | """
24 | family = socket_listener.socket.family
25 | sockaddr = socket_listener.socket.getsockname()
26 | if family in (tsocket.AF_INET, tsocket.AF_INET6):
27 | sockaddr = list(sockaddr)
28 | if sockaddr[0] == "0.0.0.0":
29 | sockaddr[0] = "127.0.0.1"
30 | if sockaddr[0] == "::":
31 | sockaddr[0] = "::1"
32 | sockaddr = tuple(sockaddr)
33 |
34 | sock = tsocket.socket(family=family)
35 | await sock.connect(sockaddr)
36 | return SocketStream(sock)
37 |
--------------------------------------------------------------------------------
/src/trio/testing/_sequencer.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from collections import defaultdict
4 | from contextlib import asynccontextmanager
5 | from typing import TYPE_CHECKING
6 |
7 | import attrs
8 |
9 | from .. import Event, _core, _util
10 |
11 | if TYPE_CHECKING:
12 | from collections.abc import AsyncIterator
13 |
14 |
15 | @_util.final
16 | @attrs.define(eq=False, slots=False)
17 | class Sequencer:
18 | """A convenience class for forcing code in different tasks to run in an
19 | explicit linear order.
20 |
21 | Instances of this class implement a ``__call__`` method which returns an
22 | async context manager. The idea is that you pass a sequence number to
23 | ``__call__`` to say where this block of code should go in the linear
24 | sequence. Block 0 starts immediately, and then block N doesn't start until
25 | block N-1 has finished.
26 |
27 | Example:
28 | An extremely elaborate way to print the numbers 0-5, in order::
29 |
30 | async def worker1(seq):
31 | async with seq(0):
32 | print(0)
33 | async with seq(4):
34 | print(4)
35 |
36 | async def worker2(seq):
37 | async with seq(2):
38 | print(2)
39 | async with seq(5):
40 | print(5)
41 |
42 | async def worker3(seq):
43 | async with seq(1):
44 | print(1)
45 | async with seq(3):
46 | print(3)
47 |
48 | async def main():
49 | seq = trio.testing.Sequencer()
50 | async with trio.open_nursery() as nursery:
51 | nursery.start_soon(worker1, seq)
52 | nursery.start_soon(worker2, seq)
53 | nursery.start_soon(worker3, seq)
54 |
55 | """
56 |
57 | _sequence_points: defaultdict[int, Event] = attrs.field(
58 | factory=lambda: defaultdict(Event),
59 | init=False,
60 | )
61 | _claimed: set[int] = attrs.field(factory=set, init=False)
62 | _broken: bool = attrs.field(default=False, init=False)
63 |
64 | @asynccontextmanager
65 | async def __call__(self, position: int) -> AsyncIterator[None]:
66 | if position in self._claimed:
67 | raise RuntimeError(f"Attempted to reuse sequence point {position}")
68 | if self._broken:
69 | raise RuntimeError("sequence broken!")
70 | self._claimed.add(position)
71 | if position != 0:
72 | try:
73 | await self._sequence_points[position].wait()
74 | except _core.Cancelled:
75 | self._broken = True
76 | for event in self._sequence_points.values():
77 | event.set()
78 | raise RuntimeError(
79 | "Sequencer wait cancelled -- sequence broken",
80 | ) from None
81 | else:
82 | if self._broken:
83 | raise RuntimeError("sequence broken!")
84 | try:
85 | yield
86 | finally:
87 | self._sequence_points[position + 1].set()
88 |
--------------------------------------------------------------------------------
/src/trio/testing/_trio_test.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from functools import partial, wraps
4 | from typing import TYPE_CHECKING, TypeVar
5 |
6 | from .. import _core
7 | from ..abc import Clock, Instrument
8 |
9 | if TYPE_CHECKING:
10 | from collections.abc import Awaitable, Callable
11 |
12 | from typing_extensions import ParamSpec
13 |
14 | ArgsT = ParamSpec("ArgsT")
15 |
16 |
17 | RetT = TypeVar("RetT")
18 |
19 |
20 | def trio_test(fn: Callable[ArgsT, Awaitable[RetT]]) -> Callable[ArgsT, RetT]:
21 | """Converts an async test function to be synchronous, running via Trio.
22 |
23 | Usage::
24 |
25 | @trio_test
26 | async def test_whatever():
27 | await ...
28 |
29 | If a pytest fixture is passed in that subclasses the :class:`~trio.abc.Clock` or
30 | :class:`~trio.abc.Instrument` ABCs, then those are passed to :meth:`trio.run()`.
31 | """
32 |
33 | @wraps(fn)
34 | def wrapper(*args: ArgsT.args, **kwargs: ArgsT.kwargs) -> RetT:
35 | __tracebackhide__ = True
36 | clocks = [c for c in kwargs.values() if isinstance(c, Clock)]
37 | if not clocks:
38 | clock = None
39 | elif len(clocks) == 1:
40 | clock = clocks[0]
41 | else:
42 | raise ValueError("too many clocks spoil the broth!")
43 | instruments = [i for i in kwargs.values() if isinstance(i, Instrument)]
44 | return _core.run(
45 | partial(fn, *args, **kwargs),
46 | clock=clock,
47 | instruments=instruments,
48 | )
49 |
50 | return wrapper
51 |
--------------------------------------------------------------------------------
/src/trio/to_thread.py:
--------------------------------------------------------------------------------
1 | from ._threads import current_default_thread_limiter, to_thread_run_sync as run_sync
2 |
3 | # need to use __all__ for pyright --verifytypes to see re-exports when renaming them
4 | __all__ = ["current_default_thread_limiter", "run_sync"]
5 |
--------------------------------------------------------------------------------
/test-requirements.in:
--------------------------------------------------------------------------------
1 | # For tests
2 | pytest >= 5.0 # for faulthandler in core
3 | coverage >= 7.2.5
4 | async_generator >= 1.9
5 | pyright
6 | pyOpenSSL >= 22.0.0 # for the ssl + DTLS tests
7 | trustme # for the ssl + DTLS tests
8 | pylint # for pylint finding all symbols tests
9 | jedi; implementation_name == "cpython" # for jedi code completion tests
10 | cryptography>=41.0.0 # cryptography<41 segfaults on pypy3.10
11 |
12 | # Tools
13 | black; implementation_name == "cpython"
14 | mypy # Would use mypy[faster-cache], but orjson has build issues on pypy
15 | orjson; implementation_name == "cpython" and python_version < "3.14" # orjson does not yet install on 3.14
16 | ruff >= 0.8.0
17 | astor # code generation
18 | uv >= 0.2.24
19 | codespell
20 | pre-commit
21 |
22 | # https://github.com/python-trio/trio/pull/654#issuecomment-420518745
23 | mypy-extensions
24 | typing-extensions
25 | types-cffi
26 | types-pyOpenSSL
27 | # annotations in doc files
28 | types-docutils
29 | sphinx
30 | # sync-requirements
31 | types-PyYAML
32 |
33 | # Trio's own dependencies
34 | cffi; os_name == "nt"
35 | attrs >= 23.2.0
36 | sortedcontainers
37 | idna
38 | outcome
39 | sniffio
40 | # 1.2.1 fixes types
41 | exceptiongroup >= 1.2.1; python_version < "3.11"
42 |
--------------------------------------------------------------------------------
/tests/_trio_check_attrs_aliases.py:
--------------------------------------------------------------------------------
1 | """Plugins are executed by Pytest before test modules.
2 |
3 | We use this to monkeypatch attrs.field(), so that we can detect if aliases are used for test_exports.
4 | """
5 |
6 | from typing import Any
7 |
8 | import attrs
9 |
10 | orig_field = attrs.field
11 |
12 |
13 | def field(**kwargs: Any) -> Any:
14 | original_args = kwargs.copy()
15 | metadata = kwargs.setdefault("metadata", {})
16 | metadata["trio_original_args"] = original_args
17 | return orig_field(**kwargs)
18 |
19 |
20 | # Mark it as being ours, so the test knows it can actually run.
21 | field.trio_modded = True # type: ignore
22 | attrs.field = field
23 |
--------------------------------------------------------------------------------
/tests/cython/run_test_cython.py:
--------------------------------------------------------------------------------
1 | from .test_cython import invoke_main_entry_point
2 |
3 | invoke_main_entry_point()
4 |
--------------------------------------------------------------------------------
/tests/cython/test_cython.pyx:
--------------------------------------------------------------------------------
1 | # cython: language_level=3
2 | import trio
3 |
4 | # the output of the prints are not currently checked, we only check
5 | # if the program can be compiled and doesn't crash when run.
6 |
7 | # The content of the program can easily be extended if there's other behaviour
8 | # that might be likely to be problematic for cython.
9 | async def foo() -> None:
10 | print('.')
11 |
12 | async def trio_main() -> None:
13 | print('hello...')
14 | await trio.sleep(1)
15 | print(' world !')
16 |
17 | async with trio.open_nursery() as nursery:
18 | nursery.start_soon(foo)
19 | nursery.start_soon(foo)
20 | nursery.start_soon(foo)
21 |
22 | def invoke_main_entry_point():
23 | trio.run(trio_main)
24 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py{39,310,311,312,313,314,py310}
3 | labels =
4 | check = typing, gen_exports, type_completeness, pip_compile
5 | cython = py39-cython2,py39-cython,py311-cython2,py313-cython
6 |
7 | # TODO:
8 | # * environment to check coverage
9 | # * replace ci.sh
10 | # * --verbose --durations=10
11 | # * -p _trio_check_attrs_aliases
12 | # * mypy cache
13 | # * LSP
14 | # * apport
15 | # * use tox in CI
16 | # * switch to nox?
17 | # * move to pyproject.toml?
18 | # * this means conditional deps need to be replaced
19 |
20 | # protip: install tox-uv for faster venv generation
21 |
22 | [testenv]
23 | description = "Base environment for running tests depending on python version."
24 | # use wheels instead of sdist, significantly faster install
25 | package = wheel
26 | wheel_build_env = .pkg
27 | deps =
28 | hypothesis: hypothesis
29 | -r test-requirements.txt
30 | set_env =
31 | slow: TOX_RUN_SLOW = '--run-slow'
32 | commands =
33 | pytest {env:TOX_RUN_SLOW:} {posargs}
34 |
35 | [testenv:no_test_requirements]
36 | description = "Run tests without optional test-requirements, to see we don't accidentally depend on a library not specified in depends."
37 | deps =
38 | pytest
39 | commands =
40 | pytest --skip-optional-imports {posargs}
41 |
42 | [testenv:docs]
43 | description = "Build documentation into docs/build."
44 | deps =
45 | -r docs-requirements.txt
46 | # base_python synced with .readthedocs.yml
47 | # To avoid syncing we can make RTD call the tox environment
48 | base_python = 3.11
49 | commands =
50 | sphinx-build {posargs:--fresh-env} docs/source docs/build
51 |
52 | [testenv:py39-cython2,py39-cython,py311-cython2,py313-cython]
53 | description = "Run cython tests."
54 | deps =
55 | # cython 3.1.0 broke stuff https://github.com/cython/cython/issues/6865
56 | cython: cython
57 | cython2: cython<3
58 | setuptools ; python_version >= '3.12'
59 | commands_pre =
60 | python --version
61 | cython --version
62 | cythonize --inplace -X linetrace=True tests/cython/test_cython.pyx
63 | commands =
64 | python -m tests.cython.run_test_cython
65 |
66 | [testenv:cov-cython]
67 | deps =
68 | setuptools
69 | cython
70 | set_env =
71 | CFLAGS= -DCYTHON_TRACE_NOGIL=1
72 | allowlist_externals =
73 | sed
74 | cp
75 | commands_pre =
76 | python --version
77 | cython --version
78 | cp pyproject.toml {temp_dir}/
79 | sed -i "s/plugins\ =\ \\[\\]/plugins = [\"Cython.Coverage\"]/" {temp_dir}/pyproject.toml
80 | cythonize --inplace -X linetrace=True tests/cython/test_cython.pyx
81 | commands =
82 | coverage run -m tests.cython.run_test_cython --rcfile={temp_dir}/pyproject.toml
83 | coverage combine
84 | coverage report
85 |
86 | [testenv:gen_exports]
87 | description = "Run gen_exports.py, regenerating code for public API wrappers."
88 | deps =
89 | -r test-requirements.txt
90 | base_python = 3.13
91 | commands =
92 | python ./src/trio/_tools/gen_exports.py --test
93 |
94 | [testenv:pip_compile]
95 | description = "Run pre-commit job pip-compile"
96 | base_python = 3.13
97 | commands =
98 | pre-commit run pip-compile --all-files
99 |
100 | # TODO: allow specifying e.g. typing-3.11 to run with --python[-]version=3.11
101 | [testenv:typing]
102 | description = "Run type checks: mypy on all platforms, and pyright on `src/trio[/_core]/_tests/type_tests/`."
103 | deps =
104 | -r test-requirements.txt
105 | exceptiongroup
106 | base_python = 3.13
107 | set_env =
108 | PYRIGHT_PYTHON_IGNORE_WARNINGS=1
109 | commands =
110 | # use mypy_annotate if running in CI? if not, should remove it
111 | mypy --platform linux
112 | mypy --platform darwin
113 | mypy --platform win32
114 |
115 | pyright src/trio/_tests/type_tests
116 | pyright src/trio/_core/_tests/type_tests
117 |
118 | [testenv:type_completeness]
119 | description = "Check type completeness, using our wrapper around pyright --verifytypes."
120 | deps =
121 | -r test-requirements.txt
122 | exceptiongroup
123 | base_python = 3.13
124 | set_env =
125 | PYRIGHT_PYTHON_IGNORE_WARNINGS=1
126 | commands =
127 | python src/trio/_tests/check_type_completeness.py
128 |
--------------------------------------------------------------------------------
/zizmor.yml:
--------------------------------------------------------------------------------
1 | rules:
2 | unpinned-uses:
3 | config:
4 | policies:
5 | # TODO: use the default policies
6 | "*": any
7 |
--------------------------------------------------------------------------------