├── .python-version ├── src └── pyleak │ ├── py.typed │ ├── __init__.py │ ├── utils.py │ ├── pytest_plugin.py │ ├── threads.py │ ├── base.py │ ├── combined.py │ ├── tasks.py │ └── eventloop.py ├── tests ├── conftest.py ├── test_plugin.py ├── test_event_loop_blocking.py ├── test_task_leaks.py └── test_thread_leaks.py ├── .github └── workflows │ ├── ci.yml │ └── release.yml ├── pyproject.toml ├── .gitignore ├── LICENSE └── README.md /.python-version: -------------------------------------------------------------------------------- 1 | 3.9 2 | -------------------------------------------------------------------------------- /src/pyleak/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | def pytest_configure(config): 2 | config.addinivalue_line( 3 | "markers", 4 | "no_leaks(tasks=True, threads=True, blocking=True): mark test to run only on named environment", 5 | ) 6 | -------------------------------------------------------------------------------- /src/pyleak/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import PyleakExceptionGroup 2 | from .eventloop import EventLoopBlockError, no_event_loop_blocking 3 | from .tasks import TaskLeakError, no_task_leaks 4 | from .threads import ThreadLeakError, no_thread_leaks, DEFAULT_THREAD_NAME_FILTER 5 | 6 | __all__ = [ 7 | "no_task_leaks", 8 | "TaskLeakError", 9 | "no_thread_leaks", 10 | "ThreadLeakError", 11 | "no_event_loop_blocking", 12 | "EventLoopBlockError", 13 | "PyleakExceptionGroup", 14 | "DEFAULT_THREAD_NAME_FILTER", 15 | ] 16 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI - tests 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | workflow_dispatch: 11 | 12 | jobs: 13 | tests: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | python-version: 18 | - "3.9" 19 | - "3.10" 20 | - "3.11" 21 | - "3.12" 22 | env: 23 | UV_PYTHON: ${{ matrix.python-version }} 24 | steps: 25 | - uses: actions/checkout@v4 26 | - name: Install uv and set the python version 27 | uses: astral-sh/setup-uv@v5 28 | with: 29 | python-version: ${{ matrix.python-version }} 30 | - name: Install dependencies 31 | run: uv sync 32 | - name: Run tests 33 | run: uv run pytest -svv 34 | -------------------------------------------------------------------------------- /src/pyleak/utils.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import logging 4 | import os 5 | import traceback 6 | from dataclasses import dataclass 7 | 8 | 9 | def setup_logger(name: str = __name__): 10 | logger = logging.getLogger(name) 11 | logger.setLevel(os.getenv("PYLEAK_LOG_LEVEL", "WARNING").upper()) 12 | logger.addHandler(logging.StreamHandler()) 13 | return logger 14 | 15 | 16 | @dataclass 17 | class CallerContext: 18 | filename: str 19 | name: str 20 | lineno: int | None = None 21 | 22 | def __str__(self): 23 | return f"{self.filename}:{self.name}:{self.lineno or '?'}" 24 | 25 | 26 | def find_my_caller(ignore_frames: int = 2) -> CallerContext | None: 27 | """detect using the stack trace""" 28 | 29 | stack = traceback.extract_stack() 30 | 31 | # ignore 2 frames 32 | # 1. the first frame which is `find_my_caller` itself 33 | # 2. the second frame if the function that called `find_my_caller` 34 | frame = stack[-ignore_frames - 1] 35 | return CallerContext(filename=frame.filename, name=frame.name, lineno=frame.lineno) 36 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "pyleak" 3 | version = "0.1.14" 4 | description = "Detect leaked asyncio tasks, threads, and event loop blocking in Python. Inspired by Go's goleak" 5 | readme = "README.md" 6 | requires-python = ">=3.9" 7 | dependencies = [ 8 | "exceptiongroup>=1.3.0", 9 | ] 10 | authors = [ 11 | { name = "Deepankar Mahapatro", email = "deepankarmahapatro@gmail.com" }, 12 | ] 13 | keywords = [ 14 | "asyncio", 15 | "testing", 16 | "leak", 17 | "goleak", 18 | "async", 19 | "tasks", 20 | "threads", 21 | "pytest" 22 | ] 23 | 24 | [project.urls] 25 | "Homepage" = "https://github.com/deepankarm/pyleak" 26 | "Repository" = "https://github.com/deepankarm/pyleak" 27 | "Issues" = "https://github.com/deepankarm/pyleak/issues" 28 | 29 | [build-system] 30 | requires = ["hatchling"] 31 | build-backend = "hatchling.build" 32 | 33 | [dependency-groups] 34 | dev = [ 35 | "aiohttp>=3.12.6", 36 | "fastapi>=0.115.12", 37 | "httpx>=0.28.1", 38 | "pytest>=8.3.5", 39 | "pytest-asyncio>=1.0.0", 40 | "requests>=2.32.3", 41 | "ruff>=0.11.12", 42 | ] 43 | 44 | [project.entry-points."pytest11"] 45 | pyleak = "pyleak.pytest_plugin" 46 | 47 | [tool.pytest.ini_options] 48 | asyncio_mode = "auto" 49 | asyncio_default_fixture_loop_scope = "function" 50 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | workflow_dispatch: 5 | 6 | jobs: 7 | release: 8 | runs-on: ubuntu-latest 9 | permissions: 10 | contents: write 11 | steps: 12 | - uses: actions/checkout@v4 13 | with: 14 | fetch-depth: 0 15 | - name: Install uv and set the python version 16 | uses: astral-sh/setup-uv@v5 17 | - name: Install dependencies 18 | run: uv sync 19 | - name: Build the package 20 | run: uv build 21 | - name: Get version 22 | id: version 23 | run: | 24 | VERSION=$(uv version --short) 25 | echo "version=$VERSION" >> $GITHUB_OUTPUT 26 | echo "tag=v$VERSION" >> $GITHUB_OUTPUT 27 | - name: Generate changelog 28 | id: changelog 29 | run: | 30 | # Get commits since last tag 31 | LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") 32 | if [ -z "$LAST_TAG" ]; then 33 | COMMITS=$(git log --oneline --pretty=format:"- %s (%h)" | head -20) 34 | else 35 | COMMITS=$(git log ${LAST_TAG}..HEAD --oneline --pretty=format:"- %s (%h)") 36 | fi 37 | 38 | echo "changelog<> $GITHUB_OUTPUT 39 | echo "$COMMITS" >> $GITHUB_OUTPUT 40 | echo "EOF" >> $GITHUB_OUTPUT 41 | 42 | - name: Create GitHub Release 43 | uses: softprops/action-gh-release@v2 44 | with: 45 | tag_name: ${{ steps.version.outputs.tag }} 46 | name: pyleak ${{ steps.version.outputs.tag }} 47 | body: ${{ steps.changelog.outputs.changelog }} 48 | files: | 49 | dist/*.whl 50 | dist/*.tar.gz 51 | draft: false 52 | prerelease: false 53 | env: 54 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 55 | - name: Publish the package 56 | run: uv publish 57 | env: 58 | UV_PUBLISH_TOKEN: ${{ secrets.PYPI_TOKEN }} 59 | -------------------------------------------------------------------------------- /tests/test_plugin.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import threading 3 | import time 4 | 5 | import pytest 6 | 7 | from pyleak import PyleakExceptionGroup 8 | 9 | 10 | @pytest.mark.no_leaks 11 | def test_sync_no_leaks(): 12 | """Test sync function with no leaks""" 13 | pass 14 | 15 | 16 | @pytest.mark.no_leaks 17 | @pytest.mark.asyncio 18 | async def test_async_no_leaks(): 19 | """Test async function with no leaks""" 20 | await asyncio.sleep(0.01) 21 | 22 | 23 | @pytest.mark.no_leaks("threads") 24 | def test_sync_thread_only(): 25 | """Test sync with thread detection only""" 26 | pass 27 | 28 | 29 | @pytest.mark.xfail(raises=PyleakExceptionGroup) 30 | @pytest.mark.no_leaks 31 | @pytest.mark.asyncio 32 | async def test_task_leak_detected(): 33 | """This test should fail due to task leak""" 34 | asyncio.create_task(asyncio.sleep(10)) # Intentional leak 35 | 36 | 37 | @pytest.mark.xfail(raises=PyleakExceptionGroup) 38 | @pytest.mark.no_leaks 39 | @pytest.mark.asyncio 40 | async def test_thread_leak_detected(): 41 | """This test should fail due to thread leak""" 42 | threading.Thread(target=lambda: time.sleep(10)).start() # Intentional leak 43 | 44 | 45 | @pytest.mark.xfail(raises=PyleakExceptionGroup) 46 | @pytest.mark.no_leaks 47 | @pytest.mark.asyncio 48 | async def test_blocking_detected(): 49 | """This test should fail due to blocking""" 50 | time.sleep(0.5) # Intentional blocking 51 | 52 | 53 | @pytest.mark.xfail(raises=PyleakExceptionGroup) 54 | @pytest.mark.no_leaks 55 | def test_sync_thread_leak_detected(): 56 | """This test should fail due to thread leak""" 57 | threading.Thread(target=lambda: time.sleep(10)).start() # Intentional leak 58 | 59 | 60 | @pytest.mark.no_leaks(tasks=True, threads=False, blocking=False) 61 | @pytest.mark.asyncio 62 | async def test_task_leak_detected_no_blocking(): 63 | """This test should pass as we only capture tasks""" 64 | await asyncio.create_task(asyncio.sleep(0.1)) # no tasks leak 65 | time.sleep(0.5) # intentionally block the event loop 66 | -------------------------------------------------------------------------------- /src/pyleak/pytest_plugin.py: -------------------------------------------------------------------------------- 1 | """ 2 | PyLeak pytest plugin for detecting leaked tasks, threads, and event loop blocking. 3 | 4 | This plugin automatically wraps tests with pyleak detectors based on pytest markers. 5 | """ 6 | 7 | from __future__ import annotations 8 | 9 | import asyncio 10 | 11 | import pytest 12 | 13 | from pyleak.combined import CombinedLeakDetector, PyLeakConfig 14 | from pyleak.utils import CallerContext 15 | 16 | 17 | def should_monitor_test(item: pytest.Function) -> PyLeakConfig | None: 18 | """Check if test should be monitored and return config""" 19 | marker = item.get_closest_marker("no_leaks") 20 | if not marker: 21 | return None 22 | 23 | marker_args = {} 24 | if marker.args: 25 | for arg in marker.args: 26 | if arg == "tasks": 27 | marker_args["tasks"] = True 28 | elif arg == "threads": 29 | marker_args["threads"] = True 30 | elif arg == "blocking": 31 | marker_args["blocking"] = True 32 | elif arg == "all": 33 | marker_args.update({"tasks": True, "threads": True, "blocking": True}) 34 | 35 | if marker.kwargs: 36 | marker_args.update(marker.kwargs) 37 | 38 | if not marker_args: 39 | marker_args = {"tasks": True, "threads": True, "blocking": True} 40 | 41 | return PyLeakConfig.from_marker_args(marker_args) 42 | 43 | 44 | @pytest.hookimpl(hookwrapper=True) 45 | def pytest_runtest_call(item: pytest.Function): 46 | """Wrap test execution with leak detection""" 47 | 48 | config = should_monitor_test(item) 49 | if not config: 50 | yield 51 | return 52 | 53 | is_async = asyncio.iscoroutinefunction(item.function) 54 | original_func = item.function 55 | caller_context = CallerContext( 56 | filename=item.fspath.strpath, name=item.name, lineno=None 57 | ) 58 | 59 | if is_async: 60 | 61 | async def async_wrapper(*args, **kwargs): 62 | detector = CombinedLeakDetector( 63 | config=config, is_async=True, caller_context=caller_context 64 | ) 65 | async with detector: 66 | return await original_func(*args, **kwargs) 67 | 68 | item.obj = async_wrapper 69 | else: 70 | 71 | def sync_wrapper(*args, **kwargs): 72 | detector = CombinedLeakDetector( 73 | config=config, is_async=False, caller_context=caller_context 74 | ) 75 | with detector: 76 | return original_func(*args, **kwargs) 77 | 78 | item.obj = sync_wrapper 79 | 80 | try: 81 | yield 82 | finally: 83 | item.obj = original_func 84 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # UV 98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | #uv.lock 102 | 103 | # poetry 104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 105 | # This is especially recommended for binary packages to ensure reproducibility, and is more 106 | # commonly ignored for libraries. 107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 108 | #poetry.lock 109 | 110 | # pdm 111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 112 | #pdm.lock 113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 114 | # in version control. 115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 116 | .pdm.toml 117 | .pdm-python 118 | .pdm-build/ 119 | 120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 121 | __pypackages__/ 122 | 123 | # Celery stuff 124 | celerybeat-schedule 125 | celerybeat.pid 126 | 127 | # SageMath parsed files 128 | *.sage.py 129 | 130 | # Environments 131 | .env 132 | .venv 133 | env/ 134 | venv/ 135 | ENV/ 136 | env.bak/ 137 | venv.bak/ 138 | 139 | # Spyder project settings 140 | .spyderproject 141 | .spyproject 142 | 143 | # Rope project settings 144 | .ropeproject 145 | 146 | # mkdocs documentation 147 | /site 148 | 149 | # mypy 150 | .mypy_cache/ 151 | .dmypy.json 152 | dmypy.json 153 | 154 | # Pyre type checker 155 | .pyre/ 156 | 157 | # pytype static type analyzer 158 | .pytype/ 159 | 160 | # Cython debug symbols 161 | cython_debug/ 162 | 163 | # PyCharm 164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 166 | # and can be added to the global gitignore or merged into this file. For a more nuclear 167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 168 | #.idea/ 169 | 170 | # Ruff stuff: 171 | .ruff_cache/ 172 | 173 | # PyPI configuration file 174 | .pypirc 175 | 176 | # Repo-to-text 177 | .repo-to-text-settings.yaml 178 | repo-to-text_*.txt 179 | -------------------------------------------------------------------------------- /src/pyleak/threads.py: -------------------------------------------------------------------------------- 1 | """ 2 | Thread Leak Detector 3 | 4 | Detect and handle leaked threads in Python. 5 | """ 6 | 7 | import logging 8 | import re 9 | import threading 10 | import time 11 | from typing import List, Optional, Set, Union 12 | 13 | from pyleak.base import ( 14 | LeakAction, 15 | LeakError, 16 | _BaseLeakContextManager, 17 | _BaseLeakDetector, 18 | ) 19 | from pyleak.utils import setup_logger 20 | 21 | _logger = setup_logger(__name__) 22 | DEFAULT_THREAD_NAME_FILTER = re.compile(r"^(?!asyncio_\d+$).*") 23 | 24 | 25 | class ThreadLeakError(LeakError): 26 | """Raised when thread leaks are detected and action is set to RAISE.""" 27 | 28 | 29 | class _ThreadLeakDetector(_BaseLeakDetector): 30 | """Core thread leak detection functionality.""" 31 | 32 | def __init__( 33 | self, 34 | action: LeakAction = LeakAction.WARN, 35 | name_filter: Optional[Union[str, re.Pattern]] = DEFAULT_THREAD_NAME_FILTER, 36 | logger: Optional[logging.Logger] = _logger, 37 | exclude_daemon: bool = True, 38 | ): 39 | super().__init__(action, name_filter, logger) 40 | self.exclude_daemon = exclude_daemon 41 | 42 | def _get_resource_name(self, thread: threading.Thread) -> str: 43 | """Get thread name, handling both named and unnamed threads.""" 44 | name = thread.name 45 | return name if name else f"" 46 | 47 | def get_running_resources( 48 | self, exclude_current: bool = True 49 | ) -> Set[threading.Thread]: 50 | """Get all currently running threads.""" 51 | threads = set(threading.enumerate()) 52 | 53 | if exclude_current: 54 | current = threading.current_thread() 55 | threads.discard(current) 56 | 57 | # Optionally exclude daemon threads (they're cleaned up automatically) 58 | if self.exclude_daemon: 59 | threads = {t for t in threads if not t.daemon} 60 | 61 | return threads 62 | 63 | def _is_resource_active(self, thread: threading.Thread) -> bool: 64 | """Check if a thread is still active/running.""" 65 | return thread.is_alive() 66 | 67 | @property 68 | def leak_error_class(self) -> type: 69 | """Get the appropriate exception class for thread leaks.""" 70 | return ThreadLeakError 71 | 72 | @property 73 | def resource_type(self) -> str: 74 | """Get the human-readable name for threads.""" 75 | return "threads" 76 | 77 | def _handle_cancel_action( 78 | self, leaked_threads: List[threading.Thread], thread_names: List[str] 79 | ) -> None: 80 | """Handle the cancel action for leaked threads.""" 81 | self.logger.warning( 82 | f"Cannot force-stop {len(leaked_threads)} leaked threads: {thread_names}. " 83 | "Consider using thread.join() or proper shutdown mechanisms." 84 | ) 85 | 86 | 87 | class _ThreadLeakContextManager(_BaseLeakContextManager): 88 | """Context manager that can also be used as a decorator.""" 89 | 90 | def __init__( 91 | self, 92 | action: str = "warn", 93 | name_filter: Optional[Union[str, re.Pattern]] = DEFAULT_THREAD_NAME_FILTER, 94 | logger: Optional[logging.Logger] = _logger, 95 | exclude_daemon: bool = True, 96 | grace_period: float = 0.1, 97 | ): 98 | super().__init__(action, name_filter, logger) 99 | self.exclude_daemon = exclude_daemon 100 | self.grace_period = grace_period 101 | 102 | def _create_detector(self) -> _ThreadLeakDetector: 103 | """Create a thread leak detector instance.""" 104 | return _ThreadLeakDetector( 105 | self.action, self.name_filter, self.logger, self.exclude_daemon 106 | ) 107 | 108 | def _wait_for_completion(self) -> None: 109 | """Wait for threads to complete naturally.""" 110 | time.sleep(self.grace_period) 111 | 112 | def __call__(self, func): 113 | """Allow this context manager to be used as a decorator.""" 114 | 115 | def wrapper(*args, **kwargs): 116 | with self: 117 | return func(*args, **kwargs) 118 | 119 | return wrapper 120 | 121 | 122 | def no_thread_leaks( 123 | action: str = "warn", 124 | name_filter: Optional[Union[str, re.Pattern]] = DEFAULT_THREAD_NAME_FILTER, 125 | logger: Optional[logging.Logger] = _logger, 126 | exclude_daemon: bool = True, 127 | grace_period: float = 0.1, 128 | ): 129 | """ 130 | Context manager/decorator that detects thread leaks within its scope. 131 | 132 | Args: 133 | action: Action to take when leaks are detected ("warn", "log", "cancel", "raise") 134 | name_filter: Optional filter for thread names (string or regex) 135 | logger: Optional logger instance 136 | exclude_daemon: Whether to exclude daemon threads from detection 137 | grace_period: Time to wait for threads to finish naturally (seconds) 138 | 139 | Example: 140 | # As context manager 141 | with no_thread_leaks(): 142 | threading.Thread(target=some_function).start() 143 | 144 | # As decorator 145 | @no_thread_leaks(action="raise") 146 | def my_function(): 147 | threading.Thread(target=some_work).start() 148 | """ 149 | return _ThreadLeakContextManager( 150 | action, name_filter, logger, exclude_daemon, grace_period 151 | ) 152 | -------------------------------------------------------------------------------- /tests/test_event_loop_blocking.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import time 3 | import warnings 4 | 5 | import httpx 6 | import pytest 7 | import pytest_asyncio 8 | from fastapi import FastAPI 9 | from starlette.testclient import TestClient 10 | 11 | from pyleak import EventLoopBlockError, no_event_loop_blocking 12 | 13 | pytestmark = pytest.mark.asyncio 14 | 15 | 16 | @no_event_loop_blocking(action="warn") 17 | async def bad_sleep_with_warning(): 18 | time.sleep(1) 19 | 20 | 21 | @no_event_loop_blocking(action="raise") 22 | async def bad_sleep_with_exception(): 23 | time.sleep(1) 24 | 25 | 26 | @no_event_loop_blocking(action="warn") 27 | async def good_sleep_with_warning(): 28 | await asyncio.sleep(1) 29 | 30 | 31 | class TestEventLoopBlockingDecorator: 32 | async def test_no_blocking(self): 33 | """Test that no warnings are issued when no event loop blocking is detected.""" 34 | with warnings.catch_warnings(record=True) as w: 35 | warnings.simplefilter("always") 36 | 37 | await good_sleep_with_warning() 38 | assert len(w) == 0 39 | 40 | async def test_action_warning(self): 41 | """Test that event loop blocking triggers warnings.""" 42 | with warnings.catch_warnings(record=True) as w: 43 | warnings.simplefilter("always") 44 | 45 | await bad_sleep_with_warning() 46 | assert len(w) == 3 47 | assert issubclass(w[0].category, ResourceWarning) 48 | assert "Event loop blocked for" in str(w[0].message) 49 | assert "Detected 1 event loop blocks" in str(w[1].message) 50 | assert "bad_sleep_with_warning" in str(w[2].message) 51 | assert "time.sleep(1)" in str(w[2].message) 52 | 53 | async def test_action_raise(self): 54 | """Test that event loop blocking triggers exceptions.""" 55 | with pytest.raises(EventLoopBlockError) as exc_info: 56 | await bad_sleep_with_exception() 57 | 58 | assert len(exc_info.value.blocking_events) == 1 59 | blocking_event = exc_info.value.blocking_events[0] 60 | assert blocking_event.block_id == 1 61 | assert blocking_event.duration > 0.0 62 | assert blocking_event.timestamp > 0.0 63 | blocking_stack = blocking_event.format_blocking_stack() 64 | assert "bad_sleep_with_exception" in blocking_stack 65 | assert "time.sleep(1)" in blocking_stack 66 | 67 | 68 | class TestEventLoopBlockingContextManager: 69 | async def test_no_blocking(self): 70 | """Test that no warnings are issued when no event loop blocking is detected.""" 71 | with warnings.catch_warnings(record=True) as w: 72 | warnings.simplefilter("always") 73 | 74 | async with no_event_loop_blocking(action="warn", threshold=0.5): 75 | await asyncio.sleep(1) 76 | 77 | assert len(w) == 0 78 | 79 | async def test_action_warning(self): 80 | """Test that event loop blocking triggers warnings.""" 81 | with warnings.catch_warnings(record=True) as w: 82 | warnings.simplefilter("always") 83 | 84 | async with no_event_loop_blocking(action="warn"): 85 | time.sleep(1) 86 | 87 | assert len(w) == 3 88 | assert issubclass(w[0].category, ResourceWarning) 89 | assert "Event loop blocked for" in str(w[0].message) 90 | assert "Detected 1 event loop blocks" in str(w[1].message) 91 | assert "time.sleep(1)" in str(w[2].message) 92 | 93 | async def test_action_raise(self): 94 | """Test that event loop blocking triggers exceptions.""" 95 | with pytest.raises(EventLoopBlockError) as exc_info: 96 | async with no_event_loop_blocking(action="raise"): 97 | time.sleep(1) 98 | 99 | assert len(exc_info.value.blocking_events) == 1 100 | blocking_event = exc_info.value.blocking_events[0] 101 | assert blocking_event.block_id == 1 102 | assert blocking_event.duration > 0.0 103 | assert blocking_event.timestamp > 0.0 104 | blocking_stack = blocking_event.format_blocking_stack() 105 | assert "time.sleep(1)" in blocking_stack 106 | 107 | 108 | @pytest.fixture 109 | def app(): 110 | simple_fastapi_app = FastAPI() 111 | 112 | @simple_fastapi_app.get("/") 113 | async def endpoint(timeout: int = 0): 114 | await asyncio.sleep(timeout) 115 | return {"message": f"Hello World after {timeout} seconds"} 116 | 117 | return simple_fastapi_app 118 | 119 | 120 | @pytest.fixture 121 | def sync_client(app: FastAPI): 122 | return TestClient(app) 123 | 124 | 125 | @pytest_asyncio.fixture 126 | async def async_client(app: FastAPI): 127 | transport = httpx.ASGITransport(app=app) 128 | return httpx.AsyncClient(transport=transport, base_url="http://test") 129 | 130 | 131 | async def my_function_using_sync_client(sync_client: TestClient): 132 | resp = sync_client.get("/", params={"timeout": 5}) 133 | assert resp.status_code == 200 134 | assert resp.json() == {"message": "Hello World after 5 seconds"} 135 | 136 | 137 | async def my_function_using_async_client(async_client: httpx.AsyncClient): 138 | resp = await async_client.get("/", params={"timeout": 5}) 139 | assert resp.status_code == 200 140 | assert resp.json() == {"message": "Hello World after 5 seconds"} 141 | 142 | 143 | class TestEventLoopBlockingWithHTTPRequests: 144 | async def test_sync_client(self, sync_client, async_client): 145 | with warnings.catch_warnings(record=True) as w: 146 | warnings.simplefilter("always") 147 | 148 | async with no_event_loop_blocking(action="warn", threshold=0.2): 149 | await my_function_using_sync_client(sync_client) 150 | 151 | assert issubclass(w[0].category, ResourceWarning) 152 | all_messages = "\n".join(str(w[i].message) for i in range(len(w))) 153 | assert "Event loop blocked" in all_messages 154 | assert "my_function_using_sync_client" in all_messages 155 | assert "sync_client.get" in all_messages 156 | 157 | async def test_async_client(self, async_client): 158 | with warnings.catch_warnings(record=True) as w: 159 | warnings.simplefilter("always") 160 | 161 | async with no_event_loop_blocking(action="warn", threshold=0.2): 162 | await my_function_using_async_client(async_client) 163 | 164 | assert len(w) == 0 165 | -------------------------------------------------------------------------------- /src/pyleak/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base classes for leak detection functionality. 3 | """ 4 | 5 | import logging 6 | import re 7 | import warnings 8 | from abc import ABC, abstractmethod 9 | from enum import Enum 10 | from typing import Any, List, Optional, Set, Union 11 | 12 | from exceptiongroup import ExceptionGroup 13 | 14 | from pyleak.utils import setup_logger 15 | 16 | _logger = setup_logger(__name__) 17 | 18 | 19 | class LeakAction(str, Enum): 20 | """Actions to take when task leaks are detected.""" 21 | 22 | WARN = "warn" 23 | LOG = "log" 24 | CANCEL = "cancel" 25 | RAISE = "raise" 26 | 27 | 28 | class LeakError(Exception): 29 | """Base exception for leak detection errors.""" 30 | 31 | 32 | class PyleakExceptionGroup(ExceptionGroup, LeakError): 33 | """Combined exception for multiple leak errors.""" 34 | 35 | def __init__(self, message: str, leak_errors: List[LeakError]): 36 | super().__init__(message, leak_errors) 37 | 38 | 39 | class _BaseLeakDetector(ABC): 40 | """Base class for leak detection functionality.""" 41 | 42 | def __init__( 43 | self, 44 | action: LeakAction = LeakAction.WARN, 45 | name_filter: Optional[Union[str, re.Pattern]] = None, 46 | logger: Optional[logging.Logger] = _logger, 47 | ): 48 | self.action = action 49 | self.name_filter = name_filter 50 | self.logger = logger 51 | 52 | def _matches_filter(self, resource_name: str) -> bool: 53 | """Check if resource name matches the filter.""" 54 | if self.name_filter is None: 55 | return True 56 | 57 | if isinstance(self.name_filter, str): 58 | return resource_name == self.name_filter 59 | elif isinstance(self.name_filter, re.Pattern): 60 | return bool(self.name_filter.search(resource_name)) 61 | else: 62 | # Try to compile as regex if it's a string-like pattern 63 | try: 64 | pattern = re.compile(str(self.name_filter)) 65 | return bool(pattern.search(resource_name)) 66 | except re.error: 67 | return resource_name == str(self.name_filter) 68 | 69 | @abstractmethod 70 | def _get_resource_name(self, resource: Any) -> str: 71 | """Get resource name, handling both named and unnamed resources.""" 72 | pass 73 | 74 | @abstractmethod 75 | def get_running_resources(self, exclude_current: bool = True) -> Set[Any]: 76 | """Get all currently running resources.""" 77 | pass 78 | 79 | @abstractmethod 80 | def _is_resource_active(self, resource: Any) -> bool: 81 | """Check if a resource is still active/running.""" 82 | pass 83 | 84 | @property 85 | @abstractmethod 86 | def leak_error_class(self) -> type: 87 | """Get the appropriate exception class for this resource type.""" 88 | pass 89 | 90 | @property 91 | @abstractmethod 92 | def resource_type(self) -> str: 93 | """Get the human-readable name for this resource type (e.g., 'tasks', 'threads').""" 94 | pass 95 | 96 | @abstractmethod 97 | def _handle_cancel_action( 98 | self, leaked_resources: List[Any], resource_names: List[str] 99 | ) -> None: 100 | """Handle the cancel action for leaked resources.""" 101 | pass 102 | 103 | def get_leaked_resources(self, initial_resources: Set[Any]) -> List[Any]: 104 | """Find resources that are still running and match the filter.""" 105 | current_resources = self.get_running_resources() 106 | new_resources = current_resources - initial_resources 107 | self.logger.debug( 108 | f"Found {len(new_resources)} new {self.resource_type} before filtering" 109 | ) 110 | 111 | leaked_resources = [] 112 | for resource in new_resources: 113 | if self._is_resource_active(resource): 114 | resource_name = self._get_resource_name(resource) 115 | if self._matches_filter(resource_name): 116 | leaked_resources.append(resource) 117 | 118 | return leaked_resources 119 | 120 | def handle_leaked_resources(self, leaked_resources: List[Any]) -> None: 121 | """Handle detected leaked resources based on the configured action.""" 122 | if not leaked_resources: 123 | return 124 | 125 | resource_names = [self._get_resource_name(r) for r in leaked_resources] 126 | message = f"Detected {len(leaked_resources)} leaked {self.resource_type}: {resource_names}" 127 | if self.action == "warn": 128 | warnings.warn(message, ResourceWarning, stacklevel=3) 129 | elif self.action == "log": 130 | self.logger.warning(message) 131 | elif self.action == "cancel": 132 | self._handle_cancel_action(leaked_resources, resource_names) 133 | elif self.action == "raise": 134 | raise self.leak_error_class(message) 135 | 136 | 137 | class _BaseLeakContextManager(ABC): 138 | """Base context manager that can also be used as a decorator.""" 139 | 140 | def __init__( 141 | self, 142 | action: str = "warn", 143 | name_filter: Optional[Union[str, re.Pattern]] = None, 144 | logger: Optional[logging.Logger] = _logger, 145 | **kwargs, 146 | ): 147 | self.action = action 148 | self.name_filter = name_filter 149 | self.logger = logger 150 | self.extra_kwargs = kwargs 151 | 152 | @abstractmethod 153 | def _create_detector(self) -> _BaseLeakDetector: 154 | """Create the appropriate detector instance.""" 155 | pass 156 | 157 | @abstractmethod 158 | def _wait_for_completion(self) -> None: 159 | """Wait for resources to complete naturally.""" 160 | pass 161 | 162 | def __enter__(self): 163 | return self._enter_context() 164 | 165 | def __exit__(self, exc_type, exc_val, exc_tb): 166 | return self._exit_context(exc_type, exc_val, exc_tb) 167 | 168 | def _enter_context(self): 169 | """Common enter logic.""" 170 | self.detector = self._create_detector() 171 | self.initial_resources = self.detector.get_running_resources() 172 | self.logger.debug( 173 | f"Detected {len(self.initial_resources)} initial {self.detector.resource_type}" 174 | ) 175 | return self 176 | 177 | def _exit_context(self, exc_type, exc_val, exc_tb): 178 | """Common exit logic.""" 179 | self._wait_for_completion() 180 | leaked_resources = self.detector.get_leaked_resources(self.initial_resources) 181 | self.logger.debug( 182 | f"Detected {len(leaked_resources)} leaked {self.detector.resource_type}" 183 | ) 184 | self.detector.handle_leaked_resources(leaked_resources) 185 | 186 | async def __aenter__(self): 187 | return self._enter_context() 188 | 189 | async def __aexit__(self, exc_type, exc_val, exc_tb): 190 | return self._exit_context(exc_type, exc_val, exc_tb) 191 | -------------------------------------------------------------------------------- /src/pyleak/combined.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass, field, fields 4 | from typing import Any 5 | 6 | from pyleak import ( 7 | DEFAULT_THREAD_NAME_FILTER, 8 | EventLoopBlockError, 9 | TaskLeakError, 10 | ThreadLeakError, 11 | no_event_loop_blocking, 12 | no_task_leaks, 13 | no_thread_leaks, 14 | ) 15 | from pyleak.base import PyleakExceptionGroup 16 | from pyleak.utils import CallerContext 17 | 18 | 19 | @dataclass 20 | class PyLeakConfig: 21 | """Configuration for pyleak detection""" 22 | 23 | tasks: bool = field( 24 | default=True, metadata={"description": "Whether to detect task leaks"} 25 | ) 26 | task_action: str = field( 27 | default="raise", 28 | metadata={"description": "Action to take when a task leak is detected"}, 29 | ) 30 | task_name_filter: str | None = field( 31 | default=None, metadata={"description": "Filter to apply to task names"} 32 | ) 33 | enable_task_creation_tracking: bool = field( 34 | default=False, 35 | metadata={"description": "Whether to enable task creation tracking"}, 36 | ) 37 | 38 | threads: bool = field( 39 | default=True, metadata={"description": "Whether to detect thread leaks"} 40 | ) 41 | thread_action: str = field( 42 | default="raise", 43 | metadata={"description": "Action to take when a thread leak is detected"}, 44 | ) 45 | thread_name_filter: str | None = field( 46 | default=DEFAULT_THREAD_NAME_FILTER, 47 | metadata={ 48 | "description": "Filter to apply to thread names (default: exclude asyncio threads)" 49 | }, 50 | ) 51 | exclude_daemon_threads: bool = field( 52 | default=True, metadata={"description": "Whether to exclude daemon threads"} 53 | ) 54 | 55 | blocking: bool = field( 56 | default=True, metadata={"description": "Whether to detect event loop blocking"} 57 | ) 58 | blocking_action: str = field( 59 | default="raise", 60 | metadata={ 61 | "description": "Action to take when a blocking event loop is detected" 62 | }, 63 | ) 64 | blocking_threshold: float = field( 65 | default=0.2, 66 | metadata={"description": "Threshold for blocking event loop detection"}, 67 | ) 68 | blocking_check_interval: float = field( 69 | default=0.01, 70 | metadata={"description": "Interval for checking for blocking event loop"}, 71 | ) 72 | 73 | @classmethod 74 | def from_marker_args(cls, marker_args: dict[str, Any]): 75 | config = cls() 76 | config.tasks = marker_args.get("tasks", True) 77 | config.task_action = marker_args.get("task_action", "raise") 78 | config.task_name_filter = marker_args.get("task_name_filter", None) 79 | config.enable_task_creation_tracking = marker_args.get( 80 | "enable_task_creation_tracking", False 81 | ) 82 | config.threads = marker_args.get("threads", True) 83 | config.thread_action = marker_args.get("thread_action", "raise") 84 | config.thread_name_filter = marker_args.get( 85 | "thread_name_filter", DEFAULT_THREAD_NAME_FILTER 86 | ) 87 | config.exclude_daemon_threads = marker_args.get("exclude_daemon_threads", True) 88 | 89 | config.blocking = marker_args.get("blocking", True) 90 | config.blocking_action = marker_args.get("blocking_action", "raise") 91 | config.blocking_threshold = marker_args.get("blocking_threshold", 0.2) 92 | config.blocking_check_interval = marker_args.get( 93 | "blocking_check_interval", 0.01 94 | ) 95 | return config 96 | 97 | def to_markdown_table(self) -> str: 98 | """Generate markdown table from the above args including names and default values""" 99 | markdown = "| Name | Default | Description |\n" 100 | markdown += "|:------|:------|:------|\n" 101 | for f in fields(self): 102 | markdown += ( 103 | f"| {f.name} | {f.default} | {f.metadata.get('description', '')} |\n" 104 | ) 105 | markdown += "\n" 106 | return markdown 107 | 108 | 109 | class CombinedLeakDetector: 110 | def __init__( 111 | self, 112 | config: PyLeakConfig, 113 | is_async: bool, 114 | caller_context: CallerContext | None = None, 115 | ): 116 | self.config = config 117 | self.is_async = is_async 118 | self.task_detector = None 119 | self.thread_detector = None 120 | self.blocking_detector = None 121 | self.caller_context = caller_context 122 | 123 | async def __aenter__(self): 124 | if self.is_async and self.config.tasks: 125 | self.task_detector = no_task_leaks( 126 | action=self.config.task_action, 127 | name_filter=self.config.task_name_filter, 128 | enable_creation_tracking=self.config.enable_task_creation_tracking, 129 | ) 130 | await self.task_detector.__aenter__() 131 | 132 | if self.is_async and self.config.blocking: 133 | self.blocking_detector = no_event_loop_blocking( 134 | action=self.config.blocking_action, 135 | threshold=self.config.blocking_threshold, 136 | check_interval=self.config.blocking_check_interval, 137 | caller_context=self.caller_context, 138 | ) 139 | self.blocking_detector.__enter__() 140 | 141 | if self.config.threads: 142 | self.thread_detector = no_thread_leaks( 143 | action=self.config.thread_action, 144 | name_filter=self.config.thread_name_filter, 145 | exclude_daemon=self.config.exclude_daemon_threads, 146 | ) 147 | self.thread_detector.__enter__() 148 | 149 | return self 150 | 151 | async def __aexit__(self, exc_type, exc_val, exc_tb): 152 | leak_errors = [] 153 | if self.thread_detector: 154 | try: 155 | self.thread_detector.__exit__(exc_type, exc_val, exc_tb) 156 | except ThreadLeakError as e: 157 | leak_errors.append(e) 158 | 159 | if self.blocking_detector: 160 | try: 161 | self.blocking_detector.__exit__(exc_type, exc_val, exc_tb) 162 | except EventLoopBlockError as e: 163 | leak_errors.append(e) 164 | 165 | if self.task_detector: 166 | try: 167 | await self.task_detector.__aexit__(exc_type, exc_val, exc_tb) 168 | except TaskLeakError as e: 169 | leak_errors.append(e) 170 | 171 | if leak_errors: 172 | raise PyleakExceptionGroup( 173 | "PyLeak detected issues:\n" 174 | + "\n\n".join([str(e) for e in leak_errors]), 175 | leak_errors, 176 | ) 177 | 178 | def __enter__(self): 179 | if self.config.threads: 180 | self.thread_detector = no_thread_leaks( 181 | action=self.config.thread_action, 182 | name_filter=self.config.thread_name_filter, 183 | exclude_daemon=self.config.exclude_daemon_threads, 184 | ) 185 | self.thread_detector.__enter__() 186 | 187 | # Ignore `detect_tasks` and `detect_blocking` for sync tests 188 | return self 189 | 190 | def __exit__(self, exc_type, exc_val, exc_tb): 191 | if self.thread_detector: 192 | try: 193 | self.thread_detector.__exit__(exc_type, exc_val, exc_tb) 194 | except ThreadLeakError as e: 195 | raise PyleakExceptionGroup( 196 | "PyLeak detected issues:\n" + "\n\n".join([str(e) for e in [e]]), 197 | [e], 198 | ) 199 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /tests/test_task_leaks.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import re 3 | import uuid 4 | import warnings 5 | from typing import Optional 6 | from unittest.mock import Mock 7 | 8 | import pytest 9 | import pytest_asyncio 10 | 11 | from pyleak import TaskLeakError, no_task_leaks 12 | 13 | pytestmark = pytest.mark.asyncio 14 | 15 | 16 | async def leaky_function(): 17 | """Function that creates a task but doesn't await it.""" 18 | 19 | async def background_task(): 20 | await asyncio.sleep(10) # Long running task 21 | 22 | # Create task but don't await it - this will leak! 23 | asyncio.create_task(background_task()) 24 | await asyncio.sleep(0.1) # Do some other work 25 | 26 | 27 | async def well_behaved_function(): 28 | """Function that properly manages its tasks.""" 29 | 30 | async def background_task(): 31 | await asyncio.sleep(0.1) 32 | 33 | task = asyncio.create_task(background_task()) 34 | await task # Properly await the task 35 | 36 | 37 | async def create_named_task(name: str): 38 | """Creates a named task that will leak.""" 39 | asyncio.create_task(asyncio.sleep(10), name=name) 40 | await asyncio.sleep(0.1) 41 | 42 | 43 | async def function_with_exception(): 44 | asyncio.create_task(asyncio.sleep(10)) 45 | raise Exception("test") 46 | 47 | 48 | class TestNoTaskLeaksContextManager: 49 | """Test no_task_leaks when used as context manager.""" 50 | 51 | async def test_no_leaks_detected(self): 52 | """Test that no warnings are issued when no tasks leak.""" 53 | with warnings.catch_warnings(record=True) as w: 54 | warnings.simplefilter("always") 55 | 56 | async with no_task_leaks(): 57 | await well_behaved_function() 58 | 59 | assert len(w) == 0 60 | 61 | async def test_action_warn(self): 62 | """Test that leaked tasks trigger warnings.""" 63 | with warnings.catch_warnings(record=True) as w: 64 | warnings.simplefilter("always") 65 | 66 | async with no_task_leaks(action="warn"): 67 | await leaky_function() 68 | 69 | assert len(w) == 2 70 | assert issubclass(w[0].category, ResourceWarning) 71 | assert "leaked asyncio tasks" in str(w[0].message) 72 | 73 | assert issubclass(w[1].category, ResourceWarning) 74 | assert "await asyncio.sleep(10) # Long running task" in str(w[1].message) 75 | 76 | async def test_action_raise(self): 77 | """Test that leaked tasks can raise exceptions.""" 78 | with pytest.raises(TaskLeakError, match="leaked asyncio tasks") as e: 79 | async with no_task_leaks(action="raise"): 80 | await leaky_function() 81 | 82 | assert "leaked asyncio tasks" in str(e.value) 83 | assert len(e.value.leaked_tasks) == 1 84 | assert e.value.leaked_tasks[0].name is not None 85 | assert e.value.leaked_tasks[0].state == "running" 86 | assert e.value.leaked_tasks[0].current_stack is not None 87 | assert e.value.leaked_tasks[0].creation_stack is None # non debug mode 88 | assert e.value.leaked_tasks[0].task_ref is not None 89 | assert "await asyncio.sleep(10) # Long running task" in str(e.value) 90 | 91 | async def test_action_cancel(self): 92 | """Test that leaked tasks can be cancelled.""" 93 | leaked_task: Optional[asyncio.Task] = None 94 | 95 | async def capture_leaked_task(): 96 | nonlocal leaked_task 97 | leaked_task = asyncio.create_task(asyncio.sleep(10)) 98 | await asyncio.sleep(0.1) 99 | 100 | async with no_task_leaks(action="cancel"): 101 | await capture_leaked_task() 102 | 103 | # Give time for cancellation to take effect 104 | await asyncio.sleep(0.01) 105 | 106 | assert leaked_task is not None 107 | assert leaked_task.cancelled() 108 | 109 | async def test_action_log(self): 110 | """Test that LOG action uses the logger.""" 111 | mock_logger = Mock() 112 | 113 | async with no_task_leaks(action="log", logger=mock_logger): 114 | await leaky_function() 115 | 116 | assert mock_logger.warning.call_count == 2 117 | assert "leaked asyncio tasks" in mock_logger.warning.call_args_list[0][0][0] 118 | assert ( 119 | "await asyncio.sleep(10) # Long running task" 120 | in mock_logger.warning.call_args_list[1][0][0] 121 | ) 122 | 123 | async def test_name_filter_exact_match(self): 124 | """Test filtering tasks by exact name match.""" 125 | with warnings.catch_warnings(record=True) as w: 126 | warnings.simplefilter("always") 127 | 128 | async with no_task_leaks(action="warn", name_filter="target-task"): 129 | # Create task with matching name 130 | await create_named_task("target-task") 131 | 132 | # Create task with different name - should be ignored 133 | await create_named_task("other-task") 134 | 135 | assert len(w) == 2 136 | assert "1 leaked asyncio tasks" in str(w[0].message) 137 | assert "target-task" in str(w[1].message) 138 | assert "other-task" not in str(w[0].message) 139 | 140 | async def test_name_filter_regex(self): 141 | """Test filtering tasks using regex patterns.""" 142 | with warnings.catch_warnings(record=True) as w: 143 | warnings.simplefilter("always") 144 | 145 | some_id = str(uuid.uuid4()) 146 | pattern = re.compile(rf"{some_id}-\d+") 147 | async with no_task_leaks(action="warn", name_filter=pattern): 148 | for i in range(1, 10): 149 | await create_named_task(f"{some_id}-{i}") 150 | 151 | await create_named_task("manager-1") 152 | 153 | assert len(w) == 10 154 | all_messages = "\n".join([str(warning.message) for warning in w]) 155 | for i in range(1, 10): 156 | assert f"{some_id}-{i}" in all_messages 157 | assert "manager-1" not in all_messages 158 | 159 | async def test_completed_tasks_not_detected(self): 160 | """Test that completed tasks are not considered leaks.""" 161 | with warnings.catch_warnings(record=True) as w: 162 | warnings.simplefilter("always") 163 | 164 | async with no_task_leaks(): 165 | # Create and complete a task 166 | task = asyncio.create_task(asyncio.sleep(0.001)) 167 | await task # Wait for completion 168 | 169 | assert len(w) == 0 170 | 171 | async def test_multiple_leaks(self): 172 | """Test detection of multiple leaked tasks.""" 173 | with warnings.catch_warnings(record=True) as w: 174 | warnings.simplefilter("always") 175 | 176 | async with no_task_leaks(action="warn"): 177 | # Create multiple leaks 178 | asyncio.create_task(asyncio.sleep(10)) 179 | asyncio.create_task(asyncio.sleep(10)) 180 | asyncio.create_task(asyncio.sleep(10)) 181 | await asyncio.sleep(0.1) 182 | 183 | assert len(w) == 4 184 | all_messages = "\n".join([str(warning.message) for warning in w]) 185 | assert "3 leaked asyncio tasks" in all_messages 186 | 187 | async def test_invalid_regex_fallback(self): 188 | """Test that invalid regex falls back to string matching.""" 189 | with warnings.catch_warnings(record=True) as w: 190 | warnings.simplefilter("always") 191 | 192 | # Use invalid regex pattern - should fall back to exact string match 193 | async with no_task_leaks(action="warn", name_filter="[invalid"): 194 | await create_named_task("[invalid") # Exact match 195 | await create_named_task("other-task") 196 | 197 | assert len(w) == 2 198 | all_messages = "\n".join([str(warning.message) for warning in w]) 199 | assert "[invalid" in all_messages 200 | assert "other-task" not in all_messages 201 | 202 | async def test_enable_creation_tracking_with_exception(self): 203 | with warnings.catch_warnings(record=True) as w: 204 | warnings.simplefilter("always") 205 | with pytest.raises(Exception, match="test"): 206 | async with no_task_leaks(action="warn", enable_creation_tracking=True): 207 | await function_with_exception() 208 | 209 | all_warnings = "\n".join([str(warning.message) for warning in w]) 210 | assert len(w) == 2 211 | assert "asyncio.create_task(asyncio.sleep(10))" in all_warnings 212 | assert "test_task_leaks.py" in all_warnings 213 | 214 | async def test_enable_creation_tracking(self): 215 | """Test that enable_creation_tracking works.""" 216 | with warnings.catch_warnings(record=True) as w: 217 | warnings.simplefilter("always") 218 | async with no_task_leaks(action="warn", enable_creation_tracking=True): 219 | await leaky_function() 220 | 221 | assert len(w) == 2 222 | assert "Creation Stack" in str(w[1].message) 223 | assert "test_task_leaks.py" in str(w[1].message) # this file name 224 | assert "asyncio.create_task(background_task())" in str(w[1].message) 225 | 226 | 227 | class TestNoTaskLeaksDecorator: 228 | """Test no_task_leaks when used as decorator.""" 229 | 230 | async def test_no_leaks(self): 231 | """Test decorator works when no leaks occur.""" 232 | 233 | @no_task_leaks() 234 | async def clean_function(): 235 | await well_behaved_function() 236 | 237 | with warnings.catch_warnings(record=True) as w: 238 | warnings.simplefilter("always") 239 | await clean_function() 240 | assert len(w) == 0 241 | 242 | async def test_action_warn(self): 243 | """Test decorator detects leaks.""" 244 | 245 | @no_task_leaks(action="warn") 246 | async def leaky_decorated(): 247 | await leaky_function() 248 | 249 | with warnings.catch_warnings(record=True) as w: 250 | warnings.simplefilter("always") 251 | await leaky_decorated() 252 | assert len(w) == 2 253 | assert "leaked asyncio tasks" in str(w[0].message) 254 | assert "await asyncio.sleep(10) # Long running task" in str(w[1].message) 255 | 256 | async def test_action_raise(self): 257 | """Test that decorator raises exceptions.""" 258 | 259 | @no_task_leaks(action="raise", enable_creation_tracking=True) 260 | async def leaky_decorated(): 261 | await leaky_function() 262 | 263 | with pytest.raises(TaskLeakError, match="leaked asyncio tasks") as e: 264 | await leaky_decorated() 265 | 266 | assert "leaked asyncio tasks" in str(e.value) 267 | assert len(e.value.leaked_tasks) == 1 268 | assert e.value.leaked_tasks[0].name is not None 269 | assert e.value.leaked_tasks[0].state == "running" 270 | assert e.value.leaked_tasks[0].current_stack is not None 271 | assert e.value.leaked_tasks[0].creation_stack is not None 272 | assert e.value.leaked_tasks[0].task_ref is not None 273 | assert "await asyncio.sleep(10) # Long running task" in str(e.value) 274 | assert "Creation Stack" in str(e.value) 275 | assert "test_task_leaks.py" in str(e.value) 276 | assert "asyncio.create_task(background_task())" in str(e.value) 277 | 278 | async def test_decorator_with_name_filter(self): 279 | """Test decorator with name filtering.""" 280 | 281 | @no_task_leaks(action="warn", name_filter="filtered-task") 282 | async def function_with_filtered_leak(): 283 | await create_named_task("filtered-task") 284 | await create_named_task("unfiltered-task") 285 | 286 | with warnings.catch_warnings(record=True) as w: 287 | warnings.simplefilter("always") 288 | await function_with_filtered_leak() 289 | 290 | assert len(w) == 2 291 | all_messages = "\n".join([str(warning.message) for warning in w]) 292 | assert "filtered-task" in all_messages 293 | assert "unfiltered-task" not in all_messages 294 | 295 | 296 | @pytest_asyncio.fixture(autouse=True) 297 | async def cleanup_leaked_tasks(): 298 | """Cleanup any tasks that might have leaked during testing.""" 299 | yield 300 | 301 | # Cancel any remaining tasks to avoid interfering with other tests 302 | tasks = set([t for t in asyncio.all_tasks() if not t.done()]) 303 | current_task = None 304 | try: 305 | current_task = asyncio.current_task() 306 | except RuntimeError: 307 | pass 308 | 309 | tasks.discard(current_task) 310 | for task in tasks: 311 | if not task.done(): 312 | task.cancel() 313 | 314 | if tasks: 315 | await asyncio.gather(*tasks, return_exceptions=True) 316 | -------------------------------------------------------------------------------- /src/pyleak/tasks.py: -------------------------------------------------------------------------------- 1 | """ 2 | AsyncIO Task Leak Detector 3 | 4 | A Python library for detecting and handling leaked asyncio tasks, 5 | inspired by Go's goleak package. 6 | """ 7 | 8 | import asyncio 9 | import logging 10 | import re 11 | import traceback 12 | import warnings 13 | from dataclasses import dataclass 14 | from enum import Enum 15 | from functools import wraps 16 | from typing import List, Optional, Set, Union 17 | 18 | from pyleak.base import ( 19 | LeakAction, 20 | LeakError, 21 | _BaseLeakContextManager, 22 | _BaseLeakDetector, 23 | ) 24 | from pyleak.utils import setup_logger 25 | 26 | _logger = setup_logger(__name__) 27 | 28 | 29 | class TaskState(str, Enum): 30 | """State of an asyncio task.""" 31 | 32 | RUNNING = "running" 33 | CANCELLED = "cancelled" 34 | DONE = "done" 35 | 36 | 37 | @dataclass 38 | class LeakedTask: 39 | """Information about a leaked asyncio task.""" 40 | 41 | task_id: int 42 | name: str 43 | state: TaskState 44 | current_stack: Optional[List[traceback.FrameSummary]] = None 45 | creation_stack: Optional[List[traceback.FrameSummary]] = None 46 | task_ref: Optional[asyncio.Task] = None 47 | 48 | @classmethod 49 | def from_task(cls, task: asyncio.Task) -> "LeakedTask": 50 | """Create a LeakedTask object from an asyncio.Task.""" 51 | if task.cancelled(): 52 | state = TaskState.CANCELLED 53 | elif task.done(): 54 | state = TaskState.DONE 55 | else: 56 | state = TaskState.RUNNING 57 | 58 | return cls( 59 | task_id=id(task), 60 | name=task.get_name(), 61 | state=state, 62 | current_stack=_TaskStackCapture.capture_current_stack(task), 63 | creation_stack=_TaskStackCapture.get_task_creation_stack(task), 64 | task_ref=task, 65 | ) 66 | 67 | def format_current_stack(self) -> str: 68 | """Format the current stack trace as a string.""" 69 | if not self.current_stack: 70 | return "No current stack available" 71 | 72 | return "".join(traceback.format_list(self.current_stack)) 73 | 74 | def format_creation_stack(self) -> str: 75 | """Format the creation stack trace as a string.""" 76 | if not self.creation_stack: 77 | return "No creation stack available" 78 | 79 | return "".join(traceback.format_list(self.creation_stack)) 80 | 81 | def __str__(self) -> str: 82 | """String representation of the leaked task.""" 83 | lines = [ 84 | f"Leaked Task: {self.name}", 85 | f" ID: {self.task_id}", 86 | f" State: {self.state}", 87 | ] 88 | 89 | if self.current_stack: 90 | lines.extend( 91 | [ 92 | " Current Stack:", 93 | " " 94 | + "\n ".join(self.format_current_stack().strip().split("\n")), 95 | ] 96 | ) 97 | 98 | if self.creation_stack: 99 | lines.extend( 100 | [ 101 | " Creation Stack:", 102 | " " 103 | + "\n ".join(self.format_creation_stack().strip().split("\n")), 104 | ] 105 | ) 106 | 107 | return "\n".join(lines) 108 | 109 | 110 | class TaskLeakError(LeakError): 111 | """Raised when task leaks are detected and action is set to RAISE.""" 112 | 113 | def __init__(self, message: str, leaked_tasks: List[LeakedTask]): 114 | super().__init__(message) 115 | self.leaked_tasks = leaked_tasks 116 | self.task_count = len(leaked_tasks) 117 | 118 | def get_stack_summary(self) -> str: 119 | """Get a summary of all stack traces.""" 120 | return "\n".join(str(task) for task in self.leaked_tasks) 121 | 122 | def __str__(self) -> str: 123 | base_msg = super().__str__() 124 | return f"{base_msg}\n\n{self.get_stack_summary()}" 125 | 126 | 127 | class _TaskStackCapture: 128 | """Utility class for capturing task stack traces.""" 129 | 130 | @staticmethod 131 | def capture_current_stack( 132 | task: asyncio.Task, 133 | ) -> Optional[List[traceback.FrameSummary]]: 134 | """Capture current stack of a task using task.get_stack().""" 135 | try: 136 | frames = task.get_stack() 137 | if not frames: 138 | return None 139 | 140 | stack_summary = [] 141 | for frame in frames: 142 | try: 143 | stack_summary.append( 144 | traceback.FrameSummary( 145 | filename=frame.f_code.co_filename, 146 | lineno=frame.f_lineno, 147 | name=frame.f_code.co_name, 148 | ) 149 | ) 150 | except Exception: 151 | continue 152 | 153 | return stack_summary 154 | 155 | except Exception: 156 | return None 157 | 158 | @staticmethod 159 | def capture_creation_stack() -> List[traceback.FrameSummary]: 160 | """Capture current stack as creation stack (call this when creating tasks).""" 161 | return traceback.extract_stack()[:-2] 162 | 163 | @staticmethod 164 | def get_task_creation_stack( 165 | task: asyncio.Task, 166 | ) -> Optional[List[traceback.FrameSummary]]: 167 | """Get the creation stack for a task if available.""" 168 | try: 169 | if hasattr(task, "_source_traceback") and task._source_traceback: 170 | return task._source_traceback 171 | except Exception: 172 | pass 173 | 174 | return None 175 | 176 | 177 | class _TaskLeakDetector(_BaseLeakDetector): 178 | """Core task leak detection functionality with stack trace support.""" 179 | 180 | def _get_resource_name(self, task: asyncio.Task) -> str: 181 | """Get task name, handling both named and unnamed tasks.""" 182 | name = getattr(task, "_name", None) or task.get_name() 183 | return name if name else f"" 184 | 185 | def get_running_resources(self, exclude_current: bool = True) -> Set[asyncio.Task]: 186 | """Get all currently running tasks.""" 187 | tasks = asyncio.all_tasks() 188 | if exclude_current: 189 | try: 190 | current = asyncio.current_task() 191 | tasks.discard(current) 192 | except RuntimeError: 193 | # No current task (not in async context) 194 | pass 195 | 196 | return tasks 197 | 198 | def _is_resource_active(self, task: asyncio.Task) -> bool: 199 | """Check if a task is still active/running.""" 200 | return not task.done() 201 | 202 | def handle_leaked_resources(self, leaked_resources: List[asyncio.Task]) -> None: 203 | """Handle leaked resources with detailed stack information.""" 204 | if not leaked_resources: 205 | return 206 | 207 | task_names = [self._get_resource_name(task) for task in leaked_resources] 208 | leaked_task_infos = [LeakedTask.from_task(task) for task in leaked_resources] 209 | message = f"Detected {len(leaked_resources)} leaked {self.resource_type}" 210 | if self.action == "warn": 211 | warnings.warn(message, ResourceWarning, stacklevel=3) 212 | for task_info in leaked_task_infos: 213 | warnings.warn(str(task_info), ResourceWarning, stacklevel=4) 214 | elif self.action == "log": 215 | self.logger.warning(message) 216 | for task_info in leaked_task_infos: 217 | self.logger.warning(str(task_info)) 218 | elif self.action == "cancel": 219 | self._handle_cancel_action(leaked_resources, task_names) 220 | elif self.action == "raise": 221 | raise TaskLeakError(message, leaked_task_infos) 222 | 223 | @property 224 | def leak_error_class(self) -> type: 225 | """Get the appropriate exception class for task leaks.""" 226 | return TaskLeakError 227 | 228 | @property 229 | def resource_type(self) -> str: 230 | """Get the human-readable name for tasks.""" 231 | return "asyncio tasks" 232 | 233 | def _handle_cancel_action( 234 | self, leaked_tasks: List[asyncio.Task], task_names: List[str] 235 | ) -> None: 236 | """Handle the cancel action for leaked tasks.""" 237 | self.logger.debug(f"Cancelling {len(leaked_tasks)} leaked tasks: {task_names}") 238 | for task in leaked_tasks: 239 | if not task.done(): 240 | task.cancel() 241 | 242 | 243 | class _AsyncTaskLeakContextManager(_BaseLeakContextManager): 244 | """Async context manager that can also be used as a decorator.""" 245 | 246 | def __init__( 247 | self, action, name_filter=None, logger=None, enable_creation_tracking=False 248 | ): 249 | super().__init__(action, name_filter, logger) 250 | self.enable_creation_tracking = enable_creation_tracking 251 | self._original_loop_params = { 252 | "debug": False, 253 | "slow_callback_duration": 0.1, 254 | } 255 | 256 | def _create_detector(self) -> _TaskLeakDetector: 257 | """Create a task leak detector instance.""" 258 | return _TaskLeakDetector(self.action, self.name_filter, self.logger) 259 | 260 | def enable_task_creation_tracking(self): 261 | """Enable automatic tracking of task creation stacks.""" 262 | loop = asyncio.get_running_loop() 263 | self._original_loop_params["debug"] = loop.get_debug() 264 | self._original_loop_params["slow_callback_duration"] = ( 265 | loop.slow_callback_duration 266 | ) 267 | loop.set_debug(True) 268 | loop.slow_callback_duration = 10 269 | self.logger.debug("Debug mode enabled for task creation tracking") 270 | 271 | def disable_task_creation_tracking(self): 272 | """Disable task creation tracking.""" 273 | loop = asyncio.get_running_loop() 274 | loop.set_debug(self._original_loop_params["debug"]) 275 | loop.slow_callback_duration = self._original_loop_params[ 276 | "slow_callback_duration" 277 | ] 278 | self.logger.debug("Debug mode disabled for task creation tracking") 279 | 280 | async def _wait_for_completion(self) -> None: 281 | """Wait for tasks to complete naturally.""" 282 | await asyncio.sleep(0.01) 283 | 284 | async def __aenter__(self): 285 | if self.enable_creation_tracking: 286 | self.enable_task_creation_tracking() 287 | return self._enter_context() 288 | 289 | async def __aexit__(self, exc_type, exc_val, exc_tb): 290 | await self._wait_for_completion() 291 | leaked_resources = self.detector.get_leaked_resources(self.initial_resources) 292 | self.logger.debug(f"Detected {len(leaked_resources)} leaked asyncio tasks") 293 | self.detector.handle_leaked_resources(leaked_resources) 294 | if self.enable_creation_tracking: 295 | self.disable_task_creation_tracking() 296 | 297 | def __enter__(self): 298 | raise RuntimeError( 299 | "no_task_leaks cannot be used as a sync context manager, please use async with" 300 | ) 301 | 302 | def __call__(self, func): 303 | """Allow this context manager to be used as a decorator.""" 304 | 305 | @wraps(func) 306 | async def wrapper(*args, **kwargs): 307 | async with self: 308 | return await func(*args, **kwargs) 309 | 310 | return wrapper 311 | 312 | 313 | def no_task_leaks( 314 | action: Union[LeakAction, str] = LeakAction.WARN, 315 | name_filter: Optional[Union[str, re.Pattern]] = None, 316 | logger: Optional[logging.Logger] = _logger, 317 | *, 318 | enable_creation_tracking: bool = False, 319 | ): 320 | """ 321 | Context manager/decorator that detects task leaks within its scope. 322 | 323 | Args: 324 | action: Action to take when leaks are detected 325 | name_filter: Optional filter for task names (string or regex) 326 | logger: Optional logger instance 327 | enable_creation_tracking: Whether to enable automatic task creation tracking 328 | 329 | Example: 330 | # As context manager 331 | async with no_task_leaks(): 332 | await some_async_function() 333 | 334 | # As decorator 335 | @no_task_leaks 336 | async def my_function(): 337 | await some_async_function() 338 | 339 | # Handle the exception with full stack traces 340 | try: 341 | async with no_task_leaks(action=LeakAction.RAISE): 342 | # Code that leaks tasks 343 | pass 344 | except TaskLeakError as e: 345 | print(f"Found {e.task_count} leaked tasks") 346 | # Cancel leaked tasks 347 | for task_info in e.leaked_tasks: 348 | if task_info.task_ref and not task_info.task_ref.done(): 349 | task_info.task_ref.cancel() 350 | 351 | """ 352 | return _AsyncTaskLeakContextManager( 353 | action, name_filter, logger, enable_creation_tracking 354 | ) 355 | -------------------------------------------------------------------------------- /tests/test_thread_leaks.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import re 3 | import threading 4 | import time 5 | import uuid 6 | import warnings 7 | from typing import Optional 8 | from unittest.mock import Mock 9 | 10 | import pytest 11 | 12 | from pyleak import ThreadLeakError, no_thread_leaks 13 | 14 | 15 | def leaky_thread_function(): 16 | """Function that creates a thread but doesn't join it.""" 17 | 18 | def background_work(): 19 | time.sleep(10) # Long running work 20 | 21 | # Create thread but don't join it - this will leak! 22 | thread = threading.Thread(target=background_work) 23 | thread.start() 24 | time.sleep(0.1) # Do some other work 25 | 26 | 27 | def well_behaved_thread_function(): 28 | """Function that properly manages its threads.""" 29 | 30 | def background_work(): 31 | time.sleep(0.1) 32 | 33 | thread = threading.Thread(target=background_work) 34 | thread.start() 35 | thread.join() # Properly wait for thread 36 | 37 | 38 | def create_named_thread(name: str): 39 | """Creates a named thread that will leak.""" 40 | 41 | def background_work(): 42 | time.sleep(10) 43 | 44 | thread = threading.Thread(target=background_work, name=name) 45 | thread.start() 46 | time.sleep(0.1) 47 | 48 | 49 | def create_daemon_thread(name: str = "daemon-thread"): 50 | """Creates a daemon thread that will be excluded by default.""" 51 | 52 | def background_work(): 53 | time.sleep(10) 54 | 55 | thread = threading.Thread(target=background_work, name=name) 56 | thread.daemon = True 57 | thread.start() 58 | time.sleep(0.1) 59 | 60 | 61 | def regular_sync_function(): 62 | time.sleep(1) 63 | return "success" 64 | 65 | 66 | class TestNoThreadLeaksContextManager: 67 | """Test no_thread_leaks when used as context manager.""" 68 | 69 | def test_no_leaks_detected(self): 70 | """Test that no warnings are issued when no threads leak.""" 71 | with warnings.catch_warnings(record=True) as w: 72 | warnings.simplefilter("always") 73 | 74 | with no_thread_leaks(): 75 | well_behaved_thread_function() 76 | 77 | assert len(w) == 0 78 | 79 | def test_leak_detection_with_warning(self): 80 | """Test that leaked threads trigger warnings.""" 81 | with warnings.catch_warnings(record=True) as w: 82 | warnings.simplefilter("always") 83 | 84 | with no_thread_leaks(action="warn"): 85 | leaky_thread_function() 86 | 87 | assert len(w) == 1 88 | assert issubclass(w[0].category, ResourceWarning) 89 | assert "leaked threads" in str(w[0].message) 90 | 91 | def test_leak_detection_with_exception(self): 92 | """Test that leaked threads can raise exceptions.""" 93 | with pytest.raises(ThreadLeakError, match="leaked threads"): 94 | with no_thread_leaks(action="raise"): 95 | leaky_thread_function() 96 | 97 | def test_leak_detection_with_cancel_warning(self): 98 | """Test that CANCEL action warns about inability to stop threads.""" 99 | leaked_thread: Optional[threading.Thread] = None 100 | 101 | def capture_leaked_thread(): 102 | nonlocal leaked_thread 103 | 104 | def background_work(): 105 | time.sleep(10) 106 | 107 | leaked_thread = threading.Thread(target=background_work) 108 | leaked_thread.start() 109 | time.sleep(0.1) 110 | 111 | mock_logger = Mock() 112 | with no_thread_leaks(action="cancel", logger=mock_logger): 113 | capture_leaked_thread() 114 | 115 | # Should warn that threads can't be force-stopped 116 | mock_logger.warning.assert_called_once() 117 | args = mock_logger.warning.call_args[0] 118 | assert "Cannot force-stop" in args[0] 119 | assert "leaked threads" in args[0] 120 | 121 | def test_logging_action(self): 122 | """Test that LOG action uses the logger.""" 123 | mock_logger = Mock() 124 | 125 | with no_thread_leaks(action="log", logger=mock_logger): 126 | leaky_thread_function() 127 | 128 | mock_logger.warning.assert_called_once() 129 | args = mock_logger.warning.call_args[0] 130 | assert "leaked threads" in args[0] 131 | 132 | def test_name_filter_exact_match(self): 133 | """Test filtering threads by exact name match.""" 134 | with warnings.catch_warnings(record=True) as w: 135 | warnings.simplefilter("always") 136 | 137 | with no_thread_leaks(action="warn", name_filter="target-thread"): 138 | # Create thread with matching name 139 | create_named_thread("target-thread") 140 | 141 | # Create thread with different name - should be ignored 142 | create_named_thread("other-thread") 143 | 144 | # Should only warn about the target thread 145 | assert len(w) == 1 146 | message = str(w[0].message) 147 | assert "target-thread" in message 148 | assert "other-thread" not in message 149 | 150 | def test_name_filter_regex(self): 151 | """Test filtering threads using regex patterns.""" 152 | with warnings.catch_warnings(record=True) as w: 153 | warnings.simplefilter("always") 154 | 155 | some_id = str(uuid.uuid4())[:8] # Use shorter ID for thread names 156 | pattern = re.compile(rf"{some_id}-\d+") 157 | with no_thread_leaks(action="warn", name_filter=pattern): 158 | # Create matching threads 159 | for i in range(1, 4): # Fewer threads to avoid resource exhaustion 160 | create_named_thread(f"{some_id}-{i}") 161 | 162 | # Create non-matching thread 163 | create_named_thread("manager-1") 164 | 165 | # Should warn about worker threads but not manager 166 | assert len(w) == 1 167 | message = str(w[0].message) 168 | for i in range(1, 4): 169 | assert f"{some_id}-{i}" in message 170 | assert "manager-1" not in message 171 | 172 | def test_exclude_daemon_threads_default(self): 173 | """Test that daemon threads are excluded by default.""" 174 | with warnings.catch_warnings(record=True) as w: 175 | warnings.simplefilter("always") 176 | 177 | with no_thread_leaks(action="warn"): 178 | # Create daemon thread (should be ignored) 179 | create_daemon_thread() 180 | 181 | # Should not warn about daemon threads 182 | assert len(w) == 0 183 | 184 | def test_include_daemon_threads(self): 185 | """Test that daemon threads can be included in detection.""" 186 | with warnings.catch_warnings(record=True) as w: 187 | warnings.simplefilter("always") 188 | 189 | with no_thread_leaks(action="warn", exclude_daemon=False): 190 | # Create daemon thread (should be detected) 191 | create_daemon_thread("test-daemon") 192 | 193 | # Should warn about daemon threads when not excluded 194 | assert len(w) == 1 195 | message = str(w[0].message) 196 | assert "leaked threads" in message 197 | assert "test-daemon" in message 198 | 199 | def test_grace_period_allows_completion(self): 200 | """Test that grace period allows threads to finish naturally.""" 201 | 202 | def quick_work(): 203 | time.sleep(0.05) # Very short work 204 | 205 | with warnings.catch_warnings(record=True) as w: 206 | warnings.simplefilter("always") 207 | 208 | with no_thread_leaks(action="warn", grace_period=0.2): 209 | # Start thread that will complete during grace period 210 | thread = threading.Thread(target=quick_work) 211 | thread.start() 212 | # Don't join - let grace period handle it 213 | 214 | # Should not detect leak since thread completed during grace period 215 | assert len(w) == 0 216 | 217 | def test_grace_period_insufficient(self): 218 | """Test that insufficient grace period still detects leaks.""" 219 | 220 | def medium_work(): 221 | time.sleep(0.3) # Work longer than grace period 222 | 223 | with warnings.catch_warnings(record=True) as w: 224 | warnings.simplefilter("always") 225 | 226 | with no_thread_leaks(action="warn", grace_period=0.05): 227 | # Start thread that won't complete during grace period 228 | thread = threading.Thread(target=medium_work) 229 | thread.start() 230 | 231 | # Should detect leak since thread didn't complete in time 232 | assert len(w) == 1 233 | assert "leaked threads" in str(w[0].message) 234 | 235 | def test_multiple_leaks(self): 236 | """Test detection of multiple leaked threads.""" 237 | with warnings.catch_warnings(record=True) as w: 238 | warnings.simplefilter("always") 239 | 240 | with no_thread_leaks(action="warn"): 241 | # Create multiple leaks 242 | for i in range(3): 243 | 244 | def work(): 245 | time.sleep(10) 246 | 247 | thread = threading.Thread(target=work) 248 | thread.start() 249 | time.sleep(0.1) 250 | 251 | assert len(w) == 1 252 | message = str(w[0].message) 253 | assert "3 leaked threads" in message 254 | 255 | def test_completed_threads_not_detected(self): 256 | """Test that completed threads are not considered leaks.""" 257 | with warnings.catch_warnings(record=True) as w: 258 | warnings.simplefilter("always") 259 | 260 | with no_thread_leaks(): 261 | # Create and complete a thread 262 | def quick_work(): 263 | time.sleep(0.01) 264 | 265 | thread = threading.Thread(target=quick_work) 266 | thread.start() 267 | thread.join() # Wait for completion 268 | 269 | assert len(w) == 0 270 | 271 | 272 | class TestNoThreadLeaksDecorator: 273 | """Test no_thread_leaks when used as decorator.""" 274 | 275 | def test_decorator_no_leaks(self): 276 | """Test decorator works when no leaks occur.""" 277 | 278 | @no_thread_leaks() 279 | def clean_function(): 280 | well_behaved_thread_function() 281 | 282 | with warnings.catch_warnings(record=True) as w: 283 | warnings.simplefilter("always") 284 | clean_function() 285 | assert len(w) == 0 286 | 287 | def test_decorator_with_leaks(self): 288 | """Test decorator detects leaks.""" 289 | 290 | @no_thread_leaks(action="warn") 291 | def leaky_decorated(): 292 | leaky_thread_function() 293 | 294 | with warnings.catch_warnings(record=True) as w: 295 | warnings.simplefilter("always") 296 | leaky_decorated() 297 | assert len(w) == 1 298 | assert "leaked threads" in str(w[0].message) 299 | 300 | def test_decorator_with_return_value(self): 301 | """Test that decorator preserves return values.""" 302 | 303 | @no_thread_leaks() 304 | def function_with_return(): 305 | well_behaved_thread_function() 306 | return "success" 307 | 308 | result = function_with_return() 309 | assert result == "success" 310 | 311 | def test_decorator_with_arguments(self): 312 | """Test that decorator preserves function arguments.""" 313 | 314 | @no_thread_leaks() 315 | def function_with_args(x, y, z=None): 316 | well_behaved_thread_function() 317 | return x + y + (z or 0) 318 | 319 | result = function_with_args(1, 2, z=3) 320 | assert result == 6 321 | 322 | def test_decorator_with_exception_handling(self): 323 | """Test that decorator properly handles exceptions from wrapped function.""" 324 | 325 | @no_thread_leaks() 326 | def function_that_raises(): 327 | well_behaved_thread_function() 328 | raise ValueError("test error") 329 | 330 | with pytest.raises(ValueError, match="test error"): 331 | function_that_raises() 332 | 333 | def test_decorator_with_name_filter(self): 334 | """Test decorator with name filtering.""" 335 | 336 | @no_thread_leaks(action="warn", name_filter="filtered-thread") 337 | def function_with_filtered_leak(): 338 | create_named_thread("filtered-thread") 339 | create_named_thread("unfiltered-thread") 340 | 341 | with warnings.catch_warnings(record=True) as w: 342 | warnings.simplefilter("always") 343 | function_with_filtered_leak() 344 | 345 | assert len(w) == 1 346 | message = str(w[0].message) 347 | assert "filtered-thread" in message 348 | assert "unfiltered-thread" not in message 349 | 350 | @pytest.mark.asyncio 351 | async def test_should_not_detect_asyncio_threads(self): 352 | """Test that asyncio threads created using `asyncio.to_thread` are not detected.""" 353 | 354 | @no_thread_leaks(action="raise") 355 | async def async_function(): 356 | return_value = await asyncio.to_thread(regular_sync_function) 357 | assert return_value == "success" 358 | 359 | await async_function() 360 | 361 | 362 | class TestEdgeCases: 363 | """Test edge cases and error conditions.""" 364 | 365 | def test_empty_name_filter(self): 366 | """Test behavior with empty name filter.""" 367 | with warnings.catch_warnings(record=True) as w: 368 | warnings.simplefilter("always") 369 | 370 | with no_thread_leaks(action="warn", name_filter=""): 371 | leaky_thread_function() 372 | 373 | # Empty string should not match anything 374 | assert len(w) == 0 375 | 376 | def test_invalid_regex_fallback(self): 377 | """Test that invalid regex falls back to string matching.""" 378 | with warnings.catch_warnings(record=True) as w: 379 | warnings.simplefilter("always") 380 | 381 | # Use invalid regex pattern - should fall back to exact string match 382 | with no_thread_leaks(action="warn", name_filter="[invalid"): 383 | create_named_thread("[invalid") # Exact match 384 | create_named_thread("other-thread") 385 | 386 | assert len(w) == 1 387 | message = str(w[0].message) 388 | assert "[invalid" in message 389 | assert "other-thread" not in message 390 | 391 | def test_unnamed_threads(self): 392 | """Test detection of unnamed threads.""" 393 | with warnings.catch_warnings(record=True) as w: 394 | warnings.simplefilter("always") 395 | 396 | with no_thread_leaks(action="warn"): 397 | # Create unnamed thread 398 | def work(): 399 | time.sleep(10) 400 | 401 | thread = threading.Thread(target=work) 402 | thread.start() 403 | time.sleep(0.1) 404 | 405 | assert len(w) == 1 406 | message = str(w[0].message) 407 | # Should contain some representation of unnamed thread 408 | assert "leaked threads" in message 409 | 410 | def test_thread_completion_race_condition(self): 411 | """Test that threads completing during detection aren't flagged.""" 412 | 413 | def very_quick_work(): 414 | time.sleep(0.001) # Very short work 415 | 416 | with warnings.catch_warnings(record=True) as w: 417 | warnings.simplefilter("always") 418 | 419 | with no_thread_leaks(grace_period=0.1): 420 | # Start thread that should complete during grace period 421 | thread = threading.Thread(target=very_quick_work) 422 | thread.start() 423 | # Give it time to start and complete 424 | time.sleep(0.05) 425 | 426 | # Should not detect leak since thread completed 427 | assert len(w) == 0 428 | 429 | def test_zero_grace_period(self): 430 | """Test behavior with zero grace period.""" 431 | with warnings.catch_warnings(record=True) as w: 432 | warnings.simplefilter("always") 433 | 434 | with no_thread_leaks(action="warn", grace_period=0.0): 435 | leaky_thread_function() 436 | 437 | # Should still detect leaks even with no grace period 438 | assert len(w) == 1 439 | assert "leaked threads" in str(w[0].message) 440 | 441 | def test_main_thread_excluded(self): 442 | """Test that main thread is excluded from detection.""" 443 | main_thread = threading.current_thread() 444 | 445 | with warnings.catch_warnings(record=True) as w: 446 | warnings.simplefilter("always") 447 | 448 | with no_thread_leaks(action="warn"): 449 | # Main thread should be excluded automatically 450 | pass 451 | 452 | # Should not warn about main thread 453 | assert len(w) == 0 454 | 455 | 456 | @pytest.fixture(autouse=True) 457 | def cleanup_leaked_threads(): 458 | """Cleanup any threads that might have leaked during testing.""" 459 | initial_threads = set(threading.enumerate()) 460 | 461 | yield 462 | 463 | current_threads = set(threading.enumerate()) 464 | new_threads = current_threads - initial_threads 465 | non_daemon_threads = [t for t in new_threads if t.is_alive() and not t.daemon] 466 | if non_daemon_threads: 467 | # Give threads a chance to finish naturally 468 | time.sleep(0.5) 469 | still_running = [t for t in non_daemon_threads if t.is_alive()] 470 | if still_running: 471 | print(f"{len(still_running)} thread(s) still running after test cleanup:") 472 | for thread in still_running: 473 | print(f"Thread {thread.name} is still running") 474 | # thread.join() 475 | -------------------------------------------------------------------------------- /src/pyleak/eventloop.py: -------------------------------------------------------------------------------- 1 | """ 2 | Event Loop Block Detector with Stack Trace Support 3 | 4 | Detect when the asyncio event loop is blocked by synchronous operations 5 | and capture stack traces showing exactly what's blocking. 6 | """ 7 | 8 | from __future__ import annotations 9 | 10 | import asyncio 11 | import concurrent.futures 12 | import logging 13 | import os 14 | import sys 15 | import threading 16 | import time 17 | import traceback 18 | from dataclasses import dataclass 19 | from typing import Any, Optional, Set 20 | 21 | from pyleak.base import ( 22 | LeakAction, 23 | LeakError, 24 | _BaseLeakContextManager, 25 | _BaseLeakDetector, 26 | ) 27 | from pyleak.utils import CallerContext, find_my_caller, setup_logger 28 | 29 | _logger = setup_logger(__name__) 30 | _this_file_path = os.path.abspath(__file__) 31 | 32 | 33 | @dataclass 34 | class EventLoopBlock: 35 | """Information about an event loop blocking event.""" 36 | 37 | block_id: int 38 | duration: float 39 | threshold: float 40 | timestamp: float 41 | blocking_stack: list[traceback.FrameSummary] | None = None 42 | 43 | def format_blocking_stack(self) -> str: 44 | """Format the blocking stack trace as a string.""" 45 | if not self.blocking_stack: 46 | return "No blocking stack available" 47 | 48 | return "".join(traceback.format_list(self.blocking_stack)) 49 | 50 | def __str__(self) -> str: 51 | """String representation of the blocking event.""" 52 | lines = [ 53 | f"Event Loop Block: block-{self.block_id}", 54 | f" Duration: {self.duration:.3f}s (threshold: {self.threshold:.3f}s)", 55 | f" Timestamp: {self.timestamp:.3f}", 56 | ] 57 | 58 | if self.blocking_stack: 59 | lines.extend( 60 | [ 61 | " Blocking Stack:", 62 | " " 63 | + "\n ".join(self.format_blocking_stack().strip().split("\n")), 64 | ] 65 | ) 66 | 67 | return "\n".join(lines) 68 | 69 | 70 | class EventLoopBlockError(LeakError): 71 | """Raised when event loop blocking is detected and action is set to RAISE.""" 72 | 73 | def __init__(self, message: str, blocking_events: list[EventLoopBlock]): 74 | super().__init__(message) 75 | self.blocking_events = blocking_events 76 | self.block_count = len(blocking_events) 77 | 78 | def get_block_summary(self) -> str: 79 | """Get a summary of all blocking events.""" 80 | return "\n".join(str(block) for block in self.blocking_events) 81 | 82 | def __str__(self) -> str: 83 | base_msg = super().__str__() 84 | return f"{base_msg}\n\n{self.get_block_summary()}" 85 | 86 | 87 | class _ThreadWithException(threading.Thread): 88 | """Thread that raises an exception when it finishes.""" 89 | 90 | def __init__(self, *args, **kwargs): 91 | super().__init__(*args, **kwargs) 92 | self.exception = None 93 | 94 | def run(self): 95 | try: 96 | super().run() 97 | except Exception as e: 98 | self.exception = e 99 | 100 | 101 | class _EventLoopBlockDetector(_BaseLeakDetector): 102 | """Core event loop blocking detection functionality with stack trace support.""" 103 | 104 | def __init__( 105 | self, 106 | action: LeakAction = LeakAction.WARN, 107 | logger: Optional[logging.Logger] = _logger, 108 | *, 109 | threshold: float = 0.1, 110 | check_interval: float = 0.01, 111 | caller_context: CallerContext | None = None, 112 | loop: asyncio.AbstractEventLoop | None = None, 113 | ): 114 | super().__init__(action=action, logger=logger) 115 | self.threshold = threshold 116 | self.check_interval = check_interval 117 | self.caller_context = caller_context 118 | self.loop = loop or asyncio.get_running_loop() 119 | 120 | self.monitoring = False 121 | self.threshold_multiplier = 1 122 | self.block_count = 0 123 | self.total_blocked_time = 0.0 124 | self.monitor_thread: Optional[_ThreadWithException] = None 125 | self.main_thread_id = threading.get_ident() 126 | self.detected_blocks: list[EventLoopBlock] = [] 127 | 128 | def _get_resource_name(self, _: Any) -> str: 129 | """Get block description.""" 130 | return "event loop block" 131 | 132 | def get_running_resources(self, exclude_current: bool = True) -> Set[dict]: 133 | """Get current blocks (returns empty set as we track blocks differently).""" 134 | return set() 135 | 136 | def _is_resource_active(self, block_info: dict) -> bool: 137 | """Check if a block is still active (always False as blocks are instantaneous).""" 138 | return False 139 | 140 | @property 141 | def leak_error_class(self) -> type: 142 | """Get the appropriate exception class for event loop blocks.""" 143 | return EventLoopBlockError 144 | 145 | @property 146 | def resource_type(self) -> str: 147 | """Get the human-readable name for event loop blocks.""" 148 | return "event loop blocks" 149 | 150 | def _handle_cancel_action( 151 | self, leaked_resources: list[dict], resource_names: list[str] 152 | ) -> None: 153 | """Handle the cancel action for detected blocks (just warn as blocks can't be cancelled).""" 154 | self.logger.warning( 155 | f"Cannot cancel event loop blocks: {resource_names}. " 156 | "Consider using async alternatives to synchronous operations." 157 | ) 158 | 159 | def _capture_main_thread_stack(self) -> list[traceback.FrameSummary] | None: 160 | """Capture the current stack trace of the main thread.""" 161 | try: 162 | if frame := sys._current_frames().get(self.main_thread_id): 163 | stack = traceback.extract_stack(frame) 164 | if self._matches_caller(stack): 165 | return stack 166 | except Exception as e: 167 | self.logger.debug(f"Failed to capture main thread stack: {e}") 168 | 169 | def _matches_caller(self, stack: list[traceback.FrameSummary]) -> bool: 170 | """Filter the stack to only include frames that have the original file in the filename.""" 171 | if not self.caller_context: 172 | return True 173 | 174 | # If the caller is not in the stack, return False 175 | if not any(frame.filename == self.caller_context.filename for frame in stack): 176 | return False 177 | 178 | # this file might also trigger a block, so let's check if the last frame with `caller_context.filename` 179 | # follows any frame with this filename `_this_file_path`. If so, return False. 180 | last_caller_context_frame_idx = next( 181 | ( 182 | idx 183 | for idx, frame in reversed(list(enumerate(stack))) 184 | if frame.filename == self.caller_context.filename 185 | ), 186 | None, 187 | ) 188 | if last_caller_context_frame_idx is None: 189 | return False 190 | if any( 191 | frame.filename == _this_file_path 192 | for frame in stack[last_caller_context_frame_idx + 1 :] 193 | ): 194 | return False 195 | 196 | return True 197 | 198 | def start_monitoring(self): 199 | """Start monitoring the event loop for blocks.""" 200 | self.monitoring = True 201 | self.monitor_thread = _ThreadWithException( 202 | target=self._monitor_loop, daemon=True 203 | ) 204 | self.monitor_thread.start() 205 | 206 | def stop_monitoring(self): 207 | """Stop monitoring the event loop.""" 208 | self.monitoring = False 209 | if self.monitor_thread: 210 | self.monitor_thread.join(timeout=self.threshold) 211 | if self.monitor_thread.exception: 212 | raise self.monitor_thread.exception 213 | 214 | def _monitor_loop(self): 215 | """Monitor thread that checks event loop responsiveness.""" 216 | while self.monitoring: 217 | start_time = time.time() 218 | future = asyncio.run_coroutine_threadsafe( 219 | self._ping_event_loop(), self.loop 220 | ) 221 | try: 222 | future.result(timeout=self.threshold * self.threshold_multiplier) 223 | response_time = time.time() - start_time 224 | if response_time > self.threshold: 225 | if blocking_stack := self._capture_main_thread_stack(): 226 | self._add_block(response_time, blocking_stack) 227 | 228 | except concurrent.futures.TimeoutError: 229 | response_time = time.time() - start_time 230 | if blocking_stack := self._capture_main_thread_stack(): 231 | self._add_block(response_time, blocking_stack) 232 | 233 | except concurrent.futures.CancelledError: 234 | break 235 | 236 | except Exception as e: 237 | self.logger.error(f"Event loop monitoring error: {e}", exc_info=True) 238 | 239 | time.sleep(self.check_interval) 240 | 241 | async def _ping_event_loop(self): 242 | """Simple coroutine to test event loop responsiveness.""" 243 | return time.perf_counter() 244 | 245 | def _add_block( 246 | self, 247 | duration: float, 248 | blocking_stack: list[traceback.FrameSummary] | None = None, 249 | ) -> None: 250 | """Detect and handle a single blocking event, combining consecutive identical blocks.""" 251 | current_time = time.time() 252 | if self.detected_blocks and self._stacks_are_same( 253 | self.detected_blocks[-1].blocking_stack, blocking_stack 254 | ): 255 | last_block = self.detected_blocks[-1] 256 | last_block.duration += duration 257 | last_block.timestamp = current_time 258 | self.total_blocked_time += duration 259 | else: 260 | self.block_count += 1 261 | self.total_blocked_time += duration 262 | block_info = EventLoopBlock( 263 | block_id=self.block_count, 264 | duration=duration, 265 | threshold=self.threshold, 266 | timestamp=current_time, 267 | blocking_stack=blocking_stack, 268 | ) 269 | 270 | self.detected_blocks.append(block_info) 271 | self._handle_single_block(block_info) 272 | 273 | def _stacks_are_same( 274 | self, 275 | stack1: list[traceback.FrameSummary] | None, 276 | stack2: list[traceback.FrameSummary] | None, 277 | ) -> bool: 278 | if stack1 is None and stack2 is None: 279 | return True 280 | if stack1 is None or stack2 is None: 281 | return False 282 | if len(stack1) != len(stack2): 283 | return False 284 | 285 | for frame1, frame2 in zip(stack1, stack2): 286 | if ( 287 | frame1.filename != frame2.filename 288 | or frame1.lineno != frame2.lineno 289 | or frame1.name != frame2.name 290 | ): 291 | return False 292 | return True 293 | 294 | def handle_detected_blocks(self) -> None: 295 | """Handle all detected blocks at the end of monitoring (similar to handle_leaked_resources).""" 296 | if not self.detected_blocks: 297 | return 298 | 299 | message = f"Detected {len(self.detected_blocks)} event loop blocks" 300 | if self.action == "warn": 301 | import warnings 302 | 303 | warnings.warn(message, ResourceWarning, stacklevel=3) 304 | for block_info in self.detected_blocks: 305 | warnings.warn(str(block_info), ResourceWarning, stacklevel=4) 306 | elif self.action == "log": 307 | self.logger.warning(message) 308 | for block_info in self.detected_blocks: 309 | self.logger.warning(str(block_info)) 310 | elif self.action == "cancel": 311 | self.logger.warning( 312 | f"{message}. Cannot cancel blocking - consider using async alternatives." 313 | ) 314 | for block_info in self.detected_blocks: 315 | self.logger.warning(str(block_info)) 316 | elif self.action == "raise": 317 | raise EventLoopBlockError(message, self.detected_blocks) 318 | 319 | def _handle_single_block(self, block_info: EventLoopBlock) -> None: 320 | """Handle a single detected block (immediate response).""" 321 | message = ( 322 | f"Event loop blocked for {block_info.duration:.3f}s " 323 | f"(threshold: {block_info.threshold:.3f}s)" 324 | ) 325 | 326 | # For immediate response modes, handle right away 327 | if self.action == "raise": 328 | # For raise, we accumulate and raise at the end like tasks 329 | pass # Will be handled in handle_detected_blocks 330 | else: 331 | # For warn/log/cancel, provide immediate feedback 332 | if self.action == "warn": 333 | import warnings 334 | 335 | warnings.warn(message, ResourceWarning, stacklevel=5) 336 | elif self.action == "log": 337 | self.logger.warning(message) 338 | elif self.action == "cancel": 339 | self.logger.warning( 340 | f"{message}. Cannot cancel blocking - consider using async alternatives." 341 | ) 342 | 343 | def get_summary(self) -> dict: 344 | """Get summary of all detected blocks.""" 345 | return { 346 | "total_blocks": self.block_count, 347 | "total_blocked_time": self.total_blocked_time, 348 | } 349 | 350 | 351 | class _EventLoopBlockContextManager(_BaseLeakContextManager): 352 | """Context manager that can also be used as a decorator.""" 353 | 354 | def __init__( 355 | self, 356 | action: LeakAction = LeakAction.WARN, 357 | logger: Optional[logging.Logger] = _logger, 358 | *, 359 | threshold: float = 0.1, 360 | check_interval: float = 0.01, 361 | caller_context: CallerContext | None = None, 362 | loop: asyncio.AbstractEventLoop | None = None, 363 | ): 364 | super().__init__(action=action, logger=logger) 365 | self.threshold = threshold 366 | self.check_interval = check_interval 367 | self.caller_context = caller_context 368 | self.loop = loop 369 | 370 | def _create_detector(self) -> _EventLoopBlockDetector: 371 | """Create an event loop block detector instance.""" 372 | return _EventLoopBlockDetector( 373 | action=self.action, 374 | logger=self.logger, 375 | threshold=self.threshold, 376 | check_interval=self.check_interval, 377 | caller_context=self.caller_context, 378 | loop=self.loop, 379 | ) 380 | 381 | def _wait_for_completion(self) -> None: 382 | """Wait for monitoring to complete (stop the monitor thread).""" 383 | pass 384 | 385 | def __enter__(self): 386 | self.detector = self._create_detector() 387 | self.initial_resources = set() # Not used for event loop monitoring 388 | self.logger.debug( 389 | f"Starting event loop block monitoring for {self.caller_context}" 390 | ) 391 | self.detector.start_monitoring() 392 | return self 393 | 394 | def __exit__(self, *args, **kwargs): 395 | self.detector.stop_monitoring() 396 | self.detector.handle_detected_blocks() 397 | summary = self.detector.get_summary() 398 | if summary["total_blocks"] > 0: 399 | self.logger.warning( 400 | f"Event loop monitoring summary: {summary['total_blocks']} block(s), " 401 | f"{summary['total_blocked_time']:.2f}s total blocked time" 402 | ) 403 | else: 404 | self.logger.debug("No event loop blocks detected") 405 | 406 | async def __aenter__(self): 407 | return self.__enter__() 408 | 409 | async def __aexit__(self, *args, **kwargs): 410 | self.__exit__(*args, **kwargs) 411 | 412 | def __call__(self, func): 413 | """Allow this context manager to be used as a decorator.""" 414 | import functools 415 | 416 | if not asyncio.iscoroutinefunction(func): 417 | raise ValueError( 418 | "no_event_loop_blocking can only be used with async functions" 419 | ) 420 | 421 | @functools.wraps(func) 422 | async def wrapper(*args, **kwargs): 423 | with self: 424 | return await func(*args, **kwargs) 425 | 426 | return wrapper 427 | 428 | 429 | def no_event_loop_blocking( 430 | action: LeakAction = LeakAction.WARN, 431 | logger: Optional[logging.Logger] = _logger, 432 | *, 433 | threshold: float = 0.2, 434 | check_interval: float = 0.05, 435 | caller_context: CallerContext | None = None, 436 | ): 437 | """ 438 | Context manager/decorator that detects event loop blocking within its scope. 439 | 440 | Args: 441 | action: Action to take when blocking is detected 442 | logger: Optional logger instance 443 | threshold: Minimum blocking duration to report (seconds) 444 | check_interval: How often to check for blocks (seconds) 445 | capture_stacks: Whether to capture stack traces of blocking code 446 | 447 | Example: 448 | # Basic usage 449 | async def main(): 450 | with no_event_loop_blocking(threshold=0.05): 451 | time.sleep(0.1) # This will be detected with stack trace 452 | 453 | # Handle blocking with detailed stack information 454 | try: 455 | with no_event_loop_blocking(action="raise", capture_stacks=True): 456 | requests.get("https://httpbin.org/delay/1") # Synchronous HTTP call 457 | except EventLoopBlockError as e: 458 | print(f"Event loop blocked for {e.duration:.3f}s") 459 | print("Blocking code:") 460 | print(e.block_info.format_blocking_stack()) 461 | 462 | # As decorator 463 | @no_event_loop_blocking(action="raise") 464 | async def my_async_function(): 465 | requests.get("https://example.com") # Synchronous HTTP call 466 | """ 467 | 468 | if caller_context is None: 469 | caller_context = find_my_caller() 470 | 471 | return _EventLoopBlockContextManager( 472 | action=action, 473 | logger=logger, 474 | threshold=threshold, 475 | check_interval=check_interval, 476 | caller_context=caller_context, 477 | ) 478 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pyleak 2 | 3 | PyPI 4 | PyPI Downloads 5 | 6 | Detect leaked asyncio tasks, threads, and event loop blocking in Python. Inspired by Go's [goleak](https://github.com/uber-go/goleak). 7 | 8 | ## Installation 9 | 10 | ```bash 11 | pip install pyleak 12 | ``` 13 | 14 | ## Quick Start 15 | 16 | ```python 17 | import asyncio 18 | from pyleak import no_task_leaks, no_thread_leaks, no_event_loop_blocking 19 | 20 | # Detect leaked asyncio tasks 21 | async def main(): 22 | async with no_task_leaks(): 23 | asyncio.create_task(asyncio.sleep(10)) # This will be detected 24 | await asyncio.sleep(0.1) 25 | 26 | # Detect leaked threads 27 | def sync_main(): 28 | with no_thread_leaks(): 29 | threading.Thread(target=lambda: time.sleep(10)).start() # This will be detected 30 | 31 | # Detect event loop blocking 32 | async def async_main(): 33 | with no_event_loop_blocking(): 34 | time.sleep(0.5) # This will be detected 35 | ``` 36 | 37 | ## Usage 38 | 39 | ### Context Managers 40 | 41 | All detectors can be used as context managers: 42 | 43 | ```python 44 | # AsyncIO tasks (async context) 45 | async with no_task_leaks(): 46 | # Your async code here 47 | pass 48 | 49 | # Threads (sync context) 50 | with no_thread_leaks(): 51 | # Your threaded code here 52 | pass 53 | 54 | # Event loop blocking (async context only) 55 | async def main(): 56 | with no_event_loop_blocking(): 57 | # Your potentially blocking code here 58 | pass 59 | ``` 60 | 61 | ### Decorators 62 | 63 | All detectors can also be used as decorators: 64 | 65 | ```python 66 | @no_task_leaks() 67 | async def my_async_function(): 68 | # Any leaked tasks will be detected 69 | pass 70 | 71 | @no_thread_leaks() 72 | def my_threaded_function(): 73 | # Any leaked threads will be detected 74 | pass 75 | 76 | @no_event_loop_blocking() 77 | async def my_potentially_blocking_function(): 78 | # Any event loop blocking will be detected 79 | pass 80 | ``` 81 | 82 | ### Get stack trace 83 | 84 | #### From leaked asyncio tasks 85 | 86 | When using `no_task_leaks`, you get detailed stack trace information showing exactly where leaked tasks are executing and where they were created. 87 | 88 | 89 | ```python 90 | import asyncio 91 | from pyleak import TaskLeakError, no_task_leaks 92 | 93 | async def leaky_function(): 94 | async def background_task(): 95 | print("background task started") 96 | await asyncio.sleep(10) 97 | 98 | print("creating a long running task") 99 | asyncio.create_task(background_task()) 100 | 101 | async def main(): 102 | try: 103 | async with no_task_leaks(action="raise"): 104 | await leaky_function() 105 | except TaskLeakError as e: 106 | print(e) 107 | 108 | if __name__ == "__main__": 109 | asyncio.run(main()) 110 | ``` 111 | 112 | Output: 113 | 114 | ``` 115 | creating a long running task 116 | background task started 117 | Detected 1 leaked asyncio tasks 118 | 119 | Leaked Task: Task-2 120 | ID: 4345977088 121 | State: TaskState.RUNNING 122 | Current Stack: 123 | File "/tmp/example.py", line 9, in background_task 124 | await asyncio.sleep(10) 125 | ``` 126 | 127 | 128 | #### Include creation stack trace 129 | 130 | You can also include the creation stack trace by passing `enable_creation_tracking=True` to `no_task_leaks`. 131 | 132 | ```python 133 | async def main(): 134 | try: 135 | async with no_task_leaks(action="raise", enable_creation_tracking=True): 136 | await leaky_function() 137 | except TaskLeakError as e: 138 | print(e) 139 | ``` 140 | 141 | Output: 142 | 143 | ``` 144 | creating a long running task 145 | background task started 146 | Detected 1 leaked asyncio tasks 147 | 148 | Leaked Task: Task-2 149 | ID: 4392245504 150 | State: TaskState.RUNNING 151 | Current Stack: 152 | File "/tmp/example.py", line 9, in background_task 153 | await asyncio.sleep(10) 154 | Creation Stack: 155 | File "/tmp/example.py", line 24, in 156 | asyncio.run(main()) 157 | File "/opt/homebrew/.../asyncio/runners.py", line 194, in run 158 | return runner.run(main) 159 | File "/opt/homebrew/.../asyncio/runners.py", line 118, in run 160 | return self._loop.run_until_complete(task) 161 | File "/opt/homebrew/.../asyncio/base_events.py", line 671, in run_until_complete 162 | self.run_forever() 163 | File "/opt/homebrew/.../asyncio/base_events.py", line 638, in run_forever 164 | self._run_once() 165 | File "/opt/homebrew/.../asyncio/base_events.py", line 1971, in _run_once 166 | handle._run() 167 | File "/opt/homebrew/.../asyncio/events.py", line 84, in _run 168 | self._context.run(self._callback, *self._args) 169 | File "/tmp/example.py", line 18, in main 170 | await leaky_function() 171 | File "/tmp/example.py", line 12, in leaky_function 172 | asyncio.create_task(background_task()) 173 | ``` 174 | 175 | `TaskLeakError` has a `leaked_tasks` attribute that contains a list of `LeakedTask` objects including the stack trace details. 176 | 177 | > Note: `enable_creation_tracking` monkey patches `asyncio.create_task` to include the creation stack trace. It is not recommended to be used in production to avoid unnecessary side effects. 178 | 179 | #### From event loop blocks 180 | 181 | When using `no_event_loop_blocking`, you get detailed stack trace information showing exactly where the event loop is blocked and where the blocking code is executing. 182 | 183 | ```python 184 | import asyncio 185 | import time 186 | 187 | from pyleak import EventLoopBlockError, no_event_loop_blocking 188 | 189 | 190 | async def some_function_with_blocking_code(): 191 | print("starting") 192 | time.sleep(1) 193 | print("done") 194 | 195 | 196 | async def main(): 197 | try: 198 | async with no_event_loop_blocking(action="raise"): 199 | await some_function_with_blocking_code() 200 | except EventLoopBlockError as e: 201 | print(e) 202 | 203 | 204 | if __name__ == "__main__": 205 | asyncio.run(main()) 206 | ``` 207 | 208 | Output: 209 | 210 | ``` 211 | starting 212 | done 213 | Detected 1 event loop blocks 214 | 215 | Event Loop Block: block-1 216 | Duration: 0.605s (threshold: 0.200s) 217 | Timestamp: 1749051796.302 218 | Blocking Stack: 219 | File "/private/tmp/example.py", line 22, in 220 | asyncio.run(main()) 221 | File "/opt/homebrew/.../asyncio/runners.py", line 194, in run 222 | return runner.run(main) 223 | File "/opt/homebrew/.../asyncio/runners.py", line 118, in run 224 | return self._loop.run_until_complete(task) 225 | File "/opt/homebrew/.../asyncio/base_events.py", line 671, in run_until_complete 226 | self.run_forever() 227 | File "/opt/homebrew/.../asyncio/base_events.py", line 638, in run_forever 228 | self._run_once() 229 | File "/opt/homebrew/.../asyncio/base_events.py", line 1971, in _run_once 230 | handle._run() 231 | File "/opt/homebrew/.../asyncio/events.py", line 84, in _run 232 | self._context.run(self._callback, *self._args) 233 | File "/private/tmp/example.py", line 16, in main 234 | await some_function_with_blocking_code() 235 | File "/private/tmp/example.py", line 9, in some_function_with_blocking_code 236 | time.sleep(1) 237 | ``` 238 | 239 | ## Actions 240 | 241 | Control what happens when leaks/blocking are detected: 242 | 243 | | Action | AsyncIO Tasks | Threads | Event Loop Blocking | 244 | |--------|---------------|---------|-------------------| 245 | | `"warn"` (default) | ✅ Issues `ResourceWarning` | ✅ Issues `ResourceWarning` | ✅ Issues ResourceWarning | 246 | | `"log"` | ✅ Writes to logger | ✅ Writes to logger | ✅ Writes to logger | 247 | | `"cancel"` | ✅ Cancels leaked tasks | ❌ Warns (can't force-stop) | ❌ Warns (can't cancel) | 248 | | `"raise"` | ✅ Raises `TaskLeakError` | ✅ Raises `ThreadLeakError` | ✅ Raises `EventLoopBlockError` | 249 | 250 | ```python 251 | # Examples 252 | async with no_task_leaks(action="cancel"): # Cancels leaked tasks 253 | pass 254 | 255 | with no_thread_leaks(action="raise"): # Raises exception on thread leaks 256 | pass 257 | 258 | with no_event_loop_blocking(action="log"): # Logs blocking events 259 | pass 260 | ``` 261 | 262 | ## Name Filtering 263 | 264 | Filter detection by resource names (tasks and threads only): 265 | 266 | ```python 267 | import re 268 | 269 | # Exact match 270 | async with no_task_leaks(name_filter="background-worker"): 271 | pass 272 | 273 | with no_thread_leaks(name_filter="worker-thread"): 274 | pass 275 | 276 | # Regex pattern 277 | async with no_task_leaks(name_filter=re.compile(r"worker-\d+")): 278 | pass 279 | 280 | with no_thread_leaks(name_filter=re.compile(r"background-.*")): 281 | pass 282 | ``` 283 | 284 | > Note: Event loop blocking detection doesn't support name filtering. 285 | 286 | ## Configuration Options 287 | 288 | ### AsyncIO Tasks 289 | ```python 290 | no_task_leaks( 291 | action="warn", # Action to take on detection 292 | name_filter=None, # Filter by task name 293 | logger=None # Custom logger 294 | ) 295 | ``` 296 | 297 | ### Threads 298 | ```python 299 | no_thread_leaks( 300 | action="warn", # Action to take on detection 301 | name_filter=None, # Filter by thread name 302 | logger=None, # Custom logger 303 | exclude_daemon=True, # Exclude daemon threads 304 | ) 305 | ``` 306 | 307 | ### Event Loop Blocking 308 | ```python 309 | no_event_loop_blocking( 310 | action="warn", # Action to take on detection 311 | logger=None, # Custom logger 312 | threshold=0.1, # Minimum blocking time to report (seconds) 313 | check_interval=0.01 # How often to check (seconds) 314 | ) 315 | ``` 316 | 317 | ## Testing 318 | 319 | Perfect for catching issues in tests: 320 | 321 | ```python 322 | import pytest 323 | from pyleak import no_task_leaks, no_thread_leaks, no_event_loop_blocking 324 | 325 | @pytest.mark.asyncio 326 | async def test_no_leaked_tasks(): 327 | async with no_task_leaks(action="raise"): 328 | await my_async_function() 329 | 330 | def test_no_leaked_threads(): 331 | with no_thread_leaks(action="raise"): 332 | my_threaded_function() 333 | 334 | @pytest.mark.asyncio 335 | async def test_no_event_loop_blocking(): 336 | with no_event_loop_blocking(action="raise", threshold=0.1): 337 | await my_potentially_blocking_function() 338 | ``` 339 | 340 | ## Real-World Examples 341 | 342 | ### Detecting Synchronous HTTP Calls in Async Code 343 | 344 | ```python 345 | import httpx 346 | from starlette.testclient import TestClient 347 | 348 | async def test_sync_vs_async_http(): 349 | # This will detect blocking 350 | with no_event_loop_blocking(action="warn"): 351 | response = TestClient(app).get("/endpoint") # Synchronous! 352 | 353 | # This will not detect blocking 354 | with no_event_loop_blocking(action="warn"): 355 | async with httpx.AsyncClient() as client: 356 | response = await client.get("/endpoint") # Asynchronous! 357 | ``` 358 | 359 | ### Ensuring Proper Resource Cleanup 360 | 361 | ```python 362 | async def test_background_task_cleanup(): 363 | async with no_task_leaks(action="raise"): 364 | # This would fail the test 365 | asyncio.create_task(long_running_task()) 366 | 367 | # This would pass 368 | task = asyncio.create_task(long_running_task()) 369 | task.cancel() 370 | try: 371 | await task 372 | except asyncio.CancelledError: 373 | pass 374 | ``` 375 | 376 | ### Debugging complex task leaks 377 | 378 | ```python 379 | import asyncio 380 | import random 381 | import re 382 | from pyleak import TaskLeakError, no_task_leaks 383 | 384 | async def debug_task_leaks(): 385 | """Example showing how to debug complex task leaks.""" 386 | 387 | async def worker(worker_id: int, sleep_time: int): 388 | print(f"Worker {worker_id} starting") 389 | await asyncio.sleep(sleep_time) # Simulate work 390 | print(f"Worker {worker_id} done") 391 | 392 | async def spawn_workers(): 393 | for i in range(3): 394 | asyncio.create_task(worker(i, random.randint(1, 10)), name=f"worker-{i}") 395 | 396 | try: 397 | async with no_task_leaks( 398 | action="raise", 399 | enable_creation_tracking=True, 400 | name_filter=re.compile(r"worker-\d+"), # Only catch worker tasks 401 | ): 402 | await spawn_workers() 403 | await asyncio.sleep(0.1) # Let workers start 404 | 405 | except TaskLeakError as e: 406 | print(f"\nFound {e.task_count} leaked worker tasks:") 407 | for task_info in e.leaked_tasks: 408 | print(f"\n--- {task_info.name} ---") 409 | print("Currently executing:") 410 | print(task_info.format_current_stack()) 411 | print("Created at:") 412 | print(task_info.format_creation_stack()) 413 | 414 | # Cancel the leaked task 415 | if task_info.task_ref: 416 | task_info.task_ref.cancel() 417 | 418 | 419 | if __name__ == "__main__": 420 | asyncio.run(debug_task_leaks()) 421 | 422 | ``` 423 | 424 | 425 |
426 | Toggle to see the output 427 | 428 | ``` 429 | Worker 0 starting 430 | Worker 1 starting 431 | Worker 2 starting 432 | 433 | Found 3 leaked worker tasks: 434 | 435 | --- worker-2 --- 436 | Currently executing: 437 | File "/private/tmp/example.py", line 33, in worker 438 | await asyncio.sleep(sleep_time) # Simulate work 439 | 440 | Created at: 441 | File "/private/tmp/example.py", line 65, in 442 | asyncio.run(debug_task_leaks()) 443 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 444 | return runner.run(main) 445 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 446 | return self._loop.run_until_complete(task) 447 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 448 | self.run_forever() 449 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 450 | self._run_once() 451 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 452 | handle._run() 453 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 454 | self._context.run(self._callback, *self._args) 455 | File "/private/tmp/example.py", line 47, in debug_task_leaks 456 | await spawn_workers() 457 | File "/private/tmp/example.py", line 39, in spawn_workers 458 | asyncio.create_task(worker(i, random.randint(1, 10)), name=f"worker-{i}") 459 | 460 | 461 | --- worker-0 --- 462 | Currently executing: 463 | File "/private/tmp/example.py", line 33, in worker 464 | await asyncio.sleep(sleep_time) # Simulate work 465 | 466 | Created at: 467 | File "/private/tmp/example.py", line 65, in 468 | asyncio.run(debug_task_leaks()) 469 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 470 | return runner.run(main) 471 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 472 | return self._loop.run_until_complete(task) 473 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 474 | self.run_forever() 475 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 476 | self._run_once() 477 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 478 | handle._run() 479 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 480 | self._context.run(self._callback, *self._args) 481 | File "/private/tmp/example.py", line 47, in debug_task_leaks 482 | await spawn_workers() 483 | File "/private/tmp/example.py", line 39, in spawn_workers 484 | asyncio.create_task(worker(i, random.randint(1, 10)), name=f"worker-{i}") 485 | 486 | 487 | --- worker-1 --- 488 | Currently executing: 489 | File "/private/tmp/example.py", line 33, in worker 490 | await asyncio.sleep(sleep_time) # Simulate work 491 | 492 | Created at: 493 | File "/private/tmp/example.py", line 65, in 494 | asyncio.run(debug_task_leaks()) 495 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 496 | return runner.run(main) 497 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 498 | return self._loop.run_until_complete(task) 499 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 500 | self.run_forever() 501 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 502 | self._run_once() 503 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 504 | handle._run() 505 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 506 | self._context.run(self._callback, *self._args) 507 | File "/private/tmp/example.py", line 47, in debug_task_leaks 508 | await spawn_workers() 509 | File "/private/tmp/example.py", line 39, in spawn_workers 510 | asyncio.create_task(worker(i, random.randint(1, 10)), name=f"worker-{i}") 511 | ``` 512 | 513 |
514 | 515 | ### Debugging event loop blocking 516 | 517 | ```python 518 | import asyncio 519 | from pyleak import EventLoopBlockError, no_event_loop_blocking 520 | 521 | async def process_user_data(user_id: int): 522 | """Simulates cpu intensive work - contains blocking operations!""" 523 | print(f"Processing user {user_id}...") 524 | return sum(i * i for i in range(100_000_000)) 525 | 526 | async def main(): 527 | try: 528 | async with no_event_loop_blocking(action="raise", threshold=0.5): 529 | user1 = await process_user_data(1) 530 | user2 = await process_user_data(2) 531 | 532 | except EventLoopBlockError as e: 533 | print(f"\n🚨 Found {e.block_count} blocking events:") 534 | print(e) 535 | 536 | if __name__ == "__main__": 537 | asyncio.run(main()) 538 | ``` 539 | 540 | 541 |
542 | Toggle to see the output 543 | 544 | ``` 545 | Processing user 1... 546 | Processing user 2... 547 | 548 | 🚨 Found 5 blocking events: 549 | Detected 5 event loop blocks 550 | 551 | Event Loop Block: block-1 552 | Duration: 1.507s (threshold: 0.500s) 553 | Timestamp: 1749052720.456 554 | Blocking Stack: 555 | File "/private/tmp/example.py", line 36, in 556 | asyncio.run(main()) 557 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 558 | return runner.run(main) 559 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 560 | return self._loop.run_until_complete(task) 561 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 562 | self.run_forever() 563 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 564 | self._run_once() 565 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 566 | handle._run() 567 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 568 | self._context.run(self._callback, *self._args) 569 | File "/private/tmp/example.py", line 27, in main 570 | user1 = await process_user_data(1) 571 | File "/private/tmp/example.py", line 21, in process_user_data 572 | return sum(i * i for i in range(100_000_000)) 573 | File "/private/tmp/example.py", line 21, in 574 | return sum(i * i for i in range(100_000_000)) 575 | Event Loop Block: block-2 576 | Duration: 1.516s (threshold: 0.500s) 577 | Timestamp: 1749052722.054 578 | Blocking Stack: 579 | File "/private/tmp/example.py", line 36, in 580 | asyncio.run(main()) 581 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 582 | return runner.run(main) 583 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 584 | return self._loop.run_until_complete(task) 585 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 586 | self.run_forever() 587 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 588 | self._run_once() 589 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 590 | handle._run() 591 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 592 | self._context.run(self._callback, *self._args) 593 | File "/private/tmp/example.py", line 27, in main 594 | user1 = await process_user_data(1) 595 | File "/private/tmp/example.py", line 21, in process_user_data 596 | return sum(i * i for i in range(100_000_000)) 597 | File "/private/tmp/example.py", line 21, in 598 | return sum(i * i for i in range(100_000_000)) 599 | Event Loop Block: block-3 600 | Duration: 1.518s (threshold: 0.500s) 601 | Timestamp: 1749052723.648 602 | Blocking Stack: 603 | File "/private/tmp/example.py", line 36, in 604 | asyncio.run(main()) 605 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 606 | return runner.run(main) 607 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 608 | return self._loop.run_until_complete(task) 609 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 610 | self.run_forever() 611 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 612 | self._run_once() 613 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 614 | handle._run() 615 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 616 | self._context.run(self._callback, *self._args) 617 | File "/private/tmp/example.py", line 28, in main 618 | user2 = await process_user_data(2) 619 | File "/private/tmp/example.py", line 21, in process_user_data 620 | return sum(i * i for i in range(100_000_000)) 621 | File "/private/tmp/example.py", line 21, in 622 | return sum(i * i for i in range(100_000_000)) 623 | Event Loop Block: block-4 624 | Duration: 1.517s (threshold: 0.500s) 625 | Timestamp: 1749052725.247 626 | Blocking Stack: 627 | File "/private/tmp/example.py", line 36, in 628 | asyncio.run(main()) 629 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 630 | return runner.run(main) 631 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 632 | return self._loop.run_until_complete(task) 633 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 634 | self.run_forever() 635 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 636 | self._run_once() 637 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 638 | handle._run() 639 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 640 | self._context.run(self._callback, *self._args) 641 | File "/private/tmp/example.py", line 28, in main 642 | user2 = await process_user_data(2) 643 | File "/private/tmp/example.py", line 21, in process_user_data 644 | return sum(i * i for i in range(100_000_000)) 645 | File "/private/tmp/example.py", line 21, in 646 | return sum(i * i for i in range(100_000_000)) 647 | Event Loop Block: block-5 648 | Duration: 1.513s (threshold: 0.500s) 649 | Timestamp: 1749052726.839 650 | Blocking Stack: 651 | File "/private/tmp/example.py", line 36, in 652 | asyncio.run(main()) 653 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 194, in run 654 | return runner.run(main) 655 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/runners.py", line 118, in run 656 | return self._loop.run_until_complete(task) 657 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 671, in run_until_complete 658 | self.run_forever() 659 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 638, in run_forever 660 | self._run_once() 661 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/base_events.py", line 1971, in _run_once 662 | handle._run() 663 | File "/opt/homebrew/anaconda3/envs/ffa/lib/python3.12/asyncio/events.py", line 84, in _run 664 | self._context.run(self._callback, *self._args) 665 | File "/private/tmp/example.py", line 28, in main 666 | user2 = await process_user_data(2) 667 | File "/private/tmp/example.py", line 21, in process_user_data 668 | return sum(i * i for i in range(100_000_000)) 669 | File "/private/tmp/example.py", line 21, in 670 | return sum(i * i for i in range(100_000_000)) 671 | ``` 672 |
673 | 674 | 675 | ## Pytest Plugin 676 | 677 | The pytest plugin automatically wraps tests with pyleak detectors based on pytest markers. 678 | 679 | ### Installation 680 | 681 | ```bash 682 | pip install pyleak 683 | ``` 684 | 685 | ### Add the plugin to your pytest configuration 686 | 687 | **pyproject.toml** 688 | 689 | ```toml 690 | [tool.pytest.ini_options] 691 | markers = [ 692 | "no_leaks: detect asyncio task leaks, thread leaks, and event loop blocking" 693 | ] 694 | ``` 695 | 696 | **pytest.ini** 697 | 698 | ```ini 699 | [tool:pytest] 700 | markers = no_leaks: detect asyncio task leaks, thread leaks, and event loop blocking 701 | ``` 702 | 703 | You can also add it to the `conftest.py` file. 704 | 705 | ```python 706 | # conftest.py 707 | import pytest 708 | 709 | def pytest_configure(config): 710 | config.addinivalue_line( 711 | "markers", 712 | "no_leaks: detect asyncio task leaks, thread leaks, and event loop blocking" 713 | ) 714 | ``` 715 | 716 | ### Usage 717 | 718 | ```python 719 | @pytest.mark.no_leaks 720 | @pytest.mark.asyncio 721 | async def test_no_task_leaks(): 722 | asyncio.create_task(asyncio.sleep(10)) 723 | ``` 724 | 725 | ### Selective detection 726 | 727 | By default, all detectors are enabled. You can selectively enable or disable detectors using the `no_leaks` marker. For example, to only detect task leaks and event loop blocking, you can use the following: 728 | 729 | ```python 730 | @pytest.mark.no_leaks(tasks=True, blocking=True, threads=False) 731 | @pytest.mark.asyncio 732 | async def test_async_no_leaks(): 733 | asyncio.create_task(asyncio.sleep(10)) # This will be detected 734 | time.sleep(0.5) # This will be detected 735 | threading.Thread(target=lambda: time.sleep(10)).start() # This will not be detected 736 | ``` 737 | 738 | #### `no_leaks` marker configuration 739 | 740 | | Name | Default | Description | 741 | |:------|:------|:------| 742 | | tasks | True | Whether to detect task leaks | 743 | | task_action | raise | Action to take when a task leak is detected | 744 | | task_name_filter | None | Filter to apply to task names | 745 | | enable_task_creation_tracking | False | Whether to enable task creation tracking | 746 | | threads | True | Whether to detect thread leaks | 747 | | thread_action | raise | Action to take when a thread leak is detected | 748 | | thread_name_filter | r'^(?!asyncio_\\d+$).*' | Filter to apply to thread names (default: exclude asyncio threads) | 749 | | exclude_daemon_threads | True | Whether to exclude daemon threads | 750 | | blocking | True | Whether to detect event loop blocking | 751 | | blocking_action | raise | Action to take when a blocking event loop is detected | 752 | | blocking_threshold | 0.2 | Threshold for blocking event loop detection | 753 | | blocking_check_interval | 0.01 | Interval for checking for blocking event loop | 754 | 755 | 756 | ## Why Use pyleak? 757 | 758 | **AsyncIO Tasks**: Leaked tasks can cause memory leaks, prevent graceful shutdown, and make debugging difficult. 759 | 760 | **Threads**: Leaked threads consume system resources and can prevent proper application termination. 761 | 762 | **Event Loop Blocking**: Synchronous operations in async code destroy performance and can cause timeouts. 763 | 764 | `pyleak` helps you catch these issues during development and testing, optionally using a pytest plugin, before they reach production. 765 | 766 | ## Examples 767 | 768 | More examples can be found in the test files: 769 | - [AsyncIO tasks tests](./tests/test_task_leaks.py) 770 | - [Thread tests](./tests/test_thread_leaks.py) 771 | - [Event loop blocking tests](./tests/test_event_loop_blocking.py) 772 | - [Pytest plugin tests](./tests/test_plugin.py) 773 | 774 | --- 775 | 776 | > Disclaimer: Most of the code and tests are written by Claude. 777 | --------------------------------------------------------------------------------