├── .github
├── ISSUE_TEMPLATE
│ ├── ---bug-report.md
│ └── ---feature-request.md
├── dependabot.yml
└── workflows
│ ├── build.yml
│ └── build_dist.yml
├── .gitignore
├── .readthedocs.yml
├── LICENSE
├── MAINTAINANCE.md
├── Makefile
├── README.md
├── docker-compose.yml
├── docs
├── _static
│ └── images
│ │ └── logo.png
├── api.rst
├── conf.py
├── configuration.rst
├── demo
│ └── test_ok.py
├── index.rst
├── news.rst
├── news
│ └── template.jinja2
└── usage.rst
├── pyproject.toml
├── src
└── pytest_memray
│ ├── __init__.py
│ ├── marks.py
│ ├── plugin.py
│ ├── py.typed
│ └── utils.py
├── tests
├── conftest.py
├── test_pytest_memray.py
└── test_utils.py
└── tox.ini
/.github/ISSUE_TEMPLATE/---bug-report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: "\U0001F41B Bug Report"
3 | about: "If something isn't working as expected \U0001F914."
4 | ---
5 |
6 | ## Bug Report
7 |
8 | **Current Behavior** A clear and concise description of the behavior.
9 |
10 | **Input Code**
11 |
12 | - REPL or Repo link if applicable:
13 |
14 | ```python
15 | your_code = "here"
16 | ```
17 |
18 | **Expected behavior/code** A clear and concise description of what you expected to
19 | happen (or code).
20 |
21 | **Environment**
22 |
23 | - Python(s): [e.g. python3.8, python3.9, ...]
24 |
25 | **Possible Solution**
26 |
27 |
28 |
29 | **Additional context/Screenshots** Add any other context about the problem here. If
30 | applicable, add screenshots to help explain.
31 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/---feature-request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: "\U0001F680 Feature Request"
3 | about: Suggest an idea for this project
4 | ---
5 |
6 | ## Feature Request
7 |
8 | **Is your feature request related to a problem? Please describe.** A clear and concise
9 | description of what the problem is. Ex. I have an issue when [...]
10 |
11 | **Describe the solution you'd like** A clear and concise description of what you want to
12 | happen. Add any considered drawbacks.
13 |
14 | **Describe alternatives you've considered** A clear and concise description of any
15 | alternative solutions or features you've considered.
16 |
17 | **Teachability, Documentation, Adoption, Migration Strategy** If you can, explain how
18 | users will be able to use this and possibly write out a version the docs. Maybe a
19 | screenshot or design?
20 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "daily"
7 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Run
2 |
3 | on:
4 | push:
5 | pull_request:
6 | branches:
7 | - main
8 | schedule:
9 | # At 12:00 UTC on every day-of-month
10 | - cron: "0 12 */1 * *"
11 |
12 | concurrency:
13 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
14 | cancel-in-progress: true
15 |
16 | jobs:
17 | test:
18 | name: test ${{ matrix.tox_env }}
19 | runs-on: ubuntu-22.04
20 | strategy:
21 | fail-fast: false
22 | matrix:
23 | include:
24 | - { "py": "3.12", "tox_env": "py312-cov" }
25 | - { "py": "3.12", "tox_env": "py312" }
26 | - { "py": "3.11", "tox_env": "py311" }
27 | - { "py": "3.10", "tox_env": "py310" }
28 | - { "py": "3.9", "tox_env": "py39" }
29 | - { "py": "3.8", "tox_env": "py38" }
30 | steps:
31 | - name: setup python for tox
32 | uses: actions/setup-python@v5
33 | with:
34 | python-version: "3.11"
35 | - name: install tox
36 | run: python -m pip install tox
37 | - uses: actions/checkout@v4
38 | - name: setup python for test ${{ matrix.py }}
39 | uses: actions/setup-python@v5
40 | with:
41 | python-version: ${{ matrix.py }}
42 | - name: setup test suite ${{ matrix.tox_env }}
43 | run: tox -vv --notest -e ${{ matrix.tox_env }}
44 | - name: run test suite ${{ matrix.tox_env }}
45 | run: tox --skip-pkg-install -e ${{ matrix.tox_env }}
46 |
47 | check:
48 | name: check ${{ matrix.tox_env }}
49 | runs-on: ubuntu-22.04
50 | strategy:
51 | fail-fast: false
52 | matrix:
53 | tox_env:
54 | - lint
55 | - docs
56 | steps:
57 | - uses: actions/checkout@v4
58 | - name: setup Python 3.11
59 | uses: actions/setup-python@v5
60 | with:
61 | python-version: "3.11"
62 | - name: Install prettier
63 | run: npm install -g prettier
64 | - name: install tox
65 | run: python -m pip install tox
66 | - name: setup test suite for ${{ matrix.tox_env }}
67 | run: tox -vv --notest -e ${{ matrix.tox_env }}
68 | - name: run test suite ${{ matrix.tox_env }}
69 | run: tox --skip-pkg-install -e ${{ matrix.tox_env }}
70 |
--------------------------------------------------------------------------------
/.github/workflows/build_dist.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 |
3 | on:
4 | push:
5 | pull_request:
6 | release:
7 | types:
8 | - published
9 | schedule:
10 | # At 12:00 UTC on every day-of-month
11 | - cron: "0 12 */1 * *"
12 |
13 | concurrency:
14 | group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
15 | cancel-in-progress: true
16 |
17 | jobs:
18 | build_dist:
19 | name: Source and wheel distributions
20 | runs-on: ubuntu-latest
21 | steps:
22 | - uses: actions/checkout@v4
23 |
24 | - name: Build distributions
25 | run: pipx run build[virtualenv] --sdist --wheel
26 |
27 | - uses: actions/upload-artifact@v4
28 | with:
29 | path: dist/*
30 |
31 | upload_pypi:
32 | needs: [build_dist]
33 | runs-on: ubuntu-latest
34 | if: github.event_name == 'release' && github.event.action == 'published'
35 | steps:
36 | - uses: actions/download-artifact@v4
37 | with:
38 | name: artifact
39 | path: dist
40 |
41 | - uses: pypa/gh-action-pypi-publish@v1.12.4
42 | with:
43 | password: ${{ secrets.PYPI_PASSWORD }}
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # IDE stuff
2 |
3 | .idea/*
4 |
5 | # Cmake stuff
6 |
7 | CMakeLists.txt.user
8 | CMakeCache.txt
9 | CMakeFiles
10 | CMakeScripts
11 | Testing
12 | cmake_install.cmake
13 | install_manifest.txt
14 | compile_commands.json
15 | CTestTestfile.cmake
16 | _deps
17 | *.cbp
18 | cmake-build-*/
19 |
20 | # Cython specific files
21 | src/memray/*.cpp
22 | src/memray/_memray_api.h
23 |
24 | # Byte-compiled / optimized / DLL files
25 | __pycache__/
26 | *.py[cod]
27 | *$py.class
28 |
29 | # C extensions
30 | *.so
31 |
32 | # Distribution / packaging
33 | .Python
34 | build/
35 | develop-eggs/
36 | dist/
37 | downloads/
38 | eggs/
39 | .eggs/
40 | lib/
41 | lib64/
42 | parts/
43 | sdist/
44 | var/
45 | wheels/
46 | share/python-wheels/
47 | *.egg-info/
48 | .installed.cfg
49 | *.egg
50 | MANIFEST
51 |
52 | # PyInstaller
53 | # Usually these files are written by a python script from a template
54 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
55 | *.manifest
56 | *.spec
57 |
58 | # Installer logs
59 | pip-log.txt
60 | pip-delete-this-directory.txt
61 |
62 | # Unit test / coverage reports
63 | htmlcov/
64 | .tox/
65 | .nox/
66 | .coverage
67 | .coverage.*
68 | .cache
69 | nosetests.xml
70 | coverage.xml
71 | *.cover
72 | *.py,cover
73 | .hypothesis/
74 | .pytest_cache/
75 | cover/
76 | /src/pytest_memray/_version.py
77 |
78 | # Translations
79 | *.mo
80 | *.pot
81 |
82 | # Django stuff:
83 | *.log
84 | local_settings.py
85 | db.sqlite3
86 | db.sqlite3-journal
87 |
88 | # Flask stuff:
89 | instance/
90 | .webassets-cache
91 |
92 | # Scrapy stuff:
93 | .scrapy
94 |
95 | # Sphinx documentation
96 | docs/_build/
97 | /docs/_draft.rst
98 |
99 | # PyBuilder
100 | .pybuilder/
101 | target/
102 |
103 | # Jupyter Notebook
104 | .ipynb_checkpoints
105 |
106 | # IPython
107 | profile_default/
108 | ipython_config.py
109 |
110 | # pyenv
111 | # For a library or package, you might want to ignore these files since the code is
112 | # intended to run in multiple environments; otherwise, check them in:
113 | # .python-version
114 |
115 | # pipenv
116 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
117 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
118 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
119 | # install all needed dependencies.
120 | #Pipfile.lock
121 |
122 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
123 | __pypackages__/
124 |
125 | # Celery stuff
126 | celerybeat-schedule
127 | celerybeat.pid
128 |
129 | # SageMath parsed files
130 | *.sage.py
131 |
132 | # Environments
133 | .env
134 | .venv
135 | env/
136 | venv/
137 | ENV/
138 | env.bak/
139 | venv.bak/
140 |
141 | # Spyder project settings
142 | .spyderproject
143 | .spyproject
144 |
145 | # Rope project settings
146 | .ropeproject
147 |
148 | # mkdocs documentation
149 | /site
150 |
151 | # mypy
152 | .mypy_cache/
153 | .dmypy.json
154 | dmypy.json
155 |
156 | # Pyre type checker
157 | .pyre/
158 |
159 | # pytype static type analyzer
160 | .pytype/
161 |
162 | # Cython debug symbols
163 | cython_debug/
164 |
165 | # Asv stuff
166 | .asv
167 |
168 | # memray stuff
169 | memray-*
170 |
171 | # Generated files
172 | **/templates/assets/*.js
173 |
174 | # VSCode
175 | .devcontainer
176 | .vscode
177 |
178 | # NodeJS
179 | node_modules/
180 |
--------------------------------------------------------------------------------
/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | build:
3 | os: ubuntu-22.04
4 | tools:
5 | python: "3"
6 | python:
7 | install:
8 | - method: pip
9 | path: .
10 | extra_requirements:
11 | - docs
12 | sphinx:
13 | builder: html
14 | configuration: docs/conf.py
15 | fail_on_warning: true
16 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2022 Bloomberg LP
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/MAINTAINANCE.md:
--------------------------------------------------------------------------------
1 | # Maintainance
2 |
3 | Information useful for the maintainers of the project.
4 |
5 | ## Release process
6 |
7 | 1. Generate the release changelog via:
8 |
9 | ```bash
10 | tox r -e release -- 1.1.0
11 | # or
12 | make gen_news VERSION=1.1.0
13 | ```
14 |
15 | commit it and create a PR.
16 |
17 | 2. After merging the PR from step 1 cut a release on the GitHub release page with same
18 | version.
19 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PYTHON ?= python
2 | PRETTIER ?= prettier --no-editorconfig
3 |
4 | # Doc generation variables
5 | UPSTREAM_GIT_REMOTE ?= origin
6 | DOCSBUILDDIR := docs/_build
7 | HTMLDIR := $(DOCSBUILDDIR)/html
8 | PKG_CONFIG_PATH ?= /opt/bb/lib64/pkgconfig
9 | PIP_INSTALL=PKG_CONFIG_PATH="$(PKG_CONFIG_PATH)" $(PYTHON) -m pip install
10 |
11 | markdown_files := $(shell find . -name \*.md -not -path '*/\.*')
12 | python_files := $(shell find . -name \*.py -not -path '*/\.*')
13 | PURELIB=$(shell $(PYTHON) -c 'import sysconfig; print(sysconfig.get_path("purelib"))')
14 | # Use this to inject arbitrary commands before the make targets (e.g. docker)
15 | ENV :=
16 |
17 | .PHONY: dist
18 | dist: ## Generate Python distribution files
19 | $(PYTHON) -m build .
20 |
21 | .PHONY: install-sdist
22 | install-sdist: dist ## Install from source distribution
23 | $(ENV) $(PIP_INSTALL) $(wildcard dist/*.tar.gz)
24 |
25 | .PHONY: test-install
26 | test-install: ## Install with test dependencies
27 | $(ENV) $(PIP_INSTALL) -e .[test]
28 |
29 | .PHONY: check
30 | check:
31 | $(PYTHON) -m pytest -vvv --color=yes $(PYTEST_ARGS) tests
32 |
33 | .PHONY: coverage
34 | coverage: ## Run the test suite, with Python code coverage
35 | $(PYTHON) -m coverage erase
36 | $(PYTHON) -m coverage run -m pytest tests
37 | $(PYTHON) -m coverage combine
38 | $(PYTHON) -m coverage report
39 | $(PYTHON) -m coverage html -d .pytest_cov/htmlcov
40 |
41 | .PHONY: format
42 | format: ## Autoformat all files
43 | $(PYTHON) -m ruff --fix $(python_files)
44 | $(PYTHON) -m black $(python_files)
45 |
46 | .PHONY: lint
47 | lint: ## Lint all files
48 | $(PYTHON) -m ruff check $(python_files)
49 | $(PYTHON) -m black --check --diff $(python_files)
50 | $(PYTHON) -m mypy src/pytest_memray --ignore-missing-imports
51 |
52 | .PHONY: docs
53 | docs: ## Generate documentation
54 | sphinx-build docs docs/_build/html --color -W --keep-going -n -bhtml -b linkcheck -W
55 |
56 | .PHONY: clean
57 | clean: ## Clean any built/generated artifacts
58 | find . | grep -E '(\.o|\.so|\.gcda|\.gcno|\.gcov\.json\.gz)' | xargs rm -rf
59 | find . | grep -E '(__pycache__|\.pyc|\.pyo)' | xargs rm -rf
60 |
61 | .PHONY: gen_news
62 | gen_news:
63 | $(PYEXEC) towncrier build --version $(VERSION) --yes
64 |
65 | .PHONY: help
66 | help: ## Print this message
67 | @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
68 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | # pytest-memray
4 |
5 | [](https://pypi.org/project/pytest-memray)
6 | [](https://pypi.org/project/pytest-memray)
7 | [](https://pypi.org/project/pytest-memray)
8 | [](https://pypistats.org/packages/pytest-memray)
9 | [](https://opensource.org/licenses/MIT)
10 | [](https://github.com/bloomberg/pytest-memray/actions/workflows/build.yml)
11 | 
12 |
13 | pytest-memray is a pytest plugin for easy integration of
14 | [memray](https://github.com/bloomberg/memray).
15 |
16 | ## Installation
17 |
18 | pytest-memray requires Python 3.8 or higher and can be easily installed using most
19 | common Python packaging tools. We recommend installing the latest stable release from
20 | [PyPI](https://pypi.org/project/pytest-memray/) with pip:
21 |
22 | ```shell
23 | pip install pytest-memray
24 | ```
25 |
26 | ## Documentation
27 |
28 | You can find the latest documentation available
29 | [here](https://pytest-memray.readthedocs.io/en/latest/).
30 |
31 | ## Quick introduction
32 |
33 | To use the plugin in a pytest run, simply add `--memray` to the command line invocation:
34 |
35 | ```shell
36 | pytest --memray tests
37 | ```
38 |
39 | After the test suite runs you'll see a memory report printed:
40 |
41 | ```bash
42 | =================================== test session starts ====================================
43 | platform linux -- Python 3.10.4, pytest-7.1.2, pluggy-1.0.0
44 | cachedir: /v/.pytest_cache
45 | rootdir: /w
46 | plugins: memray-1.1.0
47 | collected 2 items
48 |
49 | demo/test_ok.py .M [100%]
50 |
51 | ========================================= FAILURES =========================================
52 | ____________________________________ test_memory_exceed ____________________________________
53 | Test was limited to 100.0KiB but allocated 117.2KiB
54 | ------------------------------------ memray-max-memory -------------------------------------
55 | Test is using 117.2KiB out of limit of 100.0KiB
56 | List of allocations:
57 | - :/w/demo/test_ok.py:17 -> 117.2KiB
58 |
59 | ====================================== MEMRAY REPORT =======================================
60 | Allocations results for demo/test_ok.py::test_memory_exceed
61 |
62 | 📦 Total memory allocated: 117.2KiB
63 | 📏 Total allocations: 30
64 | 📊 Histogram of allocation sizes: |█|
65 | 🥇 Biggest allocating functions:
66 | - :/w/demo/test_ok.py:17 -> 117.2KiB
67 |
68 |
69 | Allocations results for demo/test_ok.py::test_track
70 |
71 | 📦 Total memory allocated: 54.9KiB
72 | 📏 Total allocations: 71
73 | 📊 Histogram of allocation sizes: |█ ▅ |
74 | 🥇 Biggest allocating functions:
75 | - test_track:/w/demo/test_ok.py:12 -> 39.1KiB
76 | - _compile_bytecode::672 -> 7.2KiB
77 | - _call_with_frames_removed::241 -> 4.7KiB
78 | - _call_with_frames_removed::241 -> 1.8KiB
79 | - _is_marked_for_rewrite:/v/lib/python3.10/site-packages/_pytest/assertion/rewrite.py:240 -> 1.1KiB
80 |
81 |
82 | ================================= short test summary info ==================================
83 | MEMORY PROBLEMS demo/test_ok.py::test_memory_exceed
84 | =============================== 1 failed, 1 passed in 0.01s ================================
85 | ```
86 |
87 | ## Configuration - CLI flags
88 |
89 | - `--memray` - activate memray tracking
90 | - `--most-allocations=MOST_ALLOCATIONS` - show the N tests that allocate most memory
91 | (N=0 for all)
92 | - `--hide-memray-summary` - hide the memray summary at the end of the execution
93 | - `--memray-bin-path` - path where to write the memray binary dumps (by default a
94 | temporary folder)
95 | - `--memray-bin-prefix` - prefix to use for the binary dump (by default a random UUID4
96 | hex)
97 | - `--stacks=STACKS` - Show the N stack entries when showing tracebacks of memory allocations
98 | - `--native` - Show native frames when showing tracebacks of memory allocations (will be slower)
99 | - `--trace-python-allocators` - Record allocations made by the Pymalloc allocator (will be slower)
100 | - `--fail-on-increase` - Fail a test with the `limit_memory`` marker if it uses
101 | more memory than its last successful run
102 |
103 | ## Configuration - INI
104 |
105 | - `memray(bool)` - activate memray tracking
106 | - `most-allocations(string)` - show the N tests that allocate most memory (N=0 for all)
107 | - `hide_memray_summary(bool)` - hide the memray summary at the end of the execution
108 | - `stacks(int)` - Show the N stack entries when showing tracebacks of memory allocations
109 | - `native(bool)`- Show native frames when showing tracebacks of memory allocations (will be slower)
110 | - `trace_python_allocators(bool)` - Record allocations made by the Pymalloc allocator (will be slower)
111 | - `fail-on-increase(bool)` - Fail a test with the `limit_memory` marker if it
112 | uses more memory than its last successful run
113 |
114 | ## License
115 |
116 | pytest-memray is Apache-2.0 licensed, as found in the [LICENSE](LICENSE) file.
117 |
118 | ## Code of Conduct
119 |
120 | - [Code of Conduct](https://github.com/bloomberg/.github/blob/main/CODE_OF_CONDUCT.md)
121 |
122 | This project has adopted a Code of Conduct. If you have any concerns about the Code, or
123 | behavior which you have experienced in the project, please contact us at
124 | opensource@bloomberg.net.
125 |
126 | ## Security Policy
127 |
128 | - [Security Policy](https://github.com/bloomberg/pytest-memray/security/policy)
129 |
130 | If you believe you have identified a security vulnerability in this project, please send
131 | email to the project team at opensource@bloomberg.net, detailing the suspected issue and
132 | any methods you've found to reproduce it.
133 |
134 | Please do NOT open an issue in the GitHub repository, as we'd prefer to keep
135 | vulnerability reports private until we've had an opportunity to review and address them.
136 |
137 | ## Contributing
138 |
139 | We welcome your contributions to help us improve and extend this project!
140 |
141 | Below you will find some basic steps required to be able to contribute to the project.
142 | If you have any questions about this process or any other aspect of contributing to a
143 | Bloomberg open source project, feel free to email opensource@bloomberg.net, and we'll
144 | get your questions answered as quickly as we can.
145 |
146 | ### Contribution Licensing
147 |
148 | Since this project is distributed under the terms of an [open source license](LICENSE),
149 | contributions that you make are licensed under the same terms. In order for us to be
150 | able to accept your contributions, we will need explicit confirmation from you that you
151 | are able and willing to provide them under these terms, and the mechanism we use to do
152 | this is called a Developer's Certificate of Origin
153 | [(DCO)](https://github.com/bloomberg/.github/blob/main/DCO.md). This is very similar to
154 | the process used by the Linux(R) kernel, Samba, and many other major open source
155 | projects.
156 |
157 | To participate under these terms, all that you must do is include a line like the
158 | following as the last line of the commit message for each commit in your contribution:
159 |
160 | ```git
161 | Signed-Off-By: Random J. Developer
162 | ```
163 |
164 | The simplest way to accomplish this is to add `-s` or `--signoff` to your `git commit`
165 | command.
166 |
167 | You must use your real name (sorry, no pseudonyms, and no anonymous contributions).
168 |
169 | ### Steps
170 |
171 | - Create an Issue, selecting 'Feature Request', and explain the proposed change.
172 | - Follow the guidelines in the issue template presented to you.
173 | - Submit the Issue.
174 | - Submit a Pull Request and link it to the Issue by including "#" in the
175 | Pull Request summary.
176 |
177 | ### Development
178 |
179 | The project requires a Linux OS to work. To set up a DEV environment use tox (or
180 | directly the make targets). You can use Docker to run the test suite on non Linux as in
181 | (you can parametrize tox by passing additional arguments at the end):
182 |
183 | ```shell
184 | docker-compose run --rm test tox
185 | ```
186 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3.9"
2 | services:
3 | test:
4 | image: "registry.gitlab.com/python-devs/ci-images:active"
5 | volumes:
6 | - .:/w:z
7 | working_dir: /w
8 | command: ["tox"]
9 |
--------------------------------------------------------------------------------
/docs/_static/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bloomberg/pytest-memray/de620779bb62244d3d91ff050318384a5221ca90/docs/_static/images/logo.png
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | .. module:: pytest_memray
2 |
3 | pytest-memray API
4 | =================
5 |
6 | Types
7 | -----
8 |
9 | .. autoclass:: LeaksFilterFunction()
10 | :members: __call__
11 | :show-inheritance:
12 |
13 | .. autoclass:: Stack()
14 | :members:
15 |
16 | .. autoclass:: StackFrame()
17 | :members:
18 |
19 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | """Sphinx configuration file for pytest-memray documentation."""
2 | from __future__ import annotations
3 |
4 | import sys
5 | from pathlib import Path
6 | from subprocess import check_output
7 |
8 | from sphinx.application import Sphinx
9 | from sphinxcontrib.programoutput import Command
10 |
11 | extensions = [
12 | "sphinx.ext.autodoc",
13 | "sphinx.ext.extlinks",
14 | "sphinx.ext.githubpages",
15 | "sphinx.ext.intersphinx",
16 | "sphinxarg.ext",
17 | "sphinx_inline_tabs",
18 | "sphinxcontrib.programoutput",
19 | ]
20 | exclude_patterns = ["_build", "news/*", "_draft.rst"]
21 | project = "pytest-memray"
22 | author = "Pablo Galindo Salgado"
23 | html_title = project
24 | html_theme = "furo"
25 | html_static_path = ["_static"]
26 | html_logo = "_static/images/logo.png"
27 | html_theme_options = {
28 | "sidebar_hide_name": True,
29 | }
30 | extlinks = {
31 | "user": ("https://github.com/%s", "@%s"),
32 | "issue": ("https://github.com/bloomberg/pytest-memray/issues/%s", "#%s"),
33 | }
34 | programoutput_prompt_template = "$ pytest --memray /w/demo \n{output}"
35 | prev = Command.get_output
36 | here = Path(__file__).parent
37 | linkcheck_allowed_redirects = {
38 | "https://github.com/bloomberg/pytest-memray/issues/.*": "https://github.com/bloomberg/pytest-memray/pull/.*"
39 | }
40 |
41 | # Try to resolve Sphinx references as Python objects by default. This means we
42 | # don't need :func: or :class: etc, which keep docstrings more human readable.
43 | default_role = "py:obj"
44 |
45 | # Automatically link to Python standard library types.
46 | intersphinx_mapping = {
47 | "python": ("https://docs.python.org/3", None),
48 | }
49 |
50 |
51 | def _get_output(self):
52 | code, out = prev(self)
53 | out = out.replace(str(Path(sys.executable).parents[1]), "/v")
54 | out = out.replace(str(here), "/w")
55 | return code, out
56 |
57 |
58 | Command.get_output = _get_output
59 |
60 |
61 | def setup(app: Sphinx) -> None:
62 | here = Path(__file__).parent
63 | root, exe = here.parent, Path(sys.executable)
64 | towncrier = exe.with_name(f"towncrier{exe.suffix}")
65 | cmd = [str(towncrier), "build", "--draft", "--version", "NEXT"]
66 | new = check_output(cmd, cwd=root, text=True)
67 | to = root / "docs" / "_draft.rst"
68 | to.write_text("" if "No significant changes" in new else new)
69 |
--------------------------------------------------------------------------------
/docs/configuration.rst:
--------------------------------------------------------------------------------
1 | Configuration
2 | =============
3 |
4 | This plugin provides a clean minimal set of command line options that are added to pytest.
5 | You can also specify most options in ``pytest.ini`` file.
6 | The complete list of command line options is:
7 |
8 | .. tab:: Command line options
9 |
10 | ``--memray``
11 | Activate memray tracking.
12 |
13 | ``--most-allocations=MOST_ALLOCATIONS``
14 | Show the N tests that allocate most memory (N=0 for all).
15 |
16 | ``--hide-memray-summary``
17 | Hide the memray summary at the end of the execution.
18 |
19 | ``--memray-bin-path``
20 | Path where to write the memray binary dumps (by default a temporary folder).
21 |
22 | ``--memray-bin-prefix``
23 | Prefix to use for the binary dump (by default a random UUID4 hex)
24 |
25 | ``--stacks=STACKS``
26 | Show the N most recent stack entries when showing tracebacks of memory allocations
27 |
28 | ``--native``
29 | Include native frames when showing tracebacks of memory allocations (will be slower)
30 |
31 | ``--trace-python-allocators``
32 | Record allocations made by the Pymalloc allocator (will be slower)
33 |
34 | ``--fail-on-increase``
35 | Fail a test with the limit_memory marker if it uses more memory than its last successful run
36 |
37 | .. tab:: Config file options
38 |
39 | ``memray(bool)``
40 | Activate memray tracking.
41 |
42 | ``most_allocations(int)``
43 | Show the N tests that allocate most memory (N=0 for all, default=5).
44 |
45 | ``hide_memray_summary(bool)``
46 | Hide the memray summary at the end of the execution.
47 |
48 | ``stacks(int)``
49 | Show the N most recent stack entries when showing tracebacks of memory allocations
50 |
51 | ``native(bool)``
52 | Include native frames when showing tracebacks of memory allocations (will be slower)
53 |
54 | ``trace_python_allocators(bool)``
55 | Record allocations made by the Pymalloc allocator (will be slower)
56 |
57 | ``fail-on-increase(bool)``
58 | Fail a test with the limit_memory marker if it uses more memory than its last successful run
59 |
--------------------------------------------------------------------------------
/docs/demo/test_ok.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import pytest
4 |
5 |
6 | def test_track():
7 | from heapq import heappush
8 |
9 | h = []
10 | for value in range(1):
11 | heappush(h, value)
12 | assert [1] * 5_000
13 |
14 |
15 | @pytest.mark.limit_memory("100 KB")
16 | def test_memory_exceed():
17 | found = [[i] * 1_000 for i in range(15)]
18 | assert found
19 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | pytest-memray
2 | =============
3 |
4 | A pytest plugin for easy integration of ``memray`` in your test suite. It can produce
5 | reports like:
6 |
7 | .. command-output:: env COLUMNS=92 pytest --memray demo
8 | :returncode: 1
9 |
10 | .. toctree::
11 | :maxdepth: 2
12 | :hidden:
13 |
14 | usage
15 | configuration
16 | api
17 | news
18 |
--------------------------------------------------------------------------------
/docs/news.rst:
--------------------------------------------------------------------------------
1 | Release History
2 | ===============
3 |
4 | .. include:: _draft.rst
5 |
6 | .. towncrier release notes start
7 |
8 | v1.7.0 (2024-07-25)
9 | -------------------
10 |
11 | No significant changes.
12 |
13 |
14 | v1.6.0 (2024-04-18)
15 | -------------------
16 |
17 | Features - 1.6.0
18 | ~~~~~~~~~~~~~~~~
19 | - Add a new --fail-on-increase option that fails a test with the ``limit_memory`` marker if it uses more memory than its last successful run. (:issue:`91`)
20 | - Use aggregated capture files, reducing the amount of temporary disk space required in order to run tests. (:issue:`107`)
21 | - Add a new ``current_thread_only`` keyword argument to the ``limit_memory`` and
22 | ``limit_leaks`` markers to ignore all allocations made in threads other than
23 | the one running the test. (:issue:`117`)
24 |
25 | Bug Fixes - 1.6.0
26 | ~~~~~~~~~~~~~~~~~
27 | - Fix the generation of histograms when the tests performed zero-byte allocations. (:issue:`113`)
28 |
29 | v1.5.0 (2023-08-23)
30 | -------------------
31 |
32 | Features - 1.5.0
33 | ~~~~~~~~~~~~~~~~
34 |
35 | - Add a new ``limit_leaks`` marker to check for memory leaks in tests. (:issue:`45`)
36 | - Support passing ``--trace-python-allocators`` to memray to track all Python allocations. (:issue:`78` and :issue:`64`)
37 |
38 | v1.4.1 (2023-06-06)
39 | -------------------
40 |
41 | Bug Fixes - 1.4.1
42 | ~~~~~~~~~~~~~~~~~
43 |
44 | - Fix long test names with xdis (:issue:`68`)
45 |
46 | v1.4.0 (2022-12-02)
47 | -------------------
48 |
49 | Features - 1.4.0
50 | ~~~~~~~~~~~~~~~~
51 | - Allow to run tests marked with memray markers without having to provide "--memray" in the command line. (:issue:`57`)
52 | - Add two new options that allow to customize the ammount of frames in allocation tracebacks as well as including hybrid stack traces. (:issue:`58`)
53 |
54 | Bug Fixes - 1.4.0
55 | ~~~~~~~~~~~~~~~~~
56 | - Fix pytest raising ``pytest.PytestReturnNotNoneWarning`` from test decorated with memray markers. (:issue:`60`)
57 |
58 |
59 | v1.3.2 (2022-11-30)
60 | -------------------
61 |
62 | Bug Fixes - 1.3.2
63 | ~~~~~~~~~~~~~~~~~
64 | - Make the plugin compatible with ``pytest-xdist``
65 |
66 | v1.3.1 (2022-11-14)
67 | -------------------
68 |
69 | Bug Fixes - 1.3.1
70 | ~~~~~~~~~~~~~~~~~
71 | - Declare 3.11 support
72 | - Fix incompatibility with the ``flaky`` plugin
73 |
74 | v1.3.0 (2022-08-21)
75 | -------------------
76 |
77 | Features - 1.3.0
78 | ~~~~~~~~~~~~~~~~
79 | - Ensure Python 3.11 support - by :user:`gaborbernat`. (:issue:`18`)
80 |
81 | v1.2.0 (2022-05-26)
82 | -------------------
83 |
84 | Features - 1.2.0
85 | ~~~~~~~~~~~~~~~~
86 | - Allow specifying the prefix used for ``-memray-bin-path`` dumps via the
87 | ``-memray-bin-prefix`` (and if specified and file already exists will be recreated) -
88 | by :user:`gaborbernat`. (:issue:`28`)
89 |
90 | Improved Documentation - 1.2.0
91 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
92 | - Fix documentation links to point from Gitub Pages to readthedocs.org - by :user:`gaborbernat`. (:issue:`12`)
93 | - Update examples in configuration and add ``-memray-bin-path`` - by :user:`gaborbernat`. (:issue:`26`)
94 | - Fix minimum python version in documentation from 3.7 to 3.8 - by :user:`ChaoticRoman`. (:issue:`30`)
95 |
96 | v1.1.0 (2022-05-17)
97 | -------------------
98 |
99 | Features - 1.1.0
100 | ~~~~~~~~~~~~~~~~
101 | - Report memory limit and allocated memory in longrepr - by :user:`petr-tik`. (:issue:`5`)
102 | - Allow passing ``--memray-bin-path`` argument to the CLI to allow
103 | persisting the binary dumps - by :user:`gaborbernat`. (:issue:`10`)
104 | - Release a pure python wheel - by :user:`gaborbernat`. (:issue:`11`)
105 | - Switch build backend from ``setuptools`` to ``hatchling`` - by :user:`gaborbernat`. (:issue:`12`)
106 |
107 | Bug Fixes - 1.1.0
108 | ~~~~~~~~~~~~~~~~~
109 | - Causes built-in junit-xml results writer to fail - by :user:`petr-tik`. (:issue:`3`)
110 |
111 | Improved Documentation - 1.1.0
112 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
113 | - Move documentation from Github Pages to readthedocs - by :user:`gaborbernat`. (:issue:`20`)
114 |
115 |
116 | v1.0.0 (2022-04-09)
117 | -------------------
118 |
119 | - Initial release.
120 |
--------------------------------------------------------------------------------
/docs/news/template.jinja2:
--------------------------------------------------------------------------------
1 | {% set top_underline = underlines[0] %}
2 | {% if versiondata.name %}
3 | v{{ versiondata.version }} ({{ versiondata.date }})
4 | {{ top_underline * ((versiondata.version + versiondata.date)|length + 4)}}
5 | {% else %}
6 | {{ versiondata.version }} ({{ versiondata.date }})
7 | {{ top_underline * ((versiondata.version + versiondata.date)|length + 3)}}
8 | {% endif %}
9 |
10 | {% for section, _ in sections.items() %}
11 | {% set underline = underlines[1] %}
12 | {% if sections[section] %}
13 | {% for category, val in definitions.items() if category in sections[section]%}
14 | {{ definitions[category]['name'] }} - {{ versiondata.version }}
15 | {{ underline * ((definitions[category]['name'] + versiondata.version)|length + 3)}}
16 | {% if definitions[category]['showcontent'] %}
17 | {% for text, values in sections[section][category].items() %}
18 | - {{ text }} ({{ values|join(', ') }})
19 | {% endfor %}
20 |
21 | {% else %}
22 | - {{ sections[section][category]['']|join(', ') }}
23 |
24 | {% endif %}
25 | {% if sections[section][category]|length == 0 %}
26 | No significant changes.
27 |
28 | {% else %}
29 | {% endif %}
30 | {% endfor %}
31 | {% else %}
32 | No significant changes.
33 |
34 |
35 | {% endif %}
36 | {% endfor %}
37 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 | Usage
2 | =====
3 |
4 | Installation
5 | ~~~~~~~~~~~~
6 |
7 | This plugin can be installed using pip:
8 |
9 |
10 | .. code-block:: shell
11 |
12 | pip install pytest-memray
13 |
14 |
15 | ``pytest-memray`` is a pytest plugin. It is enabled when you pass ``--memray`` to
16 | pytest:
17 |
18 | .. code-block:: shell
19 |
20 | pytest tests/ --memray
21 |
22 | Allocation tracking
23 | ~~~~~~~~~~~~~~~~~~~
24 |
25 | By default, the plugin will track allocations at the high watermark in all tests. This information is
26 | reported after tests run ends:
27 |
28 | .. command-output:: env COLUMNS=92 pytest --memray demo
29 | :returncode: 1
30 |
31 | Markers
32 | ~~~~~~~
33 |
34 | This plugin provides `markers `__
35 | that can be used to enforce additional checks and validations on tests.
36 |
37 |
38 | .. py:function:: pytest.mark.limit_memory(memory_limit: str, current_thread_only: bool = False)
39 |
40 | Fail the execution of the test if the test allocates more peak memory than allowed.
41 |
42 | When this marker is applied to a test, it will cause the test to fail if the
43 | execution of the test allocates more memory (at the peak/high watermark) than allowed.
44 | It takes a single argument with a string indicating the maximum memory that the test
45 | can allocate.
46 |
47 | The format for the string is `` ([KMGTP]B|B)``. The marker will raise
48 | ``ValueError`` if the string format cannot be parsed correctly.
49 |
50 | If the optional keyword-only argument ``current_thread_only`` is set to *True*, the
51 | plugin will only track memory allocations made by the current thread and all other
52 | allocations will be ignored.
53 |
54 | .. warning::
55 |
56 | As the Python interpreter has its own
57 | `object allocator `__ it's possible
58 | that memory is not immediately released to the system when objects are deleted,
59 | so tests using this marker may need to give some room to account for this.
60 |
61 | Example of usage:
62 |
63 | .. code-block:: python
64 |
65 | @pytest.mark.limit_memory("24 MB")
66 | def test_foobar():
67 | pass # do some stuff that allocates memory
68 |
69 |
70 | .. py:function:: pytest.mark.limit_leaks(location_limit: str, filter_fn: LeaksFilterFunction | None = None, current_thread_only: bool = False)
71 |
72 | Fail the execution of the test if any call stack in the test leaks more memory than
73 | allowed.
74 |
75 | .. important::
76 | To detect leaks, Memray needs to intercept calls to the Python allocators and
77 | report native call frames. This is adds significant overhead, and will slow your
78 | test down.
79 |
80 | When this marker is applied to a test, the plugin will analyze the memory
81 | allocations that are made while the test body runs and not freed by the time the
82 | test body function returns. It groups them by the call stack leading to the
83 | allocation, and sums the amount leaked by each **distinct call stack**. If the total
84 | amount leaked from any particular call stack is greater than the configured limit,
85 | the test will fail.
86 |
87 | .. important::
88 | It's recommended to run your API or code in a loop when utilizing this plugin.
89 | This practice helps in distinguishing genuine leaks from the "noise" generated
90 | by internal caches and other incidental allocations.
91 |
92 | The format for the string is `` ([KMGTP]B|B)``. The marker will raise
93 | ``ValueError`` if the string format cannot be parsed correctly.
94 |
95 | The marker also takes an optional keyword-only argument ``filter_fn``. This argument
96 | represents a filtering function that will be called once for each distinct call
97 | stack that leaked more memory than allowed. If it returns *True*, leaks from that
98 | location will be included in the final report. If it returns *False*, leaks
99 | associated with the stack it was called with will be ignored. If all leaks are
100 | ignored, the test will not fail. This can be used to discard any known false
101 | positives.
102 |
103 | If the optional keyword-only argument ``current_thread_only`` is set to *True*, the
104 | plugin will only track memory allocations made by the current thread and all other
105 | allocations will be ignored.
106 |
107 | .. tip::
108 |
109 | You can pass the ``--memray-bin-path`` argument to ``pytest`` to specify
110 | a directory where Memray will store the binary files with the results. You
111 | can then use the ``memray`` CLI to further investigate the allocations and the
112 | leaks using any Memray reporters you'd like. Check `the memray docs
113 | `_ for more
114 | information.
115 |
116 | Example of usage:
117 |
118 | .. code-block:: python
119 |
120 | @pytest.mark.limit_leaks("1 MB")
121 | def test_foobar():
122 | # Run the function we're testing in a loop to ensure
123 | # we can differentiate leaks from memory held by
124 | # caches inside the Python interpreter.
125 | for _ in range(100):
126 | do_some_stuff()
127 |
128 | .. warning::
129 | It is **very** challenging to write tests that do not "leak" memory in some way,
130 | due to circumstances beyond your control.
131 |
132 | There are many caches inside the Python interpreter itself. Just a few examples:
133 |
134 | - The `re` module caches compiled regexes.
135 | - The `logging` module caches whether a given log level is active for
136 | a particular logger the first time you try to log something at that level.
137 | - A limited number of objects of certain heavily used types are cached for reuse
138 | so that `object.__new__` does not always need to allocate memory.
139 | - The mapping from bytecode index to line number for each Python function is
140 | cached when it is first needed.
141 |
142 | There are many more such caches. Also, within pytest, any message that you log or
143 | print is captured, so that it can be included in the output if the test fails.
144 |
145 | Memray sees these all as "leaks", because something was allocated while the test
146 | ran and it was not freed by the time the test body finished. We don't know that
147 | it's due to an implementation detail of the interpreter or pytest that the memory
148 | wasn't freed. Morever, because these caches are implementation details, the
149 | amount of memory allocated, the call stack of the allocation, and even the
150 | allocator that was used can all change from one version to another.
151 |
152 | Because of this, you will almost certainly need to allow some small amount of
153 | leaked memory per call stack, or use the ``filter_fn`` argument to filter out
154 | false-positive leak reports based on the call stack they're associated with.
155 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | build-backend = "hatchling.build"
3 | requires = ["hatchling>=1.12.2", "hatch-vcs>=0.3"]
4 |
5 | [project]
6 | name = "pytest-memray"
7 | description = "A simple plugin to use with pytest"
8 | readme.file = "README.md"
9 | readme.content-type = "text/markdown"
10 | license = "apache-2.0"
11 | urls."Bug Tracker" = "https://github.com/bloomberg/pytest-memray/issues"
12 | urls.Documentation = "https://pytest-memray.readthedocs.io"
13 | urls."Source Code" = "https://github.com/bloomberg/pytest-memray"
14 | authors = [
15 | { name = "Pablo Galindo Salgado", email = "pgalindo3@bloomberg.net" },
16 | ]
17 | maintainers = [
18 | { name = "Pablo Galindo Salgado", email = "pgalindo3@bloomberg.net" },
19 | ]
20 | requires-python = ">=3.8"
21 | dependencies = [
22 | "pytest>=7.2",
23 | "memray>=1.12",
24 | ]
25 | optional-dependencies.docs = [
26 | "furo>=2022.12.7",
27 | "sphinx>=6.1.3",
28 | "sphinx-argparse>=0.4",
29 | "sphinx-inline-tabs>=2022.1.2b11",
30 | "sphinxcontrib-programoutput>=0.17",
31 | "towncrier>=22.12",
32 | ]
33 | optional-dependencies.lint = [
34 | "black==22.12",
35 | "ruff==0.0.272",
36 | "isort==5.11.4",
37 | "mypy==0.991",
38 | ]
39 | optional-dependencies.test = [
40 | "anyio>=4.4.0",
41 | "covdefaults>=2.2.2",
42 | "pytest>=7.2",
43 | "coverage>=7.0.5",
44 | "flaky>=3.7",
45 | "pytest-xdist>=3.1",
46 | ]
47 | dynamic = ["version"]
48 | classifiers = [
49 | "Intended Audience :: Developers",
50 | "License :: OSI Approved :: Apache Software License",
51 | "Operating System :: POSIX :: Linux",
52 | "Programming Language :: Python :: 3.8",
53 | "Programming Language :: Python :: 3.9",
54 | "Programming Language :: Python :: 3.10",
55 | "Programming Language :: Python :: 3.11",
56 | "Programming Language :: Python :: Implementation :: CPython",
57 | "Topic :: Software Development :: Debuggers",
58 | ]
59 |
60 | [project.entry-points.pytest11]
61 | memray = "pytest_memray.plugin"
62 |
63 | [tool.hatch]
64 | build.dev-mode-dirs = ["src"]
65 | build.hooks.vcs.version-file = "src/pytest_memray/_version.py"
66 | build.hooks.vcs.template = "__version__ = \"{version}\"\n"
67 | version.source = "vcs"
68 | build.targets.sdist.include = ["/Makefile", "/tox.ini", "/src", "/tests", "/docs"]
69 |
70 | [tool.black]
71 | line-length = 88
72 |
73 | [tool.coverage]
74 | run.dynamic_context = "test_function"
75 | run.source = ["pytest_memray", "tests"]
76 | run.plugins = ["covdefaults"]
77 | run.parallel = true
78 | report.fail_under = 97
79 | html.show_contexts = true
80 | html.skip_covered = false
81 | paths.source = [
82 | "src",
83 | ".tox*/*/lib/python*/site-packages",
84 | ".tox*/pypy*/site-packages",
85 | ".tox*\\*\\Lib\\site-packages",
86 | "*/src",
87 | "*\\src",
88 | ]
89 |
90 | [tool.mypy]
91 | python_version = "3.8"
92 | show_error_codes = true
93 | strict = true
94 |
95 | [tool.isort]
96 | force_single_line = true
97 | multi_line_output = 3
98 | include_trailing_comma = true
99 | force_grid_wrap = 0
100 | use_parentheses = true
101 | line_length = 88
102 | known_first_party = ["pytest_memray"]
103 |
104 | [tool.towncrier]
105 | name = "pytest-memray"
106 | filename = "docs/news.rst"
107 | directory = "docs/news"
108 | title_format = false
109 | issue_format = ":issue:`{issue}`"
110 | template = "docs/news/template.jinja2"
111 | type = [
112 | { name = "Features", directory = "feature", showcontent = true },
113 | { name = "Deprecations and Removals", directory = "removal", showcontent = true },
114 | { name = "Bug Fixes", directory = "bugfix", showcontent = true },
115 | { name = "Improved Documentation", directory = "doc", showcontent = true },
116 | { name = "Miscellaneous", directory = "misc", showcontent = true },
117 | ]
118 |
119 | [tool.ruff]
120 | ignore = ['E501']
121 | line-length = 95
122 | select = [
123 | 'E',
124 | 'F',
125 | 'W',
126 | ]
127 | isort = {known-first-party = ["pytest_memray"], required-imports = ["from __future__ import annotations"]}
128 |
--------------------------------------------------------------------------------
/src/pytest_memray/__init__.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from ._version import __version__ as __version__
4 | from .marks import LeaksFilterFunction
5 | from .marks import Stack
6 | from .marks import StackFrame
7 |
8 | __all__ = [
9 | "__version__",
10 | "LeaksFilterFunction",
11 | "Stack",
12 | "StackFrame",
13 | ]
14 |
--------------------------------------------------------------------------------
/src/pytest_memray/marks.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from dataclasses import dataclass
4 | from pathlib import Path
5 | from typing import Iterable
6 | from typing import Optional
7 | from typing import Protocol
8 | from typing import Tuple
9 | from typing import cast
10 |
11 | from memray import AllocationRecord
12 | from memray import FileReader
13 | from pytest import Config
14 |
15 | from .utils import parse_memory_string
16 | from .utils import sizeof_fmt
17 | from .utils import value_or_ini
18 |
19 | PytestSection = Tuple[str, str]
20 |
21 |
22 | @dataclass
23 | class StackFrame:
24 | """One frame of a call stack.
25 |
26 | Each frame has attributes to tell you what code was executing.
27 | """
28 |
29 | function: str
30 | """The function being executed, or ``"???"`` if unknown."""
31 |
32 | filename: str
33 | """The source file being executed, or ``"???"`` if unknown."""
34 |
35 | lineno: int
36 | """The line number of the executing line, or ``0`` if unknown."""
37 |
38 |
39 | @dataclass
40 | class Stack:
41 | """The call stack that led to some memory allocation.
42 |
43 | You can inspect the frames which make up the call stack.
44 | """
45 |
46 | frames: Tuple[StackFrame, ...]
47 | """The frames that make up the call stack, most recent first."""
48 |
49 |
50 | class LeaksFilterFunction(Protocol):
51 | """A callable that can decide whether to ignore some memory leaks.
52 |
53 | This can be used to suppress leak reports from locations that are known to
54 | leak. For instance, you might know that objects of a certain type are
55 | cached by the code you're invoking, and so you might want to ignore all
56 | reports of leaked memory allocated below that type's constructor.
57 |
58 | You can provide any callable with the following signature as the
59 | ``filter_fn`` keyword argument for the `.limit_leaks` marker:
60 | """
61 |
62 | def __call__(self, stack: Stack) -> bool:
63 | """Return whether allocations from this stack should be reported.
64 |
65 | Return ``True`` if you want the leak to be reported, or ``False`` if
66 | you want it to be suppressed.
67 | """
68 | ...
69 |
70 |
71 | @dataclass
72 | class _MemoryInfo:
73 | """Type that holds memory-related info for a failed test."""
74 |
75 | max_memory: float
76 | allocations: list[AllocationRecord]
77 | num_stacks: int
78 | native_stacks: bool
79 | total_allocated_memory: int
80 |
81 | @property
82 | def section(self) -> PytestSection:
83 | """Return a tuple in the format expected by section reporters."""
84 | body = _generate_section_text(
85 | self.allocations, self.native_stacks, self.num_stacks
86 | )
87 | return (
88 | "memray-max-memory",
89 | "List of allocations:\n" + body,
90 | )
91 |
92 | @property
93 | def long_repr(self) -> str:
94 | """Generate a longrepr user-facing error message."""
95 | return (
96 | f"Test was limited to {sizeof_fmt(self.max_memory)} "
97 | f"but allocated {sizeof_fmt(self.total_allocated_memory)}"
98 | )
99 |
100 |
101 | @dataclass
102 | class _LeakedInfo:
103 | """Type that holds leaked memory-related info for a failed test."""
104 |
105 | max_memory: float
106 | allocations: list[AllocationRecord]
107 | num_stacks: int
108 | native_stacks: bool
109 |
110 | @property
111 | def section(self) -> PytestSection:
112 | """Return a tuple in the format expected by section reporters."""
113 | body = _generate_section_text(
114 | self.allocations, self.native_stacks, self.num_stacks
115 | )
116 | return (
117 | "memray-leaked-memory",
118 | "List of leaked allocations:\n" + body,
119 | )
120 |
121 | @property
122 | def long_repr(self) -> str:
123 | """Generate a longrepr user-facing error message."""
124 | return (
125 | f"Test was allowed to leak {sizeof_fmt(self.max_memory)} "
126 | "per location but at least one location leaked more"
127 | )
128 |
129 |
130 | @dataclass
131 | class _MoreMemoryInfo:
132 | previous_memory: float
133 | new_memory: float
134 |
135 | @property
136 | def section(self) -> PytestSection:
137 | """Return a tuple in the format expected by section reporters."""
138 | return (
139 | "memray-max-memory",
140 | "Test uses more memory than previous run",
141 | )
142 |
143 | @property
144 | def long_repr(self) -> str:
145 | """Generate a longrepr user-facing error message."""
146 | return (
147 | f"Test previously used {sizeof_fmt(self.previous_memory)} "
148 | f"but now uses {sizeof_fmt(self.new_memory)}"
149 | )
150 |
151 |
152 | def _generate_section_text(
153 | allocations: list[AllocationRecord], native_stacks: bool, num_stacks: int
154 | ) -> str:
155 | text_lines = []
156 | for record in allocations:
157 | size = record.size
158 | stack_trace = (
159 | record.hybrid_stack_trace() if native_stacks else record.stack_trace()
160 | )
161 | if not stack_trace:
162 | continue
163 | padding = " " * 4
164 | text_lines.append(f"{padding}- {sizeof_fmt(size)} allocated here:")
165 | stacks_left = num_stacks
166 | for function, file, line in stack_trace:
167 | if stacks_left <= 0:
168 | text_lines.append(f"{padding*2}...")
169 | break
170 | text_lines.append(f"{padding*2}{function}:{file}:{line}")
171 | stacks_left -= 1
172 |
173 | return "\n".join(text_lines)
174 |
175 |
176 | def _passes_filter(
177 | stack: Iterable[Tuple[str, str, int]], filter_fn: Optional[LeaksFilterFunction]
178 | ) -> bool:
179 | if filter_fn is None:
180 | return True
181 |
182 | frames = tuple(StackFrame(*frame) for frame in stack)
183 | return filter_fn(Stack(frames))
184 |
185 |
186 | def limit_memory(
187 | limit: str,
188 | *,
189 | current_thread_only: bool = False,
190 | _result_file: Path,
191 | _config: Config,
192 | _test_id: str,
193 | ) -> _MemoryInfo | _MoreMemoryInfo | None:
194 | """Limit memory used by the test."""
195 | reader = FileReader(_result_file)
196 | allocations: list[AllocationRecord] = [
197 | record
198 | for record in reader.get_high_watermark_allocation_records(
199 | merge_threads=not current_thread_only
200 | )
201 | if not current_thread_only or record.tid == reader.metadata.main_thread_id
202 | ]
203 | max_memory = parse_memory_string(limit)
204 | total_allocated_memory = sum(record.size for record in allocations)
205 |
206 | if _config.cache is not None:
207 | cache = _config.cache.get(f"memray/{_test_id}", {})
208 | previous = cache.get("total_allocated_memory", float("inf"))
209 | fail_on_increase = cast(bool, value_or_ini(_config, "fail_on_increase"))
210 | if fail_on_increase and total_allocated_memory > previous:
211 | return _MoreMemoryInfo(previous, total_allocated_memory)
212 |
213 | cache["total_allocated_memory"] = total_allocated_memory
214 | _config.cache.set(f"memray/{_test_id}", cache)
215 |
216 | if total_allocated_memory < max_memory:
217 | return None
218 | num_stacks: int = cast(int, value_or_ini(_config, "stacks"))
219 | native_stacks: bool = cast(bool, value_or_ini(_config, "native"))
220 | return _MemoryInfo(
221 | max_memory=max_memory,
222 | allocations=allocations,
223 | num_stacks=num_stacks,
224 | native_stacks=native_stacks,
225 | total_allocated_memory=total_allocated_memory,
226 | )
227 |
228 |
229 | def limit_leaks(
230 | location_limit: str,
231 | *,
232 | filter_fn: Optional[LeaksFilterFunction] = None,
233 | current_thread_only: bool = False,
234 | _result_file: Path,
235 | _config: Config,
236 | _test_id: str,
237 | ) -> _LeakedInfo | None:
238 | reader = FileReader(_result_file)
239 | allocations: list[AllocationRecord] = [
240 | record
241 | for record in reader.get_leaked_allocation_records(
242 | merge_threads=not current_thread_only
243 | )
244 | if not current_thread_only or record.tid == reader.metadata.main_thread_id
245 | ]
246 |
247 | memory_limit = parse_memory_string(location_limit)
248 |
249 | leaked_allocations = list(
250 | allocation
251 | for allocation in allocations
252 | if (
253 | allocation.size >= memory_limit
254 | and _passes_filter(allocation.hybrid_stack_trace(), filter_fn)
255 | )
256 | )
257 |
258 | if not leaked_allocations:
259 | return None
260 |
261 | num_stacks: int = max(cast(int, value_or_ini(_config, "stacks")), 5)
262 | return _LeakedInfo(
263 | max_memory=memory_limit,
264 | allocations=leaked_allocations,
265 | num_stacks=num_stacks,
266 | native_stacks=True,
267 | )
268 |
269 |
270 | __all__ = [
271 | "limit_memory",
272 | "limit_leaks",
273 | "LeaksFilterFunction",
274 | "Stack",
275 | "StackFrame",
276 | ]
277 |
--------------------------------------------------------------------------------
/src/pytest_memray/plugin.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import collections
4 | import functools
5 | import inspect
6 | import math
7 | import os
8 | import pickle
9 | import uuid
10 | from contextlib import contextmanager
11 | from dataclasses import dataclass
12 | from itertools import islice
13 | from pathlib import Path
14 | from tempfile import TemporaryDirectory
15 | from typing import Any
16 | from typing import Generator
17 | from typing import Iterable
18 | from typing import List
19 | from typing import Tuple
20 | from typing import cast
21 | from typing import Protocol
22 |
23 | from _pytest.terminal import TerminalReporter
24 | from memray import AllocationRecord
25 | from memray import FileFormat
26 | from memray import FileReader
27 | from memray import Metadata
28 | from memray import Tracker
29 | from pytest import CallInfo
30 | from pytest import CollectReport
31 | from pytest import Config
32 | from pytest import ExitCode
33 | from pytest import Function
34 | from pytest import Item
35 | from pytest import Parser
36 | from pytest import TestReport
37 | from pytest import hookimpl
38 |
39 | from .marks import limit_memory
40 | from .marks import limit_leaks
41 | from .utils import WriteEnabledDirectoryAction
42 | from .utils import positive_int
43 | from .utils import sizeof_fmt
44 | from .utils import value_or_ini
45 |
46 |
47 | class SectionMetadata(Protocol):
48 | long_repr: str
49 | section: Tuple[str, str]
50 |
51 |
52 | class PluginFn(Protocol):
53 | def __call__(
54 | *args: Any,
55 | _result_file: Path,
56 | _config: Config,
57 | _test_id: str,
58 | **kwargs: Any,
59 | ) -> SectionMetadata | None:
60 | ...
61 |
62 |
63 | MARKERS = {
64 | "limit_memory": limit_memory,
65 | "limit_leaks": limit_leaks,
66 | }
67 |
68 | N_TOP_ALLOCS = 5
69 | N_HISTOGRAM_BINS = 5
70 |
71 |
72 | def histogram(
73 | iterable: Iterable[float], low: float, high: float, bins: int
74 | ) -> list[int]:
75 | """Count elements from the iterable into evenly spaced bins
76 |
77 | >>> scores = [82, 85, 90, 91, 70, 87, 45]
78 | >>> histogram(scores, 0, 100, 10)
79 | [0, 0, 0, 0, 1, 0, 0, 1, 3, 2]
80 |
81 | """
82 | step = ((high - low) / bins) or low or 1
83 | dist = collections.Counter((x - low) // step for x in iterable)
84 | return [dist[b] for b in range(bins)]
85 |
86 |
87 | def cli_hist(data: Iterable[float], bins: int, *, log_scale: bool = True) -> str:
88 | bars = " ▁▂▃▄▅▆▇█"
89 | if log_scale:
90 | data = [math.log(number if number else 1) for number in data]
91 | low = min(data)
92 | high = max(data)
93 | data_bins = histogram(data, low=low, high=high, bins=bins)
94 | bar_indexes = (int(elem * (len(bars) - 1) / max(data_bins)) for elem in data_bins)
95 | result = "".join(bars[bar_index] for bar_index in bar_indexes)
96 | return result
97 |
98 |
99 | ResultElement = List[Tuple[object, int]]
100 |
101 |
102 | @dataclass
103 | class Result:
104 | test_id: str
105 | metadata: Metadata
106 | result_file: Path
107 |
108 |
109 | class Manager:
110 | def __init__(self, config: Config) -> None:
111 | self.results: dict[str, Result] = {}
112 | self.config = config
113 | path: Path | None = config.getvalue("memray_bin_path")
114 | self._tmp_dir: None | TemporaryDirectory[str] = None
115 | if path is None:
116 | # Check the MEMRAY_RESULT_PATH environment variable. If this
117 | # is set, it means that we are running in a worker and the main
118 | # process has set it so we'll use it as the directory to store
119 | # the results.
120 | result_path = os.getenv("MEMRAY_RESULT_PATH")
121 | if not result_path:
122 | # We are not running in a worker, so we'll create a temporary
123 | # directory to store the results. Other possible workers will
124 | # use this directory by reading the MEMRAY_RESULT_PATH environment
125 | # variable.
126 | self._tmp_dir = TemporaryDirectory()
127 | os.environ["MEMRAY_RESULT_PATH"] = self._tmp_dir.name
128 | result_path = self._tmp_dir.name
129 | self.result_path: Path = Path(result_path)
130 | else:
131 | self._tmp_dir = None
132 | self.result_path = path
133 | self._bin_prefix = config.getvalue("memray_bin_prefix") or uuid.uuid4().hex
134 | self.result_metadata_path = self.result_path / "metadata"
135 | self.result_metadata_path.mkdir(exist_ok=True, parents=True)
136 |
137 | @hookimpl(hookwrapper=True)
138 | def pytest_unconfigure(self, config: Config) -> Generator[None, None, None]:
139 | yield
140 | if self._tmp_dir is not None:
141 | self._tmp_dir.cleanup()
142 | if os.environ.get("MEMRAY_RESULT_PATH"):
143 | del os.environ["MEMRAY_RESULT_PATH"]
144 |
145 | @hookimpl(hookwrapper=True)
146 | def pytest_pyfunc_call(self, pyfuncitem: Function) -> object | None:
147 | func = pyfuncitem.obj
148 |
149 | markers = {
150 | marker.name
151 | for marker in pyfuncitem.iter_markers()
152 | if marker.name in MARKERS
153 | }
154 |
155 | if not markers and not value_or_ini(self.config, "memray"):
156 | yield
157 | return
158 |
159 | if len(markers) > 1:
160 | raise ValueError("Only one Memray marker can be applied to each test")
161 |
162 | def _build_bin_path() -> Path:
163 | if self._tmp_dir is None and not os.getenv("MEMRAY_RESULT_PATH"):
164 | of_id = pyfuncitem.nodeid.replace("::", "-")
165 | of_id = of_id.replace(os.sep, "-")
166 | name = f"{self._bin_prefix}-{of_id}.bin"
167 | else:
168 | name = f"{uuid.uuid4().hex}.bin"
169 | result_file = self.result_path / name
170 | if self._tmp_dir is None and result_file.exists():
171 | result_file.unlink()
172 | return result_file
173 |
174 | native: bool = bool(value_or_ini(self.config, "native"))
175 | trace_python_allocators: bool = bool(
176 | value_or_ini(self.config, "trace_python_allocators")
177 | )
178 |
179 | if markers and "limit_leaks" in markers:
180 | native = trace_python_allocators = True
181 |
182 | @contextmanager
183 | def memory_reporting() -> Generator[None, None, None]:
184 | # Restore the original function. This is needed because some
185 | # pytest plugins (e.g. flaky) will call our pytest_pyfunc_call
186 | # hook again with whatever is here, which will cause the wrapper
187 | # to be wrapped again.
188 | pyfuncitem.obj = func
189 |
190 | result_file = _build_bin_path()
191 | with Tracker(
192 | result_file,
193 | native_traces=native,
194 | trace_python_allocators=trace_python_allocators,
195 | file_format=FileFormat.AGGREGATED_ALLOCATIONS,
196 | ):
197 | yield
198 |
199 | try:
200 | metadata = FileReader(result_file).metadata
201 | except OSError:
202 | return
203 | result = Result(pyfuncitem.nodeid, metadata, result_file)
204 | metadata_path = (
205 | self.result_metadata_path / result_file.with_suffix(".metadata").name
206 | )
207 | with open(metadata_path, "wb") as file_handler:
208 | pickle.dump(result, file_handler)
209 | self.results[pyfuncitem.nodeid] = result
210 |
211 | @functools.wraps(func)
212 | def wrapper(*args: Any, **kwargs: Any) -> Any:
213 | with memory_reporting():
214 | return func(*args, **kwargs)
215 |
216 | @functools.wraps(func)
217 | async def async_wrapper(*args: Any, **kwargs: Any) -> Any:
218 | with memory_reporting():
219 | return await func(*args, **kwargs)
220 |
221 | if inspect.iscoroutinefunction(func):
222 | pyfuncitem.obj = async_wrapper
223 | else:
224 | pyfuncitem.obj = wrapper
225 |
226 | yield
227 |
228 | @hookimpl(hookwrapper=True)
229 | def pytest_runtest_makereport(
230 | self, item: Item, call: CallInfo[None]
231 | ) -> Generator[None, TestReport | None, TestReport | None]:
232 | outcome = yield
233 | if call.when != "call" or outcome is None:
234 | return None
235 |
236 | report = outcome.get_result()
237 | if report.when != "call" or report.outcome != "passed":
238 | return None
239 |
240 | for marker in item.iter_markers():
241 | maybe_marker_fn = MARKERS.get(marker.name)
242 | if not maybe_marker_fn:
243 | continue
244 | marker_fn: PluginFn = cast(PluginFn, maybe_marker_fn)
245 | result = self.results.get(item.nodeid)
246 | if not result:
247 | continue
248 | res = marker_fn(
249 | *marker.args,
250 | **marker.kwargs,
251 | _result_file=result.result_file,
252 | _config=self.config,
253 | _test_id=item.nodeid,
254 | )
255 | if res:
256 | report.outcome = "failed"
257 | report.longrepr = res.long_repr
258 | report.sections.append(res.section)
259 | outcome.force_result(report)
260 | return None
261 |
262 | @hookimpl(hookwrapper=True, trylast=True)
263 | def pytest_report_teststatus(
264 | self, report: CollectReport | TestReport
265 | ) -> Generator[None, TestReport, None]:
266 | outcome = yield
267 | if report.when != "call" or report.outcome != "failed":
268 | return None
269 |
270 | if any("memray" in section for section, _ in report.sections):
271 | outcome.force_result(("failed", "M", "MEMORY PROBLEMS"))
272 | return None
273 |
274 | @hookimpl
275 | def pytest_terminal_summary(
276 | self, terminalreporter: TerminalReporter, exitstatus: ExitCode
277 | ) -> None:
278 | if value_or_ini(self.config, "hide_memray_summary") or not value_or_ini(
279 | self.config, "memray"
280 | ):
281 | return
282 |
283 | terminalreporter.write_line("")
284 | terminalreporter.write_sep("=", "MEMRAY REPORT")
285 |
286 | if not self.results:
287 | # If there are not results is because we are likely running under
288 | # pytest-xdist, and the master process is not running the tests. In
289 | # this case, we can retrieve the results from the metadata directory
290 | # instead, that is common for all workers.
291 | for result_file in self.result_metadata_path.glob("*.metadata"):
292 | result = pickle.loads(result_file.read_bytes())
293 | self.results[result.test_id] = result
294 |
295 | total_sizes = collections.Counter(
296 | {
297 | node_id: result.metadata.peak_memory
298 | for node_id, result in self.results.items()
299 | if result.result_file.exists()
300 | }
301 | )
302 |
303 | max_results = cast(int, value_or_ini(self.config, "most_allocations"))
304 | if max_results == 0:
305 | max_results = len(total_sizes)
306 |
307 | for test_id, total_size in total_sizes.most_common(max_results):
308 | result = self.results[test_id]
309 | reader = FileReader(result.result_file)
310 | func = reader.get_high_watermark_allocation_records
311 | records = list(func(merge_threads=True))
312 | if not records:
313 | continue
314 | self._report_records_for_test(
315 | records,
316 | test_id=test_id,
317 | metadata=reader.metadata,
318 | terminalreporter=terminalreporter,
319 | )
320 | if self._tmp_dir is None:
321 | msg = f"Created {len(total_sizes)} binary dumps at {self.result_path}"
322 | msg += f" with prefix {self._bin_prefix}"
323 | terminalreporter.write_line(msg)
324 |
325 | @staticmethod
326 | def _report_records_for_test(
327 | records: Iterable[AllocationRecord],
328 | test_id: str,
329 | metadata: Metadata,
330 | terminalreporter: TerminalReporter,
331 | ) -> None:
332 | writeln = terminalreporter.write_line
333 | writeln(f"Allocation results for {test_id} at the high watermark")
334 | writeln("")
335 | writeln(f"\t 📦 Total memory allocated: {sizeof_fmt(metadata.peak_memory)}")
336 | writeln(f"\t 📏 Total allocations: {metadata.total_allocations}")
337 | sizes = [allocation.size for allocation in records]
338 | histogram_txt = cli_hist(sizes, bins=min(len(sizes), N_HISTOGRAM_BINS))
339 | writeln(f"\t 📊 Histogram of allocation sizes: |{histogram_txt}|")
340 | writeln("\t 🥇 Biggest allocating functions:")
341 | sorted_records = sorted(records, key=lambda _record: _record.size, reverse=True)
342 | for record in islice(sorted_records, N_TOP_ALLOCS):
343 | stack_trace = record.stack_trace()
344 | if not stack_trace:
345 | continue
346 | (function, file, line), *_ = stack_trace
347 | writeln(f"\t\t- {function}:{file}:{line} -> {sizeof_fmt(record.size)}")
348 | writeln("\n")
349 |
350 |
351 | def pytest_addoption(parser: Parser) -> None:
352 | group = parser.getgroup("memray")
353 | group.addoption(
354 | "--memray",
355 | action="store_true",
356 | default=False,
357 | help="Activate memray tracking",
358 | )
359 | group.addoption(
360 | "--memray-bin-path",
361 | action=WriteEnabledDirectoryAction,
362 | default=None,
363 | help="Path where to write the memray binary dumps (by default a temporary folder)",
364 | )
365 | group.addoption(
366 | "--memray-bin-prefix",
367 | default=None,
368 | help="Prefix to use for the binary dump (by default a random UUID4 hex)",
369 | )
370 | group.addoption(
371 | "--hide-memray-summary",
372 | action="store_true",
373 | default=False,
374 | help="Hide the memray summary at the end of the execution",
375 | )
376 | group.addoption(
377 | "--most-allocations",
378 | type=int,
379 | default=5,
380 | help="Show the N tests that allocate most memory (N=0 for all)",
381 | )
382 | group.addoption(
383 | "--stacks",
384 | type=positive_int,
385 | default=1,
386 | help="Show the N stack entries when showing tracebacks of memory allocations",
387 | )
388 | group.addoption(
389 | "--native",
390 | action="store_true",
391 | default=False,
392 | help="Show native frames when showing tracebacks of memory allocations "
393 | "(will be slower)",
394 | )
395 | group.addoption(
396 | "--trace-python-allocators",
397 | action="store_true",
398 | default=False,
399 | help="Record allocations made by the Pymalloc allocator (will be slower)",
400 | )
401 | group.addoption(
402 | "--fail-on-increase",
403 | action="store_true",
404 | default=False,
405 | help="Fail a test with the limit_memory marker if it uses more memory than its last successful run",
406 | )
407 |
408 | parser.addini("memray", "Activate pytest.ini setting", type="bool")
409 | parser.addini(
410 | "hide_memray_summary",
411 | "Hide the memray summary at the end of the execution",
412 | type="bool",
413 | )
414 | parser.addini(
415 | "stacks",
416 | help="Show the N stack entries when showing tracebacks of memory allocations",
417 | type="string",
418 | )
419 | parser.addini(
420 | "native",
421 | help="Show native frames when showing tracebacks of memory allocations "
422 | "(will be slower)",
423 | type="bool",
424 | )
425 | parser.addini(
426 | "trace_python_allocators",
427 | help="Record allocations made by the Pymalloc allocator (will be slower)",
428 | type="bool",
429 | )
430 | parser.addini(
431 | "fail-on-increase",
432 | help="Fail a test with the limit_memory marker if it uses more memory than its last successful run",
433 | type="bool",
434 | )
435 | help_msg = "Show the N tests that allocate most memory (N=0 for all)"
436 | parser.addini("most_allocations", help_msg)
437 |
438 |
439 | def pytest_configure(config: Config) -> None:
440 | pytest_memray = Manager(config)
441 | config.pluginmanager.register(pytest_memray, "memray_manager")
442 |
443 | for marker, marker_fn in MARKERS.items():
444 | [args, *_] = inspect.getfullargspec(marker_fn)
445 | line = f"{marker}({', '.join(args)}): {marker_fn.__doc__}"
446 | config.addinivalue_line("markers", line)
447 |
--------------------------------------------------------------------------------
/src/pytest_memray/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bloomberg/pytest-memray/de620779bb62244d3d91ff050318384a5221ca90/src/pytest_memray/py.typed
--------------------------------------------------------------------------------
/src/pytest_memray/utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import argparse
4 | import os
5 | import re
6 | from argparse import Action
7 | from argparse import ArgumentParser
8 | from argparse import Namespace
9 | from pathlib import Path
10 | from typing import Sequence
11 |
12 | from pytest import Config
13 |
14 |
15 | def sizeof_fmt(num: int | float, suffix: str = "B") -> str:
16 | for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
17 | if abs(num) < 1024.0:
18 | return f"{num:3.1f}{unit}{suffix}"
19 | num /= 1024.0
20 | return f"{num:.1f}{'Yi'}{suffix}"
21 |
22 |
23 | UNIT_REGEXP = re.compile(
24 | r"""
25 | (?P\+?\d*\.\d+|\+?\d+) # A number
26 | \s* # Some optional spaces
27 | (?P[KMGTP]B|B) # The unit, e.g. KB, MB, B,...
28 | """,
29 | re.VERBOSE | re.IGNORECASE,
30 | )
31 | UNIT_TO_MULTIPLIER = {
32 | "B": 1,
33 | "KB": 1024**1,
34 | "MB": 1024**2,
35 | "GB": 1024**3,
36 | "TB": 1024**4,
37 | "PB": 1024**5,
38 | }
39 |
40 |
41 | def parse_memory_string(mem_str: str) -> float:
42 | match = UNIT_REGEXP.match(mem_str)
43 | if not match:
44 | raise ValueError(f"Invalid memory size format: {mem_str}")
45 | quantity, unit = match.groups()
46 | return float(quantity) * UNIT_TO_MULTIPLIER[unit.upper()]
47 |
48 |
49 | def value_or_ini(config: Config, key: str) -> object:
50 | value = config.getvalue(key)
51 | if value is not None:
52 | return value
53 | try:
54 | return config.getini(key)
55 | except (KeyError, ValueError):
56 | return value
57 |
58 |
59 | class WriteEnabledDirectoryAction(Action):
60 | def __call__(
61 | self,
62 | parser: ArgumentParser,
63 | namespace: Namespace,
64 | values: str | Sequence[str] | None,
65 | option_string: str | None = None,
66 | ) -> None:
67 | assert isinstance(values, str)
68 | folder = Path(values).absolute()
69 | if folder.exists():
70 | if folder.is_dir():
71 | if not os.access(folder, os.W_OK):
72 | parser.error(f"{folder} is read-only")
73 | else:
74 | parser.error(f"{folder} must be a directory")
75 | else:
76 | try:
77 | folder.mkdir(parents=True)
78 | except OSError as exc:
79 | parser.error(f"cannot create directory {folder} due to {exc}")
80 | setattr(namespace, self.dest, folder)
81 |
82 |
83 | def positive_int(value: str) -> int:
84 | the_int = int(value)
85 | if the_int <= 0:
86 | raise argparse.ArgumentTypeError(f"{value} is an invalid positive int value")
87 | return the_int
88 |
89 |
90 | __all__ = [
91 | "WriteEnabledDirectoryAction",
92 | "parse_memory_string",
93 | "sizeof_fmt",
94 | "value_or_ini",
95 | "positive_int",
96 | ]
97 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | pytest_plugins = "pytester"
4 |
--------------------------------------------------------------------------------
/tests/test_pytest_memray.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import re
4 | import xml.etree.ElementTree as ET
5 | from types import SimpleNamespace
6 | from unittest.mock import ANY
7 | from unittest.mock import patch
8 |
9 | import pytest
10 | from memray import FileFormat
11 | from memray import Tracker
12 | from pytest import ExitCode
13 | from pytest import Pytester
14 |
15 | from pytest_memray.marks import StackFrame
16 |
17 |
18 | def extract_stacks(test_output: str) -> list[list[StackFrame]]:
19 | ret: list[list[StackFrame]] = []
20 | before_start = True
21 | for line in test_output.splitlines():
22 | if before_start:
23 | if "List of allocations:" in line:
24 | before_start = False
25 | elif "allocated here" in line:
26 | ret.append([])
27 | elif (match := re.match(r"^ {8}([^:]+):(.*):(\d+)$", line)) is not None:
28 | ret[-1].append(
29 | StackFrame(function=match[1], filename=match[2], lineno=int(match[3]))
30 | )
31 |
32 | return ret
33 |
34 |
35 | def test_help_message(pytester: Pytester) -> None:
36 | result = pytester.runpytest("--help")
37 | # fnmatch_lines does an assertion internally
38 | result.stdout.fnmatch_lines(
39 | [
40 | "memray:",
41 | "*--memray*Activate memray tracking",
42 | "*memray (bool)*",
43 | ]
44 | )
45 |
46 |
47 | def test_memray_is_called_when_activated(pytester: Pytester) -> None:
48 | pytester.makepyfile(
49 | """
50 | def test_hello_world():
51 | assert 2 == 1 + 1
52 | """
53 | )
54 |
55 | with patch("pytest_memray.plugin.Tracker") as mock:
56 | result = pytester.runpytest("--memray")
57 |
58 | mock.assert_called_once()
59 | assert result.ret == ExitCode.OK
60 |
61 |
62 | def test_memray_is_not_called_when_not_activated(pytester: Pytester) -> None:
63 | pytester.makepyfile(
64 | """
65 | def test_hello_world():
66 | assert 2 == 1 + 1
67 | """
68 | )
69 |
70 | with patch("pytest_memray.plugin.Tracker") as mock:
71 | result = pytester.runpytest()
72 |
73 | mock.assert_not_called()
74 | assert result.ret == ExitCode.OK
75 |
76 |
77 | @pytest.mark.parametrize(
78 | "size, outcome",
79 | [
80 | (1024 * 5, ExitCode.TESTS_FAILED),
81 | (1024 * 2, ExitCode.TESTS_FAILED),
82 | (1024 * 2 - 1, ExitCode.OK),
83 | (1024 * 1, ExitCode.OK),
84 | ],
85 | )
86 | def test_limit_memory_marker(pytester: Pytester, size: int, outcome: ExitCode) -> None:
87 | pytester.makepyfile(
88 | f"""
89 | import pytest
90 | from memray._test import MemoryAllocator
91 | allocator = MemoryAllocator()
92 |
93 | @pytest.mark.limit_memory("2KB")
94 | def test_memory_alloc_fails():
95 | allocator.valloc({size})
96 | allocator.free()
97 | """
98 | )
99 |
100 | result = pytester.runpytest("--memray")
101 |
102 | assert result.ret == outcome
103 |
104 |
105 | def test_limit_memory_marker_does_work_if_memray_not_passed(
106 | pytester: Pytester,
107 | ) -> None:
108 | pytester.makepyfile(
109 | """
110 | import pytest
111 | from memray._test import MemoryAllocator
112 | allocator = MemoryAllocator()
113 |
114 | @pytest.mark.limit_memory("2KB")
115 | def test_memory_alloc_fails():
116 | allocator.valloc(4*1024)
117 | allocator.free()
118 | """
119 | )
120 |
121 | result = pytester.runpytest()
122 |
123 | assert result.ret == ExitCode.TESTS_FAILED
124 |
125 |
126 | @pytest.mark.parametrize(
127 | "memlimit, mem_to_alloc",
128 | [(5, 100), (10, 200)],
129 | )
130 | def test_memray_with_junit_xml_error_msg(
131 | pytester: Pytester, memlimit: int, mem_to_alloc: int
132 | ):
133 | xml_output_file = pytester.makefile(".xml", "")
134 | pytester.makepyfile(
135 | f"""
136 | import pytest
137 | from memray._test import MemoryAllocator
138 | allocator = MemoryAllocator()
139 |
140 | @pytest.mark.limit_memory("{memlimit}B")
141 | def test_memory_alloc_fails():
142 | allocator.valloc({mem_to_alloc})
143 | allocator.free()
144 | """
145 | )
146 | result = pytester.runpytest("--memray", "--junit-xml", xml_output_file)
147 | assert result.ret == ExitCode.TESTS_FAILED
148 |
149 | expected = f"Test was limited to {memlimit}.0B but allocated {mem_to_alloc}.0B"
150 | root = ET.parse(str(xml_output_file)).getroot()
151 | for testcase in root.iter("testcase"):
152 | failure = testcase.find("failure")
153 | assert expected in failure.text
154 |
155 |
156 | def test_memray_with_junit_xml(pytester: Pytester) -> None:
157 | pytester.makepyfile(
158 | """
159 | import pytest
160 | from memray._test import MemoryAllocator
161 | allocator = MemoryAllocator()
162 |
163 | @pytest.mark.limit_memory("1B")
164 | def test_memory_alloc_fails():
165 | allocator.valloc(1234)
166 | allocator.free()
167 | """
168 | )
169 | path = str(pytester.path / "blech.xml")
170 | result = pytester.runpytest("--memray", "--junit-xml", path)
171 | assert result.ret == ExitCode.TESTS_FAILED
172 |
173 |
174 | @pytest.mark.parametrize("num_stacks", [1, 5, 100])
175 | def test_memray_report_limit_number_stacks(num_stacks: int, pytester: Pytester) -> None:
176 | pytester.makepyfile(
177 | """
178 | import pytest
179 | from memray._test import MemoryAllocator
180 | allocator = MemoryAllocator()
181 |
182 | def rec(n):
183 | if n <= 1:
184 | allocator.valloc(1024*2)
185 | allocator.free()
186 | return None
187 | return rec(n - 1)
188 |
189 |
190 | @pytest.mark.limit_memory("1kb")
191 | def test_foo():
192 | rec(10)
193 | """
194 | )
195 |
196 | result = pytester.runpytest("--memray", f"--stacks={num_stacks}")
197 |
198 | assert result.ret == ExitCode.TESTS_FAILED
199 |
200 | stacks = extract_stacks(result.stdout.str())
201 | valloc_stacks = [stack for stack in stacks if stack[0].function == "valloc"]
202 | (valloc_stack,) = valloc_stacks
203 | num_rec_frames = sum(1 for frame in valloc_stack if frame.function == "rec")
204 | assert num_rec_frames == min(num_stacks - 1, 10)
205 |
206 |
207 | @pytest.mark.parametrize("native", [True, False])
208 | def test_memray_report_native(native: bool, pytester: Pytester) -> None:
209 | pytester.makepyfile(
210 | """
211 | import pytest
212 | from memray._test import MemoryAllocator
213 | allocator = MemoryAllocator()
214 |
215 | @pytest.mark.limit_memory("1kb")
216 | def test_foo():
217 | allocator.valloc(1024*2)
218 | allocator.free()
219 | """
220 | )
221 |
222 | with patch("pytest_memray.plugin.Tracker", wraps=Tracker) as mock:
223 | result = pytester.runpytest("--memray", *(["--native"] if native else []))
224 |
225 | assert result.ret == ExitCode.TESTS_FAILED
226 |
227 | output = result.stdout.str()
228 | mock.assert_called_once_with(
229 | ANY,
230 | native_traces=native,
231 | trace_python_allocators=False,
232 | file_format=FileFormat.AGGREGATED_ALLOCATIONS,
233 | )
234 |
235 | if native:
236 | assert "MemoryAllocator_1" in output
237 | else:
238 | assert "MemoryAllocator_1" not in output
239 |
240 |
241 | @pytest.mark.parametrize("trace_python_allocators", [True, False])
242 | def test_memray_report_python_allocators(
243 | trace_python_allocators: bool, pytester: Pytester
244 | ) -> None:
245 | pytester.makepyfile(
246 | """
247 | import pytest
248 | from memray._test import PymallocMemoryAllocator
249 | from memray._test import PymallocDomain
250 |
251 | allocator = PymallocMemoryAllocator(PymallocDomain.PYMALLOC_OBJECT)
252 |
253 | def allocate_with_pymalloc():
254 | allocator.malloc(256)
255 | allocator.free()
256 |
257 | @pytest.mark.limit_memory("128B")
258 | def test_foo():
259 | allocate_with_pymalloc()
260 | """
261 | )
262 |
263 | with patch("pytest_memray.plugin.Tracker", wraps=Tracker) as mock:
264 | result = pytester.runpytest(
265 | "--memray",
266 | *(["--trace-python-allocators"] if trace_python_allocators else []),
267 | )
268 |
269 | assert result.ret == (
270 | ExitCode.TESTS_FAILED if trace_python_allocators else ExitCode.OK
271 | )
272 |
273 | output = result.stdout.str()
274 | mock.assert_called_once_with(
275 | ANY,
276 | native_traces=False,
277 | trace_python_allocators=trace_python_allocators,
278 | file_format=FileFormat.AGGREGATED_ALLOCATIONS,
279 | )
280 |
281 | if trace_python_allocators:
282 | assert "allocate_with_pymalloc" in output
283 | else:
284 | assert "allocate_with_pymalloc" not in output
285 |
286 |
287 | def test_memray_report(pytester: Pytester) -> None:
288 | pytester.makepyfile(
289 | """
290 | import pytest
291 | from memray._test import MemoryAllocator
292 | allocator = MemoryAllocator()
293 |
294 | def allocating_func1():
295 | allocator.valloc(1024)
296 | allocator.free()
297 |
298 | def allocating_func2():
299 | allocator.valloc(1024*2)
300 | allocator.free()
301 |
302 | def test_foo():
303 | allocating_func1()
304 |
305 | def test_bar():
306 | allocating_func2()
307 | """
308 | )
309 |
310 | result = pytester.runpytest("--memray")
311 |
312 | assert result.ret == ExitCode.OK
313 |
314 | output = result.stdout.str()
315 |
316 | assert "MEMRAY REPORT" in output
317 |
318 | assert "results for test_memray_report.py::test_foo" in output
319 | assert "Total memory allocated: 2.0KiB" in output
320 | assert "valloc:" in output
321 | assert "-> 2.0KiB" in output
322 |
323 | assert "results for test_memray_report.py::test_bar" in output
324 | assert "Total memory allocated: 1.0KiB" in output
325 | assert "valloc:" in output
326 | assert "-> 1.0KiB" in output
327 |
328 |
329 | def test_memray_report_is_not_shown_if_deactivated(pytester: Pytester) -> None:
330 | pytester.makepyfile(
331 | """
332 | import pytest
333 | from memray._test import MemoryAllocator
334 | allocator = MemoryAllocator()
335 |
336 | def allocating_func1():
337 | allocator.valloc(1024)
338 | allocator.free()
339 |
340 | def allocating_func2():
341 | allocator.valloc(1024*2)
342 | allocator.free()
343 |
344 | def test_foo():
345 | allocating_func1()
346 |
347 | def test_bar():
348 | allocating_func2()
349 | """
350 | )
351 |
352 | result = pytester.runpytest("--memray", "--hide-memray-summary")
353 |
354 | assert result.ret == ExitCode.OK
355 |
356 | output = result.stdout.str()
357 |
358 | assert "MEMRAY REPORT" not in output
359 |
360 | assert "results for test_memray_report.py::test_foo" not in output
361 | assert "Total memory allocated: 2.0KiB" not in output
362 | assert "valloc:" not in output
363 | assert "-> 2.0KiB" not in output
364 |
365 | assert "results for test_memray_report.py::test_bar" not in output
366 | assert "Total memory allocated: 1.0KiB" not in output
367 | assert "valloc:" not in output
368 | assert "-> 1.0KiB" not in output
369 |
370 |
371 | def test_memray_report_limit(pytester: Pytester) -> None:
372 | pytester.makepyfile(
373 | """
374 | import pytest
375 | from memray._test import MemoryAllocator
376 | allocator = MemoryAllocator()
377 |
378 | def allocating_func1():
379 | allocator.valloc(1024*1024)
380 | allocator.free()
381 |
382 | def allocating_func2():
383 | allocator.valloc(1024*1024*2)
384 | allocator.free()
385 |
386 | def test_foo():
387 | allocating_func1()
388 |
389 | def test_bar():
390 | allocating_func2()
391 | """
392 | )
393 |
394 | result = pytester.runpytest("--memray", "--most-allocations=1")
395 |
396 | assert result.ret == ExitCode.OK
397 |
398 | output = result.stdout.str()
399 |
400 | assert "results for test_memray_report_limit.py::test_foo" not in output
401 | assert "results for test_memray_report_limit.py::test_bar" in output
402 |
403 |
404 | def test_memray_report_limit_without_limit(pytester: Pytester) -> None:
405 | pytester.makepyfile(
406 | """
407 | import pytest
408 | from memray._test import MemoryAllocator
409 | allocator = MemoryAllocator()
410 |
411 | def allocating_func1():
412 | allocator.valloc(1024)
413 | allocator.free()
414 |
415 | def allocating_func2():
416 | allocator.valloc(1024*2)
417 | allocator.free()
418 |
419 | def test_foo():
420 | allocating_func1()
421 |
422 | def test_bar():
423 | allocating_func2()
424 | """
425 | )
426 |
427 | result = pytester.runpytest("--memray", "--most-allocations=0")
428 |
429 | assert result.ret == ExitCode.OK
430 |
431 | output = result.stdout.str()
432 |
433 | assert "results for test_memray_report_limit_without_limit.py::test_foo" in output
434 | assert "results for test_memray_report_limit_without_limit.py::test_bar" in output
435 |
436 |
437 | def test_failing_tests_are_not_reported(pytester: Pytester) -> None:
438 | pytester.makepyfile(
439 | """
440 | import pytest
441 | from memray._test import MemoryAllocator
442 | allocator = MemoryAllocator()
443 |
444 | def allocating_func1():
445 | allocator.valloc(1024)
446 | allocator.free()
447 |
448 | def allocating_func2():
449 | allocator.valloc(1024*2)
450 | allocator.free()
451 |
452 | def test_foo():
453 | allocating_func1()
454 |
455 | def test_bar():
456 | allocating_func2()
457 | 1/0
458 | """
459 | )
460 |
461 | result = pytester.runpytest("--memray")
462 |
463 | assert result.ret == ExitCode.TESTS_FAILED
464 |
465 | output = result.stdout.str()
466 |
467 | assert "results for test_failing_tests_are_not_reported.py::test_foo" in output
468 | assert "results for test_failing_tests_are_not_reported.py::test_bar" not in output
469 |
470 |
471 | def test_plugin_calls_tests_only_once(pytester: Pytester) -> None:
472 | pytester.makepyfile(
473 | """
474 | counter = 0
475 | def test_hello_world():
476 | global counter
477 | counter += 1
478 | assert counter < 2
479 | """
480 | )
481 |
482 | with patch("pytest_memray.plugin.Tracker") as mock:
483 | result = pytester.runpytest("--memray")
484 |
485 | mock.assert_called_once()
486 | assert result.ret == ExitCode.OK
487 |
488 |
489 | def test_bin_path(pytester: Pytester) -> None:
490 | py = """
491 | import pytest
492 |
493 | def test_a():
494 | assert [1]
495 | @pytest.mark.parametrize('i', [1, 2])
496 | def test_b(i):
497 | assert [2] * i
498 | """
499 | pytester.makepyfile(**{"magic/test_a": py})
500 | dump = pytester.path / "d"
501 | with patch("uuid.uuid4", return_value=SimpleNamespace(hex="H")) as mock:
502 | result = pytester.runpytest("--memray", "--memray-bin-path", str(dump))
503 |
504 | assert result.ret == ExitCode.OK
505 | mock.assert_called_once()
506 |
507 | assert dump.exists()
508 | assert {i.name for i in dump.iterdir()} == {
509 | "H-magic-test_a.py-test_b[2].bin",
510 | "H-magic-test_a.py-test_a.bin",
511 | "H-magic-test_a.py-test_b[1].bin",
512 | "metadata",
513 | }
514 |
515 | output = result.stdout.str()
516 | assert f"Created 3 binary dumps at {dump} with prefix H" in output
517 |
518 |
519 | @pytest.mark.parametrize("override", [True, False])
520 | def test_bin_path_prefix(pytester: Pytester, override: bool) -> None:
521 | py = """
522 | import pytest
523 | def test_t():
524 | assert [1]
525 | """
526 | pytester.makepyfile(test_a=py)
527 |
528 | bin_path = pytester.path / "p-test_a.py-test_t.bin"
529 | if override:
530 | bin_path.write_bytes(b"")
531 |
532 | args = ["--memray", "--memray-bin-path", str(pytester.path)]
533 | args.extend(["--memray-bin-prefix", "p"])
534 | result = pytester.runpytest(*args)
535 | res = list(pytester.path.iterdir())
536 |
537 | assert res
538 |
539 | assert result.ret == ExitCode.OK
540 | assert bin_path.exists()
541 |
542 |
543 | def test_plugin_works_with_the_flaky_plugin(pytester: Pytester) -> None:
544 | pytester.makepyfile(
545 | """
546 | from flaky import flaky
547 |
548 | @flaky
549 | def test_hello_world():
550 | 1/0
551 | """
552 | )
553 |
554 | with patch("pytest_memray.plugin.Tracker") as mock:
555 | result = pytester.runpytest("--memray")
556 |
557 | # Ensure that flaky has only called our Tracker once per retry (2 times)
558 | # and not more times because it has incorrectly wrapped our plugin and
559 | # called it multiple times per retry.
560 | assert mock.call_count == 2
561 | assert result.ret == ExitCode.TESTS_FAILED
562 |
563 |
564 | def test_memray_report_with_pytest_xdist(pytester: Pytester) -> None:
565 | pytester.makepyfile(
566 | """
567 | import pytest
568 | from memray._test import MemoryAllocator
569 | allocator = MemoryAllocator()
570 |
571 | def allocating_func1():
572 | allocator.valloc(1024)
573 | allocator.free()
574 |
575 | def allocating_func2():
576 | allocator.valloc(1024*2)
577 | allocator.free()
578 |
579 | @pytest.mark.parametrize("param", [("unused",)], ids=["x" * 1024])
580 | def test_foo(param):
581 | allocating_func1()
582 |
583 | def test_bar():
584 | allocating_func2()
585 | """
586 | )
587 |
588 | result = pytester.runpytest("--memray", "-n", "2")
589 |
590 | assert result.ret == ExitCode.OK
591 |
592 | output = result.stdout.str()
593 |
594 | assert "MEMRAY REPORT" in output
595 |
596 | # We don't check the exact number of memory allocated because pytest-xdist
597 | # can spawn some threads using the `execnet` library which can allocate extra
598 | # memory.
599 | assert "Total memory allocated:" in output
600 |
601 | assert "results for test_memray_report_with_pytest_xdist.py::test_foo" in output
602 | assert "valloc:" in output
603 | assert "-> 2.0KiB" in output
604 |
605 | assert "results for test_memray_report_with_pytest_xdist.py::test_bar" in output
606 | assert "valloc:" in output
607 | assert "-> 1.0KiB" in output
608 |
609 |
610 | @pytest.mark.parametrize(
611 | "size, outcome",
612 | [
613 | (1024 * 20, ExitCode.TESTS_FAILED),
614 | (1024 * 10, ExitCode.TESTS_FAILED),
615 | (1024, ExitCode.OK),
616 | ],
617 | )
618 | def test_limit_memory_marker_with_pytest_xdist(
619 | pytester: Pytester, size: int, outcome: ExitCode
620 | ) -> None:
621 | pytester.makepyfile(
622 | f"""
623 | import pytest
624 | from memray._test import MemoryAllocator
625 | allocator = MemoryAllocator()
626 |
627 | @pytest.mark.limit_memory("10KB")
628 | def test_memory_alloc_fails():
629 | allocator.valloc({size})
630 | allocator.free()
631 |
632 | @pytest.mark.limit_memory("10KB")
633 | def test_memory_alloc_fails_2():
634 | allocator.valloc({size})
635 | allocator.free()
636 | """
637 | )
638 |
639 | result = pytester.runpytest("--memray", "-n", "2")
640 | assert result.ret == outcome
641 |
642 |
643 | def test_memray_does_not_raise_warnings(pytester: Pytester) -> None:
644 | pytester.makepyfile(
645 | """
646 | import pytest
647 | from memray._test import MemoryAllocator
648 | allocator = MemoryAllocator()
649 |
650 | @pytest.mark.limit_memory("1MB")
651 | def test_memory_alloc_fails():
652 | allocator.valloc(1234)
653 | allocator.free()
654 | """
655 | )
656 | result = pytester.runpytest("-Werror", "--memray")
657 | assert result.ret == ExitCode.OK
658 |
659 |
660 | @pytest.mark.parametrize(
661 | "size, outcome",
662 | [
663 | (0, ExitCode.OK),
664 | (1, ExitCode.OK),
665 | (1024 * 1 / 10, ExitCode.OK),
666 | (1024 * 1, ExitCode.TESTS_FAILED),
667 | (1024 * 10, ExitCode.TESTS_FAILED),
668 | ],
669 | )
670 | def test_leak_marker(pytester: Pytester, size: int, outcome: ExitCode) -> None:
671 | pytester.makepyfile(
672 | f"""
673 | import pytest
674 | from memray._test import MemoryAllocator
675 | allocator = MemoryAllocator()
676 | @pytest.mark.limit_leaks("5KB")
677 | def test_memory_alloc_fails():
678 | for _ in range(10):
679 | allocator.valloc({size})
680 | # No free call here
681 | """
682 | )
683 |
684 | result = pytester.runpytest("--memray")
685 |
686 | assert result.ret == outcome
687 |
688 |
689 | @pytest.mark.parametrize(
690 | "size, outcome",
691 | [
692 | (4 * 1024, ExitCode.OK),
693 | (0.4 * 1024 * 1024, ExitCode.OK),
694 | (4 * 1024 * 1024, ExitCode.TESTS_FAILED),
695 | ],
696 | )
697 | def test_leak_marker_in_a_thread(
698 | pytester: Pytester, size: int, outcome: ExitCode
699 | ) -> None:
700 | pytester.makepyfile(
701 | f"""
702 | import pytest
703 | from memray._test import MemoryAllocator
704 | allocator = MemoryAllocator()
705 | import threading
706 | def allocating_func():
707 | for _ in range(10):
708 | allocator.valloc({size})
709 | # No free call here
710 | @pytest.mark.limit_leaks("20MB")
711 | def test_memory_alloc_fails():
712 | t = threading.Thread(target=allocating_func)
713 | t.start()
714 | t.join()
715 | """
716 | )
717 |
718 | result = pytester.runpytest("--memray")
719 | assert result.ret == outcome
720 |
721 |
722 | def test_leak_marker_filtering_function(pytester: Pytester) -> None:
723 | pytester.makepyfile(
724 | """
725 | import pytest
726 | from memray._test import MemoryAllocator
727 | LEAK_SIZE = 1024
728 | allocator = MemoryAllocator()
729 |
730 | def this_should_not_be_there():
731 | allocator.valloc(LEAK_SIZE)
732 | # No free call here
733 |
734 | def filtering_function(stack):
735 | for frame in stack.frames:
736 | if frame.function == "this_should_not_be_there":
737 | return False
738 | return True
739 |
740 | @pytest.mark.limit_leaks("5KB", filter_fn=filtering_function)
741 | def test_memory_alloc_fails():
742 | for _ in range(10):
743 | this_should_not_be_there()
744 | """
745 | )
746 |
747 | result = pytester.runpytest("--memray")
748 |
749 | assert result.ret == ExitCode.OK
750 |
751 |
752 | def test_leak_marker_does_work_if_memray_not_passed(pytester: Pytester) -> None:
753 | pytester.makepyfile(
754 | """
755 | import pytest
756 | from memray._test import MemoryAllocator
757 | allocator = MemoryAllocator()
758 | @pytest.mark.limit_leaks("0B")
759 | def test_memory_alloc_fails():
760 | allocator.valloc(512)
761 | # No free call here
762 | """
763 | )
764 |
765 | result = pytester.runpytest()
766 |
767 | assert result.ret == ExitCode.TESTS_FAILED
768 |
769 |
770 | def test_multiple_markers_are_not_supported(pytester: Pytester) -> None:
771 | pytester.makepyfile(
772 | """
773 | import pytest
774 | @pytest.mark.limit_leaks("0MB")
775 | @pytest.mark.limit_memory("0MB")
776 | def test_bar():
777 | pass
778 | """
779 | )
780 |
781 | result = pytester.runpytest("--memray")
782 | assert result.ret == ExitCode.TESTS_FAILED
783 |
784 | output = result.stdout.str()
785 | assert "Only one Memray marker can be applied to each test" in output
786 |
787 |
788 | def test_multiple_markers_are_not_supported_with_global_marker(
789 | pytester: Pytester,
790 | ) -> None:
791 | pytester.makepyfile(
792 | """
793 | import pytest
794 | pytestmark = pytest.mark.limit_memory("1 MB")
795 | @pytest.mark.limit_leaks("0MB")
796 | def test_bar():
797 | pass
798 | """
799 | )
800 |
801 | result = pytester.runpytest("--memray")
802 | assert result.ret == ExitCode.TESTS_FAILED
803 |
804 | output = result.stdout.str()
805 | assert "Only one Memray marker can be applied to each test" in output
806 |
807 |
808 | def test_fail_on_increase(pytester: Pytester):
809 | pytester.makepyfile(
810 | """
811 | import pytest
812 | from memray._test import MemoryAllocator
813 | allocator = MemoryAllocator()
814 |
815 | @pytest.mark.limit_memory("100MB")
816 | def test_memory_alloc_fails():
817 | allocator.valloc(1024)
818 | allocator.free()
819 | """
820 | )
821 | result = pytester.runpytest("--memray")
822 | assert result.ret == ExitCode.OK
823 | pytester.makepyfile(
824 | """
825 | import pytest
826 | from memray._test import MemoryAllocator
827 | allocator = MemoryAllocator()
828 |
829 | @pytest.mark.limit_memory("100MB")
830 | def test_memory_alloc_fails():
831 | allocator.valloc(1024 * 10)
832 | allocator.free()
833 | """
834 | )
835 | result = pytester.runpytest("--memray", "--fail-on-increase")
836 | assert result.ret == ExitCode.TESTS_FAILED
837 | output = result.stdout.str()
838 | assert "Test uses more memory than previous run" in output
839 | assert "Test previously used 1.0KiB but now uses 10.0KiB" in output
840 |
841 |
842 | def test_fail_on_increase_unset(pytester: Pytester):
843 | pytester.makepyfile(
844 | """
845 | import pytest
846 | from memray._test import MemoryAllocator
847 | allocator = MemoryAllocator()
848 |
849 | @pytest.mark.limit_memory("100MB")
850 | def test_memory_alloc_fails():
851 | allocator.valloc(1024)
852 | allocator.free()
853 | """
854 | )
855 | result = pytester.runpytest("--memray")
856 | assert result.ret == ExitCode.OK
857 | pytester.makepyfile(
858 | """
859 | import pytest
860 | from memray._test import MemoryAllocator
861 | allocator = MemoryAllocator()
862 |
863 | @pytest.mark.limit_memory("100MB")
864 | def test_memory_alloc_fails():
865 | allocator.valloc(1024 * 10)
866 | allocator.free()
867 | """
868 | )
869 | result = pytester.runpytest("--memray")
870 | assert result.ret == ExitCode.OK
871 |
872 |
873 | def test_limit_memory_in_current_thread(pytester: Pytester) -> None:
874 | pytester.makepyfile(
875 | """
876 | import pytest
877 | from memray._test import MemoryAllocator
878 | allocator = MemoryAllocator()
879 | import threading
880 | def allocating_func():
881 | for _ in range(10):
882 | allocator.valloc(1024*5)
883 | # No free call here
884 |
885 | @pytest.mark.limit_memory("5KB", current_thread_only=True)
886 | def test_memory_alloc_fails():
887 | t = threading.Thread(target=allocating_func)
888 | t.start()
889 | t.join()
890 | """
891 | )
892 |
893 | result = pytester.runpytest("--memray")
894 |
895 | assert result.ret == ExitCode.OK
896 |
897 |
898 | def test_leaks_in_current_thread(pytester: Pytester) -> None:
899 | pytester.makepyfile(
900 | """
901 | import pytest
902 | from memray._test import MemoryAllocator
903 | allocator = MemoryAllocator()
904 | import threading
905 | def allocating_func():
906 | for _ in range(10):
907 | allocator.valloc(1024*5)
908 | # No free call here
909 |
910 | @pytest.mark.limit_leaks("5KB", current_thread_only=True)
911 | def test_memory_alloc_fails():
912 | t = threading.Thread(target=allocating_func)
913 | t.start()
914 | t.join()
915 | """
916 | )
917 |
918 | result = pytester.runpytest("--memray")
919 |
920 | assert result.ret == ExitCode.OK
921 |
922 |
923 | def test_running_async_tests_with_anyio(pytester: Pytester) -> None:
924 | xml_output_file = pytester.makefile(".xml", "")
925 | pytester.makepyfile(
926 | """
927 | import pytest
928 | from memray._test import MemoryAllocator
929 | allocator = MemoryAllocator()
930 |
931 | @pytest.fixture
932 | def anyio_backend():
933 | return 'asyncio'
934 |
935 | @pytest.mark.limit_leaks("5KB")
936 | @pytest.mark.anyio
937 | async def test_memory_alloc_fails():
938 | for _ in range(10):
939 | allocator.valloc(1024*10)
940 | # No free call here
941 | """
942 | )
943 |
944 | result = pytester.runpytest("--junit-xml", xml_output_file)
945 |
946 | assert result.ret != ExitCode.OK
947 |
948 | root = ET.parse(str(xml_output_file)).getroot()
949 | for testcase in root.iter("testcase"):
950 | failure = testcase.find("failure")
951 | assert failure.text == (
952 | "Test was allowed to leak 5.0KiB per location"
953 | " but at least one location leaked more"
954 | )
955 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import re
4 | from argparse import ArgumentParser
5 | from argparse import Namespace
6 | from pathlib import Path
7 | from stat import S_IWGRP
8 | from stat import S_IWOTH
9 | from stat import S_IWUSR
10 | from typing import Callable
11 | from typing import NoReturn
12 | from unittest.mock import create_autospec
13 |
14 | import pytest
15 |
16 | from pytest_memray.utils import WriteEnabledDirectoryAction
17 | from pytest_memray.utils import parse_memory_string
18 | from pytest_memray.plugin import cli_hist
19 |
20 |
21 | @pytest.mark.parametrize(
22 | "the_str, expected",
23 | [
24 | ("100 B", 100),
25 | ("100B", 100),
26 | ("100.0B", 100),
27 | ("100.0 B", 100),
28 | ("100 KB", 100 * 1024),
29 | ("3 MB", 3 * 1024**2),
30 | ("3.0 GB", 3 * 1024**3),
31 | ("60.0 TB", 60 * 1024**4),
32 | ("3.14 PB", 3.14 * 1024**5),
33 | ("+100.0B", 100),
34 | ("+100.0 B", 100),
35 | ("+100 KB", 100 * 1024),
36 | ("100 Kb", 100 * 1024),
37 | ("3 Mb", 3 * 1024**2),
38 | ("3.0 Gb", 3 * 1024**3),
39 | ("60.0 Tb", 60 * 1024**4),
40 | ("3.14 Pb", 3.14 * 1024**5),
41 | ("+100.0b", 100),
42 | ("+100.0 b", 100),
43 | ("+100 Kb", 100 * 1024),
44 | ],
45 | )
46 | def test_parse_memory_string(the_str: str, expected: float) -> None:
47 | assert parse_memory_string(the_str) == expected
48 |
49 |
50 | @pytest.mark.parametrize(
51 | "the_str",
52 | [
53 | "Some bad string",
54 | "100.0",
55 | "100",
56 | "100 NB",
57 | "100 K",
58 | "100.0 PK",
59 | "100 PK",
60 | "-100 B",
61 | "-100.0 B",
62 | "+100.0 K",
63 | ],
64 | )
65 | def test_parse_incorrect_memory_string(the_str: str) -> None:
66 | with pytest.raises(ValueError):
67 | parse_memory_string(the_str)
68 |
69 |
70 | WDirCheck = Callable[[Path], Namespace]
71 |
72 |
73 | @pytest.fixture()
74 | def w_dir_check() -> WDirCheck:
75 | def _func(path: Path) -> Namespace:
76 | action = WriteEnabledDirectoryAction(option_strings=["-m"], dest="d")
77 | parser = create_autospec(ArgumentParser)
78 |
79 | def error(message: str) -> NoReturn:
80 | raise ValueError(message)
81 |
82 | parser.error.side_effect = error
83 | namespace = Namespace()
84 | action(parser, namespace, str(path))
85 | return namespace
86 |
87 | return _func
88 |
89 |
90 | def test_write_enabled_dir_ok(w_dir_check: WDirCheck, tmp_path: Path) -> None:
91 | namespace = w_dir_check(tmp_path)
92 | assert namespace.d == tmp_path
93 |
94 |
95 | def test_write_enabled_dir_is_file(w_dir_check: WDirCheck, tmp_path: Path) -> None:
96 | path = tmp_path / "a"
97 | path.write_text("")
98 | exp = f"{path} must be a directory"
99 | with pytest.raises(ValueError, match=re.escape(exp)):
100 | w_dir_check(path)
101 |
102 |
103 | def test_write_enabled_dir_read_only(w_dir_check: WDirCheck, tmp_path: Path) -> None:
104 | path = tmp_path
105 | write = S_IWUSR | S_IWGRP | S_IWOTH
106 | path.chmod(path.stat().st_mode & ~write)
107 | exp = f"{path} is read-only"
108 | try:
109 | with pytest.raises(ValueError, match=re.escape(exp)):
110 | w_dir_check(path)
111 | finally:
112 | path.chmod(path.stat().st_mode | write)
113 |
114 |
115 | def test_write_enabled_dir_cannot_create(
116 | w_dir_check: WDirCheck, tmp_path: Path
117 | ) -> None:
118 | path = tmp_path / "d"
119 | write = S_IWUSR | S_IWGRP | S_IWOTH
120 | tmp_path.chmod(tmp_path.stat().st_mode & ~write)
121 | exp = f"cannot create directory {path} due to [Errno 13] Permission denied:"
122 | try:
123 | with pytest.raises(ValueError, match=re.escape(exp)):
124 | w_dir_check(path)
125 | finally:
126 | tmp_path.chmod(tmp_path.stat().st_mode | write)
127 |
128 |
129 | def test_histogram_with_zero_byte_allocations():
130 | # GIVEN
131 | allocations = [0, 100, 990, 1000, 50000]
132 |
133 | # WHEN
134 | histogram = cli_hist(allocations, bins=5)
135 |
136 | # THEN
137 | assert histogram == "▄ ▄█▄"
138 |
139 |
140 | def test_histogram_with_only_zero_byte_allocations():
141 | # GIVEN
142 | allocations = [0, 0, 0, 0]
143 |
144 | # WHEN
145 | histogram = cli_hist(allocations, bins=5)
146 |
147 | # THEN
148 | assert histogram == "█ "
149 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist =
3 | py312-cov
4 | py312
5 | py311
6 | py310
7 | py39
8 | py38
9 | docs
10 | lint
11 | requires = tox>=4.2
12 |
13 | [testenv]
14 | description =
15 | Run tests under {basepython}
16 | cov: with coverage
17 | passenv =
18 | CI
19 | PYTEST_*
20 | VIRTUALENV_*
21 | setenv =
22 | COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
23 | VIRTUALENV_NO_SETUPTOOLS = true
24 | VIRTUALENV_NO_WHEEL = true
25 | extras =
26 | test
27 | commands =
28 | make check
29 | allowlist_externals =
30 | make
31 | package = wheel
32 | wheel_build_env = .pkg
33 |
34 | [testenv:py312-cov]
35 | commands =
36 | make coverage
37 |
38 | [testenv:docs]
39 | description = invoke sphinx-build to build the HTML docs
40 | setenv =
41 | VIRTUALENV_NO_SETUPTOOLS = false
42 | VIRTUALENV_NO_WHEEL = false
43 | basepython = python3.10
44 | extras =
45 | docs
46 | commands =
47 | make docs
48 |
49 | [testenv:lint]
50 | description = lint code in {basepython}
51 | basepython = python3.10
52 | extras =
53 | lint
54 | commands =
55 | make lint
56 | whitelist_externals =
57 | make
58 | prettier
59 |
60 | [testenv:release]
61 | description = cut a new release
62 | setenv =
63 | {[testenv:docs]setenv}
64 | basepython = python3.10
65 | skip_install = true
66 | deps =
67 | towncrier>=22.12
68 | commands =
69 | make gen_news VERSION={posargs}
70 |
71 | [testenv:dev]
72 | description = generate a development environment
73 | setenv =
74 | {[testenv:docs]setenv}
75 | basepython = python3.10
76 | extras =
77 | docs
78 | lint
79 | test
80 | commands =
81 | python -c 'import sys; print(sys.executable)'
82 |
--------------------------------------------------------------------------------