├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE │ ├── 0-new-issue.yml │ ├── 1-new-feature.yml │ └── config.yml ├── SECURITY.md ├── dependabot.yml ├── release.yml └── workflows │ ├── check.yaml │ └── release.yaml ├── .gitignore ├── .pre-commit-config.yaml ├── LICENSE ├── README.md ├── pyproject.toml ├── src └── pipdeptree │ ├── __init__.py │ ├── __main__.py │ ├── _cli.py │ ├── _detect_env.py │ ├── _discovery.py │ ├── _freeze.py │ ├── _models │ ├── __init__.py │ ├── dag.py │ └── package.py │ ├── _render │ ├── __init__.py │ ├── freeze.py │ ├── graphviz.py │ ├── json.py │ ├── json_tree.py │ ├── mermaid.py │ └── text.py │ ├── _validate.py │ ├── _warning.py │ └── py.typed ├── tests ├── __init__.py ├── _models │ ├── test_dag.py │ └── test_package.py ├── conftest.py ├── our_types.py ├── render │ ├── test_freeze.py │ ├── test_graphviz.py │ ├── test_json_tree.py │ ├── test_mermaid.py │ ├── test_render.py │ └── test_text.py ├── test_cli.py ├── test_detect_env.py ├── test_discovery.py ├── test_freeze.py ├── test_non_host.py ├── test_pipdeptree.py ├── test_validate.py └── test_warning.py └── tox.toml /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | tidelift: "pypi/pipdeptree" 2 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/0-new-issue.yml: -------------------------------------------------------------------------------- 1 | name: Report a bug 2 | description: File a bug report to help improve pipdeptree 3 | labels: ["bug", "tobeconfirmed"] 4 | 5 | body: 6 | - type: input 7 | attributes: 8 | label: What pipdeptree version are you using? 9 | description: This can be fetched using `pipdeptree --version`. 10 | validations: 11 | required: true 12 | 13 | - type: dropdown 14 | attributes: 15 | label: Are you running pipdeptree in a virtual environment? 16 | options: 17 | - "Yes" 18 | - "No" 19 | default: 1 20 | validations: 21 | required: true 22 | 23 | - type: textarea 24 | attributes: 25 | label: Describe the problem 26 | description: | 27 | What is the problem you are facing? 28 | 29 | If possible, provide a set of steps to reproduce the error. 30 | validations: 31 | required: true 32 | 33 | - type: markdown 34 | attributes: 35 | value: | 36 | Thanks for taking the time to file an issue! 37 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/1-new-feature.yml: -------------------------------------------------------------------------------- 1 | name: Make a feature request 2 | description: Suggest an enhancement for pipdeptree 3 | labels: ["enhancement", "tobeconfirmed"] 4 | 5 | body: 6 | - type: textarea 7 | attributes: 8 | label: Describe the feature 9 | description: What's the problem that this feature will solve? 10 | validations: 11 | required: true 12 | 13 | - type: markdown 14 | attributes: 15 | value: | 16 | Thanks for taking the time to file a feature request! 17 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/config.yml: -------------------------------------------------------------------------------- 1 | blank_issues_enabled: false 2 | contact_links: 3 | - name: Questions 4 | url: https://github.com/tox-dev/pipdeptree/discussions 5 | about: Need help with pipdeptree? Please ask any questions here. 6 | -------------------------------------------------------------------------------- /.github/SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | | Version | Supported | 6 | | ------- | ------------------ | 7 | | 2.3.1 + | :white_check_mark: | 8 | | < 2.3.1 | :x: | 9 | 10 | ## Reporting a Vulnerability 11 | 12 | To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift 13 | will coordinate the fix and disclosure. 14 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "github-actions" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | -------------------------------------------------------------------------------- /.github/release.yml: -------------------------------------------------------------------------------- 1 | changelog: 2 | exclude: 3 | authors: 4 | - dependabot 5 | - pre-commit-ci 6 | -------------------------------------------------------------------------------- /.github/workflows/check.yaml: -------------------------------------------------------------------------------- 1 | name: check 2 | on: 3 | workflow_dispatch: 4 | push: 5 | branches: ["main"] 6 | tags-ignore: ["**"] 7 | pull_request: 8 | schedule: 9 | - cron: "0 8 * * *" 10 | 11 | concurrency: 12 | group: check-${{ github.ref }} 13 | cancel-in-progress: true 14 | 15 | jobs: 16 | test: 17 | runs-on: ubuntu-latest 18 | strategy: 19 | fail-fast: false 20 | matrix: 21 | env: 22 | - "pypy-3.10" 23 | - "pypy-3.9" 24 | - "3.13" 25 | - "3.12" 26 | - "3.11" 27 | - "3.10" 28 | - "3.9" 29 | - type 30 | - dev 31 | - pkg_meta 32 | steps: 33 | - name: Install OS dependencies 34 | run: sudo apt-get install graphviz -y 35 | - uses: actions/checkout@v4 36 | with: 37 | fetch-depth: 0 38 | - name: Install the latest version of uv 39 | uses: astral-sh/setup-uv@v6 40 | with: 41 | enable-cache: true 42 | cache-dependency-glob: "pyproject.toml" 43 | github-token: ${{ secrets.GITHUB_TOKEN }} 44 | - name: Install tox 45 | run: uv tool install --python-preference only-managed --python 3.13 tox --with tox-uv 46 | - name: Install Python 47 | if: (startsWith(matrix.env, '3.') || startsWith(matrix.env, 'pypy-')) && matrix.env != '3.13' 48 | run: uv python install --python-preference only-managed ${{ matrix.env }} 49 | - name: Setup test suite 50 | run: tox run -vv --notest --skip-missing-interpreters false -e ${{ matrix.env }} 51 | - name: Run test suite 52 | run: tox run --skip-pkg-install -e ${{ matrix.env }} 53 | env: 54 | PYTEST_ADDOPTS: "-vv --durations=20" 55 | DIFF_AGAINST: HEAD 56 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: Release to PyPI 2 | on: 3 | push: 4 | tags: ["*"] 5 | 6 | env: 7 | dists-artifact-name: python-package-distributions 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v4 14 | with: 15 | fetch-depth: 0 16 | - name: Install the latest version of uv 17 | uses: astral-sh/setup-uv@v6 18 | with: 19 | enable-cache: true 20 | cache-dependency-glob: "pyproject.toml" 21 | github-token: ${{ secrets.GITHUB_TOKEN }} 22 | - name: Build package 23 | run: uv build --python 3.13 --python-preference only-managed --sdist --wheel . --out-dir dist 24 | - name: Store the distribution packages 25 | uses: actions/upload-artifact@v4 26 | with: 27 | name: ${{ env.dists-artifact-name }} 28 | path: dist/* 29 | 30 | release: 31 | needs: 32 | - build 33 | runs-on: ubuntu-latest 34 | environment: 35 | name: release 36 | url: https://pypi.org/project/pipdeptree/${{ github.ref_name }} 37 | permissions: 38 | id-token: write 39 | steps: 40 | - name: Download all the dists 41 | uses: actions/download-artifact@v4 42 | with: 43 | name: ${{ env.dists-artifact-name }} 44 | path: dist/ 45 | - name: Publish to PyPI 46 | uses: pypa/gh-action-pypi-publish@v1.12.4 47 | with: 48 | attestations: true 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.egg-info 3 | dist/ 4 | .tox/ 5 | /src/pipdeptree/version.py 6 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v5.0.0 4 | hooks: 5 | - id: end-of-file-fixer 6 | - id: trailing-whitespace 7 | - repo: https://github.com/python-jsonschema/check-jsonschema 8 | rev: 0.33.0 9 | hooks: 10 | - id: check-github-workflows 11 | args: ["--verbose"] 12 | - repo: https://github.com/codespell-project/codespell 13 | rev: v2.4.1 14 | hooks: 15 | - id: codespell 16 | additional_dependencies: ["tomli>=2.0.1"] 17 | - repo: https://github.com/tox-dev/tox-toml-fmt 18 | rev: "v1.0.0" 19 | hooks: 20 | - id: tox-toml-fmt 21 | - repo: https://github.com/tox-dev/pyproject-fmt 22 | rev: "v2.6.0" 23 | hooks: 24 | - id: pyproject-fmt 25 | - repo: https://github.com/astral-sh/ruff-pre-commit 26 | rev: "v0.11.12" 27 | hooks: 28 | - id: ruff-format 29 | - id: ruff 30 | args: ["--fix", "--unsafe-fixes", "--exit-non-zero-on-fix"] 31 | - repo: https://github.com/rbubley/mirrors-prettier 32 | rev: "v3.5.3" 33 | hooks: 34 | - id: prettier 35 | additional_dependencies: 36 | - prettier@3.3.3 37 | - "@prettier/plugin-xml@3.4.1" 38 | - repo: meta 39 | hooks: 40 | - id: check-hooks-apply 41 | - id: check-useless-excludes 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) The pipdeptree developers 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 21 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # pipdeptree 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/pipdeptree)](https://pypi.org/project/pipdeptree/) 4 | [![Supported Python 5 | versions](https://img.shields.io/pypi/pyversions/pipdeptree.svg)](https://pypi.org/project/pipdeptree/) 6 | [![Downloads](https://static.pepy.tech/badge/pipdeptree/month)](https://pepy.tech/project/pipdeptree) 7 | [![check](https://github.com/tox-dev/pipdeptree/actions/workflows/check.yaml/badge.svg)](https://github.com/tox-dev/pipdeptree/actions/workflows/check.yaml) 8 | [![pre-commit.ci status](https://results.pre-commit.ci/badge/github/tox-dev/pipdeptree/main.svg)](https://results.pre-commit.ci/latest/github/tox-dev/pipdeptree/main) 9 | 10 | `pipdeptree` is a command line utility for displaying the installed python packages in form of a dependency tree. It 11 | works for packages installed globally on a machine as well as in a virtualenv. Since `pip freeze` shows all dependencies 12 | as a flat list, finding out which are the top level packages and which packages do they depend on requires some effort. 13 | It\'s also tedious to resolve conflicting dependencies that could have been installed because older version of `pip` 14 | didn\'t have true dependency resolution[^1]. `pipdeptree` can help here by identifying conflicting dependencies 15 | installed in the environment. 16 | 17 | To some extent, `pipdeptree` is inspired by the `lein deps :tree` command of [Leiningen](http://leiningen.org/). 18 | 19 | ## Installation 20 | 21 | ```bash 22 | pip install pipdeptree 23 | ``` 24 | 25 | ## Running in virtualenvs 26 | 27 | `New in ver. 2.0.0` 28 | 29 | If you want to run pipdeptree in the context of a particular virtualenv, you can specify the `--python` option. Note 30 | that this capability has been recently added in version `2.0.0`. 31 | 32 | Alternatively, you may also install pipdeptree inside the virtualenv and then run it from there. 33 | 34 | As of version `2.21.0`, you may also pass `--python auto`, where it will attempt to detect your virtual environment and grab the interpreter from there. It will fail if it is unable to detect one. 35 | 36 | ## Usage and examples 37 | 38 | To give you a brief idea, here is the output of `pipdeptree` compared with `pip freeze`: 39 | 40 | ```bash 41 | $ pip freeze 42 | Flask==0.10.1 43 | itsdangerous==0.24 44 | Jinja2==2.11.2 45 | -e git+git@github.com:naiquevin/lookupy.git@cdbe30c160e1c29802df75e145ea4ad903c05386#egg=Lookupy 46 | MarkupSafe==0.22 47 | pipdeptree @ file:///private/tmp/pipdeptree-2.0.0b1-py3-none-any.whl 48 | Werkzeug==0.11.2 49 | ``` 50 | 51 | And now see what `pipdeptree` outputs, 52 | 53 | ```bash 54 | $ pipdeptree 55 | Warning!!! Possibly conflicting dependencies found: 56 | * Jinja2==2.11.2 57 | - MarkupSafe [required: >=0.23, installed: 0.22] 58 | ------------------------------------------------------------------------ 59 | Flask==0.10.1 60 | - itsdangerous [required: >=0.21, installed: 0.24] 61 | - Jinja2 [required: >=2.4, installed: 2.11.2] 62 | - MarkupSafe [required: >=0.23, installed: 0.22] 63 | - Werkzeug [required: >=0.7, installed: 0.11.2] 64 | Lookupy==0.1 65 | pipdeptree==2.0.0b1 66 | - pip [required: >=6.0.0, installed: 20.1.1] 67 | setuptools==47.1.1 68 | wheel==0.34.2 69 | ``` 70 | 71 | ## Is it possible to find out why a particular package is installed? 72 | 73 | `New in ver. 0.5.0` 74 | 75 | Yes, there\'s a `--reverse` (or simply `-r`) flag for this. To find out which packages depend on a particular 76 | package(s), it can be combined with `--packages` option as follows: 77 | 78 | ```bash 79 | $ pipdeptree --reverse --packages itsdangerous,MarkupSafe 80 | Warning!!! Possibly conflicting dependencies found: 81 | * Jinja2==2.11.2 82 | - MarkupSafe [required: >=0.23, installed: 0.22] 83 | ------------------------------------------------------------------------ 84 | itsdangerous==0.24 85 | - Flask==0.10.1 [requires: itsdangerous>=0.21] 86 | MarkupSafe==0.22 87 | - Jinja2==2.11.2 [requires: MarkupSafe>=0.23] 88 | - Flask==0.10.1 [requires: Jinja2>=2.4] 89 | ``` 90 | 91 | ## What\'s with the warning about conflicting dependencies? 92 | 93 | As seen in the above output, `pipdeptree` by default warns about possible conflicting dependencies. Any package that\'s 94 | specified as a dependency of multiple packages with different versions is considered as a conflicting dependency. 95 | Conflicting dependencies are possible if older version of pip\<=20.2 96 | ([without the new resolver](https://github.com/pypa/pip/issues/988)[^2]) was ever used to install dependencies at some 97 | point. The warning is printed to stderr instead of stdout and it can be completely silenced by specifying the 98 | `-w silence` or `--warn silence` option. On the other hand, it can be made mode strict with `--warn fail`, in which case 99 | the command will not only print the warnings to stderr but also exit with a non-zero status code. This is useful if you 100 | want to fit this tool into your CI pipeline. 101 | 102 | **Note**: The `--warn` option is added in version `0.6.0`. If you are using an older version, use `--nowarn` flag to 103 | silence the warnings. 104 | 105 | ## Warnings about circular dependencies 106 | 107 | In case any of the packages have circular dependencies (eg. package A depends on package B and package B depends on 108 | package A), then `pipdeptree` will print warnings about that as well. 109 | 110 | ```bash 111 | $ pipdeptree --exclude pip,pipdeptree,setuptools,wheel 112 | Warning!!! Cyclic dependencies found: 113 | - CircularDependencyA => CircularDependencyB => CircularDependencyA 114 | - CircularDependencyB => CircularDependencyA => CircularDependencyB 115 | ------------------------------------------------------------------------ 116 | wsgiref==0.1.2 117 | argparse==1.2.1 118 | ``` 119 | 120 | Similar to the warnings about conflicting dependencies, these too are printed to stderr and can be controlled using the 121 | `--warn` option. 122 | 123 | In the above example, you can also see `--exclude` option which is the opposite of `--packages` ie. these packages will 124 | be excluded from the output. 125 | 126 | ## Using pipdeptree to write requirements.txt file 127 | 128 | If you wish to track only top level packages in your `requirements.txt` file, it\'s possible by grep-ing[^3]. only the 129 | top-level lines from the output, 130 | 131 | ```bash 132 | $ pipdeptree --warn silence | grep -E '^\w+' 133 | Flask==0.10.1 134 | gnureadline==8.0.0 135 | Lookupy==0.1 136 | pipdeptree==2.0.0b1 137 | setuptools==47.1.1 138 | wheel==0.34.2 139 | ``` 140 | 141 | There is a problem here though - The output doesn\'t mention anything about `Lookupy` being installed as an _editable_ 142 | package (refer to the output of `pip freeze` above) and information about its source is lost. To fix this, `pipdeptree` 143 | must be run with a `-f` or `--freeze` flag. 144 | 145 | ```bash 146 | $ pipdeptree -f --warn silence | grep -E '^[a-zA-Z0-9\-]+' 147 | Flask==0.10.1 148 | gnureadline==8.0.0 149 | -e git+git@github.com:naiquevin/lookupy.git@cdbe30c160e1c29802df75e145ea4ad903c05386#egg=Lookupy 150 | pipdeptree @ file:///private/tmp/pipdeptree-2.0.0b1-py3-none-any.whl 151 | setuptools==47.1.1 152 | wheel==0.34.2 153 | 154 | $ pipdeptree -f --warn silence | grep -E '^[a-zA-Z0-9\-]+' > requirements.txt 155 | ``` 156 | 157 | The freeze flag will not prefix child dependencies with hyphens, so you could dump the entire output of `pipdeptree -f` 158 | to the requirements.txt file thus making it human-friendly (due to indentations) as well as pip-friendly. 159 | 160 | ```bash 161 | $ pipdeptree -f | tee locked-requirements.txt 162 | Flask==0.10.1 163 | itsdangerous==0.24 164 | Jinja2==2.11.2 165 | MarkupSafe==0.23 166 | Werkzeug==0.11.2 167 | gnureadline==8.0.0 168 | -e git+git@github.com:naiquevin/lookupy.git@cdbe30c160e1c29802df75e145ea4ad903c05386#egg=Lookupy 169 | pipdeptree @ file:///private/tmp/pipdeptree-2.0.0b1-py3-none-any.whl 170 | pip==20.1.1 171 | setuptools==47.1.1 172 | wheel==0.34.2 173 | ``` 174 | 175 | On confirming that there are no conflicting dependencies, you can even treat this as a \"lock file\" where all packages, 176 | including the transient dependencies will be pinned to their currently installed versions. Note that the 177 | `locked-requirements.txt` file could end up with duplicate entries. Although `pip install` wouldn\'t complain about 178 | that, you can avoid duplicate lines (at the cost of losing indentation) as follows, 179 | 180 | ```bash 181 | $ pipdeptree -f | sed 's/ //g' | sort -u > locked-requirements.txt 182 | ``` 183 | 184 | ## Using pipdeptree with external tools 185 | 186 | `New in ver. 0.5.0` 187 | 188 | It\'s also possible to have `pipdeptree` output json representation of the dependency tree so that it may be used as 189 | input to other external tools. 190 | 191 | ```bash 192 | $ pipdeptree --json 193 | ``` 194 | 195 | Note that `--json` will output a flat list of all packages with their immediate dependencies. This is not very useful in 196 | itself. To obtain nested json, use `--json-tree` 197 | 198 | `New in ver. 0.11.0` 199 | 200 | ```bash 201 | $ pipdeptree --json-tree 202 | ``` 203 | 204 | ## Visualizing the dependency graph 205 | 206 | The dependency graph can also be visualized using [GraphViz](http://www.graphviz.org/): 207 | 208 | ```bash 209 | $ pipdeptree --graph-output dot > dependencies.dot 210 | $ pipdeptree --graph-output pdf > dependencies.pdf 211 | $ pipdeptree --graph-output png > dependencies.png 212 | $ pipdeptree --graph-output svg > dependencies.svg 213 | ``` 214 | 215 | Note that `graphviz` is an optional dependency that's required only if you want to use `--graph-output`. 216 | 217 | Since version `2.0.0b1`, `--package` and `--reverse` flags are supported for all output formats ie. text, json, 218 | json-tree and graph. 219 | 220 | In earlier versions, `--json`, `--json-tree` and `--graph-output` options override `--package` and `--reverse`. 221 | 222 | ## Usage 223 | 224 | ```bash 225 | % pipdeptree --help 226 | usage: pipdeptree [-h] [-v] [-w [{silence,suppress,fail}]] [--python PYTHON] [--path PATH] [-p P] [-e P] [-l | -u] [-f] [--encoding E] [-a] [-d D] [-r] [--license] [-j | --json-tree | --mermaid | --graph-output FMT] 227 | 228 | Dependency tree of the installed python packages 229 | 230 | options: 231 | -h, --help show this help message and exit 232 | -v, --version show program's version number and exit 233 | -w [{silence,suppress,fail}], --warn [{silence,suppress,fail}] 234 | warning control: suppress will show warnings but return 0 whether or not they are present; silence will not show warnings at all and always return 0; fail will show warnings and return 1 if any are present (default: 235 | suppress) 236 | 237 | select: 238 | choose what to render 239 | 240 | --python PYTHON Python interpreter to inspect. With "auto", it attempts to detect your virtual environment and fails if it can't. (default: /usr/local/bin/python) 241 | --path PATH Passes a path used to restrict where packages should be looked for (can be used multiple times) (default: None) 242 | -p P, --packages P comma separated list of packages to show - wildcards are supported, like 'somepackage.*' (default: None) 243 | -e P, --exclude P comma separated list of packages to not show - wildcards are supported, like 'somepackage.*'. (cannot combine with -p or -a) (default: None) 244 | -l, --local-only if in a virtualenv that has global access do not show globally installed packages (default: False) 245 | -u, --user-only only show installations in the user site dir (default: False) 246 | 247 | render: 248 | choose how to render the dependency tree (by default will use text mode) 249 | 250 | -f, --freeze print names so as to write freeze files (default: False) 251 | --encoding E the encoding to use when writing to the output (default: utf-8) 252 | -a, --all list all deps at top level (text and freeze render only) (default: False) 253 | -d D, --depth D limit the depth of the tree (text and freeze render only) (default: inf) 254 | -r, --reverse render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of packages that need them under them (default: False) 255 | --license list the license(s) of a package (text render only) (default: False) 256 | -j, --json raw JSON - this will yield output that may be used by external tools (default: False) 257 | --json-tree nested JSON - mimics the text format layout (default: False) 258 | --mermaid https://mermaid.js.org flow diagram (default: False) 259 | --graph-output FMT Graphviz rendering with the value being the graphviz output e.g.: dot, jpeg, pdf, png, svg (default: None) 260 | ``` 261 | 262 | ## Known issues 263 | 264 | 1. `pipdeptree` relies on the internal API of `pip`. I fully understand that it\'s a bad idea but it mostly works! On 265 | rare occasions, it breaks when a new version of `pip` is out with backward incompatible changes in internal API. So 266 | beware if you are using this tool in environments in which `pip` version is unpinned, specially automation or CD/CI 267 | pipelines. 268 | 269 | ## Limitations & Alternatives 270 | 271 | `pipdeptree` merely looks at the installed packages in the current environment using pip, constructs the tree, then 272 | outputs it in the specified format. If you want to generate the dependency tree without installing the packages, then 273 | you need a dependency resolver. You might want to check alternatives such as 274 | [pipgrip](https://github.com/ddelange/pipgrip) or [poetry](https://github.com/python-poetry/poetry). 275 | 276 | ## License 277 | 278 | MIT (See [LICENSE](./LICENSE)) 279 | 280 | ## Footnotes 281 | 282 | [^1]: 283 | pip version 20.3 has been released in Nov 2020 with the dependency resolver 284 | \<\>\_ 285 | 286 | [^2]: 287 | pip version 20.3 has been released in Nov 2020 with the dependency resolver 288 | \<\>\_ 289 | 290 | [^3]: 291 | If you are on windows (powershell) you can run `pipdeptree --warn silence | Select-String -Pattern '^\w+'` instead 292 | of grep 293 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | build-backend = "hatchling.build" 3 | requires = [ 4 | "hatch-vcs>=0.4", 5 | "hatchling>=1.27", 6 | ] 7 | 8 | [project] 9 | name = "pipdeptree" 10 | description = "Command line utility to show dependency tree of packages." 11 | readme = "README.md" 12 | keywords = [ 13 | "application", 14 | "cache", 15 | "directory", 16 | "log", 17 | "user", 18 | ] 19 | license = "MIT" 20 | license-files = [ 21 | "LICENSE", 22 | ] 23 | maintainers = [ 24 | { name = "Bernát Gábor", email = "gaborjbernat@gmail.com" }, 25 | { name = "Vineet Naik", email = "naikvin@gmail.com" }, 26 | ] 27 | requires-python = ">=3.9" 28 | classifiers = [ 29 | "Development Status :: 5 - Production/Stable", 30 | "Environment :: Console", 31 | "Intended Audience :: Developers", 32 | "Operating System :: OS Independent", 33 | "Programming Language :: Python", 34 | "Programming Language :: Python :: 3 :: Only", 35 | "Programming Language :: Python :: 3.9", 36 | "Programming Language :: Python :: 3.10", 37 | "Programming Language :: Python :: 3.11", 38 | "Programming Language :: Python :: 3.12", 39 | "Programming Language :: Python :: 3.13", 40 | ] 41 | dynamic = [ 42 | "version", 43 | ] 44 | dependencies = [ 45 | "packaging>=24.1", 46 | "pip>=24.2", 47 | ] 48 | optional-dependencies.graphviz = [ 49 | "graphviz>=0.20.3", 50 | ] 51 | optional-dependencies.test = [ 52 | "covdefaults>=2.3", 53 | "diff-cover>=9.1.1", 54 | "pytest>=8.3.2", 55 | "pytest-cov>=5", 56 | "pytest-mock>=3.14", 57 | "virtualenv>=20.31.1,<21", 58 | ] 59 | urls.Changelog = "https://github.com/tox-dev/pipdeptree/releases" 60 | urls.Documentation = "https://github.com/tox-dev/pipdeptree/blob/main/README.md#pipdeptree" 61 | urls.Homepage = "https://github.com/tox-dev/pipdeptree" 62 | urls.Source = "https://github.com/tox-dev/pipdeptree" 63 | urls.Tracker = "https://github.com/tox-dev/pipdeptree/issues" 64 | scripts.pipdeptree = "pipdeptree.__main__:main" 65 | 66 | [tool.hatch] 67 | build.hooks.vcs.version-file = "src/pipdeptree/version.py" 68 | version.source = "vcs" 69 | 70 | [tool.ruff] 71 | line-length = 120 72 | format.preview = true 73 | format.docstring-code-line-length = 100 74 | format.docstring-code-format = true 75 | lint.select = [ 76 | "ALL", 77 | ] 78 | lint.ignore = [ 79 | "A005", # Don't care about shadowing builtin modules 80 | "ANN101", # No type annotation for self 81 | "ANN102", # Missing type annotation for `cls` in classmethod 82 | "ANN401", # Dynamically typed expressions (typing.Any) are disallowed in 83 | "COM812", # Conflict with formatter 84 | "CPY", # No copyright statements 85 | "D104", # Missing docstring in public package 86 | "D203", # `one-blank-line-before-class` (D203) and `no-blank-line-before-class` (D211) are incompatible 87 | "D212", # `multi-line-summary-first-line` (D212) and `multi-line-summary-second-line` (D213) are incompatible 88 | "DOC201", # TODO: Read the comment for DOC501 89 | "DOC402", # TODO: Read the comment for DOC501 90 | "DOC501", # TODO: Remove this once ruff supports Sphinx-style doc-strings; see https://github.com/astral-sh/ruff/issues/12434 91 | "INP001", # no implicit namespace 92 | "ISC001", # Conflict with formatter 93 | "S101", # asserts allowed 94 | "S104", # Possible binding to all interface 95 | ] 96 | lint.per-file-ignores."tests/**/*.py" = [ 97 | "D", # don"t care about documentation in tests 98 | "FBT", # don"t care about booleans as positional arguments in tests 99 | "PLC2701", # Private import in tests 100 | "PLR0913", # any number of arguments in tests 101 | "PLR0917", # any number of arguments in tests 102 | "PLR2004", # Magic value used in comparison, consider replacing with a constant variable 103 | "S603", # `subprocess` call: check for execution of untrusted input 104 | ] 105 | lint.isort = { known-first-party = [ 106 | "pipdeptree", 107 | ], required-imports = [ 108 | "from __future__ import annotations", 109 | ] } 110 | lint.preview = true 111 | 112 | [tool.codespell] 113 | builtin = "clear,usage,en-GB_to_en-US" 114 | count = true 115 | quiet-level = 3 116 | ignore-words-list = "master" 117 | 118 | [tool.pyproject-fmt] 119 | max_supported_python = "3.13" 120 | 121 | [tool.coverage] 122 | html.show_contexts = true 123 | html.skip_covered = false 124 | paths.source = [ 125 | "src", 126 | ".tox/*/lib/python*/site-packages", 127 | "*/src", 128 | ] 129 | run.parallel = true 130 | run.plugins = [ 131 | "covdefaults", 132 | ] 133 | report.fail_under = 88 134 | subtract_omit = "*/__main__.py" 135 | 136 | [tool.mypy] 137 | show_error_codes = true 138 | strict = true 139 | overrides = [ 140 | { module = [ 141 | "graphviz.*", 142 | "virtualenv.*", 143 | ], ignore_missing_imports = true }, 144 | ] 145 | -------------------------------------------------------------------------------- /src/pipdeptree/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tox-dev/pipdeptree/ab3dc9419bacfa8e7672c0155ce8896e75d41171/src/pipdeptree/__init__.py -------------------------------------------------------------------------------- /src/pipdeptree/__main__.py: -------------------------------------------------------------------------------- 1 | """The main entry point used for CLI.""" 2 | 3 | from __future__ import annotations 4 | 5 | import sys 6 | from typing import TYPE_CHECKING 7 | 8 | from pipdeptree._cli import get_options 9 | from pipdeptree._detect_env import detect_active_interpreter 10 | from pipdeptree._discovery import InterpreterQueryError, get_installed_distributions 11 | from pipdeptree._models import PackageDAG 12 | from pipdeptree._render import render 13 | from pipdeptree._validate import validate 14 | from pipdeptree._warning import WarningPrinter, WarningType, get_warning_printer 15 | 16 | if TYPE_CHECKING: 17 | from collections.abc import Sequence 18 | 19 | 20 | def main(args: Sequence[str] | None = None) -> int | None: 21 | """CLI - The main function called as entry point.""" 22 | options = get_options(args) 23 | 24 | # Warnings are only enabled when using text output. 25 | is_text_output = not any([options.json, options.json_tree, options.output_format]) 26 | if not is_text_output: 27 | options.warn = WarningType.SILENCE 28 | warning_printer = get_warning_printer() 29 | warning_printer.warning_type = options.warn 30 | 31 | if options.python == "auto": 32 | resolved_path = detect_active_interpreter() 33 | options.python = resolved_path 34 | print(f"(resolved python: {resolved_path})", file=sys.stderr) # noqa: T201 35 | 36 | try: 37 | pkgs = get_installed_distributions( 38 | interpreter=options.python, 39 | supplied_paths=options.path or None, 40 | local_only=options.local_only, 41 | user_only=options.user_only, 42 | ) 43 | except InterpreterQueryError as e: 44 | print(f"Failed to query custom interpreter: {e}", file=sys.stderr) # noqa: T201 45 | return 1 46 | 47 | tree = PackageDAG.from_pkgs(pkgs) 48 | 49 | validate(tree) 50 | 51 | # Reverse the tree (if applicable) before filtering, thus ensuring, that the filter will be applied on ReverseTree 52 | if options.reverse: 53 | tree = tree.reverse() 54 | 55 | show_only = options.packages.split(",") if options.packages else None 56 | exclude = set(options.exclude.split(",")) if options.exclude else None 57 | 58 | if show_only is not None or exclude is not None: 59 | try: 60 | tree = tree.filter_nodes(show_only, exclude) 61 | except ValueError as e: 62 | if warning_printer.should_warn(): 63 | warning_printer.print_single_line(str(e)) 64 | return _determine_return_code(warning_printer) 65 | 66 | render(options, tree) 67 | 68 | return _determine_return_code(warning_printer) 69 | 70 | 71 | def _determine_return_code(warning_printer: WarningPrinter) -> int: 72 | return 1 if warning_printer.has_warned_with_failure() else 0 73 | 74 | 75 | if __name__ == "__main__": 76 | sys.exit(main()) 77 | -------------------------------------------------------------------------------- /src/pipdeptree/_cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import enum 4 | import sys 5 | from argparse import Action, ArgumentDefaultsHelpFormatter, ArgumentParser, Namespace 6 | from typing import TYPE_CHECKING, Any, cast 7 | 8 | from pipdeptree._warning import WarningType 9 | 10 | from .version import __version__ 11 | 12 | if TYPE_CHECKING: 13 | from collections.abc import Sequence 14 | 15 | 16 | class Options(Namespace): 17 | freeze: bool 18 | python: str 19 | path: list[str] 20 | all: bool 21 | local_only: bool 22 | user_only: bool 23 | warn: WarningType 24 | reverse: bool 25 | packages: str 26 | exclude: str 27 | json: bool 28 | json_tree: bool 29 | mermaid: bool 30 | output_format: str | None 31 | depth: float 32 | encoding: str 33 | license: bool 34 | 35 | 36 | class _Formatter(ArgumentDefaultsHelpFormatter): 37 | def __init__(self, prog: str) -> None: 38 | super().__init__(prog, max_help_position=22, width=240) 39 | 40 | 41 | def build_parser() -> ArgumentParser: 42 | parser = ArgumentParser(description="Dependency tree of the installed python packages", formatter_class=_Formatter) 43 | parser.add_argument("-v", "--version", action="version", version=f"{__version__}") 44 | parser.add_argument( 45 | "-w", 46 | "--warn", 47 | dest="warn", 48 | type=WarningType, 49 | nargs="?", 50 | default="suppress", 51 | action=EnumAction, 52 | help=( 53 | "warning control: suppress will show warnings but return 0 whether or not they are present; silence will " 54 | "not show warnings at all and always return 0; fail will show warnings and return 1 if any are present" 55 | ), 56 | ) 57 | 58 | select = parser.add_argument_group(title="select", description="choose what to render") 59 | select.add_argument( 60 | "--python", 61 | default=sys.executable, 62 | help=( 63 | 'Python interpreter to inspect. With "auto", it attempts to detect your virtual environment and fails if' 64 | " it can't." 65 | ), 66 | ) 67 | select.add_argument( 68 | "--path", 69 | help="Passes a path used to restrict where packages should be looked for (can be used multiple times)", 70 | action="append", 71 | ) 72 | select.add_argument( 73 | "-p", 74 | "--packages", 75 | help="comma separated list of packages to show - wildcards are supported, like 'somepackage.*'", 76 | metavar="P", 77 | ) 78 | select.add_argument( 79 | "-e", 80 | "--exclude", 81 | help="comma separated list of packages to not show - wildcards are supported, like 'somepackage.*'. " 82 | "(cannot combine with -p or -a)", 83 | metavar="P", 84 | ) 85 | 86 | scope = select.add_mutually_exclusive_group() 87 | scope.add_argument( 88 | "-l", 89 | "--local-only", 90 | action="store_true", 91 | help="if in a virtualenv that has global access do not show globally installed packages", 92 | ) 93 | scope.add_argument("-u", "--user-only", action="store_true", help="only show installations in the user site dir") 94 | 95 | render = parser.add_argument_group( 96 | title="render", 97 | description="choose how to render the dependency tree (by default will use text mode)", 98 | ) 99 | render.add_argument("-f", "--freeze", action="store_true", help="print names so as to write freeze files") 100 | render.add_argument( 101 | "--encoding", 102 | dest="encoding", 103 | default=sys.stdout.encoding, 104 | help="the encoding to use when writing to the output", 105 | metavar="E", 106 | ) 107 | render.add_argument( 108 | "-a", "--all", action="store_true", help="list all deps at top level (text and freeze render only)" 109 | ) 110 | render.add_argument( 111 | "-d", 112 | "--depth", 113 | type=lambda x: int(x) if x.isdigit() and (int(x) >= 0) else parser.error("Depth must be a number that is >= 0"), 114 | default=float("inf"), 115 | help="limit the depth of the tree (text and freeze render only)", 116 | metavar="D", 117 | ) 118 | render.add_argument( 119 | "-r", 120 | "--reverse", 121 | action="store_true", 122 | default=False, 123 | help=( 124 | "render the dependency tree in the reverse fashion ie. the sub-dependencies are listed with the list of " 125 | "packages that need them under them" 126 | ), 127 | ) 128 | render.add_argument( 129 | "--license", 130 | action="store_true", 131 | help="list the license(s) of a package (text render only)", 132 | ) 133 | 134 | render_type = render.add_mutually_exclusive_group() 135 | render_type.add_argument( 136 | "-j", 137 | "--json", 138 | action="store_true", 139 | default=False, 140 | help="raw JSON - this will yield output that may be used by external tools", 141 | ) 142 | render_type.add_argument( 143 | "--json-tree", 144 | action="store_true", 145 | default=False, 146 | help="nested JSON - mimics the text format layout", 147 | ) 148 | render_type.add_argument( 149 | "--mermaid", 150 | action="store_true", 151 | default=False, 152 | help="https://mermaid.js.org flow diagram", 153 | ) 154 | render_type.add_argument( 155 | "--graph-output", 156 | metavar="FMT", 157 | dest="output_format", 158 | help="Graphviz rendering with the value being the graphviz output e.g.: dot, jpeg, pdf, png, svg", 159 | ) 160 | return parser 161 | 162 | 163 | def get_options(args: Sequence[str] | None) -> Options: 164 | parser = build_parser() 165 | parsed_args = parser.parse_args(args) 166 | 167 | if parsed_args.exclude and (parsed_args.all or parsed_args.packages): 168 | return parser.error("cannot use --exclude with --packages or --all") 169 | if parsed_args.license and parsed_args.freeze: 170 | return parser.error("cannot use --license with --freeze") 171 | if parsed_args.path and (parsed_args.local_only or parsed_args.user_only): 172 | return parser.error("cannot use --path with --user-only or --local-only") 173 | 174 | return cast("Options", parsed_args) 175 | 176 | 177 | class EnumAction(Action): 178 | """ 179 | Generic action that exists to convert a string into a Enum value that is then added into a `Namespace` object. 180 | 181 | This custom action exists because argparse doesn't have support for enums. 182 | 183 | References 184 | ---------- 185 | - https://github.com/python/cpython/issues/69247#issuecomment-1308082792 186 | - https://docs.python.org/3/library/argparse.html#action-classes 187 | 188 | """ 189 | 190 | def __init__( # noqa: PLR0913, PLR0917 191 | self, 192 | option_strings: list[str], 193 | dest: str, 194 | nargs: str | None = None, 195 | const: Any | None = None, 196 | default: Any | None = None, 197 | type: Any | None = None, # noqa: A002 198 | choices: Any | None = None, 199 | required: bool = False, # noqa: FBT001, FBT002 200 | help: str | None = None, # noqa: A002 201 | metavar: str | None = None, 202 | ) -> None: 203 | if not type or not issubclass(type, enum.Enum): 204 | msg = "type must be a subclass of Enum" 205 | raise TypeError(msg) 206 | if not isinstance(default, str): 207 | msg = "default must be defined with a string value" 208 | raise TypeError(msg) 209 | 210 | choices = tuple(e.name.lower() for e in type) 211 | if default not in choices: 212 | msg = "default value should be among the enum choices" 213 | raise ValueError(msg) 214 | 215 | super().__init__( 216 | option_strings=option_strings, 217 | dest=dest, 218 | nargs=nargs, 219 | const=const, 220 | default=default, 221 | type=None, # We return None here so that we default to str. 222 | choices=choices, 223 | required=required, 224 | help=help, 225 | metavar=metavar, 226 | ) 227 | 228 | self._enum = type 229 | 230 | def __call__( 231 | self, 232 | parser: ArgumentParser, # noqa: ARG002 233 | namespace: Namespace, 234 | value: Any, 235 | option_string: str | None = None, # noqa: ARG002 236 | ) -> None: 237 | value = value or self.default 238 | value = next(e for e in self._enum if e.name.lower() == value) 239 | setattr(namespace, self.dest, value) 240 | 241 | 242 | __all__ = [ 243 | "Options", 244 | "get_options", 245 | ] 246 | -------------------------------------------------------------------------------- /src/pipdeptree/_detect_env.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import platform 5 | import subprocess # noqa: S404 6 | import sys 7 | from pathlib import Path 8 | from typing import Callable 9 | 10 | 11 | def detect_active_interpreter() -> str: 12 | """ 13 | Attempt to detect a venv, virtualenv, poetry, or conda environment by looking for certain markers. 14 | 15 | If it fails to find any, it will fail with a message. 16 | """ 17 | detection_funcs: list[Callable[[], Path | None]] = [ 18 | detect_venv_or_virtualenv_interpreter, 19 | detect_conda_env_interpreter, 20 | detect_poetry_env_interpreter, 21 | ] 22 | for detect in detection_funcs: 23 | path = detect() 24 | if not path: 25 | continue 26 | if not path.exists(): 27 | break 28 | return str(path) 29 | 30 | print("Unable to detect virtual environment.", file=sys.stderr) # noqa: T201 31 | raise SystemExit(1) 32 | 33 | 34 | def detect_venv_or_virtualenv_interpreter() -> Path | None: 35 | # Both virtualenv and venv set this environment variable. 36 | env_var = os.environ.get("VIRTUAL_ENV") 37 | if not env_var: 38 | return None 39 | 40 | path = Path(env_var) 41 | path /= determine_bin_dir() 42 | 43 | file_name = determine_interpreter_file_name() 44 | return path / file_name if file_name else None 45 | 46 | 47 | def determine_bin_dir() -> str: 48 | return "Scripts" if os.name == "nt" else "bin" 49 | 50 | 51 | def detect_conda_env_interpreter() -> Path | None: 52 | # Env var mentioned in https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html#saving-environment-variables. 53 | env_var = os.environ.get("CONDA_PREFIX") 54 | if not env_var: 55 | return None 56 | 57 | path = Path(env_var) 58 | 59 | # On POSIX systems, conda adds the python executable to the /bin directory. On Windows, it resides in the parent 60 | # directory of /bin (i.e. the root directory). 61 | # See https://docs.anaconda.com/free/working-with-conda/configurations/python-path/#examples. 62 | if os.name == "posix": # pragma: posix cover 63 | path /= "bin" 64 | 65 | file_name = determine_interpreter_file_name() 66 | 67 | return path / file_name if file_name else None 68 | 69 | 70 | def detect_poetry_env_interpreter() -> Path | None: 71 | # poetry doesn't expose an environment variable like other implementations, so we instead use its CLI to snatch the 72 | # active interpreter. 73 | # See https://python-poetry.org/docs/managing-environments/#displaying-the-environment-information. 74 | try: 75 | result = subprocess.run( 76 | ("poetry", "env", "info", "--executable"), 77 | check=True, 78 | text=True, 79 | stdout=subprocess.PIPE, 80 | stderr=subprocess.DEVNULL, 81 | ) 82 | except Exception: # noqa: BLE001 83 | return None 84 | 85 | return Path(result.stdout.strip()) 86 | 87 | 88 | def determine_interpreter_file_name() -> str | None: 89 | impl_name_to_file_name_dict = {"CPython": "python", "PyPy": "pypy"} 90 | name = impl_name_to_file_name_dict.get(platform.python_implementation()) 91 | if not name: 92 | return None 93 | if os.name == "nt": # pragma: nt cover 94 | return name + ".exe" 95 | return name 96 | 97 | 98 | __all__ = ["detect_active_interpreter"] 99 | -------------------------------------------------------------------------------- /src/pipdeptree/_discovery.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import ast 4 | import site 5 | import subprocess # noqa: S404 6 | import sys 7 | from importlib.metadata import Distribution, distributions 8 | from pathlib import Path 9 | from typing import TYPE_CHECKING 10 | 11 | from packaging.utils import canonicalize_name 12 | 13 | from pipdeptree._warning import get_warning_printer 14 | 15 | if TYPE_CHECKING: 16 | from collections.abc import Iterable 17 | 18 | 19 | class InterpreterQueryError(Exception): 20 | """A problem occurred while trying to query a custom interpreter.""" 21 | 22 | 23 | def get_installed_distributions( 24 | interpreter: str = sys.executable or "", 25 | supplied_paths: list[str] | None = None, 26 | local_only: bool = False, # noqa: FBT001, FBT002 27 | user_only: bool = False, # noqa: FBT001, FBT002 28 | ) -> list[Distribution]: 29 | """ 30 | Return the distributions installed in the interpreter's environment. 31 | 32 | :raises InterpreterQueryError: If a failure occurred while querying the interpreter. 33 | """ 34 | # sys.path is used by importlib.metadata.PathDistribution and pip by default. 35 | computed_paths = supplied_paths or sys.path 36 | 37 | # See https://docs.python.org/3/library/venv.html#how-venvs-work for more details. 38 | in_venv = sys.prefix != sys.base_prefix 39 | 40 | should_query_interpreter = not supplied_paths and (Path(interpreter).absolute() != Path(sys.executable).absolute()) 41 | if should_query_interpreter: 42 | computed_paths = query_interpreter_for_paths(interpreter, local_only=local_only) 43 | elif local_only and in_venv: 44 | computed_paths = [p for p in computed_paths if p.startswith(sys.prefix)] 45 | 46 | if user_only: 47 | computed_paths = [p for p in computed_paths if p.startswith(site.getusersitepackages())] 48 | 49 | return filter_valid_distributions(distributions(path=computed_paths)) 50 | 51 | 52 | def query_interpreter_for_paths(interpreter: str, *, local_only: bool = False) -> list[str]: 53 | """ 54 | Query an interpreter for paths containing distribution metadata. 55 | 56 | :raises InterpreterQueryError: If a failure occurred while querying the interpreter. 57 | """ 58 | # We query the interpreter directly to get its `sys.path`. If both --python and --local-only are given, only 59 | # snatch metadata associated to the interpreter's environment. 60 | if local_only: 61 | cmd = "import sys; print([p for p in sys.path if p.startswith(sys.prefix)])" 62 | else: 63 | cmd = "import sys; print(sys.path)" 64 | 65 | args = [interpreter, "-c", cmd] 66 | try: 67 | result = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=True, text=True) # noqa: S603 68 | return ast.literal_eval(result.stdout) # type: ignore[no-any-return] 69 | except Exception as e: 70 | raise InterpreterQueryError(str(e)) from e 71 | 72 | 73 | def filter_valid_distributions(iterable_dists: Iterable[Distribution]) -> list[Distribution]: 74 | warning_printer = get_warning_printer() 75 | 76 | # Since importlib.metadata.distributions() can return duplicate packages, we need to handle this. pip's approach is 77 | # to keep track of each package metadata it finds, and if it encounters one again it will simply just ignore it. We 78 | # take it one step further and warn the user that there are duplicate packages in their environment. 79 | # See https://github.com/pypa/pip/blob/7c49d06ea4be4635561f16a524e3842817d1169a/src/pip/_internal/metadata/importlib/_envs.py#L34 80 | seen_dists: dict[str, Distribution] = {} 81 | first_seen_to_already_seen_dists_dict: dict[Distribution, list[Distribution]] = {} 82 | 83 | # We also need to handle invalid metadata, though we can't get paths to invalid distribution metadata directly since 84 | # importlib doesn't expose an API for it. We do have the directory they reside in, so let's use that. 85 | site_dir_with_invalid_metadata: set[str] = set() 86 | 87 | dists = [] 88 | for dist in iterable_dists: 89 | if not has_valid_metadata(dist): 90 | site_dir = str(dist.locate_file("")) 91 | site_dir_with_invalid_metadata.add(site_dir) 92 | continue 93 | normalized_name = canonicalize_name(dist.metadata["Name"]) 94 | if normalized_name not in seen_dists: 95 | seen_dists[normalized_name] = dist 96 | dists.append(dist) 97 | continue 98 | if warning_printer.should_warn(): 99 | already_seen_dists = first_seen_to_already_seen_dists_dict.setdefault(seen_dists[normalized_name], []) 100 | already_seen_dists.append(dist) 101 | 102 | if warning_printer.should_warn(): 103 | if site_dir_with_invalid_metadata: 104 | warning_printer.print_multi_line( 105 | "Missing or invalid metadata found in the following site dirs", 106 | lambda: render_invalid_metadata_text(site_dir_with_invalid_metadata), 107 | ) 108 | if first_seen_to_already_seen_dists_dict: 109 | warning_printer.print_multi_line( 110 | "Duplicate package metadata found", 111 | lambda: render_duplicated_dist_metadata_text(first_seen_to_already_seen_dists_dict), 112 | ignore_fail=True, 113 | ) 114 | 115 | return dists 116 | 117 | 118 | def has_valid_metadata(dist: Distribution) -> bool: 119 | return "Name" in dist.metadata 120 | 121 | 122 | def render_invalid_metadata_text(site_dirs_with_invalid_metadata: set[str]) -> None: 123 | for site_dir in site_dirs_with_invalid_metadata: 124 | print(site_dir, file=sys.stderr) # noqa: T201 125 | 126 | 127 | FirstSeenWithDistsPair = tuple[Distribution, Distribution] 128 | 129 | 130 | def render_duplicated_dist_metadata_text( 131 | first_seen_to_already_seen_dists_dict: dict[Distribution, list[Distribution]], 132 | ) -> None: 133 | entries_to_pairs_dict: dict[str, list[FirstSeenWithDistsPair]] = {} 134 | for first_seen, dists in first_seen_to_already_seen_dists_dict.items(): 135 | for dist in dists: 136 | entry = str(dist.locate_file("")) 137 | dist_list = entries_to_pairs_dict.setdefault(entry, []) 138 | dist_list.append((first_seen, dist)) 139 | 140 | for entry, pairs in entries_to_pairs_dict.items(): 141 | print(f'"{entry}"', file=sys.stderr) # noqa: T201 142 | for first_seen, dist in pairs: 143 | print( # noqa: T201 144 | ( 145 | f" {dist.metadata['Name']:<32} {dist.version:<16} (using {first_seen.version}," 146 | f' "{first_seen.locate_file("")}")' 147 | ), 148 | file=sys.stderr, 149 | ) 150 | 151 | 152 | __all__ = [ 153 | "get_installed_distributions", 154 | ] 155 | -------------------------------------------------------------------------------- /src/pipdeptree/_freeze.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import locale 4 | from json import JSONDecodeError 5 | from pathlib import Path 6 | from typing import TYPE_CHECKING, Any 7 | 8 | from pip._internal.models.direct_url import ( 9 | DirectUrl, # noqa: PLC2701 10 | DirectUrlValidationError, # noqa: PLC2701 11 | ) 12 | from pip._internal.utils.egg_link import egg_link_path_from_sys_path # noqa: PLC2701 13 | from pip._vendor.packaging.version import Version # noqa: PLC2701 14 | 15 | if TYPE_CHECKING: 16 | from importlib.metadata import Distribution 17 | 18 | 19 | def dist_to_frozen_repr(dist: Distribution) -> str: 20 | """Return the frozen requirement repr of a `importlib.metadata.Distribution` object.""" 21 | from pip._internal.operations.freeze import FrozenRequirement # noqa: PLC0415, PLC2701 22 | 23 | adapter = PipBaseDistributionAdapter(dist) 24 | fr = FrozenRequirement.from_dist(adapter) # type: ignore[arg-type] 25 | 26 | return str(fr).strip() 27 | 28 | 29 | class PipBaseDistributionAdapter: 30 | """ 31 | An adapter class for pip's `pip._internal.metadata.BaseDistribution` abstract class. 32 | 33 | It essentially wraps over an importlib.metadata.Distribution object and provides just enough fields/methods found in 34 | pip's `BaseDistribution` so that we can use `pip._internal.operations.freeze.FrozenRequirement.from_dist()`. 35 | 36 | :param dist: Represents an `importlib.metadata.Distribution` object. 37 | """ 38 | 39 | DIRECT_URL_METADATA_NAME = "direct_url.json" 40 | 41 | def __init__(self, dist: Distribution) -> None: 42 | self._dist = dist 43 | self._raw_name = dist.metadata["Name"] 44 | self._version = Version(dist.version) 45 | 46 | @property 47 | def raw_name(self) -> str | Any: 48 | return self._raw_name 49 | 50 | @property 51 | def version(self) -> Version: 52 | return self._version 53 | 54 | @property 55 | def editable(self) -> bool: 56 | return self.editable_project_location is not None 57 | 58 | @property 59 | def direct_url(self) -> DirectUrl | None: 60 | result = None 61 | json_str = self._dist.read_text(self.DIRECT_URL_METADATA_NAME) 62 | try: 63 | if json_str: 64 | result = DirectUrl.from_json(json_str) 65 | except ( 66 | UnicodeDecodeError, 67 | JSONDecodeError, 68 | DirectUrlValidationError, 69 | ): 70 | return result 71 | return result 72 | 73 | @property 74 | def editable_project_location(self) -> str | None: 75 | direct_url = self.direct_url 76 | if direct_url and direct_url.is_local_editable(): 77 | from pip._internal.utils.urls import url_to_path # noqa: PLC2701, PLC0415 78 | 79 | return url_to_path(direct_url.url) 80 | 81 | result = None 82 | egg_link_path = egg_link_path_from_sys_path(self.raw_name) 83 | if egg_link_path: 84 | with Path(egg_link_path).open("r", encoding=locale.getpreferredencoding(False)) as f: # noqa: FBT003 85 | result = f.readline().rstrip() 86 | return result 87 | 88 | 89 | __all__ = ["dist_to_frozen_repr"] 90 | -------------------------------------------------------------------------------- /src/pipdeptree/_models/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from .dag import PackageDAG, ReversedPackageDAG 4 | from .package import DistPackage, ReqPackage 5 | 6 | __all__ = [ 7 | "DistPackage", 8 | "PackageDAG", 9 | "ReqPackage", 10 | "ReversedPackageDAG", 11 | ] 12 | -------------------------------------------------------------------------------- /src/pipdeptree/_models/dag.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from collections import defaultdict, deque 5 | from collections.abc import Iterator, Mapping 6 | from fnmatch import fnmatch 7 | from itertools import chain 8 | from typing import TYPE_CHECKING 9 | 10 | from packaging.utils import canonicalize_name 11 | 12 | if TYPE_CHECKING: 13 | from importlib.metadata import Distribution 14 | 15 | 16 | from pipdeptree._warning import get_warning_printer 17 | 18 | from .package import DistPackage, InvalidRequirementError, ReqPackage 19 | 20 | 21 | def render_invalid_reqs_text(dist_name_to_invalid_reqs_dict: dict[str, list[str]]) -> None: 22 | for dist_name, invalid_reqs in dist_name_to_invalid_reqs_dict.items(): 23 | print(dist_name, file=sys.stderr) # noqa: T201 24 | 25 | for invalid_req in invalid_reqs: 26 | print(f' Skipping "{invalid_req}"', file=sys.stderr) # noqa: T201 27 | 28 | 29 | class PackageDAG(Mapping[DistPackage, list[ReqPackage]]): 30 | """ 31 | Representation of Package dependencies as directed acyclic graph using a dict as the underlying datastructure. 32 | 33 | The nodes and their relationships (edges) are internally stored using a map as follows, 34 | 35 | {a: [b, c], 36 | b: [d], 37 | c: [d, e], 38 | d: [e], 39 | e: [], 40 | f: [b], 41 | g: [e, f]} 42 | 43 | Here, node `a` has 2 children nodes `b` and `c`. Consider edge direction from `a` -> `b` and `a` -> `c` 44 | respectively. 45 | 46 | A node is expected to be an instance of a subclass of `Package`. The keys are must be of class `DistPackage` and 47 | each item in values must be of class `ReqPackage`. (See also ReversedPackageDAG where the key and value types are 48 | interchanged). 49 | 50 | """ 51 | 52 | @classmethod 53 | def from_pkgs(cls, pkgs: list[Distribution]) -> PackageDAG: 54 | warning_printer = get_warning_printer() 55 | dist_pkgs = [DistPackage(p) for p in pkgs] 56 | idx = {p.key: p for p in dist_pkgs} 57 | m: dict[DistPackage, list[ReqPackage]] = {} 58 | dist_name_to_invalid_reqs_dict: dict[str, list[str]] = {} 59 | for p in dist_pkgs: 60 | reqs = [] 61 | requires_iterator = p.requires() 62 | while True: 63 | try: 64 | req = next(requires_iterator) 65 | except InvalidRequirementError as err: 66 | # We can't work with invalid requirement strings. Let's warn the user about them. 67 | if warning_printer.should_warn(): 68 | dist_name_to_invalid_reqs_dict.setdefault(p.project_name, []).append(str(err)) 69 | continue 70 | except StopIteration: 71 | break 72 | d = idx.get(canonicalize_name(req.name)) 73 | # Distribution.requires only returns the name of requirements in the metadata file, which may not be the 74 | # same as the name in PyPI. We should try to retain the original package names for requirements. 75 | # See https://github.com/tox-dev/pipdeptree/issues/242 76 | req.name = d.project_name if d is not None else req.name 77 | pkg = ReqPackage(req, d) 78 | reqs.append(pkg) 79 | m[p] = reqs 80 | 81 | should_print_warning = warning_printer.should_warn() and dist_name_to_invalid_reqs_dict 82 | if should_print_warning: 83 | warning_printer.print_multi_line( 84 | "Invalid requirement strings found for the following distributions", 85 | lambda: render_invalid_reqs_text(dist_name_to_invalid_reqs_dict), 86 | ) 87 | 88 | return cls(m) 89 | 90 | def __init__(self, m: dict[DistPackage, list[ReqPackage]]) -> None: 91 | """ 92 | Initialize the PackageDAG object. 93 | 94 | :param dict m: dict of node objects (refer class docstring) 95 | :returns: None 96 | :rtype: NoneType 97 | 98 | """ 99 | self._obj: dict[DistPackage, list[ReqPackage]] = m 100 | self._index: dict[str, DistPackage] = {p.key: p for p in list(self._obj)} 101 | 102 | def get_node_as_parent(self, node_key: str) -> DistPackage | None: 103 | """ 104 | Get the node from the keys of the dict representing the DAG. 105 | 106 | This method is useful if the dict representing the DAG contains different kind of objects in keys and values. 107 | Use this method to look up a node obj as a parent (from the keys of the dict) given a node key. 108 | 109 | :param node_key: identifier corresponding to key attr of node obj 110 | :returns: node obj (as present in the keys of the dict) 111 | 112 | """ 113 | try: 114 | return self._index[node_key] 115 | except KeyError: 116 | return None 117 | 118 | def get_children(self, node_key: str) -> list[ReqPackage]: 119 | """ 120 | Get child nodes for a node by its key. 121 | 122 | :param node_key: key of the node to get children of 123 | :returns: child nodes 124 | 125 | """ 126 | node = self.get_node_as_parent(node_key) 127 | return self._obj[node] if node else [] 128 | 129 | def filter_nodes(self, include: list[str] | None, exclude: set[str] | None) -> PackageDAG: # noqa: C901, PLR0912 130 | """ 131 | Filter nodes in a graph by given parameters. 132 | 133 | If a node is included, then all it's children are also included. 134 | 135 | :param include: list of node keys to include (or None) 136 | :param exclude: set of node keys to exclude (or None) 137 | :raises ValueError: If include has node keys that do not exist in the graph 138 | :returns: filtered version of the graph 139 | 140 | """ 141 | # If neither of the filters are specified, short circuit 142 | if include is None and exclude is None: 143 | return self 144 | 145 | include_with_casing_preserved: list[str] = [] 146 | if include: 147 | include_with_casing_preserved = include 148 | include = [canonicalize_name(i) for i in include] 149 | exclude = {canonicalize_name(s) for s in exclude} if exclude else set() 150 | 151 | # Check for mutual exclusion of show_only and exclude sets 152 | # after normalizing the values to lowercase 153 | if include and exclude: 154 | assert not (set(include) & exclude) 155 | 156 | # Traverse the graph in a depth first manner and filter the 157 | # nodes according to `show_only` and `exclude` sets 158 | stack: deque[DistPackage] = deque() 159 | m: dict[DistPackage, list[ReqPackage]] = {} 160 | seen = set() 161 | matched_includes: set[str] = set() 162 | for node in self._obj: 163 | if any(fnmatch(node.key, e) for e in exclude): 164 | continue 165 | if include is None: 166 | stack.append(node) 167 | else: 168 | should_append = False 169 | for i in include: 170 | if fnmatch(node.key, i): 171 | # Add all patterns that match with the node key. Otherwise if we break, patterns like py* or 172 | # pytest* (which both should match "pytest") may cause one pattern to be missed and will 173 | # raise an error 174 | matched_includes.add(i) 175 | should_append = True 176 | if should_append: 177 | stack.append(node) 178 | 179 | while stack: 180 | n = stack.pop() 181 | cldn = [c for c in self._obj[n] if not any(fnmatch(c.key, e) for e in exclude)] 182 | m[n] = cldn 183 | seen.add(n.key) 184 | for c in cldn: 185 | if c.key not in seen: 186 | cld_node = self.get_node_as_parent(c.key) 187 | if cld_node: 188 | stack.append(cld_node) 189 | else: 190 | # It means there's no root node corresponding to the child node i.e. 191 | # a dependency is missing 192 | continue 193 | 194 | non_existent_includes = [ 195 | i for i in include_with_casing_preserved if canonicalize_name(i) not in matched_includes 196 | ] 197 | if non_existent_includes: 198 | raise ValueError("No packages matched using the following patterns: " + ", ".join(non_existent_includes)) 199 | 200 | return self.__class__(m) 201 | 202 | def reverse(self) -> ReversedPackageDAG: 203 | """ 204 | Reverse the DAG, or turn it upside-down. 205 | 206 | In other words, the directions of edges of the nodes in the DAG will be reversed. 207 | 208 | Note that this function purely works on the nodes in the graph. This implies that to perform a combination of 209 | filtering and reversing, the order in which `filter` and `reverse` methods should be applied is important. For 210 | e.g., if reverse is called on a filtered graph, then only the filtered nodes and it's children will be 211 | considered when reversing. On the other hand, if filter is called on reversed DAG, then the definition of 212 | "child" nodes is as per the reversed DAG. 213 | 214 | :returns: DAG in the reversed form 215 | 216 | """ 217 | m: defaultdict[ReqPackage, list[DistPackage]] = defaultdict(list) 218 | child_keys = {r.key for r in chain.from_iterable(self._obj.values())} 219 | for k, vs in self._obj.items(): 220 | for v in vs: 221 | # if v is already added to the dict, then ensure that 222 | # we are using the same object. This check is required 223 | # as we're using array mutation 224 | node: ReqPackage = next((p for p in m if p.key == v.key), v) 225 | m[node].append(k.as_parent_of(v)) 226 | if k.key not in child_keys: 227 | m[k.as_requirement()] = [] 228 | return ReversedPackageDAG(dict(m)) # type: ignore[arg-type] 229 | 230 | def sort(self) -> PackageDAG: 231 | """ 232 | Return sorted tree in which the underlying _obj dict is an dict, sorted alphabetically by the keys. 233 | 234 | :returns: Instance of same class with dict 235 | 236 | """ 237 | return self.__class__({k: sorted(v) for k, v in sorted(self._obj.items())}) 238 | 239 | # Methods required by the abstract base class Mapping 240 | def __getitem__(self, arg: DistPackage) -> list[ReqPackage] | None: # type: ignore[override] 241 | return self._obj.get(arg) 242 | 243 | def __iter__(self) -> Iterator[DistPackage]: 244 | return self._obj.__iter__() 245 | 246 | def __len__(self) -> int: 247 | return len(self._obj) 248 | 249 | 250 | class ReversedPackageDAG(PackageDAG): 251 | """ 252 | Representation of Package dependencies in the reverse order. 253 | 254 | Similar to it's super class `PackageDAG`, the underlying datastructure is a dict, but here the keys are expected to 255 | be of type `ReqPackage` and each item in the values of type `DistPackage`. 256 | 257 | Typically, this object will be obtained by calling `PackageDAG.reverse`. 258 | 259 | """ 260 | 261 | def reverse(self) -> PackageDAG: # type: ignore[override] 262 | """ 263 | Reverse the already reversed DAG to get the PackageDAG again. 264 | 265 | :returns: reverse of the reversed DAG 266 | 267 | """ 268 | m: defaultdict[DistPackage, list[ReqPackage]] = defaultdict(list) 269 | child_keys = {r.key for r in chain.from_iterable(self._obj.values())} 270 | for k, vs in self._obj.items(): 271 | for v in vs: 272 | assert isinstance(v, DistPackage) 273 | node = next((p for p in m if p.key == v.key), v.as_parent_of(None)) 274 | m[node].append(k) 275 | if k.key not in child_keys: 276 | assert isinstance(k, ReqPackage) 277 | assert k.dist is not None 278 | m[k.dist] = [] 279 | return PackageDAG(dict(m)) 280 | 281 | 282 | __all__ = [ 283 | "PackageDAG", 284 | "ReversedPackageDAG", 285 | ] 286 | -------------------------------------------------------------------------------- /src/pipdeptree/_models/package.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from abc import ABC, abstractmethod 4 | from importlib import import_module 5 | from importlib.metadata import Distribution, PackageNotFoundError, metadata, version 6 | from inspect import ismodule 7 | from typing import TYPE_CHECKING 8 | 9 | from packaging.requirements import InvalidRequirement, Requirement 10 | from packaging.utils import canonicalize_name 11 | 12 | from pipdeptree._freeze import dist_to_frozen_repr 13 | 14 | if TYPE_CHECKING: 15 | from collections.abc import Iterator 16 | from importlib.metadata import Distribution 17 | 18 | 19 | class InvalidRequirementError(ValueError): 20 | """ 21 | An invalid requirement string was found. 22 | 23 | When raising an exception, this should provide just the problem requirement string. 24 | """ 25 | 26 | 27 | class Package(ABC): 28 | """Abstract class for wrappers around objects that pip returns.""" 29 | 30 | UNKNOWN_LICENSE_STR = "(Unknown license)" 31 | 32 | def __init__(self, project_name: str) -> None: 33 | self.project_name = project_name 34 | self.key = canonicalize_name(project_name) 35 | 36 | def licenses(self) -> str: 37 | try: 38 | dist_metadata = metadata(self.key) 39 | except PackageNotFoundError: 40 | return self.UNKNOWN_LICENSE_STR 41 | 42 | if license_str := dist_metadata.get("License-Expression"): 43 | return f"({license_str})" 44 | 45 | license_strs: list[str] = [] 46 | classifiers = dist_metadata.get_all("Classifier", []) 47 | for classifier in classifiers: 48 | line = str(classifier) 49 | if line.startswith("License"): 50 | license_str = line.rsplit(":: ", 1)[-1] 51 | license_strs.append(license_str) 52 | 53 | if len(license_strs) == 0: 54 | return self.UNKNOWN_LICENSE_STR 55 | 56 | return f"({', '.join(license_strs)})" 57 | 58 | @abstractmethod 59 | def render_as_root(self, *, frozen: bool) -> str: 60 | raise NotImplementedError 61 | 62 | @abstractmethod 63 | def render_as_branch(self, *, frozen: bool) -> str: 64 | raise NotImplementedError 65 | 66 | @abstractmethod 67 | def as_dict(self) -> dict[str, str]: 68 | raise NotImplementedError 69 | 70 | def render( 71 | self, 72 | parent: DistPackage | ReqPackage | None = None, 73 | *, 74 | frozen: bool = False, 75 | ) -> str: 76 | render = self.render_as_branch if parent else self.render_as_root 77 | return render(frozen=frozen) 78 | 79 | @staticmethod 80 | def as_frozen_repr(dist: Distribution) -> str: 81 | return dist_to_frozen_repr(dist) 82 | 83 | def __repr__(self) -> str: 84 | return f'<{self.__class__.__name__}("{self.key}")>' 85 | 86 | def __lt__(self, rhs: Package) -> bool: 87 | return self.key < rhs.key 88 | 89 | 90 | class DistPackage(Package): 91 | """ 92 | Wrapper class for importlib.metadata.Distribution instances. 93 | 94 | :param obj: importlib.metadata.Distribution to wrap over 95 | :param req: optional ReqPackage object to associate this DistPackage with. This is useful for displaying the tree in 96 | reverse 97 | 98 | """ 99 | 100 | def __init__(self, obj: Distribution, req: ReqPackage | None = None) -> None: 101 | super().__init__(obj.metadata["Name"]) 102 | self._obj = obj 103 | self.req = req 104 | 105 | def requires(self) -> Iterator[Requirement]: 106 | """ 107 | Return an iterator of the distribution's required dependencies. 108 | 109 | :raises InvalidRequirementError: If the metadata contains invalid requirement strings. 110 | """ 111 | for r in self._obj.requires or []: 112 | try: 113 | req = Requirement(r) 114 | except InvalidRequirement: 115 | raise InvalidRequirementError(r) from None 116 | if not req.marker or req.marker.evaluate(): 117 | # Make sure that we're either dealing with a dependency that has no environment markers or does but 118 | # are evaluated True against the existing environment (if it's False, it means they cannot be 119 | # installed). "extra" markers are always evaluated False here which is what we want when retrieving 120 | # only required dependencies. 121 | yield req 122 | 123 | @property 124 | def version(self) -> str: 125 | return self._obj.version 126 | 127 | def unwrap(self) -> Distribution: 128 | """Exposes the internal `importlib.metadata.Distribution` object.""" 129 | return self._obj 130 | 131 | def render_as_root(self, *, frozen: bool) -> str: 132 | return self.as_frozen_repr(self._obj) if frozen else f"{self.project_name}=={self.version}" 133 | 134 | def render_as_branch(self, *, frozen: bool) -> str: 135 | assert self.req is not None 136 | if not frozen: 137 | parent_ver_spec = self.req.version_spec 138 | parent_str = self.req.project_name 139 | if parent_ver_spec: 140 | parent_str += parent_ver_spec 141 | return f"{self.project_name}=={self.version} [requires: {parent_str}]" 142 | return self.render_as_root(frozen=frozen) 143 | 144 | def as_requirement(self) -> ReqPackage: 145 | """Return a ReqPackage representation of this DistPackage.""" 146 | spec = f"{self.project_name}=={self.version}" 147 | return ReqPackage(Requirement(spec), dist=self) 148 | 149 | def as_parent_of(self, req: ReqPackage | None) -> DistPackage: 150 | """ 151 | Return a DistPackage instance associated to a requirement. 152 | 153 | This association is necessary for reversing the PackageDAG. 154 | If `req` is None, and the `req` attribute of the current instance is also None, then the same instance will be 155 | returned. 156 | 157 | :param ReqPackage req: the requirement to associate with 158 | :returns: DistPackage instance 159 | 160 | """ 161 | if req is None and self.req is None: 162 | return self 163 | return self.__class__(self._obj, req) 164 | 165 | def as_dict(self) -> dict[str, str]: 166 | return {"key": self.key, "package_name": self.project_name, "installed_version": self.version} 167 | 168 | 169 | class ReqPackage(Package): 170 | """ 171 | Wrapper class for Requirement instance. 172 | 173 | :param obj: The `Requirement` instance to wrap over 174 | :param dist: optional `importlib.metadata.Distribution` instance for this requirement 175 | 176 | """ 177 | 178 | UNKNOWN_VERSION = "?" 179 | 180 | def __init__(self, obj: Requirement, dist: DistPackage | None = None) -> None: 181 | super().__init__(obj.name) 182 | self._obj = obj 183 | self.dist = dist 184 | 185 | def render_as_root(self, *, frozen: bool) -> str: 186 | if not frozen: 187 | return f"{self.project_name}=={self.installed_version}" 188 | if self.dist: 189 | return self.as_frozen_repr(self.dist.unwrap()) 190 | return self.project_name 191 | 192 | def render_as_branch(self, *, frozen: bool) -> str: 193 | if not frozen: 194 | req_ver = self.version_spec or "Any" 195 | return f"{self.project_name} [required: {req_ver}, installed: {self.installed_version}]" 196 | return self.render_as_root(frozen=frozen) 197 | 198 | @property 199 | def version_spec(self) -> str | None: 200 | result = None 201 | specs = sorted(map(str, self._obj.specifier), reverse=True) # `reverse` makes '>' prior to '<' 202 | if specs: 203 | result = ",".join(specs) 204 | return result 205 | 206 | @property 207 | def installed_version(self) -> str: 208 | if not self.dist: 209 | try: 210 | return version(self.key) 211 | except PackageNotFoundError: 212 | pass 213 | # Avoid AssertionError with setuptools, see https://github.com/tox-dev/pipdeptree/issues/162 214 | if self.key == "setuptools": 215 | return self.UNKNOWN_VERSION 216 | try: 217 | m = import_module(self.key) 218 | except ImportError: 219 | return self.UNKNOWN_VERSION 220 | else: 221 | v = getattr(m, "__version__", self.UNKNOWN_VERSION) 222 | if ismodule(v): 223 | return getattr(v, "__version__", self.UNKNOWN_VERSION) 224 | return v 225 | return self.dist.version 226 | 227 | def is_conflicting(self) -> bool: 228 | """If installed version conflicts with required version.""" 229 | # unknown installed version is also considered conflicting 230 | if self.is_missing: 231 | return True 232 | 233 | return not self._obj.specifier.contains(self.installed_version, prereleases=True) 234 | 235 | @property 236 | def is_missing(self) -> bool: 237 | return self.installed_version == self.UNKNOWN_VERSION 238 | 239 | def as_dict(self) -> dict[str, str]: 240 | return { 241 | "key": self.key, 242 | "package_name": self.project_name, 243 | "installed_version": self.installed_version, 244 | "required_version": self.version_spec if self.version_spec is not None else "Any", 245 | } 246 | 247 | 248 | __all__ = [ 249 | "DistPackage", 250 | "ReqPackage", 251 | ] 252 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/__init__.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from .freeze import render_freeze 6 | from .graphviz import render_graphviz 7 | from .json import render_json 8 | from .json_tree import render_json_tree 9 | from .mermaid import render_mermaid 10 | from .text import render_text 11 | 12 | if TYPE_CHECKING: 13 | from pipdeptree._cli import Options 14 | from pipdeptree._models import PackageDAG 15 | 16 | 17 | def render(options: Options, tree: PackageDAG) -> None: 18 | if options.json: 19 | print(render_json(tree)) # noqa: T201 20 | elif options.json_tree: 21 | print(render_json_tree(tree)) # noqa: T201 22 | elif options.mermaid: 23 | print(render_mermaid(tree)) # noqa: T201 24 | elif options.output_format: 25 | render_graphviz(tree, output_format=options.output_format, reverse=options.reverse) 26 | elif options.freeze: 27 | render_freeze(tree, max_depth=options.depth, list_all=options.all) 28 | else: 29 | render_text( 30 | tree, 31 | max_depth=options.depth, 32 | encoding=options.encoding, 33 | list_all=options.all, 34 | include_license=options.license, 35 | ) 36 | 37 | 38 | __all__ = [ 39 | "render", 40 | ] 41 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/freeze.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from itertools import chain 4 | from typing import TYPE_CHECKING, Any 5 | 6 | from .text import get_top_level_nodes 7 | 8 | if TYPE_CHECKING: 9 | from pipdeptree._models.dag import PackageDAG 10 | from pipdeptree._models.package import DistPackage, ReqPackage 11 | 12 | 13 | def render_freeze(tree: PackageDAG, *, max_depth: float, list_all: bool = True) -> None: 14 | nodes = get_top_level_nodes(tree, list_all=list_all) 15 | 16 | def aux( 17 | node: DistPackage | ReqPackage, 18 | parent: DistPackage | ReqPackage | None = None, 19 | indent: int = 0, 20 | cur_chain: list[str] | None = None, 21 | depth: int = 0, 22 | ) -> list[Any]: 23 | cur_chain = cur_chain or [] 24 | node_str = node.render(parent, frozen=True) 25 | if parent: 26 | prefix = " " * indent 27 | node_str = prefix + node_str 28 | result = [node_str] 29 | children = [ 30 | aux(c, node, indent=indent + 2, cur_chain=[*cur_chain, c.project_name], depth=depth + 1) 31 | for c in tree.get_children(node.key) 32 | if c.project_name not in cur_chain and depth + 1 <= max_depth 33 | ] 34 | result += list(chain.from_iterable(children)) 35 | return result 36 | 37 | lines = chain.from_iterable([aux(p) for p in nodes]) 38 | print("\n".join(lines)) # noqa: T201 39 | 40 | 41 | __all__ = [ 42 | "render_freeze", 43 | ] 44 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/graphviz.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import os 4 | import sys 5 | from typing import TYPE_CHECKING 6 | 7 | from pipdeptree._models import DistPackage, ReqPackage 8 | 9 | if TYPE_CHECKING: 10 | from pipdeptree._models import PackageDAG 11 | 12 | 13 | def dump_graphviz( # noqa: C901 14 | tree: PackageDAG, 15 | output_format: str = "dot", 16 | is_reverse: bool = False, # noqa: FBT001, FBT002 17 | ) -> str | bytes: 18 | """ 19 | Output dependency graph as one of the supported GraphViz output formats. 20 | 21 | :param dict tree: dependency graph 22 | :param string output_format: output format 23 | :param bool is_reverse: reverse or not 24 | :returns: representation of tree in the specified output format 25 | :rtype: str or binary representation depending on the output format 26 | """ 27 | try: 28 | from graphviz import Digraph # noqa: PLC0415 29 | except ImportError as exc: 30 | print( # noqa: T201 31 | "graphviz is not available, but necessary for the output option. Please install it.", 32 | file=sys.stderr, 33 | ) 34 | raise SystemExit(1) from exc 35 | 36 | from graphviz import parameters # noqa: PLC0415 37 | 38 | valid_formats = parameters.FORMATS 39 | 40 | if output_format not in valid_formats: 41 | print(f"{output_format} is not a supported output format.", file=sys.stderr) # noqa: T201 42 | print(f"Supported formats are: {', '.join(sorted(valid_formats))}", file=sys.stderr) # noqa: T201 43 | raise SystemExit(1) 44 | 45 | graph = Digraph(format=output_format) 46 | 47 | if is_reverse: 48 | for dep_rev, parents in tree.items(): 49 | assert isinstance(dep_rev, ReqPackage) 50 | dep_label = f"{dep_rev.project_name}\\n{dep_rev.installed_version}" 51 | graph.node(dep_rev.key, label=dep_label) 52 | for parent in parents: 53 | # req reference of the dep associated with this particular parent package 54 | assert isinstance(parent, DistPackage) 55 | edge_label = (parent.req.version_spec if parent.req is not None else None) or "any" 56 | graph.edge(dep_rev.key, parent.key, label=edge_label) 57 | else: 58 | for pkg, deps in tree.items(): 59 | pkg_label = f"{pkg.project_name}\\n{pkg.version}" 60 | graph.node(pkg.key, label=pkg_label) 61 | for dep in deps: 62 | edge_label = dep.version_spec or "any" 63 | if dep.is_missing: 64 | dep_label = f"{dep.project_name}\\n(missing)" 65 | graph.node(dep.key, label=dep_label, style="dashed") 66 | graph.edge(pkg.key, dep.key, style="dashed") 67 | else: 68 | graph.edge(pkg.key, dep.key, label=edge_label) 69 | 70 | # Allow output of dot format, even if GraphViz isn't installed. 71 | if output_format == "dot": 72 | # Emulates graphviz.dot.Dot.__iter__() to force the sorting of graph.body. 73 | # Fixes https://github.com/tox-dev/pipdeptree/issues/188 74 | # That way we can guarantee the output of the dot format is deterministic 75 | # and stable. 76 | return "".join([next(iter(graph)), *sorted(graph.body), graph._tail]) # noqa: SLF001 77 | 78 | # As it's unknown if the selected output format is binary or not, try to 79 | # decode it as UTF8 and only print it out in binary if that's not possible. 80 | try: 81 | return graph.pipe().decode("utf-8") # type: ignore[no-any-return] 82 | except UnicodeDecodeError: 83 | return graph.pipe() # type: ignore[no-any-return] 84 | 85 | 86 | def print_graphviz(dump_output: str | bytes) -> None: 87 | """ 88 | Dump the data generated by GraphViz to stdout. 89 | 90 | :param dump_output: The output from dump_graphviz 91 | 92 | """ 93 | if hasattr(dump_output, "encode"): 94 | print(dump_output) # noqa: T201 95 | else: 96 | with os.fdopen(sys.stdout.fileno(), "wb") as bytestream: 97 | bytestream.write(dump_output) 98 | 99 | 100 | def render_graphviz(tree: PackageDAG, *, output_format: str, reverse: bool) -> None: 101 | output = dump_graphviz(tree, output_format=output_format, is_reverse=reverse) 102 | print_graphviz(output) 103 | 104 | 105 | __all__ = [ 106 | "render_graphviz", 107 | ] 108 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/json.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from typing import TYPE_CHECKING 5 | 6 | if TYPE_CHECKING: 7 | from pipdeptree._models import PackageDAG 8 | 9 | 10 | def render_json(tree: PackageDAG) -> str: 11 | """ 12 | Convert the tree into a flat json representation. 13 | 14 | The json repr will be a list of hashes, each hash having 2 fields: 15 | - package 16 | - dependencies: list of dependencies 17 | 18 | :param tree: dependency tree 19 | :returns: JSON representation of the tree 20 | 21 | """ 22 | tree = tree.sort() 23 | return json.dumps( 24 | [{"package": k.as_dict(), "dependencies": [v.as_dict() for v in vs]} for k, vs in tree.items()], 25 | indent=4, 26 | ) 27 | 28 | 29 | __all__ = [ 30 | "render_json", 31 | ] 32 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/json_tree.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import json 4 | from itertools import chain 5 | from typing import TYPE_CHECKING, Any 6 | 7 | from pipdeptree._models import ReqPackage 8 | 9 | if TYPE_CHECKING: 10 | from pipdeptree._models import DistPackage, PackageDAG 11 | 12 | 13 | def render_json_tree(tree: PackageDAG) -> str: 14 | """ 15 | Convert the tree into a nested json representation. 16 | 17 | The json repr will be a list of hashes, each hash having the following fields: 18 | 19 | - package_name 20 | - key 21 | - required_version 22 | - installed_version 23 | - dependencies: list of dependencies 24 | 25 | :param tree: dependency tree 26 | :returns: json representation of the tree 27 | 28 | """ 29 | tree = tree.sort() 30 | branch_keys = {r.key for r in chain.from_iterable(tree.values())} 31 | nodes = [p for p in tree if p.key not in branch_keys] 32 | 33 | def aux( 34 | node: DistPackage | ReqPackage, 35 | parent: DistPackage | ReqPackage | None = None, 36 | cur_chain: list[str] | None = None, 37 | ) -> dict[str, Any]: 38 | if cur_chain is None: 39 | cur_chain = [node.project_name] 40 | 41 | d: dict[str, str | list[Any] | None] = node.as_dict() # type: ignore[assignment] 42 | if parent: 43 | d["required_version"] = node.version_spec if isinstance(node, ReqPackage) and node.version_spec else "Any" 44 | else: 45 | d["required_version"] = d["installed_version"] 46 | 47 | d["dependencies"] = [ 48 | aux(c, parent=node, cur_chain=[*cur_chain, c.project_name]) 49 | for c in tree.get_children(node.key) 50 | if c.project_name not in cur_chain 51 | ] 52 | 53 | return d 54 | 55 | return json.dumps([aux(p) for p in nodes], indent=4) 56 | 57 | 58 | __all__ = [ 59 | "render_json_tree", 60 | ] 61 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/mermaid.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import itertools as it 4 | from typing import TYPE_CHECKING, Final 5 | 6 | from pipdeptree._models import DistPackage, ReqPackage, ReversedPackageDAG 7 | 8 | if TYPE_CHECKING: 9 | from pipdeptree._models import PackageDAG 10 | 11 | _RESERVED_IDS: Final[frozenset[str]] = frozenset( 12 | [ 13 | "C4Component", 14 | "C4Container", 15 | "C4Deployment", 16 | "C4Dynamic", 17 | "_blank", 18 | "_parent", 19 | "_self", 20 | "_top", 21 | "call", 22 | "class", 23 | "classDef", 24 | "click", 25 | "end", 26 | "flowchart", 27 | "flowchart-v2", 28 | "graph", 29 | "interpolate", 30 | "linkStyle", 31 | "style", 32 | "subgraph", 33 | ], 34 | ) 35 | 36 | 37 | def render_mermaid(tree: PackageDAG) -> str: # noqa: C901 38 | """ 39 | Produce a Mermaid flowchart from the dependency graph. 40 | 41 | :param tree: dependency graph 42 | 43 | """ 44 | # List of reserved keywords in Mermaid that cannot be used as node names. 45 | # See: https://github.com/mermaid-js/mermaid/issues/4182#issuecomment-1454787806 46 | 47 | node_ids_map: dict[str, str] = {} 48 | 49 | def mermaid_id(key: str) -> str: 50 | """Return a valid Mermaid node ID from a string.""" 51 | # If we have already seen this key, return the canonical ID. 52 | canonical_id = node_ids_map.get(key) 53 | if canonical_id is not None: 54 | return canonical_id 55 | # If the key is not a reserved keyword, return it as is, and update the map. 56 | if key not in _RESERVED_IDS: 57 | node_ids_map[key] = key 58 | return key 59 | # If the key is a reserved keyword, append a number to it. 60 | for number in it.count(): 61 | new_id = f"{key}_{number}" 62 | if new_id not in node_ids_map: 63 | node_ids_map[key] = new_id 64 | return new_id 65 | raise NotImplementedError 66 | 67 | # Use a sets to avoid duplicate entries. 68 | nodes: set[str] = set() 69 | edges: set[str] = set() 70 | 71 | if isinstance(tree, ReversedPackageDAG): 72 | for package, reverse_dependencies in tree.items(): 73 | assert isinstance(package, ReqPackage) 74 | package_label = "\\n".join( 75 | (package.project_name, "(missing)" if package.is_missing else package.installed_version), 76 | ) 77 | package_key = mermaid_id(package.key) 78 | nodes.add(f'{package_key}["{package_label}"]') 79 | for reverse_dependency in reverse_dependencies: 80 | assert isinstance(reverse_dependency, DistPackage) 81 | edge_label = ( 82 | reverse_dependency.req.version_spec if reverse_dependency.req is not None else None 83 | ) or "any" 84 | reverse_dependency_key = mermaid_id(reverse_dependency.key) 85 | edges.add(f'{package_key} -- "{edge_label}" --> {reverse_dependency_key}') 86 | else: 87 | for package, dependencies in tree.items(): 88 | package_label = f"{package.project_name}\\n{package.version}" 89 | package_key = mermaid_id(package.key) 90 | nodes.add(f'{package_key}["{package_label}"]') 91 | for dependency in dependencies: 92 | edge_label = dependency.version_spec or "any" 93 | dependency_key = mermaid_id(dependency.key) 94 | if dependency.is_missing: 95 | dependency_label = f"{dependency.project_name}\\n(missing)" 96 | nodes.add(f'{dependency_key}["{dependency_label}"]:::missing') 97 | edges.add(f"{package_key} -.-> {dependency_key}") 98 | else: 99 | edges.add(f'{package_key} -- "{edge_label}" --> {dependency_key}') 100 | 101 | # Produce the Mermaid Markdown. 102 | lines = [ 103 | "flowchart TD", 104 | "classDef missing stroke-dasharray: 5", 105 | *sorted(nodes), 106 | *sorted(edges), 107 | ] 108 | return "".join(f"{' ' if i else ''}{line}\n" for i, line in enumerate(lines)) 109 | 110 | 111 | __all__ = [ 112 | "render_mermaid", 113 | ] 114 | -------------------------------------------------------------------------------- /src/pipdeptree/_render/text.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from itertools import chain 4 | from typing import TYPE_CHECKING, Any 5 | 6 | if TYPE_CHECKING: 7 | from pipdeptree._models import DistPackage, PackageDAG, ReqPackage 8 | 9 | 10 | def render_text( 11 | tree: PackageDAG, 12 | *, 13 | max_depth: float, 14 | encoding: str, 15 | list_all: bool = True, 16 | include_license: bool = False, 17 | ) -> None: 18 | """ 19 | Print tree as text on console. 20 | 21 | :param tree: the package tree 22 | :param max_depth: the maximum depth of the dependency tree 23 | :param encoding: encoding to use (use "utf-8", "utf-16", "utf-32" for unicode or anything else for legacy output) 24 | :param list_all: whether to list all the pkgs at the root level or only those that are the sub-dependencies 25 | :param include_license: provide license information 26 | :returns: None 27 | 28 | """ 29 | nodes = get_top_level_nodes(tree, list_all=list_all) 30 | 31 | if encoding in {"utf-8", "utf-16", "utf-32"}: 32 | _render_text_with_unicode(tree, nodes, max_depth, include_license) 33 | else: 34 | _render_text_without_unicode(tree, nodes, max_depth, include_license) 35 | 36 | 37 | def get_top_level_nodes(tree: PackageDAG, *, list_all: bool) -> list[DistPackage]: 38 | """ 39 | Get a list of nodes that will appear at the first depth of the dependency tree. 40 | 41 | :param tree: the package tree 42 | :param list_all: whether to list all the pkgs at the root level or only those that are the sub-dependencies 43 | """ 44 | tree = tree.sort() 45 | nodes = list(tree.keys()) 46 | branch_keys = {r.key for r in chain.from_iterable(tree.values())} 47 | 48 | if not list_all: 49 | nodes = [p for p in nodes if p.key not in branch_keys] 50 | 51 | return nodes 52 | 53 | 54 | def _render_text_with_unicode( 55 | tree: PackageDAG, 56 | nodes: list[DistPackage], 57 | max_depth: float, 58 | include_license: bool, # noqa: FBT001 59 | ) -> None: 60 | def aux( # noqa: PLR0913, PLR0917 61 | node: DistPackage | ReqPackage, 62 | parent: DistPackage | ReqPackage | None = None, 63 | indent: int = 0, 64 | cur_chain: list[str] | None = None, 65 | prefix: str = "", 66 | depth: int = 0, 67 | has_grand_parent: bool = False, # noqa: FBT001, FBT002 68 | is_last_child: bool = False, # noqa: FBT001, FBT002 69 | parent_is_last_child: bool = False, # noqa: FBT001, FBT002 70 | ) -> list[Any]: 71 | cur_chain = cur_chain or [] 72 | node_str = node.render(parent, frozen=False) 73 | next_prefix = "" 74 | next_indent = indent + 2 75 | 76 | if parent: 77 | bullet = "├── " 78 | if is_last_child: 79 | bullet = "└── " 80 | 81 | if has_grand_parent: 82 | next_indent -= 1 83 | if parent_is_last_child: 84 | prefix += " " * (indent + 1 - depth) 85 | else: 86 | prefix += "│" + " " * (indent - depth) 87 | # Without this extra space, bullets will point to the space just before the project name 88 | prefix += " " 89 | next_prefix = prefix 90 | node_str = prefix + bullet + node_str 91 | elif include_license: 92 | node_str += " " + node.licenses() 93 | 94 | result = [node_str] 95 | 96 | children = tree.get_children(node.key) 97 | children_strings = [ 98 | aux( 99 | c, 100 | node, 101 | indent=next_indent, 102 | cur_chain=[*cur_chain, c.project_name], 103 | prefix=next_prefix, 104 | depth=depth + 1, 105 | has_grand_parent=parent is not None, 106 | is_last_child=c is children[-1], 107 | parent_is_last_child=is_last_child, 108 | ) 109 | for c in children 110 | if c.project_name not in cur_chain and depth + 1 <= max_depth 111 | ] 112 | 113 | result += list(chain.from_iterable(children_strings)) 114 | return result 115 | 116 | lines = chain.from_iterable([aux(p) for p in nodes]) 117 | print("\n".join(lines)) # noqa: T201 118 | 119 | 120 | def _render_text_without_unicode( 121 | tree: PackageDAG, 122 | nodes: list[DistPackage], 123 | max_depth: float, 124 | include_license: bool, # noqa: FBT001 125 | ) -> None: 126 | def aux( 127 | node: DistPackage | ReqPackage, 128 | parent: DistPackage | ReqPackage | None = None, 129 | indent: int = 0, 130 | cur_chain: list[str] | None = None, 131 | depth: int = 0, 132 | ) -> list[Any]: 133 | cur_chain = cur_chain or [] 134 | node_str = node.render(parent, frozen=False) 135 | if parent: 136 | prefix = " " * indent + "- " 137 | node_str = prefix + node_str 138 | elif include_license: 139 | node_str += " " + node.licenses() 140 | result = [node_str] 141 | children = [ 142 | aux(c, node, indent=indent + 2, cur_chain=[*cur_chain, c.project_name], depth=depth + 1) 143 | for c in tree.get_children(node.key) 144 | if c.project_name not in cur_chain and depth + 1 <= max_depth 145 | ] 146 | result += list(chain.from_iterable(children)) 147 | return result 148 | 149 | lines = chain.from_iterable([aux(p) for p in nodes]) 150 | print("\n".join(lines)) # noqa: T201 151 | 152 | 153 | __all__ = ["get_top_level_nodes", "render_text"] 154 | -------------------------------------------------------------------------------- /src/pipdeptree/_validate.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from collections import defaultdict 5 | from typing import TYPE_CHECKING 6 | 7 | from pipdeptree._warning import get_warning_printer 8 | 9 | if TYPE_CHECKING: 10 | from pipdeptree._models.package import Package 11 | 12 | from ._models import DistPackage, PackageDAG, ReqPackage 13 | 14 | 15 | def validate(tree: PackageDAG) -> None: 16 | # Before any reversing or filtering, show warnings to console, about possibly conflicting or cyclic deps if found 17 | # and warnings are enabled (i.e. only if output is to be printed to console) 18 | warning_printer = get_warning_printer() 19 | if warning_printer.should_warn(): 20 | conflicts = conflicting_deps(tree) 21 | if conflicts: 22 | warning_printer.print_multi_line( 23 | "Possibly conflicting dependencies found", lambda: render_conflicts_text(conflicts) 24 | ) 25 | 26 | cycles = cyclic_deps(tree) 27 | if cycles: 28 | warning_printer.print_multi_line("Cyclic dependencies found", lambda: render_cycles_text(cycles)) 29 | 30 | 31 | def conflicting_deps(tree: PackageDAG) -> dict[DistPackage, list[ReqPackage]]: 32 | """ 33 | Return dependencies which are not present or conflict with the requirements of other packages. 34 | 35 | e.g. will warn if pkg1 requires pkg2==2.0 and pkg2==1.0 is installed 36 | 37 | :param tree: the requirements tree (dict) 38 | :returns: dict of DistPackage -> list of unsatisfied/unknown ReqPackage 39 | :rtype: dict 40 | 41 | """ 42 | conflicting = defaultdict(list) 43 | for package, requires in tree.items(): 44 | for req in requires: 45 | if req.is_conflicting(): 46 | conflicting[package].append(req) 47 | return conflicting 48 | 49 | 50 | def render_conflicts_text(conflicts: dict[DistPackage, list[ReqPackage]]) -> None: 51 | # Enforce alphabetical order when listing conflicts 52 | pkgs = sorted(conflicts.keys()) 53 | for p in pkgs: 54 | pkg = p.render_as_root(frozen=False) 55 | print(f"* {pkg}", file=sys.stderr) # noqa: T201 56 | for req in conflicts[p]: 57 | req_str = req.render_as_branch(frozen=False) 58 | print(f" - {req_str}", file=sys.stderr) # noqa: T201 59 | 60 | 61 | def cyclic_deps(tree: PackageDAG) -> list[list[Package]]: 62 | """ 63 | Return cyclic dependencies as list of lists. 64 | 65 | :param tree: package tree/dag 66 | :returns: list of lists, where each list represents a cycle 67 | 68 | """ 69 | 70 | def dfs(root: DistPackage, current: Package, visited: set[str], cdeps: list[Package]) -> bool: 71 | if current.key not in visited: 72 | visited.add(current.key) 73 | current_dist = tree.get_node_as_parent(current.key) 74 | if not current_dist: 75 | return False 76 | 77 | reqs = tree.get(current_dist) 78 | if not reqs: 79 | return False 80 | 81 | for req in reqs: 82 | if dfs(root, req, visited, cdeps): 83 | cdeps.append(current) 84 | return True 85 | elif current.key == root.key: 86 | cdeps.append(current) 87 | return True 88 | return False 89 | 90 | cycles: list[list[Package]] = [] 91 | 92 | for p in tree: 93 | cdeps: list[Package] = [] 94 | visited: set[str] = set() 95 | if dfs(p, p, visited, cdeps): 96 | cdeps.reverse() 97 | cycles.append(cdeps) 98 | 99 | return cycles 100 | 101 | 102 | def render_cycles_text(cycles: list[list[Package]]) -> None: 103 | # List in alphabetical order the dependency that caused the cycle (i.e. the second-to-last Package element) 104 | cycles = sorted(cycles, key=lambda c: c[len(c) - 2].key) 105 | for cycle in cycles: 106 | print("*", end=" ", file=sys.stderr) # noqa: T201 107 | 108 | size = len(cycle) - 1 109 | for idx, pkg in enumerate(cycle): 110 | if idx == size: 111 | print(f"{pkg.project_name}", end="", file=sys.stderr) # noqa: T201 112 | else: 113 | print(f"{pkg.project_name} =>", end=" ", file=sys.stderr) # noqa: T201 114 | print(file=sys.stderr) # noqa: T201 115 | 116 | 117 | __all__ = [ 118 | "validate", 119 | ] 120 | -------------------------------------------------------------------------------- /src/pipdeptree/_warning.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from enum import Enum 5 | from typing import Callable 6 | 7 | WarningType = Enum("WarningType", ["SILENCE", "SUPPRESS", "FAIL"]) 8 | 9 | 10 | class WarningPrinter: 11 | """Non-thread safe class that handles printing warning logic.""" 12 | 13 | def __init__(self, warning_type: WarningType = WarningType.SUPPRESS) -> None: 14 | self._warning_type = warning_type 15 | self._has_warned = False 16 | 17 | @property 18 | def warning_type(self) -> WarningType: 19 | return self._warning_type 20 | 21 | @warning_type.setter 22 | def warning_type(self, new_warning_type: WarningType) -> None: 23 | self._warning_type = new_warning_type 24 | 25 | def should_warn(self) -> bool: 26 | return self._warning_type != WarningType.SILENCE 27 | 28 | def has_warned_with_failure(self) -> bool: 29 | return self._has_warned and self.warning_type == WarningType.FAIL 30 | 31 | def print_single_line(self, line: str) -> None: 32 | self._has_warned = True 33 | print(line, file=sys.stderr) # noqa: T201 34 | 35 | def print_multi_line(self, summary: str, print_func: Callable[[], None], ignore_fail: bool = False) -> None: # noqa: FBT001, FBT002 36 | """ 37 | Print a multi-line warning, delegating most of the printing logic to the caller. 38 | 39 | :param summary: a summary of the warning 40 | :param print_func: a callback that the caller passes that performs most of the multi-line printing 41 | :param ignore_fail: if True, this warning won't be a fail when `self.warning_type == WarningType.FAIL` 42 | """ 43 | print(f"Warning!!! {summary}:", file=sys.stderr) # noqa: T201 44 | print_func() 45 | if ignore_fail: 46 | print("NOTE: This warning isn't a failure warning.", file=sys.stderr) # noqa: T201 47 | else: 48 | self._has_warned = True 49 | print("-" * 72, file=sys.stderr) # noqa: T201 50 | 51 | 52 | _shared_warning_printer = WarningPrinter() 53 | 54 | 55 | def get_warning_printer() -> WarningPrinter: 56 | """Shared warning printer, representing a module-level singleton object.""" 57 | return _shared_warning_printer 58 | 59 | 60 | __all__ = ["WarningPrinter", "get_warning_printer"] 61 | -------------------------------------------------------------------------------- /src/pipdeptree/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tox-dev/pipdeptree/ab3dc9419bacfa8e7672c0155ce8896e75d41171/src/pipdeptree/py.typed -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tox-dev/pipdeptree/ab3dc9419bacfa8e7672c0155ce8896e75d41171/tests/__init__.py -------------------------------------------------------------------------------- /tests/_models/test_dag.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from itertools import chain 4 | from typing import TYPE_CHECKING, Any, Callable 5 | 6 | import pytest 7 | 8 | from pipdeptree._models import DistPackage, PackageDAG, ReqPackage, ReversedPackageDAG 9 | 10 | if TYPE_CHECKING: 11 | from collections.abc import Iterator 12 | from unittest.mock import Mock 13 | 14 | from tests.our_types import MockGraph 15 | 16 | 17 | def test_package_dag_get_node_as_parent(example_dag: PackageDAG) -> None: 18 | node = example_dag.get_node_as_parent("b") 19 | assert node is not None 20 | assert node.key == "b" 21 | node = example_dag.get_node_as_parent("c") 22 | assert node is not None 23 | assert node.key == "c" 24 | 25 | 26 | @pytest.fixture(scope="session") 27 | def t_fnmatch(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> PackageDAG: 28 | graph: MockGraph = { 29 | ("a-a", "1"): [("a-b", []), ("a-c", [])], 30 | ("a-b", "1"): [("a-c", [])], 31 | ("b-a", "1"): [("b-b", [])], 32 | ("b-b", "1"): [("a-b", [])], 33 | } 34 | return PackageDAG.from_pkgs(list(mock_pkgs(graph))) 35 | 36 | 37 | def dag_to_dict(g: PackageDAG) -> dict[str, list[str]]: 38 | return {k.key: [v.key for v in vs] for k, vs in g._obj.items()} # noqa: SLF001 39 | 40 | 41 | def test_package_dag_filter_fnmatch_include_a(t_fnmatch: PackageDAG) -> None: 42 | # test include for a-*in the result we got only a-* nodes 43 | graph = dag_to_dict(t_fnmatch.filter_nodes(["a-*"], None)) 44 | assert graph == {"a-a": ["a-b", "a-c"], "a-b": ["a-c"]} 45 | 46 | 47 | def test_package_dag_filter_fnmatch_include_b(t_fnmatch: PackageDAG) -> None: 48 | # test include for b-*, which has a-b and a-c in tree, but not a-a 49 | # in the result we got the b-* nodes plus the a-b node as child in the tree 50 | graph = dag_to_dict(t_fnmatch.filter_nodes(["b-*"], None)) 51 | assert graph == {"b-a": ["b-b"], "b-b": ["a-b"], "a-b": ["a-c"]} 52 | 53 | 54 | def test_package_dag_filter_fnmatch_exclude_c(t_fnmatch: PackageDAG) -> None: 55 | # test exclude for b-* in the result we got only a-* nodes 56 | graph = dag_to_dict(t_fnmatch.filter_nodes(None, {"b-*"})) 57 | assert graph == {"a-a": ["a-b", "a-c"], "a-b": ["a-c"]} 58 | 59 | 60 | def test_package_dag_filter_fnmatch_exclude_a(t_fnmatch: PackageDAG) -> None: 61 | # test exclude for a-* in the result we got only b-* nodes 62 | graph = dag_to_dict(t_fnmatch.filter_nodes(None, {"a-*"})) 63 | assert graph == {"b-a": ["b-b"], "b-b": []} 64 | 65 | 66 | def test_package_dag_filter_include_exclude_both_used(t_fnmatch: PackageDAG) -> None: 67 | with pytest.raises(AssertionError): 68 | t_fnmatch.filter_nodes(["a-a", "a-b"], {"a-b"}) 69 | 70 | 71 | def test_package_dag_filter_nonexistent_packages(t_fnmatch: PackageDAG) -> None: 72 | with pytest.raises(ValueError, match="No packages matched using the following patterns: x, y, z"): 73 | t_fnmatch.filter_nodes(["x", "y", "z"], None) 74 | 75 | 76 | def test_package_dag_filter_packages_uses_pep503normalize( 77 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 78 | ) -> None: 79 | graph: MockGraph = { 80 | ("Pie.Pie", "1"): [], 81 | } 82 | pkgs = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 83 | pkgs = pkgs.filter_nodes(["Pie.Pie"], None) 84 | assert len(pkgs) == 1 85 | assert pkgs.get_node_as_parent("pie-pie") is not None 86 | 87 | pkgs = pkgs.filter_nodes(None, {"Pie.Pie"}) 88 | assert len(pkgs) == 0 89 | 90 | 91 | def test_package_dag_reverse(example_dag: PackageDAG) -> None: 92 | def sort_map_values(m: dict[str, Any]) -> dict[str, Any]: 93 | return {k: sorted(v) for k, v in m.items()} 94 | 95 | t1 = example_dag.reverse() 96 | expected = {"a": [], "b": ["a", "f"], "c": ["a"], "d": ["b", "c"], "e": ["c", "d", "g"], "f": ["g"], "g": []} 97 | assert isinstance(t1, ReversedPackageDAG) 98 | assert sort_map_values(expected) == sort_map_values(dag_to_dict(t1)) 99 | assert all(isinstance(k, ReqPackage) for k in t1) 100 | assert all(isinstance(v, DistPackage) for v in chain.from_iterable(t1.values())) 101 | 102 | # testing reversal of ReversedPackageDAG instance 103 | expected = {"a": ["b", "c"], "b": ["d"], "c": ["d", "e"], "d": ["e"], "e": [], "f": ["b"], "g": ["e", "f"]} 104 | t2 = t1.reverse() 105 | assert isinstance(t2, PackageDAG) 106 | assert sort_map_values(expected) == sort_map_values(dag_to_dict(t2)) 107 | assert all(isinstance(k, DistPackage) for k in t2) 108 | assert all(isinstance(v, ReqPackage) for v in chain.from_iterable(t2.values())) 109 | 110 | 111 | def test_package_dag_from_pkgs(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> None: 112 | # when pip's _vendor.packaging.requirements.Requirement's requires() gives a lowercased package name but the actual 113 | # package name in PyPI is mixed case, expect the mixed case version 114 | 115 | graph: MockGraph = { 116 | ("examplePy", "1.2.3"): [("hellopy", [(">=", "2.0.0")])], 117 | ("HelloPy", "2.2.0"): [], 118 | } 119 | package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 120 | parent_key = "examplepy" 121 | c = package_dag.get_children(parent_key) 122 | assert len(c) == 1 123 | assert c[0].project_name == "HelloPy" 124 | 125 | 126 | def test_package_dag_from_pkgs_uses_pep503normalize(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> None: 127 | # ensure that requirement gets matched with a dists even when it's key needs pep503 normalization to match 128 | 129 | graph: MockGraph = { 130 | ("parent-package", "1.2.3"): [("flufl.lock", [(">=", "2.0.0")])], 131 | ("flufl-lock", "2.2.0"): [], 132 | } 133 | package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 134 | parent_key = "parent-package" 135 | c = package_dag.get_children(parent_key) 136 | assert c[0].dist 137 | assert c[0].key == "flufl-lock" 138 | 139 | 140 | def test_package_from_pkgs_given_invalid_requirements( 141 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], capfd: pytest.CaptureFixture[str] 142 | ) -> None: 143 | graph: MockGraph = { 144 | ("a-package", "1.2.3"): [("BAD**requirement", [(">=", "2.0.0")])], 145 | } 146 | package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 147 | assert len(package_dag) == 1 148 | out, err = capfd.readouterr() 149 | assert not out 150 | assert err == ( 151 | "Warning!!! Invalid requirement strings found for the following distributions:\na-package\n " 152 | 'Skipping "BAD**requirement>=2.0.0"\n------------------------------------------------------------------------\n' 153 | ) 154 | -------------------------------------------------------------------------------- /tests/_models/test_package.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from importlib.metadata import PackageNotFoundError 4 | from typing import TYPE_CHECKING, Any 5 | from unittest.mock import MagicMock, Mock 6 | 7 | import pytest 8 | from packaging.specifiers import SpecifierSet 9 | 10 | from pipdeptree._models import DistPackage, ReqPackage 11 | from pipdeptree._models.package import Package 12 | 13 | if TYPE_CHECKING: 14 | from pytest_mock import MockerFixture 15 | 16 | 17 | def sort_map_values(m: dict[str, Any]) -> dict[str, Any]: 18 | return {k: sorted(v) for k, v in m.items()} 19 | 20 | 21 | def test_guess_version_setuptools(mocker: MockerFixture) -> None: 22 | mocker.patch("pipdeptree._models.package.version", side_effect=PackageNotFoundError) 23 | r = MagicMock() 24 | r.name = "setuptools" 25 | result = ReqPackage(r).installed_version 26 | assert result == "?" 27 | 28 | 29 | def test_package_as_frozen_repr(mocker: MockerFixture) -> None: 30 | foo = Mock(metadata={"Name": "foo"}, version="1.2.3") 31 | dp = DistPackage(foo) 32 | expected = "test" 33 | mocker.patch("pipdeptree._models.package.dist_to_frozen_repr", Mock(return_value=expected)) 34 | assert Package.as_frozen_repr(dp.unwrap()) == expected 35 | 36 | 37 | def test_dist_package_requires() -> None: 38 | foo = Mock( 39 | metadata={"Name": "foo"}, 40 | requires=["bar", "baz >=2.7.2"], 41 | ) 42 | dp = DistPackage(foo) 43 | reqs = list(dp.requires()) 44 | assert len(reqs) == 2 45 | 46 | 47 | def test_dist_package_requires_with_environment_markers_that_eval_to_false() -> None: 48 | foo = Mock( 49 | metadata={"Name": "foo"}, 50 | requires=['foo ; sys_platform == "NoexistOS"', "bar >=2.7.2 ; extra == 'testing'"], 51 | ) 52 | dp = DistPackage(foo) 53 | reqs = list(dp.requires()) 54 | assert len(reqs) == 0 55 | 56 | 57 | def test_dist_package_render_as_root() -> None: 58 | foo = Mock(metadata={"Name": "foo"}, version="20.4.1") 59 | dp = DistPackage(foo) 60 | assert dp.render_as_root(frozen=False) == "foo==20.4.1" 61 | 62 | 63 | def test_dist_package_render_as_branch() -> None: 64 | foo = Mock(metadata={"Name": "foo"}, version="20.4.1") 65 | bar = Mock(metadata={"Name": "bar"}, version="4.1.0") 66 | bar_req = MagicMock(version="4.1.0", specifier=[">=4.0"]) 67 | bar_req.name = "bar" 68 | rp = ReqPackage(bar_req, dist=bar) 69 | dp = DistPackage(foo).as_parent_of(rp) 70 | assert dp.render_as_branch(frozen=False) == "foo==20.4.1 [requires: bar>=4.0]" 71 | 72 | 73 | def test_dist_package_render_as_root_with_frozen(mocker: MockerFixture) -> None: 74 | foo = Mock(metadata={"Name": "foo"}, version="1.2.3") 75 | dp = DistPackage(foo) 76 | expected = "test" 77 | mocker.patch("pipdeptree._models.package.dist_to_frozen_repr", Mock(return_value=expected)) 78 | assert dp.render_as_root(frozen=True) == expected 79 | 80 | 81 | def test_dist_package_as_parent_of() -> None: 82 | foo = Mock(metadata={"Name": "foo"}, version="20.4.1") 83 | dp = DistPackage(foo) 84 | assert dp.req is None 85 | 86 | bar = Mock(metadata={"Name": "bar"}, version="4.1.0") 87 | bar_req = MagicMock(version="4.1.0", specifier=[">=4.0"]) 88 | bar_req.name = "bar" 89 | rp = ReqPackage(bar_req, dist=bar) 90 | dp1 = dp.as_parent_of(rp) 91 | assert dp1._obj == dp._obj # noqa: SLF001 92 | assert dp1.req is rp 93 | 94 | dp2 = dp.as_parent_of(None) 95 | assert dp2 is dp 96 | 97 | 98 | def test_dist_package_as_dict() -> None: 99 | foo = Mock(metadata={"Name": "foo"}, version="1.3.2b1") 100 | dp = DistPackage(foo) 101 | result = dp.as_dict() 102 | expected = {"key": "foo", "package_name": "foo", "installed_version": "1.3.2b1"} 103 | assert expected == result 104 | 105 | 106 | @pytest.mark.parametrize( 107 | ("mocked_metadata", "expected_output"), 108 | [ 109 | pytest.param( 110 | Mock(get=Mock(return_value=None), get_all=Mock(return_value=[])), 111 | Package.UNKNOWN_LICENSE_STR, 112 | id="no-license", 113 | ), 114 | pytest.param( 115 | Mock( 116 | get=Mock(return_value=None), 117 | get_all=Mock( 118 | return_value=[ 119 | "License :: OSI Approved :: GNU General Public License v2 (GPLv2)", 120 | "Operating System :: OS Independent", 121 | ] 122 | ), 123 | ), 124 | "(GNU General Public License v2 (GPLv2))", 125 | id="one-license-with-one-non-license", 126 | ), 127 | pytest.param( 128 | Mock( 129 | get=Mock(return_value=None), 130 | get_all=Mock( 131 | return_value=[ 132 | "License :: OSI Approved :: GNU General Public License v2 (GPLv2)", 133 | "License :: OSI Approved :: Apache Software License", 134 | ] 135 | ), 136 | ), 137 | "(GNU General Public License v2 (GPLv2), Apache Software License)", 138 | id="more-than-one-license", 139 | ), 140 | pytest.param( 141 | Mock(get=Mock(return_value="MIT"), get_all=Mock(return_value=[])), 142 | "(MIT)", 143 | id="license-expression", 144 | ), 145 | pytest.param( 146 | Mock( 147 | get=Mock(return_value="MIT"), 148 | get_all=Mock( 149 | return_value=[ 150 | "License :: OSI Approved :: MIT License", 151 | ] 152 | ), 153 | ), 154 | "(MIT)", 155 | id="license-expression-with-license-classifier", 156 | ), 157 | ], 158 | ) 159 | def test_dist_package_licenses(mocked_metadata: Mock, expected_output: str, monkeypatch: pytest.MonkeyPatch) -> None: 160 | monkeypatch.setattr("pipdeptree._models.package.metadata", Mock(return_value=mocked_metadata)) 161 | dist = DistPackage(Mock(metadata={"Name": "a"})) 162 | licenses_str = dist.licenses() 163 | 164 | assert licenses_str == expected_output 165 | 166 | 167 | def test_dist_package_licenses_importlib_cant_find_package(monkeypatch: pytest.MonkeyPatch) -> None: 168 | monkeypatch.setattr("pipdeptree._models.package.metadata", Mock(side_effect=PackageNotFoundError())) 169 | dist = DistPackage(Mock(metadata={"Name": "a"})) 170 | licenses_str = dist.licenses() 171 | 172 | assert licenses_str == Package.UNKNOWN_LICENSE_STR 173 | 174 | 175 | def test_dist_package_key_pep503_normalized() -> None: 176 | foobar = Mock(metadata={"Name": "foo.bar"}, version="20.4.1") 177 | dp = DistPackage(foobar) 178 | assert dp.key == "foo-bar" 179 | 180 | 181 | def test_req_package_key_pep503_normalized() -> None: 182 | bar_req = MagicMock(specifier=[">=4.0"]) 183 | bar_req.name = "bar.bar-bar-bar" 184 | rp = ReqPackage(bar_req) 185 | assert rp.key == "bar-bar-bar-bar" 186 | 187 | 188 | def test_req_package_render_as_root() -> None: 189 | bar = Mock(metadata={"Name": "bar"}, version="4.1.0") 190 | bar_req = MagicMock(specifier=[">=4.0"]) 191 | bar_req.name = "bar" 192 | rp = ReqPackage(bar_req, dist=bar) 193 | assert rp.render_as_root(frozen=False) == "bar==4.1.0" 194 | 195 | 196 | def test_req_package_render_as_root_with_frozen(mocker: MockerFixture) -> None: 197 | bar = Mock(metadata={"Name": "bar"}, version="4.1.0") 198 | dp = DistPackage(bar) 199 | bar_req = MagicMock(specifier=[">=4.0"]) 200 | bar_req.name = "bar" 201 | rp = ReqPackage(bar_req, dp) 202 | expected = "test" 203 | mocker.patch("pipdeptree._models.package.dist_to_frozen_repr", Mock(return_value=expected)) 204 | assert rp.render_as_root(frozen=True) == expected 205 | 206 | 207 | def test_req_package_render_as_branch() -> None: 208 | bar = Mock(metadata={"Name": "bar"}, version="4.1.0") 209 | bar_req = MagicMock(specifier=[">=4.0"]) 210 | bar_req.name = "bar" 211 | rp = ReqPackage(bar_req, dist=bar) 212 | assert rp.render_as_branch(frozen=False) == "bar [required: >=4.0, installed: 4.1.0]" 213 | 214 | 215 | def test_req_package_is_conflicting_handle_dev_versions() -> None: 216 | # ensure that we can handle development versions when detecting conflicts 217 | # see https://github.com/tox-dev/pipdeptree/issues/393 218 | bar = Mock(metadata={"Name": "bar"}, version="1.2.3.dev0") 219 | bar_req = MagicMock(specifier=SpecifierSet(">1.2.0")) 220 | bar_req.name = "bar" 221 | rp = ReqPackage(bar_req, dist=bar) 222 | assert not rp.is_conflicting() 223 | 224 | 225 | def test_req_package_as_dict() -> None: 226 | bar = Mock(metadata={"Name": "bar"}, version="4.1.0") 227 | bar_req = MagicMock(specifier=[">=4.0"]) 228 | bar_req.name = "bar" 229 | rp = ReqPackage(bar_req, dist=bar) 230 | result = rp.as_dict() 231 | expected = {"key": "bar", "package_name": "bar", "installed_version": "4.1.0", "required_version": ">=4.0"} 232 | assert expected == result 233 | 234 | 235 | def test_req_package_as_dict_with_no_version_spec() -> None: 236 | bar = Mock(key="bar", version="4.1.0") 237 | bar_req = MagicMock(specifier=[]) 238 | bar_req.name = "bar" 239 | rp = ReqPackage(bar_req, dist=bar) 240 | result = rp.as_dict() 241 | expected = {"key": "bar", "package_name": "bar", "installed_version": "4.1.0", "required_version": "Any"} 242 | assert expected == result 243 | -------------------------------------------------------------------------------- /tests/conftest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import locale 4 | from pathlib import Path 5 | from random import shuffle 6 | from typing import TYPE_CHECKING, Callable 7 | from unittest.mock import Mock 8 | 9 | import pytest 10 | 11 | from pipdeptree._models import PackageDAG 12 | 13 | if TYPE_CHECKING: 14 | from collections.abc import Iterator 15 | 16 | from tests.our_types import MockGraph 17 | 18 | 19 | @pytest.fixture(scope="session") 20 | def mock_pkgs() -> Callable[[MockGraph], Iterator[Mock]]: 21 | def func(simple_graph: MockGraph) -> Iterator[Mock]: 22 | for node, children in simple_graph.items(): 23 | nk, nv = node 24 | m = Mock(metadata={"Name": nk}, version=nv) 25 | reqs = [] 26 | for ck, cv in children: 27 | r = ck 28 | for item in cv: 29 | if item: 30 | rs, rv = item 31 | r = r + rs + rv 32 | if item != cv[-1]: 33 | r += "," 34 | reqs.append(r) 35 | m.requires = reqs 36 | yield m 37 | 38 | return func 39 | 40 | 41 | @pytest.fixture 42 | def example_dag(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> PackageDAG: 43 | packages: MockGraph = { 44 | ("a", "3.4.0"): [("b", [(">=", "2.0.0")]), ("c", [(">=", "5.7.1")])], 45 | ("b", "2.3.1"): [("d", [(">=", "2.30"), ("<", "2.42")])], 46 | ("c", "5.10.0"): [("d", [(">=", "2.30")]), ("e", [(">=", "0.12.1")])], 47 | ("d", "2.35"): [("e", [(">=", "0.9.0")])], 48 | ("e", "0.12.1"): [], 49 | ("f", "3.1"): [("b", [(">=", "2.1.0")])], 50 | ("g", "6.8.3rc1"): [("e", [(">=", "0.9.0")]), ("f", [(">=", "3.0.0")])], 51 | } 52 | return PackageDAG.from_pkgs(list(mock_pkgs(packages))) 53 | 54 | 55 | @pytest.fixture 56 | def randomized_example_dag(example_dag: PackageDAG) -> PackageDAG: 57 | """Returns a copy of the package tree fixture with dependencies in randomized order.""" 58 | # Extract the dependency graph from the package tree and randomize it. 59 | randomized_graph = {} 60 | randomized_nodes = list(example_dag._obj.keys()) # noqa: SLF001 61 | shuffle(randomized_nodes) 62 | for node in randomized_nodes: 63 | edges = example_dag._obj[node].copy() # noqa: SLF001 64 | shuffle(edges) 65 | randomized_graph[node] = edges 66 | assert set(randomized_graph) == set(example_dag._obj) # noqa: SLF001 67 | 68 | # Create a randomized package tree. 69 | randomized_dag = PackageDAG(randomized_graph) 70 | assert len(example_dag) == len(randomized_dag) 71 | return randomized_dag 72 | 73 | 74 | @pytest.fixture 75 | def fake_dist(tmp_path: Path) -> Path: 76 | """Creates a fake site package (that you get using Path.parent) and a fake dist-info called bar-2.4.5.dist-info.""" 77 | fake_site_pkgs = tmp_path / "site-packages" 78 | fake_dist_path = fake_site_pkgs / "bar-2.4.5.dist-info" 79 | fake_dist_path.mkdir(parents=True) 80 | fake_metadata = Path(fake_dist_path) / "METADATA" 81 | with fake_metadata.open("w", encoding=locale.getpreferredencoding(False)) as f: 82 | f.write("Metadata-Version: 2.3\nName: bar\nVersion: 2.4.5\n") 83 | 84 | return fake_dist_path 85 | 86 | 87 | @pytest.fixture 88 | def fake_dist_with_invalid_metadata(tmp_path: Path) -> Path: 89 | "Similar to `fake_dist()`, but creates an invalid METADATA file." 90 | fake_site_pkgs = tmp_path / "site-packages" 91 | fake_dist_path = fake_site_pkgs / "bar-2.4.5.dist-info" 92 | fake_dist_path.mkdir(parents=True) 93 | fake_metadata = Path(fake_dist_path) / "METADATA" 94 | fake_metadata.touch() 95 | return fake_dist_path 96 | -------------------------------------------------------------------------------- /tests/our_types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | MockGraph = dict[tuple[str, str], list[tuple[str, list[tuple[str, str]]]]] # pragma: no cover 4 | 5 | __all__ = [ 6 | "MockGraph", 7 | ] 8 | -------------------------------------------------------------------------------- /tests/render/test_freeze.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from math import inf 4 | from typing import TYPE_CHECKING 5 | from unittest.mock import PropertyMock 6 | 7 | import pytest 8 | 9 | from pipdeptree._freeze import PipBaseDistributionAdapter 10 | from pipdeptree._render.freeze import render_freeze 11 | 12 | if TYPE_CHECKING: 13 | from pipdeptree._models.dag import PackageDAG 14 | 15 | 16 | @pytest.fixture 17 | def patch_pip_adapter(monkeypatch: pytest.MonkeyPatch) -> None: 18 | """ 19 | Patches `PipBaseDistributionAdapter` such that `editable` returns `False` and `direct_url` returns `None`. 20 | 21 | This will have the pip API always return a frozen req in the "name==version" format. 22 | """ 23 | monkeypatch.setattr(PipBaseDistributionAdapter, "editable", PropertyMock(return_value=False)) 24 | monkeypatch.setattr(PipBaseDistributionAdapter, "direct_url", PropertyMock(return_value=None)) 25 | 26 | 27 | @pytest.mark.parametrize( 28 | ("list_all", "expected_output"), 29 | [ 30 | ( 31 | True, 32 | [ 33 | "a==3.4.0", 34 | " b==2.3.1", 35 | " d==2.35", 36 | " e==0.12.1", 37 | " c==5.10.0", 38 | " d==2.35", 39 | " e==0.12.1", 40 | " e==0.12.1", 41 | "b==2.3.1", 42 | " d==2.35", 43 | " e==0.12.1", 44 | "c==5.10.0", 45 | " d==2.35", 46 | " e==0.12.1", 47 | " e==0.12.1", 48 | "d==2.35", 49 | " e==0.12.1", 50 | "e==0.12.1", 51 | "f==3.1", 52 | " b==2.3.1", 53 | " d==2.35", 54 | " e==0.12.1", 55 | "g==6.8.3rc1", 56 | " e==0.12.1", 57 | " f==3.1", 58 | " b==2.3.1", 59 | " d==2.35", 60 | " e==0.12.1", 61 | ], 62 | ), 63 | ( 64 | False, 65 | [ 66 | "a==3.4.0", 67 | " b==2.3.1", 68 | " d==2.35", 69 | " e==0.12.1", 70 | " c==5.10.0", 71 | " d==2.35", 72 | " e==0.12.1", 73 | " e==0.12.1", 74 | "g==6.8.3rc1", 75 | " e==0.12.1", 76 | " f==3.1", 77 | " b==2.3.1", 78 | " d==2.35", 79 | " e==0.12.1", 80 | ], 81 | ), 82 | ], 83 | ) 84 | @pytest.mark.usefixtures("patch_pip_adapter") 85 | def test_render_freeze( 86 | example_dag: PackageDAG, 87 | capsys: pytest.CaptureFixture[str], 88 | list_all: bool, 89 | expected_output: list[str], 90 | ) -> None: 91 | render_freeze(example_dag, max_depth=inf, list_all=list_all) 92 | captured = capsys.readouterr() 93 | assert "\n".join(expected_output).strip() == captured.out.strip() 94 | 95 | 96 | @pytest.mark.parametrize( 97 | ("depth", "expected_output"), 98 | [ 99 | ( 100 | 0, 101 | [ 102 | "a==3.4.0", 103 | "b==2.3.1", 104 | "c==5.10.0", 105 | "d==2.35", 106 | "e==0.12.1", 107 | "f==3.1", 108 | "g==6.8.3rc1", 109 | ], 110 | ), 111 | ( 112 | 2, 113 | [ 114 | "a==3.4.0", 115 | " b==2.3.1", 116 | " d==2.35", 117 | " c==5.10.0", 118 | " d==2.35", 119 | " e==0.12.1", 120 | "b==2.3.1", 121 | " d==2.35", 122 | " e==0.12.1", 123 | "c==5.10.0", 124 | " d==2.35", 125 | " e==0.12.1", 126 | " e==0.12.1", 127 | "d==2.35", 128 | " e==0.12.1", 129 | "e==0.12.1", 130 | "f==3.1", 131 | " b==2.3.1", 132 | " d==2.35", 133 | "g==6.8.3rc1", 134 | " e==0.12.1", 135 | " f==3.1", 136 | " b==2.3.1", 137 | ], 138 | ), 139 | ], 140 | ) 141 | @pytest.mark.usefixtures("patch_pip_adapter") 142 | def test_render_freeze_given_depth( 143 | example_dag: PackageDAG, 144 | capsys: pytest.CaptureFixture[str], 145 | depth: int, 146 | expected_output: list[str], 147 | ) -> None: 148 | render_freeze(example_dag, max_depth=depth) 149 | captured = capsys.readouterr() 150 | assert "\n".join(expected_output).strip() == captured.out.strip() 151 | -------------------------------------------------------------------------------- /tests/render/test_graphviz.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from textwrap import dedent 5 | from typing import TYPE_CHECKING 6 | 7 | import pytest 8 | 9 | from pipdeptree._render.graphviz import dump_graphviz, print_graphviz 10 | 11 | if TYPE_CHECKING: 12 | from pathlib import Path 13 | 14 | from pytest_mock import MockerFixture 15 | 16 | from pipdeptree._models import PackageDAG 17 | 18 | 19 | def test_render_dot( 20 | capsys: pytest.CaptureFixture[str], 21 | example_dag: PackageDAG, 22 | randomized_example_dag: PackageDAG, 23 | ) -> None: 24 | # Check both the sorted and randomized package tree produces the same sorted graphviz output. 25 | for package_tree in (example_dag, randomized_example_dag): 26 | output = dump_graphviz(package_tree, output_format="dot") 27 | print_graphviz(output) 28 | out, _ = capsys.readouterr() 29 | assert out == dedent( 30 | """\ 31 | digraph { 32 | \ta -> b [label=">=2.0.0"] 33 | \ta -> c [label=">=5.7.1"] 34 | \ta [label="a\\n3.4.0"] 35 | \tb -> d [label=">=2.30,<2.42"] 36 | \tb [label="b\\n2.3.1"] 37 | \tc -> d [label=">=2.30"] 38 | \tc -> e [label=">=0.12.1"] 39 | \tc [label="c\\n5.10.0"] 40 | \td -> e [label=">=0.9.0"] 41 | \td [label="d\\n2.35"] 42 | \te [label="e\\n0.12.1"] 43 | \tf -> b [label=">=2.1.0"] 44 | \tf [label="f\\n3.1"] 45 | \tg -> e [label=">=0.9.0"] 46 | \tg -> f [label=">=3.0.0"] 47 | \tg [label="g\\n6.8.3rc1"] 48 | } 49 | 50 | """, 51 | ) 52 | 53 | 54 | def test_render_pdf(tmp_path: Path, mocker: MockerFixture, example_dag: PackageDAG) -> None: 55 | output = dump_graphviz(example_dag, output_format="pdf") 56 | res = tmp_path / "file" 57 | with pytest.raises(OSError, match="Bad file"): # noqa: PT012, SIM117 # because we reopen the file 58 | with res.open("wb") as buf: 59 | mocker.patch.object(sys, "stdout", buf) 60 | print_graphviz(output) 61 | assert res.read_bytes()[:4] == b"%PDF" 62 | 63 | 64 | def test_render_svg(capsys: pytest.CaptureFixture[str], example_dag: PackageDAG) -> None: 65 | output = dump_graphviz(example_dag, output_format="svg") 66 | print_graphviz(output) 67 | out, _ = capsys.readouterr() 68 | assert out.startswith("") 71 | -------------------------------------------------------------------------------- /tests/render/test_json_tree.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Callable 4 | 5 | import pytest 6 | 7 | from pipdeptree._models.dag import PackageDAG 8 | from pipdeptree._render.json_tree import render_json_tree 9 | 10 | if TYPE_CHECKING: 11 | from collections.abc import Iterator 12 | from unittest.mock import Mock 13 | 14 | from tests.our_types import MockGraph 15 | 16 | 17 | @pytest.mark.parametrize( 18 | ("version_spec_tuple", "expected_version_spec"), 19 | [ 20 | pytest.param((), "Any"), 21 | pytest.param((">=", "2.0.0"), ">=2.0.0"), 22 | ], 23 | ) 24 | def test_json_tree_given_req_package_with_version_spec( 25 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 26 | version_spec_tuple: tuple[str, str], 27 | expected_version_spec: str, 28 | ) -> None: 29 | graph: MockGraph = { 30 | ("a", "1.2.3"): [("b", [version_spec_tuple])], 31 | ("b", "2.2.0"): [], 32 | } 33 | package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 34 | json_tree_str = render_json_tree(package_dag) 35 | assert json_tree_str.find(expected_version_spec) != -1 36 | -------------------------------------------------------------------------------- /tests/render/test_mermaid.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from textwrap import dedent, indent 4 | from typing import TYPE_CHECKING, Callable 5 | 6 | from pipdeptree._models import PackageDAG 7 | from pipdeptree._render.mermaid import render_mermaid 8 | 9 | if TYPE_CHECKING: 10 | from collections.abc import Iterator 11 | from unittest.mock import Mock 12 | 13 | from tests.our_types import MockGraph 14 | 15 | 16 | def test_render_mermaid(example_dag: PackageDAG, randomized_example_dag: PackageDAG) -> None: 17 | """Check both the sorted and randomized package tree produces the same sorted Mermaid output. 18 | 19 | Rendering a reverse dependency tree should produce the same set of nodes. Edges should have the same version spec 20 | label, but be resorted after swapping node positions. 21 | 22 | `See how this renders `_. 23 | 24 | """ 25 | 26 | nodes = dedent( 27 | """\ 28 | flowchart TD 29 | classDef missing stroke-dasharray: 5 30 | a["a\\n3.4.0"] 31 | b["b\\n2.3.1"] 32 | c["c\\n5.10.0"] 33 | d["d\\n2.35"] 34 | e["e\\n0.12.1"] 35 | f["f\\n3.1"] 36 | g["g\\n6.8.3rc1"] 37 | """, 38 | ) 39 | dependency_edges = indent( 40 | dedent( 41 | """\ 42 | a -- ">=2.0.0" --> b 43 | a -- ">=5.7.1" --> c 44 | b -- ">=2.30,<2.42" --> d 45 | c -- ">=0.12.1" --> e 46 | c -- ">=2.30" --> d 47 | d -- ">=0.9.0" --> e 48 | f -- ">=2.1.0" --> b 49 | g -- ">=0.9.0" --> e 50 | g -- ">=3.0.0" --> f 51 | """, 52 | ), 53 | " " * 4, 54 | ).rstrip() 55 | reverse_dependency_edges = indent( 56 | dedent( 57 | """\ 58 | b -- ">=2.0.0" --> a 59 | b -- ">=2.1.0" --> f 60 | c -- ">=5.7.1" --> a 61 | d -- ">=2.30" --> c 62 | d -- ">=2.30,<2.42" --> b 63 | e -- ">=0.12.1" --> c 64 | e -- ">=0.9.0" --> d 65 | e -- ">=0.9.0" --> g 66 | f -- ">=3.0.0" --> g 67 | """, 68 | ), 69 | " " * 4, 70 | ).rstrip() 71 | 72 | for package_tree in (example_dag, randomized_example_dag): 73 | output = render_mermaid(package_tree) 74 | assert output.rstrip() == nodes + dependency_edges 75 | reversed_output = render_mermaid(package_tree.reverse()) 76 | assert reversed_output.rstrip() == nodes + reverse_dependency_edges 77 | 78 | 79 | def test_mermaid_reserved_ids(mock_pkgs: Callable[[MockGraph], Iterator[Mock]]) -> None: 80 | graph = {("click", "3.4.0"): [("click-extra", [(">=", "2.0.0")])]} 81 | package_tree = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 82 | output = render_mermaid(package_tree) 83 | assert output == dedent( 84 | """\ 85 | flowchart TD 86 | classDef missing stroke-dasharray: 5 87 | click-extra["click-extra\\n(missing)"]:::missing 88 | click_0["click\\n3.4.0"] 89 | click_0 -.-> click-extra 90 | """, 91 | ) 92 | -------------------------------------------------------------------------------- /tests/render/test_render.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from math import inf 4 | from typing import TYPE_CHECKING 5 | from unittest.mock import ANY 6 | 7 | from pipdeptree.__main__ import main 8 | 9 | if TYPE_CHECKING: 10 | from pytest_mock import MockerFixture 11 | 12 | 13 | def test_json_routing(mocker: MockerFixture) -> None: 14 | render = mocker.patch("pipdeptree._render.render_json") 15 | main(["--json"]) 16 | render.assert_called_once_with(ANY) 17 | 18 | 19 | def test_json_tree_routing(mocker: MockerFixture) -> None: 20 | render = mocker.patch("pipdeptree._render.render_json_tree") 21 | main(["--json-tree"]) 22 | render.assert_called_once_with(ANY) 23 | 24 | 25 | def test_mermaid_routing(mocker: MockerFixture) -> None: 26 | render = mocker.patch("pipdeptree._render.render_mermaid") 27 | main(["--mermaid"]) 28 | render.assert_called_once_with(ANY) 29 | 30 | 31 | def test_grahpviz_routing(mocker: MockerFixture) -> None: 32 | render = mocker.patch("pipdeptree._render.render_graphviz") 33 | main(["--graph-output", "dot"]) 34 | render.assert_called_once_with(ANY, output_format="dot", reverse=False) 35 | 36 | 37 | def test_text_routing(mocker: MockerFixture) -> None: 38 | render = mocker.patch("pipdeptree._render.render_text") 39 | main([]) 40 | render.assert_called_once_with(ANY, encoding="utf-8", max_depth=inf, list_all=False, include_license=False) 41 | 42 | 43 | def test_freeze_routing(mocker: MockerFixture) -> None: 44 | render = mocker.patch("pipdeptree._render.render_freeze") 45 | main(["--freeze"]) 46 | render.assert_called_once_with(ANY, max_depth=inf, list_all=False) 47 | -------------------------------------------------------------------------------- /tests/render/test_text.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Callable 4 | 5 | import pytest 6 | 7 | from pipdeptree._models import PackageDAG 8 | from pipdeptree._models.package import Package 9 | from pipdeptree._render.text import render_text 10 | 11 | if TYPE_CHECKING: 12 | from collections.abc import Iterator 13 | from unittest.mock import Mock 14 | 15 | from tests.our_types import MockGraph 16 | 17 | 18 | @pytest.mark.parametrize( 19 | ("list_all", "reverse", "unicode", "expected_output"), 20 | [ 21 | ( 22 | True, 23 | False, 24 | True, 25 | [ 26 | "a==3.4.0", 27 | "├── b [required: >=2.0.0, installed: 2.3.1]", 28 | "│ └── d [required: >=2.30,<2.42, installed: 2.35]", 29 | "│ └── e [required: >=0.9.0, installed: 0.12.1]", 30 | "└── c [required: >=5.7.1, installed: 5.10.0]", 31 | " ├── d [required: >=2.30, installed: 2.35]", 32 | " │ └── e [required: >=0.9.0, installed: 0.12.1]", 33 | " └── e [required: >=0.12.1, installed: 0.12.1]", 34 | "b==2.3.1", 35 | "└── d [required: >=2.30,<2.42, installed: 2.35]", 36 | " └── e [required: >=0.9.0, installed: 0.12.1]", 37 | "c==5.10.0", 38 | "├── d [required: >=2.30, installed: 2.35]", 39 | "│ └── e [required: >=0.9.0, installed: 0.12.1]", 40 | "└── e [required: >=0.12.1, installed: 0.12.1]", 41 | "d==2.35", 42 | "└── e [required: >=0.9.0, installed: 0.12.1]", 43 | "e==0.12.1", 44 | "f==3.1", 45 | "└── b [required: >=2.1.0, installed: 2.3.1]", 46 | " └── d [required: >=2.30,<2.42, installed: 2.35]", 47 | " └── e [required: >=0.9.0, installed: 0.12.1]", 48 | "g==6.8.3rc1", 49 | "├── e [required: >=0.9.0, installed: 0.12.1]", 50 | "└── f [required: >=3.0.0, installed: 3.1]", 51 | " └── b [required: >=2.1.0, installed: 2.3.1]", 52 | " └── d [required: >=2.30,<2.42, installed: 2.35]", 53 | " └── e [required: >=0.9.0, installed: 0.12.1]", 54 | ], 55 | ), 56 | ( 57 | True, 58 | True, 59 | True, 60 | [ 61 | "a==3.4.0", 62 | "b==2.3.1", 63 | "├── a==3.4.0 [requires: b>=2.0.0]", 64 | "└── f==3.1 [requires: b>=2.1.0]", 65 | " └── g==6.8.3rc1 [requires: f>=3.0.0]", 66 | "c==5.10.0", 67 | "└── a==3.4.0 [requires: c>=5.7.1]", 68 | "d==2.35", 69 | "├── b==2.3.1 [requires: d>=2.30,<2.42]", 70 | "│ ├── a==3.4.0 [requires: b>=2.0.0]", 71 | "│ └── f==3.1 [requires: b>=2.1.0]", 72 | "│ └── g==6.8.3rc1 [requires: f>=3.0.0]", 73 | "└── c==5.10.0 [requires: d>=2.30]", 74 | " └── a==3.4.0 [requires: c>=5.7.1]", 75 | "e==0.12.1", 76 | "├── c==5.10.0 [requires: e>=0.12.1]", 77 | "│ └── a==3.4.0 [requires: c>=5.7.1]", 78 | "├── d==2.35 [requires: e>=0.9.0]", 79 | "│ ├── b==2.3.1 [requires: d>=2.30,<2.42]", 80 | "│ │ ├── a==3.4.0 [requires: b>=2.0.0]", 81 | "│ │ └── f==3.1 [requires: b>=2.1.0]", 82 | "│ │ └── g==6.8.3rc1 [requires: f>=3.0.0]", 83 | "│ └── c==5.10.0 [requires: d>=2.30]", 84 | "│ └── a==3.4.0 [requires: c>=5.7.1]", 85 | "└── g==6.8.3rc1 [requires: e>=0.9.0]", 86 | "f==3.1", 87 | "└── g==6.8.3rc1 [requires: f>=3.0.0]", 88 | "g==6.8.3rc1", 89 | ], 90 | ), 91 | ( 92 | False, 93 | False, 94 | True, 95 | [ 96 | "a==3.4.0", 97 | "├── b [required: >=2.0.0, installed: 2.3.1]", 98 | "│ └── d [required: >=2.30,<2.42, installed: 2.35]", 99 | "│ └── e [required: >=0.9.0, installed: 0.12.1]", 100 | "└── c [required: >=5.7.1, installed: 5.10.0]", 101 | " ├── d [required: >=2.30, installed: 2.35]", 102 | " │ └── e [required: >=0.9.0, installed: 0.12.1]", 103 | " └── e [required: >=0.12.1, installed: 0.12.1]", 104 | "g==6.8.3rc1", 105 | "├── e [required: >=0.9.0, installed: 0.12.1]", 106 | "└── f [required: >=3.0.0, installed: 3.1]", 107 | " └── b [required: >=2.1.0, installed: 2.3.1]", 108 | " └── d [required: >=2.30,<2.42, installed: 2.35]", 109 | " └── e [required: >=0.9.0, installed: 0.12.1]", 110 | ], 111 | ), 112 | ( 113 | False, 114 | True, 115 | True, 116 | [ 117 | "e==0.12.1", 118 | "├── c==5.10.0 [requires: e>=0.12.1]", 119 | "│ └── a==3.4.0 [requires: c>=5.7.1]", 120 | "├── d==2.35 [requires: e>=0.9.0]", 121 | "│ ├── b==2.3.1 [requires: d>=2.30,<2.42]", 122 | "│ │ ├── a==3.4.0 [requires: b>=2.0.0]", 123 | "│ │ └── f==3.1 [requires: b>=2.1.0]", 124 | "│ │ └── g==6.8.3rc1 [requires: f>=3.0.0]", 125 | "│ └── c==5.10.0 [requires: d>=2.30]", 126 | "│ └── a==3.4.0 [requires: c>=5.7.1]", 127 | "└── g==6.8.3rc1 [requires: e>=0.9.0]", 128 | ], 129 | ), 130 | ( 131 | True, 132 | False, 133 | False, 134 | [ 135 | "a==3.4.0", 136 | " - b [required: >=2.0.0, installed: 2.3.1]", 137 | " - d [required: >=2.30,<2.42, installed: 2.35]", 138 | " - e [required: >=0.9.0, installed: 0.12.1]", 139 | " - c [required: >=5.7.1, installed: 5.10.0]", 140 | " - d [required: >=2.30, installed: 2.35]", 141 | " - e [required: >=0.9.0, installed: 0.12.1]", 142 | " - e [required: >=0.12.1, installed: 0.12.1]", 143 | "b==2.3.1", 144 | " - d [required: >=2.30,<2.42, installed: 2.35]", 145 | " - e [required: >=0.9.0, installed: 0.12.1]", 146 | "c==5.10.0", 147 | " - d [required: >=2.30, installed: 2.35]", 148 | " - e [required: >=0.9.0, installed: 0.12.1]", 149 | " - e [required: >=0.12.1, installed: 0.12.1]", 150 | "d==2.35", 151 | " - e [required: >=0.9.0, installed: 0.12.1]", 152 | "e==0.12.1", 153 | "f==3.1", 154 | " - b [required: >=2.1.0, installed: 2.3.1]", 155 | " - d [required: >=2.30,<2.42, installed: 2.35]", 156 | " - e [required: >=0.9.0, installed: 0.12.1]", 157 | "g==6.8.3rc1", 158 | " - e [required: >=0.9.0, installed: 0.12.1]", 159 | " - f [required: >=3.0.0, installed: 3.1]", 160 | " - b [required: >=2.1.0, installed: 2.3.1]", 161 | " - d [required: >=2.30,<2.42, installed: 2.35]", 162 | " - e [required: >=0.9.0, installed: 0.12.1]", 163 | ], 164 | ), 165 | ( 166 | True, 167 | True, 168 | False, 169 | [ 170 | "a==3.4.0", 171 | "b==2.3.1", 172 | " - a==3.4.0 [requires: b>=2.0.0]", 173 | " - f==3.1 [requires: b>=2.1.0]", 174 | " - g==6.8.3rc1 [requires: f>=3.0.0]", 175 | "c==5.10.0", 176 | " - a==3.4.0 [requires: c>=5.7.1]", 177 | "d==2.35", 178 | " - b==2.3.1 [requires: d>=2.30,<2.42]", 179 | " - a==3.4.0 [requires: b>=2.0.0]", 180 | " - f==3.1 [requires: b>=2.1.0]", 181 | " - g==6.8.3rc1 [requires: f>=3.0.0]", 182 | " - c==5.10.0 [requires: d>=2.30]", 183 | " - a==3.4.0 [requires: c>=5.7.1]", 184 | "e==0.12.1", 185 | " - c==5.10.0 [requires: e>=0.12.1]", 186 | " - a==3.4.0 [requires: c>=5.7.1]", 187 | " - d==2.35 [requires: e>=0.9.0]", 188 | " - b==2.3.1 [requires: d>=2.30,<2.42]", 189 | " - a==3.4.0 [requires: b>=2.0.0]", 190 | " - f==3.1 [requires: b>=2.1.0]", 191 | " - g==6.8.3rc1 [requires: f>=3.0.0]", 192 | " - c==5.10.0 [requires: d>=2.30]", 193 | " - a==3.4.0 [requires: c>=5.7.1]", 194 | " - g==6.8.3rc1 [requires: e>=0.9.0]", 195 | "f==3.1", 196 | " - g==6.8.3rc1 [requires: f>=3.0.0]", 197 | "g==6.8.3rc1", 198 | ], 199 | ), 200 | ( 201 | False, 202 | False, 203 | False, 204 | [ 205 | "a==3.4.0", 206 | " - b [required: >=2.0.0, installed: 2.3.1]", 207 | " - d [required: >=2.30,<2.42, installed: 2.35]", 208 | " - e [required: >=0.9.0, installed: 0.12.1]", 209 | " - c [required: >=5.7.1, installed: 5.10.0]", 210 | " - d [required: >=2.30, installed: 2.35]", 211 | " - e [required: >=0.9.0, installed: 0.12.1]", 212 | " - e [required: >=0.12.1, installed: 0.12.1]", 213 | "g==6.8.3rc1", 214 | " - e [required: >=0.9.0, installed: 0.12.1]", 215 | " - f [required: >=3.0.0, installed: 3.1]", 216 | " - b [required: >=2.1.0, installed: 2.3.1]", 217 | " - d [required: >=2.30,<2.42, installed: 2.35]", 218 | " - e [required: >=0.9.0, installed: 0.12.1]", 219 | ], 220 | ), 221 | ( 222 | False, 223 | True, 224 | False, 225 | [ 226 | "e==0.12.1", 227 | " - c==5.10.0 [requires: e>=0.12.1]", 228 | " - a==3.4.0 [requires: c>=5.7.1]", 229 | " - d==2.35 [requires: e>=0.9.0]", 230 | " - b==2.3.1 [requires: d>=2.30,<2.42]", 231 | " - a==3.4.0 [requires: b>=2.0.0]", 232 | " - f==3.1 [requires: b>=2.1.0]", 233 | " - g==6.8.3rc1 [requires: f>=3.0.0]", 234 | " - c==5.10.0 [requires: d>=2.30]", 235 | " - a==3.4.0 [requires: c>=5.7.1]", 236 | " - g==6.8.3rc1 [requires: e>=0.9.0]", 237 | ], 238 | ), 239 | ], 240 | ) 241 | def test_render_text( 242 | example_dag: PackageDAG, 243 | capsys: pytest.CaptureFixture[str], 244 | list_all: bool, 245 | reverse: bool, 246 | unicode: bool, 247 | expected_output: list[str], 248 | ) -> None: 249 | tree = example_dag.reverse() if reverse else example_dag 250 | encoding = "utf-8" if unicode else "ascii" 251 | render_text(tree, max_depth=float("inf"), encoding=encoding, list_all=list_all) 252 | captured = capsys.readouterr() 253 | assert "\n".join(expected_output).strip() == captured.out.strip() 254 | 255 | 256 | @pytest.mark.parametrize( 257 | ("unicode", "level", "expected_output"), 258 | [ 259 | ( 260 | True, 261 | 0, 262 | [ 263 | "a==3.4.0", 264 | "b==2.3.1", 265 | "c==5.10.0", 266 | "d==2.35", 267 | "e==0.12.1", 268 | "f==3.1", 269 | "g==6.8.3rc1", 270 | ], 271 | ), 272 | ( 273 | False, 274 | 0, 275 | [ 276 | "a==3.4.0", 277 | "b==2.3.1", 278 | "c==5.10.0", 279 | "d==2.35", 280 | "e==0.12.1", 281 | "f==3.1", 282 | "g==6.8.3rc1", 283 | ], 284 | ), 285 | ( 286 | True, 287 | 2, 288 | [ 289 | "a==3.4.0", 290 | "├── b [required: >=2.0.0, installed: 2.3.1]", 291 | "│ └── d [required: >=2.30,<2.42, installed: 2.35]", 292 | "└── c [required: >=5.7.1, installed: 5.10.0]", 293 | " ├── d [required: >=2.30, installed: 2.35]", 294 | " └── e [required: >=0.12.1, installed: 0.12.1]", 295 | "b==2.3.1", 296 | "└── d [required: >=2.30,<2.42, installed: 2.35]", 297 | " └── e [required: >=0.9.0, installed: 0.12.1]", 298 | "c==5.10.0", 299 | "├── d [required: >=2.30, installed: 2.35]", 300 | "│ └── e [required: >=0.9.0, installed: 0.12.1]", 301 | "└── e [required: >=0.12.1, installed: 0.12.1]", 302 | "d==2.35", 303 | "└── e [required: >=0.9.0, installed: 0.12.1]", 304 | "e==0.12.1", 305 | "f==3.1", 306 | "└── b [required: >=2.1.0, installed: 2.3.1]", 307 | " └── d [required: >=2.30,<2.42, installed: 2.35]", 308 | "g==6.8.3rc1", 309 | "├── e [required: >=0.9.0, installed: 0.12.1]", 310 | "└── f [required: >=3.0.0, installed: 3.1]", 311 | " └── b [required: >=2.1.0, installed: 2.3.1]", 312 | ], 313 | ), 314 | ( 315 | False, 316 | 2, 317 | [ 318 | "a==3.4.0", 319 | " - b [required: >=2.0.0, installed: 2.3.1]", 320 | " - d [required: >=2.30,<2.42, installed: 2.35]", 321 | " - c [required: >=5.7.1, installed: 5.10.0]", 322 | " - d [required: >=2.30, installed: 2.35]", 323 | " - e [required: >=0.12.1, installed: 0.12.1]", 324 | "b==2.3.1", 325 | " - d [required: >=2.30,<2.42, installed: 2.35]", 326 | " - e [required: >=0.9.0, installed: 0.12.1]", 327 | "c==5.10.0", 328 | " - d [required: >=2.30, installed: 2.35]", 329 | " - e [required: >=0.9.0, installed: 0.12.1]", 330 | " - e [required: >=0.12.1, installed: 0.12.1]", 331 | "d==2.35", 332 | " - e [required: >=0.9.0, installed: 0.12.1]", 333 | "e==0.12.1", 334 | "f==3.1", 335 | " - b [required: >=2.1.0, installed: 2.3.1]", 336 | " - d [required: >=2.30,<2.42, installed: 2.35]", 337 | "g==6.8.3rc1", 338 | " - e [required: >=0.9.0, installed: 0.12.1]", 339 | " - f [required: >=3.0.0, installed: 3.1]", 340 | " - b [required: >=2.1.0, installed: 2.3.1]", 341 | ], 342 | ), 343 | ], 344 | ) 345 | def test_render_text_given_depth( 346 | capsys: pytest.CaptureFixture[str], 347 | unicode: str, 348 | level: int, 349 | expected_output: list[str], 350 | example_dag: PackageDAG, 351 | ) -> None: 352 | render_text(example_dag, max_depth=level, encoding="utf-8" if unicode else "ascii") 353 | captured = capsys.readouterr() 354 | assert "\n".join(expected_output).strip() == captured.out.strip() 355 | 356 | 357 | @pytest.mark.parametrize( 358 | ("level", "encoding", "expected_output"), 359 | [ 360 | ( 361 | 0, 362 | "utf-8", 363 | [ 364 | "a==3.4.0", 365 | "b==2.3.1", 366 | "c==5.10.0", 367 | "d==2.35", 368 | "e==0.12.1", 369 | "f==3.1", 370 | "g==6.8.3rc1", 371 | ], 372 | ), 373 | ( 374 | 2, 375 | "utf-8", 376 | [ 377 | "a==3.4.0", 378 | "├── b [required: >=2.0.0, installed: 2.3.1]", 379 | "│ └── d [required: >=2.30,<2.42, installed: 2.35]", 380 | "└── c [required: >=5.7.1, installed: 5.10.0]", 381 | " ├── d [required: >=2.30, installed: 2.35]", 382 | " └── e [required: >=0.12.1, installed: 0.12.1]", 383 | "b==2.3.1", 384 | "└── d [required: >=2.30,<2.42, installed: 2.35]", 385 | " └── e [required: >=0.9.0, installed: 0.12.1]", 386 | "c==5.10.0", 387 | "├── d [required: >=2.30, installed: 2.35]", 388 | "│ └── e [required: >=0.9.0, installed: 0.12.1]", 389 | "└── e [required: >=0.12.1, installed: 0.12.1]", 390 | "d==2.35", 391 | "└── e [required: >=0.9.0, installed: 0.12.1]", 392 | "e==0.12.1", 393 | "f==3.1", 394 | "└── b [required: >=2.1.0, installed: 2.3.1]", 395 | " └── d [required: >=2.30,<2.42, installed: 2.35]", 396 | "g==6.8.3rc1", 397 | "├── e [required: >=0.9.0, installed: 0.12.1]", 398 | "└── f [required: >=3.0.0, installed: 3.1]", 399 | " └── b [required: >=2.1.0, installed: 2.3.1]", 400 | ], 401 | ), 402 | ( 403 | 2, 404 | "ascii", 405 | [ 406 | "a==3.4.0", 407 | " - b [required: >=2.0.0, installed: 2.3.1]", 408 | " - d [required: >=2.30,<2.42, installed: 2.35]", 409 | " - c [required: >=5.7.1, installed: 5.10.0]", 410 | " - d [required: >=2.30, installed: 2.35]", 411 | " - e [required: >=0.12.1, installed: 0.12.1]", 412 | "b==2.3.1", 413 | " - d [required: >=2.30,<2.42, installed: 2.35]", 414 | " - e [required: >=0.9.0, installed: 0.12.1]", 415 | "c==5.10.0", 416 | " - d [required: >=2.30, installed: 2.35]", 417 | " - e [required: >=0.9.0, installed: 0.12.1]", 418 | " - e [required: >=0.12.1, installed: 0.12.1]", 419 | "d==2.35", 420 | " - e [required: >=0.9.0, installed: 0.12.1]", 421 | "e==0.12.1", 422 | "f==3.1", 423 | " - b [required: >=2.1.0, installed: 2.3.1]", 424 | " - d [required: >=2.30,<2.42, installed: 2.35]", 425 | "g==6.8.3rc1", 426 | " - e [required: >=0.9.0, installed: 0.12.1]", 427 | " - f [required: >=3.0.0, installed: 3.1]", 428 | " - b [required: >=2.1.0, installed: 2.3.1]", 429 | ], 430 | ), 431 | ], 432 | ) 433 | def test_render_text_encoding( 434 | capsys: pytest.CaptureFixture[str], 435 | level: int, 436 | encoding: str, 437 | expected_output: list[str], 438 | example_dag: PackageDAG, 439 | ) -> None: 440 | render_text(example_dag, max_depth=level, encoding=encoding, list_all=True) 441 | captured = capsys.readouterr() 442 | assert "\n".join(expected_output).strip() == captured.out.strip() 443 | 444 | 445 | def test_render_text_list_all_and_packages_options_used( 446 | capsys: pytest.CaptureFixture[str], 447 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 448 | ) -> None: 449 | graph: MockGraph = { 450 | ("examplePy", "1.2.3"): [("hellopy", [(">=", "2.0.0")]), ("worldpy", [(">=", "0.0.2")])], 451 | ("HelloPy", "2.0.0"): [], 452 | ("worldpy", "0.0.2"): [], 453 | ("anotherpy", "0.1.2"): [("hellopy", [(">=", "2.0.0")])], 454 | ("YetAnotherPy", "3.1.2"): [], 455 | } 456 | package_dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 457 | 458 | # NOTE: Mimicking the --packages option being used here. 459 | package_dag = package_dag.filter_nodes(["examplePy"], None) 460 | 461 | render_text(package_dag, max_depth=float("inf"), encoding="utf-8", list_all=True) 462 | captured = capsys.readouterr() 463 | expected_output = [ 464 | "examplePy==1.2.3", 465 | "├── HelloPy [required: >=2.0.0, installed: 2.0.0]", 466 | "└── worldpy [required: >=0.0.2, installed: 0.0.2]", 467 | "HelloPy==2.0.0", 468 | "worldpy==0.0.2", 469 | ] 470 | 471 | assert "\n".join(expected_output).strip() == captured.out.strip() 472 | 473 | 474 | @pytest.mark.parametrize( 475 | ("encoding", "expected_output"), 476 | [ 477 | ( 478 | "utf-8", 479 | [ 480 | "a==3.4.0 (TEST)", 481 | "└── c [required: ==1.0.0, installed: 1.0.0]", 482 | "b==2.3.1 (TEST)", 483 | "c==1.0.0 (TEST)", 484 | ], 485 | ), 486 | ( 487 | "ascii", 488 | [ 489 | "a==3.4.0 (TEST)", 490 | " - c [required: ==1.0.0, installed: 1.0.0]", 491 | "b==2.3.1 (TEST)", 492 | "c==1.0.0 (TEST)", 493 | ], 494 | ), 495 | ], 496 | ) 497 | def test_render_text_with_license_info( 498 | encoding: str, 499 | expected_output: str, 500 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 501 | capsys: pytest.CaptureFixture[str], 502 | monkeypatch: pytest.MonkeyPatch, 503 | ) -> None: 504 | graph: MockGraph = { 505 | ("a", "3.4.0"): [("c", [("==", "1.0.0")])], 506 | ("b", "2.3.1"): [], 507 | ("c", "1.0.0"): [], 508 | } 509 | dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 510 | monkeypatch.setattr(Package, "licenses", lambda _: "(TEST)") 511 | 512 | render_text(dag, max_depth=float("inf"), encoding=encoding, include_license=True) 513 | captured = capsys.readouterr() 514 | assert "\n".join(expected_output).strip() == captured.out.strip() 515 | 516 | 517 | @pytest.mark.parametrize( 518 | ("encoding", "expected_output"), 519 | [ 520 | ( 521 | "utf-8", 522 | [ 523 | "a==3.4.0 (TEST)", 524 | "b==2.3.1 (TEST)", 525 | "└── a==3.4.0 [requires: b==2.3.1]", 526 | "c==1.0.0 (TEST)", 527 | "├── a==3.4.0 [requires: c==1.0.0]", 528 | "└── b==2.3.1 [requires: c==1.0.0]", 529 | " └── a==3.4.0 [requires: b==2.3.1]", 530 | ], 531 | ), 532 | ( 533 | "ascii", 534 | [ 535 | "a==3.4.0 (TEST)", 536 | "b==2.3.1 (TEST)", 537 | " - a==3.4.0 [requires: b==2.3.1]", 538 | "c==1.0.0 (TEST)", 539 | " - a==3.4.0 [requires: c==1.0.0]", 540 | " - b==2.3.1 [requires: c==1.0.0]", 541 | " - a==3.4.0 [requires: b==2.3.1]", 542 | ], 543 | ), 544 | ], 545 | ) 546 | def test_render_text_with_license_info_and_reversed_tree( 547 | encoding: str, 548 | expected_output: str, 549 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 550 | capsys: pytest.CaptureFixture[str], 551 | monkeypatch: pytest.MonkeyPatch, 552 | ) -> None: 553 | graph: MockGraph = { 554 | ("a", "3.4.0"): [("b", [("==", "2.3.1")]), ("c", [("==", "1.0.0")])], 555 | ("b", "2.3.1"): [("c", [("==", "1.0.0")])], 556 | ("c", "1.0.0"): [], 557 | } 558 | dag = PackageDAG.from_pkgs(list(mock_pkgs(graph))) 559 | dag = dag.reverse() 560 | monkeypatch.setattr(Package, "licenses", lambda _: "(TEST)") 561 | 562 | render_text(dag, max_depth=float("inf"), encoding=encoding, include_license=True) 563 | captured = capsys.readouterr() 564 | assert "\n".join(expected_output).strip() == captured.out.strip() 565 | -------------------------------------------------------------------------------- /tests/test_cli.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import argparse 4 | from typing import Any 5 | 6 | import pytest 7 | 8 | from pipdeptree._cli import EnumAction, build_parser, get_options 9 | from pipdeptree._warning import WarningType 10 | 11 | 12 | def test_parser_default() -> None: 13 | parser = build_parser() 14 | args = parser.parse_args([]) 15 | assert not args.json 16 | assert args.output_format is None 17 | 18 | 19 | def test_parser_j() -> None: 20 | parser = build_parser() 21 | args = parser.parse_args(["-j"]) 22 | assert args.json 23 | assert args.output_format is None 24 | 25 | 26 | def test_parser_json() -> None: 27 | parser = build_parser() 28 | args = parser.parse_args(["--json"]) 29 | assert args.json 30 | assert args.output_format is None 31 | 32 | 33 | def test_parser_json_tree() -> None: 34 | parser = build_parser() 35 | args = parser.parse_args(["--json-tree"]) 36 | assert args.json_tree 37 | assert not args.json 38 | assert args.output_format is None 39 | 40 | 41 | def test_parser_mermaid() -> None: 42 | parser = build_parser() 43 | args = parser.parse_args(["--mermaid"]) 44 | assert args.mermaid 45 | assert not args.json 46 | assert args.output_format is None 47 | 48 | 49 | def test_parser_pdf() -> None: 50 | parser = build_parser() 51 | args = parser.parse_args(["--graph-output", "pdf"]) 52 | assert args.output_format == "pdf" 53 | assert not args.json 54 | 55 | 56 | def test_parser_svg() -> None: 57 | parser = build_parser() 58 | args = parser.parse_args(["--graph-output", "svg"]) 59 | assert args.output_format == "svg" 60 | assert not args.json 61 | 62 | 63 | @pytest.mark.parametrize( 64 | ("should_be_error", "depth_arg", "expected_value"), 65 | [ 66 | (True, ["-d", "-1"], None), 67 | (True, ["--depth", "string"], None), 68 | (False, ["-d", "0"], 0), 69 | (False, ["--depth", "8"], 8), 70 | (False, [], float("inf")), 71 | ], 72 | ) 73 | def test_parser_depth(should_be_error: bool, depth_arg: list[str], expected_value: float | None) -> None: 74 | parser = build_parser() 75 | 76 | if should_be_error: 77 | with pytest.raises(SystemExit): 78 | parser.parse_args(depth_arg) 79 | else: 80 | args = parser.parse_args(depth_arg) 81 | assert args.depth == expected_value 82 | 83 | 84 | @pytest.mark.parametrize( 85 | "args", 86 | [ 87 | pytest.param(["--exclude", "py", "--all"], id="exclude-all"), 88 | pytest.param(["-e", "py", "--packages", "py2"], id="exclude-packages"), 89 | pytest.param(["-e", "py", "-p", "py2", "-a"], id="exclude-packages-all"), 90 | ], 91 | ) 92 | def test_parser_get_options_exclude_combine_not_supported(args: list[str], capsys: pytest.CaptureFixture[str]) -> None: 93 | with pytest.raises(SystemExit, match="2"): 94 | get_options(args) 95 | 96 | out, err = capsys.readouterr() 97 | assert not out 98 | assert "cannot use --exclude with --packages or --all" in err 99 | 100 | 101 | def test_parser_get_options_exclude_only() -> None: 102 | parsed_args = get_options(["--exclude", "py"]) 103 | assert parsed_args.exclude == "py" 104 | 105 | 106 | def test_parser_get_options_license_and_freeze_together_not_supported(capsys: pytest.CaptureFixture[str]) -> None: 107 | with pytest.raises(SystemExit, match="2"): 108 | get_options(["--license", "--freeze"]) 109 | 110 | out, err = capsys.readouterr() 111 | assert not out 112 | assert "cannot use --license with --freeze" in err 113 | 114 | 115 | @pytest.mark.parametrize( 116 | "args", 117 | [ 118 | pytest.param(["--path", "/random/path", "--local-only"], id="path-with-local"), 119 | pytest.param(["--path", "/random/path", "--user-only"], id="path-with-user"), 120 | ], 121 | ) 122 | def test_parser_get_options_path_with_either_local_or_user_not_supported( 123 | args: list[str], capsys: pytest.CaptureFixture[str] 124 | ) -> None: 125 | with pytest.raises(SystemExit, match="2"): 126 | get_options(args) 127 | 128 | out, err = capsys.readouterr() 129 | assert not out 130 | assert "cannot use --path with --user-only or --local-only" in err 131 | 132 | 133 | @pytest.mark.parametrize(("bad_type"), [None, str]) 134 | def test_enum_action_type_argument(bad_type: Any) -> None: 135 | with pytest.raises(TypeError, match="type must be a subclass of Enum"): 136 | EnumAction(["--test"], "test", type=bad_type) 137 | 138 | 139 | def test_enum_action_default_argument_not_str() -> None: 140 | with pytest.raises(TypeError, match="default must be defined with a string value"): 141 | EnumAction(["--test"], "test", type=WarningType) 142 | 143 | 144 | def test_enum_action_default_argument_not_a_valid_choice() -> None: 145 | with pytest.raises(ValueError, match="default value should be among the enum choices"): 146 | EnumAction(["--test"], "test", type=WarningType, default="bad-warning-type") 147 | 148 | 149 | def test_enum_action_call_with_value() -> None: 150 | action = EnumAction(["--test"], "test", type=WarningType, default="silence") 151 | namespace = argparse.Namespace() 152 | action(argparse.ArgumentParser(), namespace, "suppress") 153 | assert getattr(namespace, "test", None) == WarningType.SUPPRESS 154 | 155 | 156 | def test_enum_action_call_without_value() -> None: 157 | # ensures that we end up using the default value in case no value is specified (currently we pass nargs='?' when 158 | # creating the --warn option, which is why this test exists) 159 | action = EnumAction(["--test"], "test", type=WarningType, default="silence") 160 | namespace = argparse.Namespace() 161 | action(argparse.ArgumentParser(), namespace, None) 162 | assert getattr(namespace, "test", None) == WarningType.SILENCE 163 | -------------------------------------------------------------------------------- /tests/test_detect_env.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | from subprocess import CompletedProcess # noqa: S404 5 | from typing import TYPE_CHECKING 6 | 7 | import pytest 8 | 9 | from pipdeptree._detect_env import detect_active_interpreter 10 | 11 | if TYPE_CHECKING: 12 | from pytest_mock import MockFixture 13 | 14 | 15 | @pytest.mark.parametrize(("env_var"), ["VIRTUAL_ENV", "CONDA_PREFIX"]) 16 | def test_detect_active_interpreter_using_env_vars(tmp_path: Path, mocker: MockFixture, env_var: str) -> None: 17 | mocker.patch("pipdeptree._detect_env.os.environ", {env_var: str(tmp_path)}) 18 | mocker.patch("pipdeptree._detect_env.Path.exists", return_value=True) 19 | 20 | actual_path = detect_active_interpreter() 21 | 22 | assert actual_path.startswith(str(tmp_path)) 23 | 24 | 25 | def test_detect_active_interpreter_poetry(tmp_path: Path, mocker: MockFixture) -> None: 26 | faked_result = CompletedProcess("", 0, stdout=str(tmp_path)) 27 | mocker.patch("pipdeptree._detect_env.subprocess.run", return_value=faked_result) 28 | mocker.patch("pipdeptree._detect_env.os.environ", {}) 29 | 30 | actual_path = detect_active_interpreter() 31 | 32 | assert str(tmp_path) == actual_path 33 | 34 | 35 | def test_detect_active_interpreter_non_supported_python_implementation( 36 | tmp_path: Path, 37 | mocker: MockFixture, 38 | ) -> None: 39 | mocker.patch("pipdeptree._detect_env.os.environ", {"VIRTUAL_ENV": str(tmp_path)}) 40 | mocker.patch("pipdeptree._detect_env.Path.exists", return_value=True) 41 | mocker.patch("pipdeptree._detect_env.platform.python_implementation", return_value="NotSupportedPythonImpl") 42 | 43 | with pytest.raises(SystemExit): 44 | detect_active_interpreter() 45 | 46 | 47 | def test_detect_active_interpreter_non_existent_path( 48 | mocker: MockFixture, 49 | ) -> None: 50 | fake_path = str(Path(*("i", "dont", "exist"))) 51 | mocker.patch("pipdeptree._detect_env.os.environ", {"VIRTUAL_ENV": fake_path}) 52 | 53 | with pytest.raises(SystemExit): 54 | detect_active_interpreter() 55 | -------------------------------------------------------------------------------- /tests/test_discovery.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import site 4 | import sys 5 | from pathlib import Path 6 | from typing import TYPE_CHECKING 7 | from unittest.mock import Mock 8 | 9 | import virtualenv 10 | 11 | from pipdeptree.__main__ import main 12 | from pipdeptree._discovery import get_installed_distributions 13 | 14 | if TYPE_CHECKING: 15 | import pytest 16 | from pytest_mock import MockerFixture 17 | 18 | 19 | def test_local_only(tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: 20 | venv_path = str(tmp_path / "venv") 21 | result = virtualenv.cli_run([venv_path, "--activators", ""]) 22 | venv_site_packages = site.getsitepackages([venv_path]) 23 | fake_dist = Path(venv_site_packages[0]) / "foo-1.2.5.dist-info" 24 | fake_dist.mkdir() 25 | fake_metadata = Path(fake_dist) / "METADATA" 26 | with fake_metadata.open("w") as f: 27 | f.write("Metadata-Version: 2.3\nName: foo\nVersion: 1.2.5\n") 28 | 29 | cmd = [str(result.creator.exe.parent / "python3"), "--local-only"] 30 | mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) 31 | sys_path = sys.path.copy() 32 | mock_path = sys_path + venv_site_packages 33 | mocker.patch("pipdeptree._discovery.sys.path", mock_path) 34 | mocker.patch("pipdeptree._discovery.sys.argv", cmd) 35 | main() 36 | out, _ = capfd.readouterr() 37 | found = {i.split("==")[0] for i in out.splitlines()} 38 | expected = {"foo", "pip", "setuptools"} 39 | if sys.version_info >= (3, 12): 40 | expected -= {"setuptools"} # pragma: no cover 41 | 42 | assert found == expected 43 | 44 | 45 | def test_user_only(fake_dist: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: 46 | # Make a fake user site. 47 | fake_user_site = str(fake_dist.parent) 48 | mocker.patch("pipdeptree._discovery.site.getusersitepackages", Mock(return_value=fake_user_site)) 49 | 50 | # Add fake user site directory into a fake sys.path (normal environments will have the user site in sys.path). 51 | fake_sys_path = [*sys.path, fake_user_site] 52 | mocker.patch("pipdeptree._discovery.sys.path", fake_sys_path) 53 | 54 | cmd = ["", "--user-only"] 55 | mocker.patch("pipdeptree.__main__.sys.argv", cmd) 56 | main() 57 | 58 | out, err = capfd.readouterr() 59 | assert not err 60 | found = {i.split("==")[0] for i in out.splitlines()} 61 | expected = {"bar"} 62 | 63 | assert found == expected 64 | 65 | 66 | def test_user_only_when_in_virtual_env( 67 | tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str] 68 | ) -> None: 69 | # ensures that we follow `pip list` by not outputting anything when --user-only is set and pipdeptree is running in 70 | # a virtual environment 71 | 72 | # Create a virtual environment and mock sys.path to point to the venv's site packages. 73 | venv_path = str(tmp_path / "venv") 74 | virtualenv.cli_run([venv_path, "--activators", ""]) 75 | venv_site_packages = site.getsitepackages([venv_path]) 76 | mocker.patch("pipdeptree._discovery.sys.path", venv_site_packages) 77 | mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) 78 | 79 | cmd = ["", "--user-only"] 80 | mocker.patch("pipdeptree.__main__.sys.argv", cmd) 81 | main() 82 | 83 | out, err = capfd.readouterr() 84 | assert not err 85 | 86 | # Here we expect 1 element because print() adds a newline. 87 | found = out.splitlines() 88 | assert len(found) == 1 89 | assert not found[0] 90 | 91 | 92 | def test_user_only_when_in_virtual_env_and_system_site_pkgs_enabled( 93 | tmp_path: Path, fake_dist: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str] 94 | ) -> None: 95 | # ensures that we provide user site metadata when --user-only is set and we're in a virtual env with system site 96 | # packages enabled 97 | 98 | # Make a fake user site directory since we don't know what to expect from the real one. 99 | fake_user_site = str(fake_dist.parent) 100 | mocker.patch("pipdeptree._discovery.site.getusersitepackages", Mock(return_value=fake_user_site)) 101 | 102 | # Create a temporary virtual environment. Add the fake user site to path (since user site packages should normally 103 | # be there). 104 | venv_path = str(tmp_path / "venv") 105 | virtualenv.cli_run([venv_path, "--system-site-packages", "--activators", ""]) 106 | venv_site_packages = site.getsitepackages([venv_path]) 107 | mock_path = sys.path + venv_site_packages + [fake_user_site] 108 | mocker.patch("pipdeptree._discovery.sys.path", mock_path) 109 | mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) 110 | 111 | cmd = ["", "--user-only"] 112 | mocker.patch("pipdeptree.__main__.sys.argv", cmd) 113 | main() 114 | 115 | out, err = capfd.readouterr() 116 | assert not err 117 | found = {i.split("==")[0] for i in out.splitlines()} 118 | expected = {"bar"} 119 | 120 | assert found == expected 121 | 122 | 123 | def test_interpreter_query_failure(mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: 124 | cmd = ["", "--python", "i-dont-exist"] 125 | mocker.patch("pipdeptree._discovery.sys.argv", cmd) 126 | 127 | main() 128 | 129 | _, err = capfd.readouterr() 130 | assert err.startswith("Failed to query custom interpreter") 131 | 132 | 133 | def test_duplicate_metadata(mocker: MockerFixture, capfd: pytest.CaptureFixture[str]) -> None: 134 | mocker.patch( 135 | "pipdeptree._discovery.distributions", 136 | Mock( 137 | return_value=[ 138 | Mock(metadata={"Name": "foo"}, version="1.2.5", locate_file=Mock(return_value="/path/1")), 139 | Mock(metadata={"Name": "foo"}, version="5.9.0", locate_file=Mock(return_value="/path/2")), 140 | ] 141 | ), 142 | ) 143 | 144 | dists = get_installed_distributions() 145 | assert len(dists) == 1 146 | # we expect it to use the first distribution found 147 | assert dists[0].version == "1.2.5" 148 | 149 | _, err = capfd.readouterr() 150 | expected = ( 151 | 'Warning!!! Duplicate package metadata found:\n"/path/2"\n foo 5.9.0 ' 152 | ' (using 1.2.5, "/path/1")\nNOTE: This warning isn\'t a failure warning.\n---------------------------------' 153 | "---------------------------------------\n" 154 | ) 155 | assert err == expected 156 | 157 | 158 | def test_invalid_metadata( 159 | mocker: MockerFixture, capfd: pytest.CaptureFixture[str], fake_dist_with_invalid_metadata: Path 160 | ) -> None: 161 | fake_site_dir = str(fake_dist_with_invalid_metadata.parent) 162 | mocked_sys_path = [fake_site_dir] 163 | mocker.patch("pipdeptree._discovery.sys.path", mocked_sys_path) 164 | 165 | dists = get_installed_distributions() 166 | 167 | assert len(dists) == 0 168 | out, err = capfd.readouterr() 169 | assert not out 170 | assert err == ( 171 | "Warning!!! Missing or invalid metadata found in the following site dirs:\n" 172 | f"{fake_site_dir}\n" 173 | "------------------------------------------------------------------------\n" 174 | ) 175 | 176 | 177 | def test_paths(fake_dist: Path) -> None: 178 | fake_site_dir = str(fake_dist.parent) 179 | mocked_path = [fake_site_dir] 180 | 181 | dists = get_installed_distributions(supplied_paths=mocked_path) 182 | assert len(dists) == 1 183 | assert dists[0].metadata["Name"] == "bar" 184 | 185 | 186 | def test_paths_when_in_virtual_env(tmp_path: Path, fake_dist: Path) -> None: 187 | # tests to ensure that we use only the user-supplied path, not paths in the virtual env 188 | fake_site_dir = str(fake_dist.parent) 189 | mocked_path = [fake_site_dir] 190 | 191 | venv_path = str(tmp_path / "venv") 192 | s = virtualenv.cli_run([venv_path, "--activators", ""]) 193 | 194 | dists = get_installed_distributions(interpreter=str(s.creator.exe), supplied_paths=mocked_path) 195 | assert len(dists) == 1 196 | assert dists[0].metadata["Name"] == "bar" 197 | -------------------------------------------------------------------------------- /tests/test_freeze.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from unittest.mock import Mock 4 | 5 | from pipdeptree._freeze import dist_to_frozen_repr 6 | 7 | 8 | def test_dist_to_frozen_repr() -> None: 9 | foo = Mock(metadata={"Name": "foo"}, version="20.4.1") 10 | foo.read_text = Mock(return_value=None) 11 | expected = "foo==20.4.1" 12 | assert dist_to_frozen_repr(foo) == expected 13 | -------------------------------------------------------------------------------- /tests/test_non_host.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from platform import python_implementation 5 | from typing import TYPE_CHECKING 6 | from unittest.mock import Mock 7 | 8 | import pytest 9 | import virtualenv 10 | 11 | from pipdeptree.__main__ import main 12 | 13 | if TYPE_CHECKING: 14 | from pathlib import Path 15 | 16 | from pytest_mock import MockerFixture 17 | 18 | 19 | @pytest.fixture(scope="session") 20 | def expected_venv_pkgs() -> frozenset[str]: 21 | implementation = python_implementation() 22 | if implementation == "CPython": # pragma: cpython cover 23 | expected = {"pip", "setuptools"} 24 | elif implementation == "PyPy": # pragma: pypy cover 25 | expected = {"cffi", "greenlet", "pip", "hpy", "readline", "setuptools"} 26 | if sys.version_info >= (3, 10): # pragma: >=3.10 cover 27 | expected -= {"readline"} 28 | else: # pragma: no cover 29 | raise ValueError(implementation) 30 | if sys.version_info >= (3, 12): # pragma: >=3.12 cover 31 | expected -= {"setuptools"} 32 | 33 | return frozenset(expected) 34 | 35 | 36 | @pytest.mark.parametrize("args_joined", [True, False]) 37 | def test_custom_interpreter( 38 | tmp_path: Path, 39 | mocker: MockerFixture, 40 | monkeypatch: pytest.MonkeyPatch, 41 | capfd: pytest.CaptureFixture[str], 42 | args_joined: bool, 43 | expected_venv_pkgs: frozenset[str], 44 | ) -> None: 45 | # Delete $PYTHONPATH so that it cannot be passed to the custom interpreter process (since we don't know what 46 | # distribution metadata to expect when it's used). 47 | monkeypatch.delenv("PYTHONPATH", False) 48 | 49 | monkeypatch.chdir(tmp_path) 50 | result = virtualenv.cli_run([str(tmp_path / "venv"), "--activators", ""]) 51 | py = str(result.creator.exe.relative_to(tmp_path)) 52 | cmd = ["", f"--python={result.creator.exe}"] if args_joined else ["", "--python", py] 53 | cmd += ["--all", "--depth", "0"] 54 | mocker.patch("pipdeptree._discovery.sys.argv", cmd) 55 | main() 56 | out, _ = capfd.readouterr() 57 | found = {i.split("==")[0] for i in out.splitlines()} 58 | 59 | assert expected_venv_pkgs == found, out 60 | 61 | 62 | def test_custom_interpreter_with_local_only( 63 | tmp_path: Path, 64 | mocker: MockerFixture, 65 | capfd: pytest.CaptureFixture[str], 66 | ) -> None: 67 | venv_path = str(tmp_path / "venv") 68 | result = virtualenv.cli_run([venv_path, "--system-site-packages", "--activators", ""]) 69 | 70 | cmd = ["", f"--python={result.creator.exe}", "--local-only"] 71 | mocker.patch("pipdeptree._discovery.sys.prefix", venv_path) 72 | mocker.patch("pipdeptree._discovery.sys.argv", cmd) 73 | main() 74 | out, _ = capfd.readouterr() 75 | found = {i.split("==")[0] for i in out.splitlines()} 76 | expected = {"pip", "setuptools"} 77 | if sys.version_info >= (3, 12): # pragma: >=3.12 cover 78 | expected -= {"setuptools"} 79 | assert expected == found, out 80 | 81 | 82 | def test_custom_interpreter_with_user_only( 83 | tmp_path: Path, mocker: MockerFixture, capfd: pytest.CaptureFixture[str] 84 | ) -> None: 85 | # ensures there is no output when --user-only and --python are passed 86 | 87 | venv_path = str(tmp_path / "venv") 88 | result = virtualenv.cli_run([venv_path, "--activators", ""]) 89 | 90 | cmd = ["", f"--python={result.creator.exe}", "--user-only"] 91 | mocker.patch("pipdeptree.__main__.sys.argv", cmd) 92 | main() 93 | out, err = capfd.readouterr() 94 | assert not err 95 | 96 | # Here we expect 1 element because print() adds a newline. 97 | found = out.splitlines() 98 | assert len(found) == 1 99 | assert not found[0] 100 | 101 | 102 | def test_custom_interpreter_with_user_only_and_system_site_pkgs_enabled( 103 | tmp_path: Path, 104 | fake_dist: Path, 105 | mocker: MockerFixture, 106 | monkeypatch: pytest.MonkeyPatch, 107 | capfd: pytest.CaptureFixture[str], 108 | ) -> None: 109 | # ensures that we provide user site metadata when --user-only and --python are passed and the custom interpreter has 110 | # system site packages enabled 111 | 112 | # Make a fake user site directory since we don't know what to expect from the real one. 113 | fake_user_site = str(fake_dist.parent) 114 | mocker.patch("pipdeptree._discovery.site.getusersitepackages", Mock(return_value=fake_user_site)) 115 | 116 | # Create a temporary virtual environment. 117 | venv_path = str(tmp_path / "venv") 118 | result = virtualenv.cli_run([venv_path, "--activators", ""]) 119 | 120 | # Use $PYTHONPATH to add the fake user site into the custom interpreter's environment so that it will include it in 121 | # its sys.path. 122 | monkeypatch.setenv("PYTHONPATH", str(fake_user_site)) 123 | 124 | cmd = ["", f"--python={result.creator.exe}", "--user-only"] 125 | mocker.patch("pipdeptree.__main__.sys.argv", cmd) 126 | main() 127 | 128 | out, err = capfd.readouterr() 129 | assert not err 130 | found = {i.split("==")[0] for i in out.splitlines()} 131 | expected = {"bar"} 132 | 133 | assert expected == found 134 | 135 | 136 | def test_custom_interpreter_ensure_pythonpath_envar_is_honored( 137 | tmp_path: Path, 138 | mocker: MockerFixture, 139 | monkeypatch: pytest.MonkeyPatch, 140 | capfd: pytest.CaptureFixture[str], 141 | expected_venv_pkgs: frozenset[str], 142 | ) -> None: 143 | # ensures that we honor $PYTHONPATH when passing it to the custom interpreter process 144 | venv_path = str(tmp_path / "venv") 145 | result = virtualenv.cli_run([venv_path, "--activators", ""]) 146 | 147 | another_path = tmp_path / "another-path" 148 | fake_dist = another_path / "foo-1.2.3.dist-info" 149 | fake_dist.mkdir(parents=True) 150 | fake_metadata = fake_dist / "METADATA" 151 | with fake_metadata.open("w") as f: 152 | f.write("Metadata-Version: 2.3\nName: foo\nVersion: 1.2.3\n") 153 | cmd = ["", f"--python={result.creator.exe}", "--all", "--depth", "0"] 154 | mocker.patch("pipdeptree._discovery.sys.argv", cmd) 155 | monkeypatch.setenv("PYTHONPATH", str(another_path)) 156 | main() 157 | out, _ = capfd.readouterr() 158 | found = {i.split("==")[0] for i in out.splitlines()} 159 | assert {*expected_venv_pkgs, "foo"} == found, out 160 | -------------------------------------------------------------------------------- /tests/test_pipdeptree.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import sys 4 | from importlib import metadata 5 | from subprocess import CompletedProcess, check_call # noqa: S404 6 | from typing import TYPE_CHECKING 7 | 8 | import pytest 9 | 10 | from pipdeptree.__main__ import main 11 | 12 | if TYPE_CHECKING: 13 | from pathlib import Path 14 | 15 | from pytest_mock import MockFixture 16 | 17 | 18 | def test_main() -> None: 19 | check_call([sys.executable, "-m", "pipdeptree", "--help"]) 20 | 21 | 22 | def test_console_script() -> None: 23 | try: 24 | dist = metadata.distribution("pipdeptree") 25 | except Exception as e: # noqa: BLE001 # pragma: no cover 26 | pytest.fail(f"Unexpected error when retrieving pipdeptree metadata: {e}") 27 | 28 | entry_points = dist.entry_points 29 | assert len(entry_points) == 1 30 | 31 | if sys.version_info >= (3, 11): # pragma: >=3.11 32 | entry_point = entry_points["pipdeptree"] 33 | else: 34 | entry_point = entry_points[0] 35 | 36 | try: 37 | pipdeptree = entry_point.load() 38 | except Exception as e: # noqa: BLE001 # pragma: no cover 39 | pytest.fail(f"Unexpected error: {e}") 40 | 41 | with pytest.raises(SystemExit, match="0"): 42 | pipdeptree(["", "--help"]) 43 | 44 | 45 | def test_main_log_resolved(tmp_path: Path, mocker: MockFixture, capsys: pytest.CaptureFixture[str]) -> None: 46 | mocker.patch("sys.argv", ["", "--python", "auto"]) 47 | mocker.patch("pipdeptree.__main__.detect_active_interpreter", return_value=str(tmp_path)) 48 | mock_subprocess_run = mocker.patch("subprocess.run") 49 | valid_sys_path = str([str(tmp_path)]) 50 | mock_subprocess_run.return_value = CompletedProcess( 51 | args=["python", "-c", "import sys; print(sys.path)"], 52 | returncode=0, 53 | stdout=valid_sys_path, 54 | stderr="", 55 | ) 56 | 57 | main() 58 | 59 | captured = capsys.readouterr() 60 | assert captured.err.startswith(f"(resolved python: {tmp_path!s}") 61 | -------------------------------------------------------------------------------- /tests/test_validate.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING, Callable 4 | 5 | import pytest 6 | 7 | from pipdeptree._models import PackageDAG 8 | from pipdeptree._validate import conflicting_deps, cyclic_deps, render_conflicts_text, render_cycles_text, validate 9 | 10 | if TYPE_CHECKING: 11 | from collections.abc import Iterator 12 | from unittest.mock import Mock 13 | 14 | from tests.our_types import MockGraph 15 | 16 | 17 | @pytest.mark.parametrize( 18 | ("mpkgs", "expected_keys", "expected_output"), 19 | [ 20 | pytest.param( 21 | { 22 | ("a", "1.0.1"): [("b", [(">=", "2.0.0")])], 23 | ("b", "2.3.0"): [("a", [(">=", "1.0.1")])], 24 | ("c", "4.5.0"): [("d", [("==", "2.0")])], 25 | ("d", "2.0"): [], 26 | }, 27 | [["a", "b", "a"], ["b", "a", "b"]], 28 | ["* b => a => b", "* a => b => a"], 29 | id="depth-of-2", 30 | ), 31 | pytest.param( 32 | { 33 | ("a", "1.0.1"): [("b", [(">=", "2.0.0")]), ("e", [("==", "2.0")])], 34 | ("b", "2.3.0"): [("c", [(">=", "4.5.0")])], 35 | ("c", "4.5.0"): [("d", [("==", "1.0.1")])], 36 | ("d", "1.0.0"): [("a", [("==", "1.0.1")]), ("e", [("==", "2.0")])], 37 | ("e", "2.0"): [], 38 | }, 39 | [ 40 | ["b", "c", "d", "a", "b"], 41 | ["c", "d", "a", "b", "c"], 42 | ["d", "a", "b", "c", "d"], 43 | ["a", "b", "c", "d", "a"], 44 | ], 45 | [ 46 | "* b => c => d => a => b", 47 | "* c => d => a => b => c", 48 | "* d => a => b => c => d", 49 | "* a => b => c => d => a", 50 | ], 51 | id="depth-greater-than-2", 52 | ), 53 | pytest.param( 54 | {("a", "1.0.1"): [("b", [(">=", "2.0.0")])], ("b", "2.0.0"): []}, 55 | [], 56 | [], 57 | id="no-cycle", 58 | ), 59 | pytest.param( 60 | { 61 | ("a", "1.0.1"): [("b", [(">=", "2.0.0")])], 62 | }, 63 | [], 64 | [], 65 | id="dependency-not-installed", 66 | ), 67 | pytest.param({("a", "1.0.1"): []}, [], [], id="no-dependencies"), 68 | ], 69 | ) 70 | def test_cyclic_deps( 71 | capsys: pytest.CaptureFixture[str], 72 | mpkgs: MockGraph, 73 | expected_keys: list[list[str]], 74 | expected_output: list[str], 75 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 76 | ) -> None: 77 | tree = PackageDAG.from_pkgs(list(mock_pkgs(mpkgs))) 78 | result = cyclic_deps(tree) 79 | result_keys = [[dep.key for dep in deps] for deps in result] 80 | assert sorted(expected_keys) == sorted(result_keys) 81 | render_cycles_text(result) 82 | captured = capsys.readouterr() 83 | assert "\n".join(expected_output).strip() == captured.err.strip() 84 | 85 | 86 | @pytest.mark.parametrize( 87 | ("mpkgs", "expected_keys", "expected_output"), 88 | [ 89 | ( 90 | {("a", "1.0.1"): [("b", [(">=", "2.3.0")])], ("b", "1.9.1"): []}, 91 | {"a": ["b"]}, 92 | [ 93 | "* a==1.0.1", 94 | " - b [required: >=2.3.0, installed: 1.9.1]", 95 | ], 96 | ), 97 | ( 98 | {("a", "1.0.1"): [("c", [(">=", "9.4.1")])], ("b", "2.3.0"): [("c", [(">=", "7.0")])], ("c", "8.0.1"): []}, 99 | {"a": ["c"]}, 100 | [ 101 | "* a==1.0.1", 102 | " - c [required: >=9.4.1, installed: 8.0.1]", 103 | ], 104 | ), 105 | ( 106 | {("a", "1.0.1"): [("c", [(">=", "9.4.1")])], ("b", "2.3.0"): [("c", [(">=", "9.4.0")])]}, 107 | {"a": ["c"], "b": ["c"]}, 108 | [ 109 | "* a==1.0.1", 110 | " - c [required: >=9.4.1, installed: ?]", 111 | "* b==2.3.0", 112 | " - c [required: >=9.4.0, installed: ?]", 113 | ], 114 | ), 115 | ( 116 | {("a", "1.0.1"): [("c", [(">=", "9.4.1")])], ("b", "2.3.0"): [("c", [(">=", "7.0")])], ("c", "9.4.1"): []}, 117 | {}, 118 | [], 119 | ), 120 | ], 121 | ) 122 | def test_conflicting_deps( 123 | capsys: pytest.CaptureFixture[str], 124 | mpkgs: MockGraph, 125 | expected_keys: dict[str, list[str]], 126 | expected_output: list[str], 127 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 128 | ) -> None: 129 | tree = PackageDAG.from_pkgs(list(mock_pkgs(mpkgs))) 130 | result = conflicting_deps(tree) 131 | result_keys = {k.key: [v.key for v in vs] for k, vs in result.items()} 132 | assert expected_keys == result_keys 133 | render_conflicts_text(result) 134 | captured = capsys.readouterr() 135 | assert "\n".join(expected_output).strip() == captured.err.strip() 136 | 137 | 138 | @pytest.mark.parametrize( 139 | ("mpkgs", "expected_output"), 140 | [ 141 | ( 142 | {("a", "1.0.1"): [("b", [(">=", "2.3.0")])], ("b", "1.9.1"): []}, 143 | [ 144 | "Warning!!! Possibly conflicting dependencies found:", 145 | "* a==1.0.1", 146 | " - b [required: >=2.3.0, installed: 1.9.1]", 147 | "------------------------------------------------------------------------", 148 | ], 149 | ), 150 | ( 151 | { 152 | ("a", "1.0.1"): [("b", [(">=", "2.0.0")])], 153 | ("b", "2.3.0"): [("a", [(">=", "1.0.1")])], 154 | ("c", "4.5.0"): [], 155 | }, 156 | [ 157 | "Warning!!! Cyclic dependencies found:", 158 | "* b => a => b", 159 | "* a => b => a", 160 | "------------------------------------------------------------------------", 161 | ], 162 | ), 163 | ], 164 | ) 165 | def test_validate( 166 | capsys: pytest.CaptureFixture[str], 167 | mock_pkgs: Callable[[MockGraph], Iterator[Mock]], 168 | mpkgs: MockGraph, 169 | expected_output: list[str], 170 | ) -> None: 171 | tree = PackageDAG.from_pkgs(list(mock_pkgs(mpkgs))) 172 | validate(tree) 173 | out, err = capsys.readouterr() 174 | assert len(out) == 0 175 | assert "\n".join(expected_output).strip() == err.strip() 176 | -------------------------------------------------------------------------------- /tests/test_warning.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from typing import TYPE_CHECKING 4 | 5 | from pipdeptree._warning import WarningPrinter, WarningType 6 | 7 | if TYPE_CHECKING: 8 | import pytest 9 | 10 | 11 | def test_warning_printer_print_single_line(capsys: pytest.CaptureFixture[str]) -> None: 12 | # Use WarningType.FAIL so that we can be able to test to see if WarningPrinter remembers it has warned before. 13 | warning_printer = WarningPrinter(WarningType.FAIL) 14 | warning_printer.print_single_line("test") 15 | assert warning_printer.has_warned_with_failure() 16 | out, err = capsys.readouterr() 17 | assert len(out) == 0 18 | assert err == "test\n" 19 | -------------------------------------------------------------------------------- /tox.toml: -------------------------------------------------------------------------------- 1 | requires = [ "tox>=4.2", "tox-uv>=1.11.3" ] 2 | env_list = [ 3 | "fix", 4 | "3.13", 5 | "3.12", 6 | "3.11", 7 | "3.10", 8 | "3.9", 9 | "type", 10 | "pkg_meta", 11 | ] 12 | skip_missing_interpreters = true 13 | 14 | [env_run_base] 15 | description = "run the unit tests with pytest under {base_python}" 16 | package = "wheel" 17 | wheel_build_env = ".pkg" 18 | extras = [ "graphviz", "test" ] 19 | pass_env = [ "DIFF_AGAINST", "PYTEST_*" ] 20 | set_env.COVERAGE_FILE = "{work_dir}/.coverage.{env_name}" 21 | commands = [ 22 | [ 23 | "python", 24 | "-m", 25 | "pytest", 26 | "{tty:--color=yes}", 27 | { replace = "posargs", extend = true, default = [ 28 | "--cov", 29 | "{env_site_packages_dir}{/}pipdeptree", 30 | "--cov", 31 | "{tox_root}{/}tests", 32 | "--cov-config=pyproject.toml", 33 | "--no-cov-on-fail", 34 | "--cov-report", 35 | "term-missing:skip-covered", 36 | "--cov-context=test", 37 | "--cov-report", 38 | "html:{env_tmp_dir}{/}htmlcov", 39 | "--cov-report", 40 | "xml:{work_dir}{/}coverage.{env_name}.xml", 41 | "--junitxml", 42 | "{work_dir}{/}junit.{env_name}.xml", 43 | "tests", 44 | ] }, 45 | ], 46 | [ 47 | "diff-cover", 48 | "--compare-branch", 49 | "{env:DIFF_AGAINST:origin/main}", 50 | "{work_dir}{/}coverage.{env_name}.xml", 51 | "--fail-under", 52 | "100", 53 | ], 54 | ] 55 | 56 | [env.fix] 57 | description = "format the code base to adhere to our styles, and complain about what we cannot do automatically" 58 | skip_install = true 59 | deps = [ "pre-commit-uv>=4.1.3" ] 60 | commands = [ [ "pre-commit", "run", "--all-files", "--show-diff-on-failure" ] ] 61 | 62 | [env.type] 63 | description = "run type check on code base" 64 | deps = [ "mypy==1.11.2" ] 65 | commands = [ [ "mypy", "src" ], [ "mypy", "tests" ] ] 66 | 67 | [env.pkg_meta] 68 | description = "check that the long description is valid" 69 | skip_install = true 70 | deps = [ "check-wheel-contents>=0.6", "twine>=5.1.1", "uv>=0.4.10" ] 71 | commands = [ 72 | [ "uv", "build", "--sdist", "--wheel", "--out-dir", "{env_tmp_dir}", "." ], 73 | [ "twine", "check", "{env_tmp_dir}{/}*" ], 74 | [ "check-wheel-contents", "--no-config", "{env_tmp_dir}" ], 75 | ] 76 | 77 | [env.dev] 78 | description = "generate a DEV environment" 79 | package = "editable" 80 | commands = [ [ "uv", "pip", "tree" ], [ "python", "-c", "import sys; print(sys.executable)" ] ] 81 | --------------------------------------------------------------------------------